diff options
author | John Wiegley <johnw@newartisans.com> | 2009-10-31 00:55:56 -0400 |
---|---|---|
committer | John Wiegley <johnw@newartisans.com> | 2009-10-31 00:55:56 -0400 |
commit | a2cb549b1dff9024e3f700203e424e496b25fd91 (patch) | |
tree | 1c03e9eb6f649a1446021dfb2a5d93a697f81feb | |
parent | 349fc5d175bc5c4acbc478b4d78c12dc507c4f58 (diff) | |
parent | a88a4c55b2b11d58d3b9e49bf785be42afe63510 (diff) | |
download | fork-ledger-a2cb549b1dff9024e3f700203e424e496b25fd91.tar.gz fork-ledger-a2cb549b1dff9024e3f700203e424e496b25fd91.tar.bz2 fork-ledger-a2cb549b1dff9024e3f700203e424e496b25fd91.zip |
Merge branch 'next'
-rw-r--r-- | Makefile.am | 2 | ||||
-rw-r--r-- | README-1ST | 61 | ||||
-rwxr-xr-x | acprep | 216 | ||||
-rw-r--r--[-rwxr-xr-x] | autogen.sh | 0 | ||||
-rw-r--r-- | configure.ac | 32 | ||||
-rw-r--r-- | lisp/ledger.el | 2 | ||||
-rw-r--r-- | src/account.cc | 2 | ||||
-rw-r--r-- | src/account.h | 20 | ||||
-rw-r--r-- | src/amount.cc | 101 | ||||
-rw-r--r-- | src/amount.h | 10 | ||||
-rw-r--r-- | src/annotate.h | 46 | ||||
-rw-r--r-- | src/archive.cc | 247 | ||||
-rw-r--r-- | src/archive.h | 105 | ||||
-rw-r--r-- | src/balance.cc | 6 | ||||
-rw-r--r-- | src/balance.h | 14 | ||||
-rw-r--r-- | src/chain.cc | 14 | ||||
-rw-r--r-- | src/commodity.h | 83 | ||||
-rw-r--r-- | src/derive.cc | 4 | ||||
-rw-r--r-- | src/emacs.cc | 6 | ||||
-rw-r--r-- | src/error.cc | 8 | ||||
-rw-r--r-- | src/expr.h | 18 | ||||
-rw-r--r-- | src/flags.h | 22 | ||||
-rw-r--r-- | src/global.cc | 14 | ||||
-rw-r--r-- | src/interactive.cc | 6 | ||||
-rw-r--r-- | src/item.cc | 32 | ||||
-rw-r--r-- | src/item.h | 97 | ||||
-rw-r--r-- | src/journal.cc | 26 | ||||
-rw-r--r-- | src/journal.h | 83 | ||||
-rw-r--r-- | src/mask.h | 19 | ||||
-rw-r--r-- | src/op.cc | 6 | ||||
-rw-r--r-- | src/op.h | 28 | ||||
-rw-r--r-- | src/output.cc | 4 | ||||
-rw-r--r-- | src/pool.h | 18 | ||||
-rw-r--r-- | src/post.h | 17 | ||||
-rw-r--r-- | src/predicate.h | 13 | ||||
-rw-r--r-- | src/py_value.cc | 6 | ||||
-rw-r--r-- | src/report.cc | 4 | ||||
-rw-r--r-- | src/scope.h | 66 | ||||
-rw-r--r-- | src/session.cc | 132 | ||||
-rw-r--r-- | src/session.h | 11 | ||||
-rw-r--r-- | src/stats.cc | 2 | ||||
-rw-r--r-- | src/system.hh.in | 69 | ||||
-rw-r--r-- | src/textual.cc | 54 | ||||
-rw-r--r-- | src/times.cc | 122 | ||||
-rw-r--r-- | src/times.h | 88 | ||||
-rw-r--r-- | src/utils.h | 12 | ||||
-rw-r--r-- | src/value.cc | 22 | ||||
-rw-r--r-- | src/value.h | 92 | ||||
-rw-r--r-- | src/xact.cc | 4 | ||||
-rw-r--r-- | src/xact.h | 69 | ||||
-rw-r--r-- | tools/Makefile.am | 520 | ||||
-rwxr-xr-x | tools/autogen.sh | 1491 | ||||
-rw-r--r-- | tools/configure.ac | 391 |
53 files changed, 4095 insertions, 442 deletions
diff --git a/Makefile.am b/Makefile.am index d4494fa7..13ea4e7f 100644 --- a/Makefile.am +++ b/Makefile.am @@ -59,6 +59,7 @@ libledger_data_la_SOURCES = \ src/timelog.cc \ src/textual.cc \ src/journal.cc \ + src/archive.cc \ src/account.cc \ src/xact.cc \ src/post.cc \ @@ -118,6 +119,7 @@ pkginclude_HEADERS = \ src/xact.h \ src/account.h \ src/journal.h \ + src/archive.h \ src/timelog.h \ src/iterators.h \ src/compare.h \ diff --git a/README-1ST b/README-1ST new file mode 100644 index 00000000..cb62accf --- /dev/null +++ b/README-1ST @@ -0,0 +1,61 @@ + + README FIRST!!! + +=============================================================================== + +To build this code after doing a Git clone, run: + + $ ./acprep update + +If you try to configure/build on your own, you are almost certainly going to +run into problems. In future, you can run this command again and it will keep +you updated with the very latest version. + +=============================================================================== + + F.A.Q. + + ---------------------------------------------------------------------- + + - Q: The build fails saying it can't find utf8.h + + A: You didn't run ./acprep update. + + ---------------------------------------------------------------------- + + - Q: Configure fails saying it can't find boost_regex + + A: Look in config.log and search for "boost_regex", then scroll down a bit + until you see the exact compile error. Usually it's failing because + your include directory is different from anything acprep is expecting to + see. It could also be failing because your Boost libraries have a + custom "suffix" on them. + + Let's say your Boost was installed in ~/boost, and every library has the + suffix '-xgcc42'. This is what you would run: + + CPPFLAGS=-I$HOME/boost acprep --boost=xgcc42 update + + ---------------------------------------------------------------------- + + - Q: Configure fails saying it can't find MPFR + + A: You need MPFR version 2.4.0 or higher. This version does not come with + most Debian distributions, so you will need to build it. + + ---------------------------------------------------------------------- + + - Q: Something else fails, or Ledger crashes on startup + + A: This, I am most interested in hearing about. Please e-mail me a copy of + config.log and your build log to <johnw@newartisans.com>. Also, if + Ledger is crashing, try running it under gdb like so: + + $ gdb ledger + (gdb) run <ARGS TO LEDGER> + ... runs till crash ... + (gdb) bt + + Send me that backtrace output, and the output from "ledger --version". + + ---------------------------------------------------------------------- @@ -293,7 +293,7 @@ class PrepareBuild(CommandLineApp): else: cmd = 'config' - self.log.debug('Invoking primary phase: ' + cmd) + self.log.info('Invoking primary phase: ' + cmd) PrepareBuild.__dict__['phase_' + cmd](self, *args) ######################################################################### @@ -364,7 +364,7 @@ class PrepareBuild(CommandLineApp): def ensure(self, dirname): if not exists(dirname): - self.log.debug('Making directory: ' + dirname) + self.log.info('Making directory: ' + dirname) os.makedirs(dirname) elif not isdir(dirname): self.log.error('Directory is not a directory: ' + dirname) @@ -396,11 +396,11 @@ class PrepareBuild(CommandLineApp): return False def phase_products(self, *args): - self.log.debug('Executing phase: products') + self.log.info('Executing phase: products') print self.products_directory() def phase_info(self, *args): - self.log.debug('Executing phase: info') + self.log.info('Executing phase: info') environ, conf_args = self.configure_environment() @@ -433,7 +433,7 @@ class PrepareBuild(CommandLineApp): self.log.debug(' %s' % arg) def phase_sloc(self, *args): - self.log.debug('Executing phase: sloc') + self.log.info('Executing phase: sloc') self.execute('sloccount', 'src', 'python', 'lisp', 'test') ######################################################################### @@ -441,13 +441,13 @@ class PrepareBuild(CommandLineApp): ######################################################################### def phase_gettext(self, *args): - self.log.debug('Executing phase: gettext') + self.log.info('Executing phase: gettext') # configure the template files assert exists('po') and isdir('po') if not exists(join('po', 'Makevars')): assert exists(join('po', 'Makevars.template')) - self.log.debug('Moving po/Makevars.template -> po/Makevars') + self.log.info('Moving po/Makevars.template -> po/Makevars') os.rename(join('po', 'Makevars.template'), join('po', 'Makevars')) @@ -460,22 +460,36 @@ class PrepareBuild(CommandLineApp): POTFILES_in.close() def phase_version(self, *args): - self.log.debug('Executing phase: version') + self.log.info('Executing phase: version') version_m4 = open('version.m4', 'w') version_m4.write("m4_define([VERSION_NUMBER], [%s])" % self.current_version()) version_m4.close() def phase_autogen(self, *args): - self.log.debug('Executing phase: autogen') - self.execute('sh', 'autogen.sh') + self.log.info('Executing phase: autogen') + + if not exists('autogen.sh') or \ + self.isnewer('tools/autogen.sh', 'autogen.sh'): + shutil.copyfile('tools/autogen.sh', 'autogen.sh') + + self.execute('sh', 'tools/autogen.sh') def phase_aclocal(self, *args): - self.log.debug('Executing phase: aclocal') + self.log.info('Executing phase: aclocal') self.execute('aclocal', '-I', 'm4') def phase_autoconf(self, *args): - self.log.debug('Executing phase: autoconf') + self.log.info('Executing phase: autoconf') + + if not exists('configure.ac') or \ + self.isnewer('tools/configure.ac', 'configure.ac'): + shutil.copyfile('tools/configure.ac', 'configure.ac') + + if not exists('Makefile.am') or \ + self.isnewer('tools/Makefile.am', 'Makefile.am'): + shutil.copyfile('tools/Makefile.am', 'Makefile.am') + reason = self.need_to_prepare_autotools() if reason: self.log.info('autogen.sh must be run ' + reason) @@ -491,13 +505,13 @@ class PrepareBuild(CommandLineApp): ######################################################################### def phase_submodule(self, *args): - self.log.debug('Executing phase: submodule') + self.log.info('Executing phase: submodule') if exists('.git') and isdir('.git'): self.execute('git', 'submodule', 'init') self.execute('git', 'submodule', 'update') def phase_pull(self, *args): - self.log.debug('Executing phase: pull') + self.log.info('Executing phase: pull') if not exists('.git') and not isdir('.git'): self.log.error("This is not a Git clone.") sys.exit(1) @@ -509,7 +523,7 @@ class PrepareBuild(CommandLineApp): ######################################################################### def phase_dependencies(self, *args): - self.log.debug('Executing phase: dependencies') + self.log.info('Executing phase: dependencies') self.log.info("Installing Ledger's build dependencies ...") @@ -581,7 +595,7 @@ class PrepareBuild(CommandLineApp): self.execute(*packages) def phase_buildlibs(self, *args): - self.log.debug('Executing phase: buildlibs') + self.log.info('Executing phase: buildlibs') try: os.chdir('lib') @@ -645,12 +659,12 @@ class PrepareBuild(CommandLineApp): entries.sort() for entry in entries: if re.search('boost_regex', entry): - self.log.debug('Found a Boost library: ' + entry) + self.log.info('Found a Boost library: ' + entry) match = re.match('libboost_regex([^.]*)\.(a|so|dylib)', entry) if match: suffix = match.group(1) - self.log.debug('Found Boost suffix => ' + suffix) + self.log.info('Found Boost suffix => ' + suffix) self.envvars['BOOST_HOME'] = dirname(path) return suffix else: @@ -660,20 +674,20 @@ class PrepareBuild(CommandLineApp): def locate_boost(self): if self.envvars['BOOST_SUFFIX']: - self.log.debug(("Not looking for Boost, since " + + self.log.info(("Not looking for Boost, since " + "a suffix of '%s' was given") % self.envvars['BOOST_SUFFIX']) else: suffix = None for path in ['/usr/local/lib', '/opt/local/lib', '/sw/lib', '/usr/lib']: - self.log.debug('Looking for Boost in %s...' % path) + self.log.info('Looking for Boost in %s...' % path) suffix = self.locate_boost_in_dir(path) if suffix is not None: - self.log.debug('Boost is located here:') - self.log.debug('BOOST_HOME => ' + + self.log.info('Boost is located here:') + self.log.info('BOOST_HOME => ' + self.envvars['BOOST_HOME']) - self.log.debug('BOOST_SUFFIX => ' + suffix) + self.log.info('BOOST_SUFFIX => ' + suffix) break if suffix is None: self.log.error("Boost could not be found.") @@ -695,7 +709,7 @@ class PrepareBuild(CommandLineApp): '/sw/include']: if exists(path) and isdir(path) and \ path != '/usr/include': - self.log.debug('Noticing include directory => ' + path) + self.log.info('Noticing include directory => ' + path) self.sys_include_dirs.append(path) # Each of these becomes '-L<name>' @@ -707,17 +721,19 @@ class PrepareBuild(CommandLineApp): '/opt/local/lib', '/sw/lib']: if exists(path) and isdir(path): - self.log.debug('Noticing library directory => ' + path) + self.log.info('Noticing library directory => ' + path) self.sys_library_dirs.append(path) def setup_for_johnw(self): # jww (2009-03-09): Some peculiarities specific to my system - if exists('/usr/local/stow/cppunit/include'): - self.sys_include_dirs.insert(0, '/usr/local/stow/cppunit/include') - self.sys_library_dirs.insert(0, '/usr/local/stow/cppunit/lib') + if exists('/Users/johnw/Dropbox/Accounts/ledger.dat'): + if exists('/usr/local/stow/cppunit/include'): + self.sys_include_dirs.insert(0, '/usr/local/stow/cppunit/include') + self.sys_library_dirs.insert(0, '/usr/local/stow/cppunit/lib') self.CXXFLAGS.append('-march=nocona') self.CXXFLAGS.append('-msse3') + self.CPPFLAGS.append('-D_GLIBCXX_FULLY_DYNAMIC_STRING=1') self.options.use_glibcxx_debug = True @@ -726,7 +742,7 @@ class PrepareBuild(CommandLineApp): system = self.get_stdout('uname', '-s') - self.log.debug('System type is => ' + system) + self.log.info('System type is => ' + system) # These options are global defaults at the moment #self.option_warn() @@ -796,7 +812,7 @@ class PrepareBuild(CommandLineApp): self.log.error('Unknown build flavor "%s"' % self.current_flavor) sys.exit(1) - self.log.debug('Setting up build flavor => ' + self.current_flavor) + self.log.info('Setting up build flavor => ' + self.current_flavor) PrepareBuild.__dict__['setup_flavor_' + self.current_flavor](self) self.setup_flags() @@ -927,23 +943,23 @@ class PrepareBuild(CommandLineApp): ######################################################################### def setup_flavor_default(self): - if exists('/usr/local/lib/libboost_regex-xgcc44-s-1_40.a'): - self.envvars['BOOST_HOME'] = '/usr/local' - self.envvars['BOOST_SUFFIX'] = '-xgcc44-s-1_40' - self.log.debug('Setting BOOST_SUFFIX => %s' % - self.envvars['BOOST_SUFFIX']) - - self.sys_include_dirs.append('/usr/local/include/boost-1_40') - - elif exists('/opt/local/lib/libboost_regex.a'): + if exists('/opt/local/lib/libboost_regex.a'): self.envvars['BOOST_HOME'] = '/opt/local' self.envvars['BOOST_SUFFIX'] = '' - self.log.debug('Setting BOOST_SUFFIX => %s' % + self.log.info('Setting BOOST_SUFFIX => %s' % self.envvars['BOOST_SUFFIX']) self.sys_include_dirs.append('/opt/local/include/boost') + elif exists('/usr/local/lib/libboost_regex-xgcc44-s-1_40.a'): + self.envvars['BOOST_HOME'] = '/usr/local' + self.envvars['BOOST_SUFFIX'] = '-xgcc44-s-1_40' + self.log.info('Setting BOOST_SUFFIX => %s' % + self.envvars['BOOST_SUFFIX']) + + self.sys_include_dirs.append('/usr/local/include/boost-1_40') + def setup_flavor_debug(self): self.configure_args.append('--enable-debug') @@ -960,64 +976,65 @@ class PrepareBuild(CommandLineApp): self.sys_include_dirs.remove('/usr/local/stow/cppunit/include') self.sys_library_dirs.remove('/usr/local/stow/cppunit/lib') - self.sys_include_dirs.insert(0, '/usr/local/stow/cppunit-debug/include') - self.sys_library_dirs.insert(0, '/usr/local/stow/cppunit-debug/lib') + if exists('/usr/local/stow/cppunit-debug/include'): + self.sys_include_dirs.insert(0, '/usr/local/stow/cppunit-debug/include') + self.sys_library_dirs.insert(0, '/usr/local/stow/cppunit-debug/lib') - if exists('/usr/local/lib/libboost_regex-xgcc44-sd-1_40.a'): - self.envvars['BOOST_HOME'] = '/usr/local' - self.envvars['BOOST_SUFFIX'] = '-xgcc44-sd-1_40' - self.log.debug('Setting BOOST_SUFFIX => %s' % - self.envvars['BOOST_SUFFIX']) - - self.sys_include_dirs.append('/usr/local/include/boost-1_40') - - elif exists('/opt/local/lib/libboost_regex-d.a'): + if exists('/opt/local/lib/libboost_regex-d.a'): self.envvars['BOOST_HOME'] = '/opt/local' self.envvars['BOOST_SUFFIX'] = '-d' - self.log.debug('Setting BOOST_SUFFIX => %s' % - self.envvars['BOOST_SUFFIX']) + self.log.info('Setting BOOST_SUFFIX => %s' % + self.envvars['BOOST_SUFFIX']) self.sys_include_dirs.append('/opt/local/include/boost') - else: - if exists('/usr/local/lib/libboost_regex-xgcc44-s-1_40.a'): + + elif exists('/usr/local/lib/libboost_regex-xgcc44-sd-1_40.a'): self.envvars['BOOST_HOME'] = '/usr/local' - self.envvars['BOOST_SUFFIX'] = '-xgcc44-s-1_40' - self.log.debug('Setting BOOST_SUFFIX => %s' % - self.envvars['BOOST_SUFFIX']) + self.envvars['BOOST_SUFFIX'] = '-xgcc44-sd-1_40' + self.log.info('Setting BOOST_SUFFIX => %s' % + self.envvars['BOOST_SUFFIX']) self.sys_include_dirs.append('/usr/local/include/boost-1_40') - - elif exists('/opt/local/lib/libboost_regex.a'): + else: + if exists('/opt/local/lib/libboost_regex.a'): self.envvars['BOOST_HOME'] = '/opt/local' self.envvars['BOOST_SUFFIX'] = '' - self.log.debug('Setting BOOST_SUFFIX => %s' % + self.log.info('Setting BOOST_SUFFIX => %s' % self.envvars['BOOST_SUFFIX']) self.sys_include_dirs.append('/opt/local/include/boost') + elif exists('/usr/local/lib/libboost_regex-xgcc44-s-1_40.a'): + self.envvars['BOOST_HOME'] = '/usr/local' + self.envvars['BOOST_SUFFIX'] = '-xgcc44-s-1_40' + self.log.info('Setting BOOST_SUFFIX => %s' % + self.envvars['BOOST_SUFFIX']) + + self.sys_include_dirs.append('/usr/local/include/boost-1_40') + def setup_flavor_opt(self): self.CXXFLAGS.append('-O3') self.CXXFLAGS.append('-fomit-frame-pointer') - if exists('/usr/local/lib/libboost_regex-xgcc44-s-1_40.a'): - self.envvars['BOOST_HOME'] = '/usr/local' - self.envvars['BOOST_SUFFIX'] = '-xgcc44-s-1_40' - self.log.debug('Setting BOOST_SUFFIX => %s' % - self.envvars['BOOST_SUFFIX']) - - self.sys_include_dirs.append('/usr/local/include/boost-1_40') - - elif exists('/opt/local/lib/libboost_regex.a'): + if exists('/opt/local/lib/libboost_regex.a'): self.envvars['BOOST_HOME'] = '/opt/local' self.envvars['BOOST_SUFFIX'] = '' - self.log.debug('Setting BOOST_SUFFIX => %s' % - self.envvars['BOOST_SUFFIX']) + self.log.info('Setting BOOST_SUFFIX => %s' % + self.envvars['BOOST_SUFFIX']) self.sys_include_dirs.append('/opt/local/include/boost') + elif exists('/usr/local/lib/libboost_regex-xgcc44-s-1_40.a'): + self.envvars['BOOST_HOME'] = '/usr/local' + self.envvars['BOOST_SUFFIX'] = '-xgcc44-s-1_40' + self.log.info('Setting BOOST_SUFFIX => %s' % + self.envvars['BOOST_SUFFIX']) + + self.sys_include_dirs.append('/usr/local/include/boost-1_40') + def setup_flavor_gcov(self): self.CXXFLAGS.append('-g') self.CXXFLAGS.append('-fprofile-arcs') @@ -1038,7 +1055,7 @@ class PrepareBuild(CommandLineApp): """Alter the Makefile so that it's not nearly so verbose. This makes errors and warnings much easier to spot.""" - self.log.debug('Executing phase: patch') + self.log.info('Executing phase: patch') if exists('Makefile'): self.log.debug('Patching generated Makefile') @@ -1108,7 +1125,7 @@ class PrepareBuild(CommandLineApp): return False def phase_configure(self, *args): - self.log.debug('Executing phase: configure') + self.log.info('Executing phase: configure') self.configured = True @@ -1130,7 +1147,10 @@ class PrepareBuild(CommandLineApp): self.log.debug('configure args => ' + str(conf_args)) configure = Popen(conf_args, shell=False, env=environ) - configure.wait() + retcode = configure.wait() + if retcode < 0: + self.log.error("Child was terminated by signal", -retcode) + sys.exit(1) if not self.options.no_patch: self.phase_patch() @@ -1145,7 +1165,7 @@ class PrepareBuild(CommandLineApp): os.chdir(self.source_dir) def phase_config(self, *args): - self.log.debug('Executing phase: config') + self.log.info('Executing phase: config') self.phase_submodule() self.phase_autoconf() self.phase_configure(*args) @@ -1157,7 +1177,7 @@ class PrepareBuild(CommandLineApp): ######################################################################### def phase_make(self, *args): - self.log.debug('Executing phase: make') + self.log.info('Executing phase: make') config_args = [] make_args = [] @@ -1182,15 +1202,12 @@ class PrepareBuild(CommandLineApp): self.log.debug('Changing directory to ' + build_dir) os.chdir(build_dir) - self.log.debug('make args => ' + str(make_args)) - - configure = Popen(['make'] + make_args, shell=False) - configure.wait() + self.execute(*(['make'] + make_args)) finally: os.chdir(self.source_dir) def phase_update(self, *args): - self.log.debug('Executing phase: update') + self.log.info('Executing phase: update') self.phase_pull() self.phase_make(*args) @@ -1199,15 +1216,15 @@ class PrepareBuild(CommandLineApp): ######################################################################### def phase_clean(self, *args): - self.log.debug('Executing phase: clean') + self.log.info('Executing phase: clean') self.phase_make('clean') def phase_distclean(self, *args): - self.log.debug('Executing phase: distclean') + self.log.info('Executing phase: distclean') self.phase_make('distclean') def phase_gitclean(self, *args): - self.log.debug('Executing phase: gitclean') + self.log.info('Executing phase: gitclean') self.execute('git', 'clean', '-dfx') ######################################################################### @@ -1238,7 +1255,7 @@ class PrepareBuild(CommandLineApp): '@loader_path/' + base, dest_file) def phase_bindmg(self, *args): - self.log.debug('Executing phase: bindmg') + self.log.info('Executing phase: bindmg') self.phase_make() @@ -1263,7 +1280,7 @@ class PrepareBuild(CommandLineApp): shutil.rmtree(tempdir) def phase_upload(self, *args): - self.log.debug('Executing phase: upload') + self.log.info('Executing phase: upload') self.phase_bindmg() @@ -1295,7 +1312,7 @@ class PrepareBuild(CommandLineApp): shutil.rmtree(self.build_directory()) def phase_distcheck(self, *args): - self.log.debug('Executing phase: distcheck') + self.log.info('Executing phase: distcheck') self.configure_flavor('default', False) @@ -1320,7 +1337,7 @@ class PrepareBuild(CommandLineApp): self.phase_make(*make_args) def phase_rsync(self, *args): - self.log.debug('Executing phase: rsync') + self.log.info('Executing phase: rsync') source_copy_dir = join(self.ensure(self.products_directory()), 'ledger-proof') @@ -1332,32 +1349,28 @@ class PrepareBuild(CommandLineApp): self.source_dir = source_copy_dir def phase_proof(self, *args): - self.log.debug('Executing phase: proof') + self.log.info('Executing phase: proof') + + self.phase_makeall(*args) self.log.info('=== Copying source tree ===') self.phase_rsync() + self.phase_makeall(*args) + self.configure_flavor('opt') - self.log.info('=== Building opt ===') - self.phase_make(*args) self.log.info('=== Testing opt ===') self.phase_make('fullcheck') self.configure_flavor('gcov') - self.log.info('=== Building gcov ===') - self.phase_make(*args) self.log.info('=== Testing gcov ===') self.phase_make('check') self.configure_flavor('debug') - self.log.info('=== Building debug ===') - self.phase_make(*args) self.log.info('=== Testing debug ===') self.phase_make('fullcheck') self.configure_flavor('default') - self.log.info('=== Building default ===') - self.phase_make(*args) self.log.info('=== Testing default ===') self.phase_make('fullcheck') @@ -1365,7 +1378,7 @@ class PrepareBuild(CommandLineApp): self.phase_distcheck() def phase_makeall(self, *args): - self.log.debug('Executing phase: makeall') + self.log.info('Executing phase: makeall') self.configure_flavor('opt', False) @@ -1403,11 +1416,6 @@ class PrepareBuild(CommandLineApp): self.configure_flavor('default', False) - def phase_do_all(self, *args): - self.log.debug('Executing phase: do_all') - self.phase_makeall(*args) - self.phase_proof(*args) - ######################################################################### # Help # ######################################################################### diff --git a/autogen.sh b/autogen.sh index a8b63eff..a8b63eff 100755..100644 --- a/autogen.sh +++ b/autogen.sh diff --git a/configure.ac b/configure.ac index 69fe0e1e..22b4b96a 100644 --- a/configure.ac +++ b/configure.ac @@ -280,6 +280,38 @@ else AC_MSG_FAILURE("Could not find boost_iostreams library (set CPPFLAGS and LDFLAGS?)") fi +# check for boost_serialization +AC_CACHE_CHECK( + [if boost_serialization is available], + [boost_serialization_cpplib_avail_cv_], + [boost_serialization_save_libs=$LIBS + LIBS="-lboost_serialization$BOOST_SUFFIX -lboost_system$BOOST_SUFFIX $LIBS" + AC_LANG_PUSH(C++) + AC_LINK_IFELSE( + [AC_LANG_PROGRAM( + [[#include <boost/archive/binary_oarchive.hpp> + #include <iostream> + struct foo { + int a; + template<class Archive> + void serialize(Archive & ar, const unsigned int) { + ar & a; + } + };]], + [[boost::archive::binary_oarchive oa(std::cout); + foo x; + oa << x;]])], + [boost_serialization_cpplib_avail_cv_=true], + [boost_serialization_cpplib_avail_cv_=false]) + AC_LANG_POP + LIBS=$boost_serialization_save_libs]) + +if [test x$boost_serialization_cpplib_avail_cv_ = xtrue ]; then + AC_DEFINE([HAVE_BOOST_SERIALIZATION], [1], [Whether Boost.Serialization is available]) + LIBS="-lboost_serialization$BOOST_SUFFIX $LIBS" +fi +AM_CONDITIONAL(HAVE_BOOST_SERIALIZATION, test x$boost_serialization_cpplib_avail_cv_ = xtrue) + # check for Python AM_PATH_PYTHON(2.4,, :) if [test "$PYTHON" != :]; then diff --git a/lisp/ledger.el b/lisp/ledger.el index be730fd9..8e4de270 100644 --- a/lisp/ledger.el +++ b/lisp/ledger.el @@ -4,7 +4,7 @@ ;; Emacs Lisp Archive Entry ;; Filename: ledger.el -;; Version: 2.7 +;; Version: 3.0 ;; Date: Fri 18-Jul-2008 ;; Keywords: data ;; Author: John Wiegley (johnw AT gnu DOT org) diff --git a/src/account.cc b/src/account.cc index 57b66d86..52a6b436 100644 --- a/src/account.cc +++ b/src/account.cc @@ -458,7 +458,7 @@ void account_t::xdata_t::details_t::update(post_t& post, posts_virtuals_count++; if (gather_all) - filenames.insert(post.pathname); + filenames.insert(post.pos->pathname); date_t date = post.date(); diff --git a/src/account.h b/src/account.h index 8c276c8a..9dc467bc 100644 --- a/src/account.h +++ b/src/account.h @@ -232,6 +232,26 @@ public: return xdata_ && xdata_->has_flags(flags); } std::size_t children_with_flags(xdata_t::flags_t flags) const; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<supports_flags<> >(*this); + ar & boost::serialization::base_object<scope_t>(*this); + ar & parent; + ar & name; + ar & note; + ar & depth; + ar & accounts; + ar & posts; + ar & _fullname; + } +#endif // HAVE_BOOST_SERIALIZATION }; std::ostream& operator<<(std::ostream& out, const account_t& account); diff --git a/src/amount.cc b/src/amount.cc index a6788af9..3ac47c59 100644 --- a/src/amount.cc +++ b/src/amount.cc @@ -56,25 +56,25 @@ struct amount_t::bigint_t : public supports_flags<> mpq_t val; precision_t prec; - uint_least16_t ref; + uint_least16_t refc; #define MP(bigint) ((bigint)->val) - bigint_t() : prec(0), ref(1) { + bigint_t() : prec(0), refc(1) { TRACE_CTOR(bigint_t, ""); mpq_init(val); } bigint_t(const bigint_t& other) : supports_flags<>(static_cast<uint_least8_t> (other.flags() & ~BIGINT_BULK_ALLOC)), - prec(other.prec), ref(1) { + prec(other.prec), refc(1) { TRACE_CTOR(bigint_t, "copy"); mpq_init(val); mpq_set(val, other.val); } ~bigint_t() { TRACE_DTOR(bigint_t); - assert(ref == 0); + assert(refc == 0); mpq_clear(val); } @@ -83,8 +83,8 @@ struct amount_t::bigint_t : public supports_flags<> DEBUG("ledger.validate", "amount_t::bigint_t: prec > 128"); return false; } - if (ref > 16535) { - DEBUG("ledger.validate", "amount_t::bigint_t: ref > 16535"); + if (refc > 16535) { + DEBUG("ledger.validate", "amount_t::bigint_t: refc > 16535"); return false; } if (flags() & ~(BIGINT_BULK_ALLOC | BIGINT_KEEP_PREC)) { @@ -94,6 +94,20 @@ struct amount_t::bigint_t : public supports_flags<> } return true; } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive& ar, const unsigned int /* version */) + { + ar & boost::serialization::base_object<supports_flags<> >(*this); + ar & val; + ar & prec; + ar & refc; + } +#endif // HAVE_BOOST_SERIALIZATION }; shared_ptr<commodity_pool_t> amount_t::current_pool; @@ -147,8 +161,8 @@ void amount_t::_copy(const amount_t& amt) } else { quantity = amt.quantity; DEBUG("amounts.refs", - quantity << " ref++, now " << (quantity->ref + 1)); - quantity->ref++; + quantity << " refc++, now " << (quantity->refc + 1)); + quantity->refc++; } } commodity_ = amt.commodity_; @@ -160,7 +174,7 @@ void amount_t::_dup() { VERIFY(valid()); - if (quantity->ref > 1) { + if (quantity->refc > 1) { bigint_t * q = new bigint_t(*quantity); _release(); quantity = q; @@ -184,9 +198,9 @@ void amount_t::_release() { VERIFY(valid()); - DEBUG("amounts.refs", quantity << " ref--, now " << (quantity->ref - 1)); + DEBUG("amounts.refs", quantity << " refc--, now " << (quantity->refc - 1)); - if (--quantity->ref == 0) { + if (--quantity->refc == 0) { if (quantity->has_flags(BIGINT_BULK_ALLOC)) quantity->~bigint_t(); else @@ -920,7 +934,7 @@ bool amount_t::parse(std::istream& in, const parse_flags_t& flags) quantity = new bigint_t; safe_holder.reset(quantity); } - else if (quantity->ref > 1) { + else if (quantity->refc > 1) { _release(); quantity = new bigint_t; safe_holder.reset(quantity); @@ -1095,8 +1109,8 @@ bool amount_t::valid() const return false; } - if (quantity->ref == 0) { - DEBUG("ledger.validate", "amount_t: quantity->ref == 0"); + if (quantity->refc == 0) { + DEBUG("ledger.validate", "amount_t: quantity->refc == 0"); return false; } } @@ -1107,4 +1121,63 @@ bool amount_t::valid() const return true; } +#if defined(HAVE_BOOST_SERIALIZATION) + +template<class Archive> +void amount_t::serialize(Archive& ar, const unsigned int /* version */) +{ + ar & current_pool; + ar & is_initialized; + ar & quantity; + ar & commodity_; +} + +#endif // HAVE_BOOST_SERIALIZATION + } // namespace ledger + +#if defined(HAVE_BOOST_SERIALIZATION) +namespace boost { +namespace serialization { + +template <class Archive> +void serialize(Archive& ar, MP_INT& mpz, const unsigned int /* version */) +{ + ar & mpz._mp_alloc; + ar & mpz._mp_size; + ar & mpz._mp_d; +} + +template <class Archive> +void serialize(Archive& ar, MP_RAT& mpq, const unsigned int /* version */) +{ + ar & mpq._mp_num; + ar & mpq._mp_den; +} + +template <class Archive> +void serialize(Archive& ar, long unsigned int& integer, + const unsigned int /* version */) +{ + ar & make_binary_object(&integer, sizeof(long unsigned int)); +} + +} // namespace serialization +} // namespace boost + +BOOST_CLASS_EXPORT(ledger::annotated_commodity_t) + +template void boost::serialization::serialize(boost::archive::binary_oarchive&, + MP_INT&, const unsigned int); +template void boost::serialization::serialize(boost::archive::binary_iarchive&, + MP_RAT&, const unsigned int); +template void boost::serialization::serialize(boost::archive::binary_iarchive&, + long unsigned int&, + const unsigned int); + +template void ledger::amount_t::serialize(boost::archive::binary_oarchive&, + const unsigned int); +template void ledger::amount_t::serialize(boost::archive::binary_iarchive&, + const unsigned int); + +#endif // HAVE_BOOST_SERIALIZATION diff --git a/src/amount.h b/src/amount.h index b3c632af..f7d6986e 100644 --- a/src/amount.h +++ b/src/amount.h @@ -691,6 +691,16 @@ public: bool valid() const; +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */); +#endif // HAVE_BOOST_SERIALIZATION + /*@}*/ }; diff --git a/src/annotate.h b/src/annotate.h index d98f7ef6..17c8a637 100644 --- a/src/annotate.h +++ b/src/annotate.h @@ -98,6 +98,21 @@ struct annotation_t : public supports_flags<>, assert(*this); return true; } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<supports_flags<> >(*this); + ar & price; + ar & date; + ar & tag; + } +#endif // HAVE_BOOST_SERIALIZATION }; struct keep_details_t @@ -136,6 +151,21 @@ struct keep_details_t return keep_price || keep_date || keep_tag; } bool keep_any(const commodity_t& comm) const; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & keep_price; + ar & keep_date; + ar & keep_tag; + ar & only_actuals; + } +#endif // HAVE_BOOST_SERIALIZATION }; inline std::ostream& operator<<(std::ostream& out, @@ -183,6 +213,22 @@ public: virtual commodity_t& strip_annotations(const keep_details_t& what_to_keep); virtual void write_annotations(std::ostream& out) const; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + explicit annotated_commodity_t() : ptr(NULL) {} + + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<commodity_t>(*this); + ar & ptr; + ar & details; + } +#endif // HAVE_BOOST_SERIALIZATION }; inline annotated_commodity_t& diff --git a/src/archive.cc b/src/archive.cc new file mode 100644 index 00000000..d631651f --- /dev/null +++ b/src/archive.cc @@ -0,0 +1,247 @@ +/* + * Copyright (c) 2003-2009, John Wiegley. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * - Neither the name of New Artisans LLC nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#include <system.hh> + +#if defined(HAVE_BOOST_SERIALIZATION) + +#include "archive.h" +#include "amount.h" +#include "commodity.h" +#include "pool.h" +#include "scope.h" +#include "account.h" +#include "post.h" +#include "xact.h" + +#define ARCHIVE_VERSION 0x03000001 + +//BOOST_IS_ABSTRACT(ledger::scope_t) +BOOST_CLASS_EXPORT(ledger::scope_t) +BOOST_CLASS_EXPORT(ledger::child_scope_t) +BOOST_CLASS_EXPORT(ledger::symbol_scope_t) +BOOST_CLASS_EXPORT(ledger::call_scope_t) +BOOST_CLASS_EXPORT(ledger::account_t) +BOOST_CLASS_EXPORT(ledger::item_t) +BOOST_CLASS_EXPORT(ledger::post_t) +BOOST_CLASS_EXPORT(ledger::xact_base_t) +BOOST_CLASS_EXPORT(ledger::xact_t) +BOOST_CLASS_EXPORT(ledger::auto_xact_t) +BOOST_CLASS_EXPORT(ledger::period_xact_t) + +template void ledger::journal_t::serialize(boost::archive::binary_oarchive&, + const unsigned int); +template void ledger::journal_t::serialize(boost::archive::binary_iarchive&, + const unsigned int); +namespace ledger { + +void archive_t::read_header() +{ + if (exists(file)) { + // Open the stream, read the version number and the list of sources + ifstream stream(file, std::ios::binary); + boost::archive::binary_iarchive iarchive(stream); + + DEBUG("archive.journal", "Reading header from archive"); + iarchive >> *this; + + DEBUG("archive.journal", + "Version number: " << std::hex << version << std::dec); + DEBUG("archive.journal", "Number of sources: " << sources.size()); + + foreach (const journal_t::fileinfo_t& i, sources) + DEBUG("archive.journal", "Loaded source: " << *i.filename); + } +} + +bool archive_t::should_load(const std::list<path>& data_files) +{ + std::size_t found = 0; + + DEBUG("archive.journal", "Should the archive be loaded?"); + + if (! exists(file)) { + DEBUG("archive.journal", "No, it does not exist"); + return false; + } + + if (version != ARCHIVE_VERSION) { + DEBUG("archive.journal", "No, it fails the version check"); + return false; + } + + if (data_files.empty()) { + DEBUG("archive.journal", "No, there were no data files!"); + return false; + } + + if (sources.empty()) { + DEBUG("archive.journal", "No, there were no sources!"); + return false; + } + + if (data_files.size() != sources.size()) { + DEBUG("archive.journal", "No, number of sources doesn't match: " + << data_files.size() << " != " << sources.size()); + return false; + } + + foreach (const path& p, data_files) { + DEBUG("archive.journal", "Scanning for data file: " << p); + + if (! exists(p)) { + DEBUG("archive.journal", "No, an input source no longer exists: " << p); + return false; + } + + foreach (const journal_t::fileinfo_t& i, sources) { + assert(! i.from_stream); + assert(i.filename); + + DEBUG("archive.journal", "Comparing against source file: " << *i.filename); + + if (*i.filename == p) { + if (! exists(*i.filename)) { + DEBUG("archive.journal", + "No, a referent source no longer exists: " << *i.filename); + return false; + } + + if (i.modtime != posix_time::from_time_t(last_write_time(p))) { + DEBUG("archive.journal", "No, a source's modtime has changed: " << p); + return false; + } + + if (i.size != file_size(p)) { + DEBUG("archive.journal", "No, a source's size has changed: " << p); + return false; + } + + found++; + } + } + } + + if (found != data_files.size()) { + DEBUG("archive.journal", "No, not every source's name matched"); + return false; + } + + DEBUG("archive.journal", "Yes, it should be loaded!"); + return true; +} + +bool archive_t::should_save(shared_ptr<journal_t> journal) +{ + std::list<path> data_files; + + DEBUG("archive.journal", "Should the archive be saved?"); + + if (journal->was_loaded) { + DEBUG("archive.journal", "No, it's one we loaded before"); + return false; + } + + if (journal->sources.empty()) { + DEBUG("archive.journal", "No, there were no sources!"); + return false; + } + + foreach (const journal_t::fileinfo_t& i, journal->sources) { + if (i.from_stream) { + DEBUG("archive.journal", "No, one source was from a stream"); + return false; + } + + if (! exists(*i.filename)) { + DEBUG("archive.journal", + "No, a source no longer exists: " << *i.filename); + return false; + } + + data_files.push_back(*i.filename); + } + + if (should_load(data_files)) { + DEBUG("archive.journal", "No, because it's still loadable"); + return false; + } + + DEBUG("archive.journal", "Yes, it should be saved!"); + return true; +} + +void archive_t::save(shared_ptr<journal_t> journal) +{ + INFO_START(archive, "Saved journal file cache"); + + ofstream archive(file, std::ios::binary); + boost::archive::binary_oarchive oa(archive); + + version = ARCHIVE_VERSION; + sources = journal->sources; + + foreach (const journal_t::fileinfo_t& i, sources) + DEBUG("archive.journal", "Saving source: " << *i.filename); + + DEBUG("archive.journal", + "Creating archive with version " << std::hex << version << std::dec); + oa << *this; + + DEBUG("archive.journal", + "Archiving journal with " << sources.size() << " sources"); + oa << *journal; + + INFO_FINISH(archive); +} + +bool archive_t::load(shared_ptr<journal_t> journal) +{ + INFO_START(archive, "Read cached journal file"); + + ifstream stream(file, std::ios::binary); + boost::archive::binary_iarchive iarchive(stream); + + // Skip past the archive header, it was already read in before + archive_t temp; + iarchive >> temp; + + iarchive >> *journal.get(); + journal->was_loaded = true; + + INFO_FINISH(archive); + + return true; +} + +} // namespace ledger + +#endif // HAVE_BOOST_SERIALIZATION diff --git a/src/archive.h b/src/archive.h new file mode 100644 index 00000000..77272dbe --- /dev/null +++ b/src/archive.h @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2003-2009, John Wiegley. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * - Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * - Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * - Neither the name of New Artisans LLC nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/** + * @defgroup report Reporting + */ + +/** + * @file archive.h + * @author John Wiegley + * + * @ingroup report + * + * @brief Brief + * + * Long. + */ +#ifndef _ARCHIVE_H +#define _ARCHIVE_H + +#include "journal.h" + +namespace ledger { + +/** + * @brief Brief + * + * Long. + */ +class archive_t +{ + path file; + uint32_t version; + + std::list<journal_t::fileinfo_t> sources; + +public: + archive_t() { + TRACE_CTOR(archive_t, ""); + } + archive_t(const path& _file) + : file(_file), version(0) { + TRACE_CTOR(archive_t, "const path&"); + } + archive_t(const archive_t& ar) + : file(ar.file), version(0) { + TRACE_CTOR(archive_t, "copy"); + } + ~archive_t() { + TRACE_DTOR(archive_t); + } + + void read_header(); + + bool should_load(const std::list<path>& data_files); + bool should_save(shared_ptr<journal_t> journal); + + void save(shared_ptr<journal_t> journal); + bool load(shared_ptr<journal_t> journal); + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & version; + ar & sources; + } +#endif // HAVE_BOOST_SERIALIZATION +}; + +} // namespace ledger + +#endif // _ARCHIVE_H diff --git a/src/balance.cc b/src/balance.cc index 86352fa2..37628bd2 100644 --- a/src/balance.cc +++ b/src/balance.cc @@ -228,12 +228,14 @@ balance_t::commodity_amount(const optional<const commodity_t&>& commodity) const return temp.commodity_amount(commodity); throw_(amount_error, - _("Requested amount of a balance with multiple commodities: %1") << temp); + _("Requested amount of a balance with multiple commodities: %1") + << temp); } #endif } else if (amounts.size() > 0) { - amounts_map::const_iterator i = amounts.find(&*commodity); + amounts_map::const_iterator i = + amounts.find(const_cast<commodity_t *>(&*commodity)); if (i != amounts.end()) return i->second; } diff --git a/src/balance.h b/src/balance.h index fe8afe2b..5452510b 100644 --- a/src/balance.h +++ b/src/balance.h @@ -80,7 +80,7 @@ class balance_t multiplicative<balance_t, long> > > > > > > > > > > > > > { public: - typedef std::map<const commodity_t *, amount_t> amounts_map; + typedef std::map<commodity_t *, amount_t> amounts_map; amounts_map amounts; @@ -523,6 +523,18 @@ public: } return true; } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & amounts; + } +#endif // HAVE_BOOST_SERIALIZATION }; inline std::ostream& operator<<(std::ostream& out, const balance_t& bal) { diff --git a/src/chain.cc b/src/chain.cc index 018b3812..d7d1460b 100644 --- a/src/chain.cc +++ b/src/chain.cc @@ -223,33 +223,33 @@ post_handler_ptr chain_post_handlers(report_t& report, if (report.HANDLED(set_account_)) handler.reset(new transfer_details(handler, transfer_details::SET_ACCOUNT, - report.session.master.get(), + report.session.journal->master, report.HANDLER(set_account_).str(), report)); else if (report.HANDLED(set_payee_)) handler.reset(new transfer_details(handler, transfer_details::SET_PAYEE, - report.session.master.get(), + report.session.journal->master, report.HANDLER(set_payee_).str(), report)); else if (report.HANDLED(comm_as_payee)) handler.reset(new transfer_details(handler, transfer_details::SET_PAYEE, - report.session.master.get(), + report.session.journal->master, expr_t("commodity"), report)); else if (report.HANDLED(code_as_payee)) handler.reset(new transfer_details(handler, transfer_details::SET_PAYEE, - report.session.master.get(), + report.session.journal->master, expr_t("code"), report)); else if (report.HANDLED(payee_as_account)) handler.reset(new transfer_details(handler, transfer_details::SET_ACCOUNT, - report.session.master.get(), + report.session.journal->master, expr_t("payee"), report)); else if (report.HANDLED(comm_as_account)) handler.reset(new transfer_details(handler, transfer_details::SET_ACCOUNT, - report.session.master.get(), + report.session.journal->master, expr_t("commodity"), report)); else if (report.HANDLED(code_as_account)) handler.reset(new transfer_details(handler, transfer_details::SET_ACCOUNT, - report.session.master.get(), + report.session.journal->master, expr_t("code"), report)); return handler; diff --git a/src/commodity.h b/src/commodity.h index 5d73f4e8..d91fce85 100644 --- a/src/commodity.h +++ b/src/commodity.h @@ -62,6 +62,19 @@ struct price_point_t { datetime_t when; amount_t price; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & when; + ar & price; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** @@ -78,7 +91,7 @@ class commodity_t public: class base_t : public noncopyable, public supports_flags<uint_least16_t> { - base_t(); + base_t() {} public: typedef std::map<const datetime_t, amount_t> history_map; @@ -100,6 +113,18 @@ public: , const int indent = 0 #endif ) const; + +#if defined(HAVE_BOOST_SERIALIZATION) + private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & prices; + } +#endif // HAVE_BOOST_SERIALIZATION }; typedef std::map<commodity_t *, history_t> history_by_commodity_map; @@ -126,6 +151,18 @@ public: optional<history_t&> history(const optional<commodity_t&>& commodity = none); + +#if defined(HAVE_BOOST_SERIALIZATION) + private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & histories; + } +#endif // HAVE_BOOST_SERIALIZATION }; #define COMMODITY_STYLE_DEFAULTS 0x000 @@ -158,6 +195,25 @@ public: ~base_t() { TRACE_DTOR(base_t); } + +#if defined(HAVE_BOOST_SERIALIZATION) + private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<supports_flags<uint_least16_t> >(*this); + ar & symbol; + ar & precision; + ar & name; + ar & note; + ar & varied_history; + ar & smaller; + ar & larger; + } +#endif // HAVE_BOOST_SERIALIZATION }; public: @@ -330,6 +386,31 @@ public: } bool valid() const; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + supports_flags<uint_least16_t> temp_flags; + +protected: + explicit commodity_t() + : delegates_flags<uint_least16_t>(temp_flags), parent_(NULL), + annotated(false) {} + +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<delegates_flags<uint_least16_t> >(*this); + ar & base; + ar & parent_; + ar & qualified_symbol; + ar & mapping_key_; + ar & annotated; + } +#endif // HAVE_BOOST_SERIALIZATION }; inline std::ostream& operator<<(std::ostream& out, const commodity_t& comm) { diff --git a/src/derive.cc b/src/derive.cc index 2fe5754f..ef2d1e51 100644 --- a/src/derive.cc +++ b/src/derive.cc @@ -252,7 +252,7 @@ namespace { if (tmpl.payee_mask.match((*j)->payee)) { matching = *j; DEBUG("derive.xact", - "Found payee match: transaction on line " << (*j)->beg_line); + "Found payee match: transaction on line " << (*j)->pos->beg_line); break; } } @@ -332,7 +332,7 @@ namespace { if (post.account_mask->match(x->account->fullname())) { new_post.reset(new post_t(*x)); DEBUG("derive.xact", - "Founding posting from line " << x->beg_line); + "Founding posting from line " << x->pos->beg_line); break; } } diff --git a/src/emacs.cc b/src/emacs.cc index 57054690..24d3f1c1 100644 --- a/src/emacs.cc +++ b/src/emacs.cc @@ -40,8 +40,8 @@ namespace ledger { void format_emacs_posts::write_xact(xact_t& xact) { - out << "\"" << xact.pathname << "\" " - << (xact.beg_line + 1) << " "; + out << "\"" << xact.pos->pathname << "\" " + << (xact.pos->beg_line + 1) << " "; tm when = gregorian::to_tm(xact.date()); std::time_t date = std::mktime(&when); // jww (2008-04-20): Is this GMT or local? @@ -77,7 +77,7 @@ void format_emacs_posts::operator()(post_t& post) out << "\n"; } - out << " (" << (post.beg_line + 1) << " "; + out << " (" << (post.pos->beg_line + 1) << " "; out << "\"" << post.reported_account()->fullname() << "\" \"" << post.amount << "\""; diff --git a/src/error.cc b/src/error.cc index 70759b08..d5abe4de 100644 --- a/src/error.cc +++ b/src/error.cc @@ -96,10 +96,10 @@ string source_context(const path& file, ifstream in(file); in.seekg(pos, std::ios::beg); - scoped_array<char> buf(new char[len + 1]); - in.read(buf.get(), len); - assert(in.gcount() == len); - buf[len] = '\0'; + scoped_array<char> buf(new char[static_cast<std::size_t>(len) + 1]); + in.read(buf.get(), static_cast<std::streamsize>(len)); + assert(in.gcount() == static_cast<std::streamsize>(len)); + buf[static_cast<std::size_t>(len)] = '\0'; bool first = true; for (char * p = std::strtok(buf.get(), "\n"); @@ -103,7 +103,7 @@ public: expr_t(const string& _str, const uint_least8_t flags = 0); expr_t(std::istream& in, const uint_least8_t flags = 0); - virtual ~expr_t() throw(); + ~expr_t() throw(); expr_t& operator=(const expr_t& _expr); expr_t& operator=(const string& _expr) { @@ -163,6 +163,22 @@ public: void dump(std::ostream& out) const; static value_t eval(const string& _expr, scope_t& scope); + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & ptr; + ar & context; + ar & str; + if (Archive::is_loading::value) + compiled = false; + } +#endif // HAVE_BOOST_SERIALIZATION }; std::ostream& operator<<(std::ostream& out, const expr_t& expr); diff --git a/src/flags.h b/src/flags.h index 21607fc2..33935556 100644 --- a/src/flags.h +++ b/src/flags.h @@ -99,6 +99,17 @@ public: void drop_flags(const flags_t arg) { _flags = static_cast<T>(static_cast<U>(_flags) & static_cast<U>(~arg)); } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) + { + ar & _flags; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** @@ -201,6 +212,17 @@ public: void drop_flags(const flags_t arg) { _flags.drop_flags(arg); } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) + { + ar & _flags; + } +#endif // HAVE_BOOST_SERIALIZATION }; #endif // _FLAGS_H diff --git a/src/global.cc b/src/global.cc index 8204bf69..9ba2a357 100644 --- a/src/global.cc +++ b/src/global.cc @@ -435,17 +435,18 @@ void global_scope_t::normalize_report_options(const string& verb) item_t::use_effective_date = (rep.HANDLED(effective) && ! rep.HANDLED(actual_dates)); - rep.session.commodity_pool->keep_base = rep.HANDLED(base); - rep.session.commodity_pool->get_quotes = rep.session.HANDLED(download); + rep.session.journal->commodity_pool->keep_base = rep.HANDLED(base); + rep.session.journal->commodity_pool->get_quotes = rep.session.HANDLED(download); if (rep.session.HANDLED(price_exp_)) - rep.session.commodity_pool->quote_leeway = + rep.session.journal->commodity_pool->quote_leeway = rep.session.HANDLER(price_exp_).value.as_long(); if (rep.session.HANDLED(price_db_)) - rep.session.commodity_pool->price_db = rep.session.HANDLER(price_db_).str(); + rep.session.journal->commodity_pool->price_db = + rep.session.HANDLER(price_db_).str(); else - rep.session.commodity_pool->price_db = none; + rep.session.journal->commodity_pool->price_db = none; if (rep.HANDLED(date_format_)) set_date_format(rep.HANDLER(date_format_).str().c_str()); @@ -542,7 +543,8 @@ void global_scope_t::normalize_report_options(const string& verb) if (! rep.HANDLER(date_width_).specified) rep.HANDLER(date_width_) - .on_with(none, format_date(CURRENT_DATE(), FMT_PRINTED).length()); + .on_with(none, static_cast<long>(format_date(CURRENT_DATE(), + FMT_PRINTED).length())); long date_width = rep.HANDLER(date_width_).value.to_long(); long payee_width = (rep.HANDLER(payee_width_).specified ? diff --git a/src/interactive.cc b/src/interactive.cc index d2d6256b..61273f06 100644 --- a/src/interactive.cc +++ b/src/interactive.cc @@ -118,9 +118,9 @@ void interactive_t::verify_arguments() const label = _("any value"); wrong_arg = false; break; - case 'P': - label = _("a pointer"); - wrong_arg = ! next_arg->is_pointer(); + case '^': + label = _("a scope"); + wrong_arg = ! next_arg->is_scope(); break; case 'S': label = _("a sequence"); diff --git a/src/item.cc b/src/item.cc index c4db7a51..631423a9 100644 --- a/src/item.cc +++ b/src/item.cc @@ -187,7 +187,7 @@ namespace { } value_t get_actual(item_t& item) { - return ! item.has_flags(ITEM_GENERATED); + return ! item.has_flags(ITEM_GENERATED | ITEM_TEMP); } value_t get_date(item_t& item) { @@ -224,23 +224,26 @@ namespace { } value_t get_pathname(item_t& item) { - return string_value(item.pathname.string()); + if (item.pos) + return string_value(item.pos->pathname.string()); + else + return string_value(empty_string); } value_t get_beg_pos(item_t& item) { - return long(item.beg_pos); + return item.pos ? long(item.pos->beg_pos) : 0L; } value_t get_beg_line(item_t& item) { - return long(item.beg_line); + return item.pos ? long(item.pos->beg_line) : 0L; } value_t get_end_pos(item_t& item) { - return long(item.end_pos); + return item.pos ? long(item.pos->end_pos) : 0L; } value_t get_end_line(item_t& item) { - return long(item.end_line); + return item.pos ? long(item.pos->end_line) : 0L; } value_t get_depth(item_t&) { @@ -397,12 +400,13 @@ bool item_t::valid() const void print_item(std::ostream& out, const item_t& item, const string& prefix) { - out << source_context(item.pathname, item.beg_pos, item.end_pos, prefix); + out << source_context(item.pos->pathname, item.pos->beg_pos, + item.pos->end_pos, prefix); } string item_context(const item_t& item, const string& desc) { - std::streamoff len = item.end_pos - item.beg_pos; + std::streamoff len = item.pos->end_pos - item.pos->beg_pos; if (! len) return _("<no item context>"); @@ -411,18 +415,18 @@ string item_context(const item_t& item, const string& desc) std::ostringstream out; - if (item.pathname == path("/dev/stdin")) { + if (item.pos->pathname == path("/dev/stdin")) { out << desc << _(" from standard input:"); return out.str(); } - out << desc << _(" from \"") << item.pathname.string() << "\""; + out << desc << _(" from \"") << item.pos->pathname.string() << "\""; - if (item.beg_line != item.end_line) - out << _(", lines ") << item.beg_line << "-" - << item.end_line << ":\n"; + if (item.pos->beg_line != item.pos->end_line) + out << _(", lines ") << item.pos->beg_line << "-" + << item.pos->end_line << ":\n"; else - out << _(", line ") << item.beg_line << ":\n"; + out << _(", line ") << item.pos->beg_line << ":\n"; print_item(out, item, "> "); @@ -50,6 +50,53 @@ namespace ledger { +struct position_t +{ + path pathname; + istream_pos_type beg_pos; + std::size_t beg_line; + istream_pos_type end_pos; + std::size_t end_line; + + position_t() : beg_pos(0), beg_line(0), end_pos(0), end_line(0) { + TRACE_CTOR(position_t, ""); + } + position_t(const position_t& pos) { + TRACE_CTOR(position_t, "copy"); + *this = pos; + } + ~position_t() throw() { + TRACE_DTOR(position_t); + } + + position_t& operator=(const position_t& pos) { + if (this != &pos) { + pathname = pos.pathname; + beg_pos = pos.beg_pos; + beg_line = pos.beg_line; + end_pos = pos.end_pos; + end_line = pos.end_line; + } + return *this; + } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & pathname; + ar & beg_pos; + ar & beg_line; + ar & end_pos; + ar & end_line; + } +#endif // HAVE_BOOST_SERIALIZATION +}; + /** * @brief Brief * @@ -61,28 +108,21 @@ public: #define ITEM_NORMAL 0x00 // no flags at all, a basic posting // jww (2009-10-27): I'm not consistent on the difference between these two. #define ITEM_GENERATED 0x01 // posting was not found in a journal -#define ITEM_TEMP 0x02 // posting is a temporary object +#define ITEM_TEMP 0x02 // posting is a managed temporary enum state_t { UNCLEARED = 0, CLEARED, PENDING }; - state_t _state; - - optional<date_t> _date; - optional<date_t> _date_eff; - optional<string> note; - typedef std::map<string, optional<string> > string_map; - optional<string_map> metadata; - path pathname; - istream_pos_type beg_pos; - std::size_t beg_line; - istream_pos_type end_pos; - std::size_t end_line; + state_t _state; + optional<date_t> _date; + optional<date_t> _date_eff; + optional<string> note; + optional<position_t> pos; + optional<string_map> metadata; item_t(flags_t _flags = ITEM_NORMAL, const optional<string>& _note = none) - : supports_flags<>(_flags), _state(UNCLEARED), note(_note), - beg_pos(0), beg_line(0), end_pos(0), end_line(0) + : supports_flags<>(_flags), _state(UNCLEARED), note(_note) { TRACE_CTOR(item_t, "flags_t, const string&"); } @@ -102,14 +142,8 @@ public: _date = item._date; _date_eff = item._date_eff; - note = item.note; - - pathname = item.pathname; - beg_pos = item.beg_pos; - beg_line = item.beg_line; - end_pos = item.end_pos; - end_line = item.end_line; + pos = item.pos; } virtual bool operator==(const item_t& xact) { @@ -158,6 +192,25 @@ public: virtual expr_t::ptr_op_t lookup(const string& name); bool valid() const; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<supports_flags<> >(*this); + ar & boost::serialization::base_object<scope_t>(*this); + ar & _state; + ar & _date; + ar & _date_eff; + ar & note; + ar & metadata; + ar & pos; + } +#endif // HAVE_BOOST_SERIALIZATION }; value_t get_comment(item_t& item); diff --git a/src/journal.cc b/src/journal.cc index fdb49e24..7dbc2907 100644 --- a/src/journal.cc +++ b/src/journal.cc @@ -32,11 +32,34 @@ #include <system.hh> #include "journal.h" +#include "amount.h" +#include "commodity.h" +#include "pool.h" #include "xact.h" #include "account.h" namespace ledger { +journal_t::journal_t() + : master(new account_t), was_loaded(false), + commodity_pool(new commodity_pool_t) +{ + TRACE_CTOR(journal_t, ""); + + // Add time commodity conversions, so that timelog's may be parsed + // in terms of seconds, but reported as minutes or hours. + if (commodity_t * commodity = commodity_pool->create("s")) + commodity->add_flags(COMMODITY_BUILTIN | COMMODITY_NOMARKET); + else + assert(false); + + // Add a "percentile" commodity + if (commodity_t * commodity = commodity_pool->create("%")) + commodity->add_flags(COMMODITY_BUILTIN | COMMODITY_NOMARKET); + else + assert(false); +} + journal_t::~journal_t() { TRACE_DTOR(journal_t); @@ -52,6 +75,9 @@ journal_t::~journal_t() foreach (period_xact_t * xact, period_xacts) checked_delete(xact); + + checked_delete(master); + commodity_pool.reset(); } void journal_t::add_account(account_t * acct) diff --git a/src/journal.h b/src/journal.h index 43309590..88a225c5 100644 --- a/src/journal.h +++ b/src/journal.h @@ -48,11 +48,11 @@ #include "utils.h" #include "hooks.h" +#include "times.h" namespace ledger { -typedef std::list<path> paths_list; - +class commodity_pool_t; class xact_t; class auto_xact_t; class xact_finalizer_t; @@ -72,18 +72,60 @@ typedef std::list<period_xact_t *> period_xacts_list; class journal_t : public noncopyable { public: - account_t * master; - account_t * basket; - xacts_list xacts; - - auto_xacts_list auto_xacts; - period_xacts_list period_xacts; - + struct fileinfo_t + { + optional<path> filename; + uintmax_t size; + datetime_t modtime; + bool from_stream; + + fileinfo_t() : size(0), from_stream(true) { + TRACE_CTOR(journal_t::fileinfo_t, ""); + } + fileinfo_t(const path& _filename) + : filename(_filename), from_stream(false) { + TRACE_CTOR(journal_t::fileinfo_t, "const path&"); + size = file_size(*filename); + modtime = posix_time::from_time_t(last_write_time(*filename)); + } + fileinfo_t(const fileinfo_t& info) + : filename(info.filename), size(info.size), + modtime(info.modtime), from_stream(info.from_stream) + { + TRACE_CTOR(journal_t::fileinfo_t, "copy"); + } + ~fileinfo_t() throw() { + TRACE_DTOR(journal_t::fileinfo_t); + } + +#if defined(HAVE_BOOST_SERIALIZATION) + private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & filename; + ar & size; + ar & modtime; + ar & from_stream; + } +#endif // HAVE_BOOST_SERIALIZATION + }; + + account_t * master; + account_t * basket; + xacts_list xacts; + auto_xacts_list auto_xacts; + period_xacts_list period_xacts; + std::list<fileinfo_t> sources; + bool was_loaded; + + shared_ptr<commodity_pool_t> commodity_pool; hooks_t<xact_finalizer_t, xact_t> xact_finalize_hooks; - journal_t(account_t * _master = NULL) : master(_master) { - TRACE_CTOR(journal_t, ""); - } + journal_t(); ~journal_t(); // These four methods are delegated to the current session, since all @@ -110,6 +152,23 @@ public: bool strict = false); bool valid() const; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & master; + ar & basket; + ar & xacts; + ar & auto_xacts; + ar & period_xacts; + ar & sources; + } +#endif // HAVE_BOOST_SERIALIZATION }; } // namespace ledger @@ -94,6 +94,25 @@ public: } return true; } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + string temp; + if (Archive::is_loading::value) { + ar & temp; + *this = temp; + } else { + temp = expr.str(); + ar & temp; + } + } +#endif // HAVE_BOOST_SERIALIZATION }; inline std::ostream& operator<<(std::ostream& out, const mask_t& mask) { @@ -172,12 +172,12 @@ value_t expr_t::op_t::calc(scope_t& scope, ptr_op_t * locus, const int depth) left()->left() && left()->left()->is_function()) { call_scope_t call_args(scope); if (value_t obj = left()->left()->as_function()(call_args)) { - if (obj.is_pointer()) { - if (obj.as_pointer_lval<scope_t>() == NULL) { + if (obj.is_scope()) { + if (obj.as_scope() == NULL) { throw_(calc_error, _("Left operand of . operator is NULL")); } else { - scope_t& objscope(obj.as_ref_lval<scope_t>()); + scope_t& objscope(*obj.as_scope()); if (ptr_op_t member = objscope.lookup(right()->as_ident())) { result = member->calc(objscope, NULL, depth + 1); break; @@ -192,6 +192,7 @@ public: } ptr_op_t& left() { + assert(kind > TERMINALS || kind == IDENT); return left_; } const ptr_op_t& left() const { @@ -289,6 +290,33 @@ public: static ptr_op_t wrap_value(const value_t& val); static ptr_op_t wrap_functor(const function_t& fobj); + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & refc; + ar & kind; + if (Archive::is_loading::value || ! left_ || left_->kind != FUNCTION) { + ar & left_; + } else { + ptr_op_t temp_op; + ar & temp_op; + } + if (Archive::is_loading::value || kind == VALUE || kind == IDENT || + (kind > UNARY_OPERATORS && + (! has_right() || ! right()->is_function()))) { + ar & data; + } else { + variant<ptr_op_t, value_t, string, function_t> temp_data; + ar & temp_data; + } + } +#endif // HAVE_BOOST_SERIALIZATION }; inline expr_t::ptr_op_t diff --git a/src/output.cc b/src/output.cc index 371319bd..e2bbb7ec 100644 --- a/src/output.cc +++ b/src/output.cc @@ -203,7 +203,7 @@ void format_accounts::flush() disp_pred.predicate.parse(report.HANDLER(display_).str()); } - mark_accounts(*report.session.master, report.HANDLED(flat)); + mark_accounts(*report.session.journal->master, report.HANDLED(flat)); std::size_t displayed = 0; @@ -212,7 +212,7 @@ void format_accounts::flush() if (displayed > 1 && ! report.HANDLED(no_total) && ! report.HANDLED(percent)) { - bind_scope_t bound_scope(report, *report.session.master); + bind_scope_t bound_scope(report, *report.session.journal->master); separator_format.format(out, bound_scope); total_line_format.format(out, bound_scope); } @@ -134,6 +134,24 @@ public: parse_price_expression(const std::string& str, const bool add_prices = true, const optional<datetime_t>& moment = none); + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & commodities; + ar & null_commodity; + ar & default_commodity; + ar & keep_base; + ar & price_db; + ar & quote_leeway; + ar & get_quotes; + } +#endif // HAVE_BOOST_SERIALIZATION }; } // namespace ledger @@ -205,6 +205,23 @@ public: } friend class xact_t; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<item_t>(*this); + ar & xact; + ar & account; + ar & amount; + ar & cost; + ar & assigned_amount; + } +#endif // HAVE_BOOST_SERIALIZATION }; } // namespace ledger diff --git a/src/predicate.h b/src/predicate.h index 555fac05..5e900234 100644 --- a/src/predicate.h +++ b/src/predicate.h @@ -94,6 +94,19 @@ public: throw; } } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & predicate; + ar & what_to_keep; + } +#endif // HAVE_BOOST_SERIALIZATION }; class query_lexer_t diff --git a/src/py_value.cc b/src/py_value.cc index 8e579104..9aa4984e 100644 --- a/src/py_value.cc +++ b/src/py_value.cc @@ -45,8 +45,8 @@ BOOST_PYTHON_MEMBER_FUNCTION_OVERLOADS(value_overloads, value, 0, 2) namespace { expr_t py_value_getattr(const value_t& value, const string& name) { - if (value.is_pointer()) { - if (scope_t * scope = value.as_pointer<scope_t>()) + if (value.is_scope()) { + if (scope_t * scope = value.as_scope()) return expr_t(scope->lookup(name), scope); } throw_(value_error, _("Cannot lookup attributes in %1") << value.label()); @@ -283,7 +283,7 @@ void export_value() .value("BALANCE", value_t::BALANCE) .value("STRING", value_t::STRING) .value("SEQUENCE", value_t::SEQUENCE) - .value("POINTER", value_t::POINTER) + .value("SCOPE", value_t::SCOPE) ; scope().attr("NULL_VALUE") = NULL_VALUE; diff --git a/src/report.cc b/src/report.cc index 8628cac7..bc0680d1 100644 --- a/src/report.cc +++ b/src/report.cc @@ -87,11 +87,11 @@ void report_t::accounts_report(acct_handler_ptr handler) scoped_ptr<accounts_iterator> iter; if (! HANDLED(sort_)) { - iter.reset(new basic_accounts_iterator(*session.master)); + iter.reset(new basic_accounts_iterator(*session.journal->master)); } else { expr_t sort_expr(HANDLER(sort_).str()); sort_expr.set_context(this); - iter.reset(new sorted_accounts_iterator(*session.master.get(), + iter.reset(new sorted_accounts_iterator(*session.journal->master, sort_expr, HANDLED(flat))); } diff --git a/src/scope.h b/src/scope.h index 2539074e..fc330ba0 100644 --- a/src/scope.h +++ b/src/scope.h @@ -67,6 +67,16 @@ public: virtual void define(const string&, expr_t::ptr_op_t) {} virtual expr_t::ptr_op_t lookup(const string& name) = 0; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive &, const unsigned int /* version */) {} +#endif // HAVE_BOOST_SERIALIZATION }; /** @@ -100,6 +110,19 @@ public: return parent->lookup(name); return NULL; } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<scope_t>(*this); + ar & parent; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** @@ -127,6 +150,19 @@ public: virtual void define(const string& name, expr_t::ptr_op_t def); virtual expr_t::ptr_op_t lookup(const string& name); + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<child_scope_t>(*this); + ar & symbols; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** @@ -138,8 +174,6 @@ class call_scope_t : public child_scope_t { value_t args; - call_scope_t(); - public: explicit call_scope_t(scope_t& _parent) : child_scope_t(_parent) { TRACE_CTOR(call_scope_t, "scope_t&"); @@ -182,6 +216,21 @@ public: bool empty() const { return args.size() == 0; } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + explicit call_scope_t() {} + + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<child_scope_t>(*this); + ar & args; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** @@ -215,6 +264,19 @@ public: return def; return child_scope_t::lookup(name); } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<child_scope_t>(*this); + ar & grandchild; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** diff --git a/src/session.cc b/src/session.cc index ea9ae180..b7fdf275 100644 --- a/src/session.cc +++ b/src/session.cc @@ -32,13 +32,12 @@ #include <system.hh> #include "session.h" -#include "commodity.h" -#include "pool.h" #include "xact.h" #include "account.h" #include "journal.h" #include "iterators.h" #include "filters.h" +#include "archive.h" namespace ledger { @@ -46,7 +45,7 @@ void set_session_context(session_t * session) { if (session) { times_initialize(); - amount_t::initialize(session->commodity_pool); + amount_t::initialize(session->journal->commodity_pool); // jww (2009-02-04): Is amount_t the right place for parse_conversion to // happen? @@ -64,12 +63,8 @@ void set_session_context(session_t * session) session_t::session_t() : flush_on_next_data_file(false), - current_year(CURRENT_DATE().year()), - - commodity_pool(new commodity_pool_t), - master(new account_t), - journal(new journal_t(master.get())) + journal(new journal_t) { TRACE_CTOR(session_t, ""); @@ -77,19 +72,6 @@ session_t::session_t() HANDLER(price_db_).on(none, (path(home_var) / ".pricedb").string()); else HANDLER(price_db_).on(none, path("./.pricedb").string()); - - // Add time commodity conversions, so that timelog's may be parsed - // in terms of seconds, but reported as minutes or hours. - if (commodity_t * commodity = commodity_pool->create("s")) - commodity->add_flags(COMMODITY_BUILTIN | COMMODITY_NOMARKET); - else - assert(false); - - // Add a "percentile" commodity - if (commodity_t * commodity = commodity_pool->create("%")) - commodity->add_flags(COMMODITY_BUILTIN | COMMODITY_NOMARKET); - else - assert(false); } std::size_t session_t::read_journal(std::istream& in, @@ -123,6 +105,9 @@ std::size_t session_t::read_journal(const path& pathname, std::size_t session_t::read_data(const string& master_account) { + bool populated_data_files = false; + bool populated_price_db = false; + if (HANDLER(file_).data_files.empty()) { path file; if (const char * home_var = std::getenv("HOME")) @@ -132,6 +117,8 @@ std::size_t session_t::read_data(const string& master_account) HANDLER(file_).data_files.push_back(file); else throw_(parse_error, "No journal file was specified (please use -f)"); + + populated_data_files = true; } std::size_t xact_count = 0; @@ -140,43 +127,75 @@ std::size_t session_t::read_data(const string& master_account) if (! master_account.empty()) acct = journal->find_account(master_account); - if (HANDLED(price_db_)) { - path price_db_path = resolve_path(HANDLER(price_db_).str()); - if (exists(price_db_path) && read_journal(price_db_path) > 0) - throw_(parse_error, _("Transactions not allowed in price history file")); - } - - foreach (const path& pathname, HANDLER(file_).data_files) { - path filename = resolve_path(pathname); - if (filename == "-") { - // To avoid problems with stdin and pipes, etc., we read the entire - // file in beforehand into a memory buffer, and then parcel it out - // from there. - std::ostringstream buffer; - - while (std::cin.good() && ! std::cin.eof()) { - char line[8192]; - std::cin.read(line, 8192); - std::streamsize count = std::cin.gcount(); - buffer.write(line, count); - } - buffer.flush(); + optional<path> price_db_path; + if (HANDLED(price_db_)) + price_db_path = resolve_path(HANDLER(price_db_).str()); - std::istringstream buf_in(buffer.str()); + optional<archive_t> cache; + if (HANDLED(cache_) && master_account.empty()) { + cache = archive_t(HANDLED(cache_).str()); + cache->read_header(); - xact_count += read_journal(buf_in, "/dev/stdin", acct); + if (price_db_path) { + HANDLER(file_).data_files.push_back(*price_db_path); + populated_price_db = true; } - else if (exists(filename)) { - xact_count += read_journal(filename, acct); + } + + if (! (cache && + cache->should_load(HANDLER(file_).data_files) && + cache->load(journal))) { + if (price_db_path) { + if (exists(*price_db_path) && read_journal(*price_db_path) > 0) + throw_(parse_error, _("Transactions not allowed in price history file")); + journal->sources.push_back(journal_t::fileinfo_t(*price_db_path)); + HANDLER(file_).data_files.remove(*price_db_path); } - else { - throw_(parse_error, _("Could not read journal file '%1'") << filename); + + foreach (const path& pathname, HANDLER(file_).data_files) { + path filename = resolve_path(pathname); + if (filename == "-") { + // To avoid problems with stdin and pipes, etc., we read the entire + // file in beforehand into a memory buffer, and then parcel it out + // from there. + std::ostringstream buffer; + + while (std::cin.good() && ! std::cin.eof()) { + char line[8192]; + std::cin.read(line, 8192); + std::streamsize count = std::cin.gcount(); + buffer.write(line, count); + } + buffer.flush(); + + std::istringstream buf_in(buffer.str()); + + xact_count += read_journal(buf_in, "/dev/stdin", acct); + journal->sources.push_back(journal_t::fileinfo_t()); + } + else if (exists(filename)) { + xact_count += read_journal(filename, acct); + journal->sources.push_back(journal_t::fileinfo_t(filename)); + } + else { + throw_(parse_error, _("Could not read journal file '%1'") << filename); + } } + + assert(xact_count == journal->xacts.size()); + + if (cache && cache->should_save(journal)) + cache->save(journal); } + if (populated_data_files) + HANDLER(file_).data_files.clear(); + else if (populated_price_db) + HANDLER(file_).data_files.remove(*price_db_path); + VERIFY(journal->valid()); - return xact_count; + return journal->xacts.size(); } void session_t::read_journal_files() @@ -200,14 +219,10 @@ void session_t::read_journal_files() void session_t::close_journal_files() { journal.reset(); - master.reset(); - commodity_pool.reset(); amount_t::shutdown(); - commodity_pool.reset(new commodity_pool_t); - amount_t::initialize(commodity_pool); - master.reset(new account_t); - journal.reset(new journal_t(master.get())); + journal.reset(new journal_t); + amount_t::initialize(journal->commodity_pool); } void session_t::clean_posts() @@ -224,9 +239,9 @@ void session_t::clean_posts(xact_t& xact) void session_t::clean_accounts() { - basic_accounts_iterator acct_walker(*master); + basic_accounts_iterator acct_walker(*journal->master); pass_down_accounts(acct_handler_ptr(new clear_account_xdata), acct_walker); - master->clear_xdata(); + journal->master->clear_xdata(); } option_t<session_t> * session_t::lookup_option(const char * p) @@ -241,6 +256,9 @@ option_t<session_t> * session_t::lookup_option(const char * p) case 'a': OPT_(account_); // -a break; + case 'c': + OPT(cache_); + break; case 'd': OPT(download); // -Q break; diff --git a/src/session.h b/src/session.h index 4955053d..c1e0243b 100644 --- a/src/session.h +++ b/src/session.h @@ -66,12 +66,9 @@ class session_t : public symbol_scope_t friend void set_session_context(session_t * session); public: - bool flush_on_next_data_file; - date_t::year_type current_year; - - shared_ptr<commodity_pool_t> commodity_pool; - scoped_ptr<account_t> master; - scoped_ptr<journal_t> journal; + bool flush_on_next_data_file; + date_t::year_type current_year; + shared_ptr<journal_t> journal; explicit session_t(); virtual ~session_t() { @@ -106,6 +103,7 @@ public: void report_options(std::ostream& out) { HANDLER(account_).report(out); + HANDLER(cache_).report(out); HANDLER(download).report(out); HANDLER(file_).report(out); HANDLER(input_date_format_).report(out); @@ -123,6 +121,7 @@ public: */ OPTION(session_t, account_); // -a + OPTION(session_t, cache_); OPTION(session_t, download); // -Q OPTION__ diff --git a/src/stats.cc b/src/stats.cc index 5bb97fd1..6f0e21f4 100644 --- a/src/stats.cc +++ b/src/stats.cc @@ -46,7 +46,7 @@ value_t report_statistics(call_scope_t& args) std::ostream& out(report.output_stream); const account_t::xdata_t::details_t& - statistics(report.session.master->family_details(true)); + statistics(report.session.journal->master->family_details(true)); if (! is_valid(statistics.earliest_post) && ! is_valid(statistics.latest_post)) diff --git a/src/system.hh.in b/src/system.hh.in index 07598173..4a7dc55f 100644 --- a/src/system.hh.in +++ b/src/system.hh.in @@ -138,7 +138,6 @@ typedef std::ostream::pos_type ostream_pos_type; #include <boost/algorithm/string/classification.hpp> #include <boost/algorithm/string/predicate.hpp> -#include <boost/any.hpp> #include <boost/bind.hpp> #include <boost/cast.hpp> #include <boost/current_function.hpp> @@ -168,6 +167,74 @@ typedef std::ostream::pos_type ostream_pos_type; #include <boost/variant.hpp> #include <boost/version.hpp> +#if defined(HAVE_BOOST_SERIALIZATION) + +#include <boost/archive/binary_iarchive.hpp> +#include <boost/archive/binary_oarchive.hpp> + +#include <boost/serialization/base_object.hpp> +#include <boost/serialization/binary_object.hpp> +#include <boost/serialization/optional.hpp> +#include <boost/serialization/shared_ptr.hpp> +#include <boost/serialization/variant.hpp> +#include <boost/serialization/utility.hpp> +#include <boost/serialization/export.hpp> +#include <boost/serialization/level.hpp> +#include <boost/serialization/string.hpp> +#include <boost/serialization/vector.hpp> +#include <boost/serialization/deque.hpp> +#include <boost/serialization/list.hpp> +#include <boost/serialization/map.hpp> + +#include <boost/date_time/posix_time/time_serialize.hpp> +#include <boost/date_time/gregorian/greg_serialize.hpp> + +namespace boost { +namespace serialization { + +template <class Archive> +void serialize(Archive& ar, boost::filesystem::path& p, const unsigned int) +{ + std::string s; + if (Archive::is_saving::value) + s = p.string(); + + ar & s; + + if (Archive::is_loading::value) + p = s; +} + +template <class Archive, class T> +void serialize(Archive& ar, boost::intrusive_ptr<T>& ptr, const unsigned int) +{ + if (Archive::is_saving::value) { + T * p = ptr.get(); + ar & p; + } + else if (Archive::is_loading::value) { + T * p; + ar & p; + ptr.reset(p); + } +} + +template <class Archive, class T> +void serialize(Archive&, boost::function<T>&, const unsigned int) +{ +} + +template <class Archive> +void serialize(Archive& ar, istream_pos_type& pos, const unsigned int) +{ + ar & make_binary_object(&pos, sizeof(istream_pos_type)); +} + +} // namespace serialization +} // namespace boost + +#endif // HAVE_BOOST_SERIALIZATION + #if defined(HAVE_BOOST_PYTHON) #include <boost/python.hpp> diff --git a/src/textual.cc b/src/textual.cc index f05499df..9375ea4f 100644 --- a/src/textual.cc +++ b/src/textual.cc @@ -525,11 +525,12 @@ void instance_t::automated_xact_directive(char * line) journal.auto_xacts.push_back(ae.get()); - ae->pathname = pathname; - ae->beg_pos = pos; - ae->beg_line = lnum; - ae->end_pos = curr_pos; - ae->end_line = linenum; + ae->pos = position_t(); + ae->pos->pathname = pathname; + ae->pos->beg_pos = pos; + ae->pos->beg_line = lnum; + ae->pos->end_pos = curr_pos; + ae->pos->end_line = linenum; ae.release(); } @@ -565,11 +566,12 @@ void instance_t::period_xact_directive(char * line) journal.period_xacts.push_back(pe.get()); - pe->pathname = pathname; - pe->beg_pos = pos; - pe->beg_line = lnum; - pe->end_pos = curr_pos; - pe->end_line = linenum; + pe->pos = position_t(); + pe->pos->pathname = pathname; + pe->pos->beg_pos = pos; + pe->pos->beg_line = lnum; + pe->pos->end_pos = curr_pos; + pe->pos->end_line = linenum; pe.release(); } else { @@ -778,10 +780,11 @@ post_t * instance_t::parse_post(char * line, std::auto_ptr<post_t> post(new post_t); - post->xact = xact; // this could be NULL - post->pathname = pathname; - post->beg_pos = line_beg_pos; - post->beg_line = linenum; + post->xact = xact; // this could be NULL + post->pos = position_t(); + post->pos->pathname = pathname; + post->pos->beg_pos = line_beg_pos; + post->pos->beg_line = linenum; char buf[MAX_LINE + 1]; std::strcpy(buf, line); @@ -1056,8 +1059,8 @@ post_t * instance_t::parse_post(char * line, _("Unexpected char '%1' (Note: inline math requires parentheses)") << *next); - post->end_pos = curr_pos; - post->end_line = linenum; + post->pos->end_pos = curr_pos; + post->pos->end_line = linenum; if (! tag_stack.empty()) { foreach (const string& tag, tag_stack) @@ -1107,9 +1110,10 @@ xact_t * instance_t::parse_xact(char * line, std::auto_ptr<xact_t> xact(new xact_t); - xact->pathname = pathname; - xact->beg_pos = line_beg_pos; - xact->beg_line = linenum; + xact->pos = position_t(); + xact->pos->pathname = pathname; + xact->pos->beg_pos = line_beg_pos; + xact->pos->beg_line = linenum; bool reveal_context = true; @@ -1189,8 +1193,8 @@ xact_t * instance_t::parse_xact(char * line, // This is a trailing note, and possibly a metadata info tag item->append_note(p + 1, current_year); - item->end_pos = curr_pos; - item->end_line++; + item->pos->end_pos = curr_pos; + item->pos->end_line++; } else { reveal_context = false; @@ -1216,8 +1220,8 @@ xact_t * instance_t::parse_xact(char * line, } } - xact->end_pos = curr_pos; - xact->end_line = linenum; + xact->pos->end_pos = curr_pos; + xact->pos->end_line = linenum; if (! tag_stack.empty()) { foreach (const string& tag, tag_stack) @@ -1232,8 +1236,8 @@ xact_t * instance_t::parse_xact(char * line, catch (const std::exception& err) { if (reveal_context) { add_error_context(_("While parsing transaction:")); - add_error_context(source_context(xact->pathname, - xact->beg_pos, curr_pos, "> ")); + add_error_context(source_context(xact->pos->pathname, + xact->pos->beg_pos, curr_pos, "> ")); } throw; } diff --git a/src/times.cc b/src/times.cc index 7b6eb6e8..667f65a3 100644 --- a/src/times.cc +++ b/src/times.cc @@ -314,50 +314,18 @@ date_t parse_date(const char * str, optional<date_t::year_type> current_year) return parse_date_mask(str, current_year, saw_year); } -date_t date_interval_t::add_duration(const date_t& date, - const duration_t& duration) -{ - if (duration.type() == typeid(gregorian::days)) - return date + boost::get<gregorian::days>(duration); - else if (duration.type() == typeid(gregorian::weeks)) - return date + boost::get<gregorian::weeks>(duration); - else if (duration.type() == typeid(gregorian::months)) - return date + boost::get<gregorian::months>(duration); - else - assert(duration.type() == typeid(gregorian::years)); - return date + boost::get<gregorian::years>(duration); -} - -date_t date_interval_t::subtract_duration(const date_t& date, - const duration_t& duration) -{ - if (duration.type() == typeid(gregorian::days)) - return date - boost::get<gregorian::days>(duration); - else if (duration.type() == typeid(gregorian::weeks)) - return date - boost::get<gregorian::weeks>(duration); - else if (duration.type() == typeid(gregorian::months)) - return date - boost::get<gregorian::months>(duration); - else - assert(duration.type() == typeid(gregorian::years)); - return date - boost::get<gregorian::years>(duration); -} - std::ostream& operator<<(std::ostream& out, const date_interval_t::duration_t& duration) { - if (duration.type() == typeid(gregorian::days)) - out << boost::get<gregorian::days>(duration).days() - << " day(s)"; - else if (duration.type() == typeid(gregorian::weeks)) - out << (boost::get<gregorian::weeks>(duration).days() / 7) - << " week(s)"; - else if (duration.type() == typeid(gregorian::months)) - out << boost::get<gregorian::months>(duration).number_of_months() - << " month(s)"; + if (duration.quantum == date_interval_t::duration_t::DAYS) + out << duration.length << " day(s)"; + else if (duration.quantum == date_interval_t::duration_t::WEEKS) + out << duration.length << " week(s)"; + else if (duration.quantum == date_interval_t::duration_t::MONTHS) + out << duration.length << " month(s)"; else { - assert(duration.type() == typeid(gregorian::years)); - out << boost::get<gregorian::years>(duration).number_of_years() - << " year(s)"; + assert(duration.quantum == date_interval_t::duration_t::YEARS); + out << duration.length << " year(s)"; } return out; } @@ -365,7 +333,7 @@ std::ostream& operator<<(std::ostream& out, void date_interval_t::resolve_end() { if (start && ! end_of_duration) { - end_of_duration = add_duration(*start, *duration); + end_of_duration = duration->add(*start); DEBUG("times.interval", "stabilize: end_of_duration = " << *end_of_duration); } @@ -383,7 +351,7 @@ void date_interval_t::resolve_end() } if (start && ! next) { - next = add_duration(*start, *skip_duration); + next = skip_duration->add(*start); DEBUG("times.interval", "stabilize: next set to: " << *next); } @@ -423,8 +391,8 @@ void date_interval_t::stabilize(const optional<date_t>& date) date_t when = start ? *start : *date; - if (duration->type() == typeid(gregorian::months) || - duration->type() == typeid(gregorian::years)) { + if (duration->quantum == duration_t::MONTHS || + duration->quantum == duration_t::YEARS) { DEBUG("times.interval", "stabilize: monthly or yearly duration"); start = date_t(when.year(), gregorian::Jan, 1); @@ -433,7 +401,7 @@ void date_interval_t::stabilize(const optional<date_t>& date) start = date_t(when - gregorian::days(400)); - if (duration->type() == typeid(gregorian::weeks)) { + if (duration->quantum == duration_t::WEEKS) { // Move it to a Sunday while (start->day_of_week() != start_of_week) *start += gregorian::days(1); @@ -540,8 +508,8 @@ bool date_interval_t::find_period(const date_t& date) return true; } - scan = add_duration(scan, *skip_duration); - end_of_scan = add_duration(scan, *duration); + scan = skip_duration->add(scan); + end_of_scan = duration->add(scan); } return false; @@ -565,7 +533,7 @@ date_interval_t& date_interval_t::operator++() } else { start = *next; - end_of_duration = add_duration(*start, *duration); + end_of_duration = duration->add(*start); } next = none; @@ -634,15 +602,15 @@ namespace { assert(look_for_start || look_for_end); if (word == _("year")) { - duration = gregorian::years(1); + duration = date_interval_t::duration_t(date_interval_t::duration_t::YEARS, 1); start = gregorian::date(start.year(), 1, 1); } else if (word == _("month")) { - duration = gregorian::months(1); + duration = date_interval_t::duration_t(date_interval_t::duration_t::MONTHS, 1); start = gregorian::date(start.year(), start.month(), 1); } else if (word == _("today") || word == _("day")) { - duration = gregorian::days(1); + duration = date_interval_t::duration_t(date_interval_t::duration_t::DAYS, 1); } else { parse_specifier = true; @@ -651,15 +619,15 @@ namespace { if (parse_specifier) parse_inclusion_specifier(word, &start, &end); else - end = date_interval_t::add_duration(start, *duration); + end = duration->add(start); if (type == _("last") && duration) { - start = date_interval_t::subtract_duration(start, *duration); - end = date_interval_t::subtract_duration(end, *duration); + start = duration->subtract(start); + end = duration->subtract(end); } else if (type == _("next") && duration) { - start = date_interval_t::add_duration(start, *duration); - end = date_interval_t::add_duration(end, *duration); + start = duration->add(start); + end = duration->add(end); } if (look_for_start && is_valid(start)) interval.start = start; @@ -683,41 +651,41 @@ void date_interval_t::parse(std::istream& in) int quantity = lexical_cast<int>(word); read_lower_word(in, word); if (word == _("days")) - duration = gregorian::days(quantity); + duration = duration_t(duration_t::DAYS, quantity); else if (word == _("weeks")) - duration = gregorian::weeks(quantity); + duration = duration_t(duration_t::WEEKS, quantity); else if (word == _("months")) - duration = gregorian::months(quantity); + duration = duration_t(duration_t::MONTHS, quantity); else if (word == _("quarters")) - duration = gregorian::months(3 * quantity); + duration = duration_t(duration_t::MONTHS, 3 * quantity); else if (word == _("years")) - duration = gregorian::years(quantity); + duration = duration_t(duration_t::YEARS, quantity); } else if (word == _("day")) - duration = gregorian::days(1); + duration = duration_t(duration_t::DAYS, 1); else if (word == _("week")) - duration = gregorian::weeks(1); + duration = duration_t(duration_t::WEEKS, 1); else if (word == _("month")) - duration = gregorian::months(1); + duration = duration_t(duration_t::MONTHS, 1); else if (word == _("quarter")) - duration = gregorian::months(3); + duration = duration_t(duration_t::MONTHS, 3); else if (word == _("year")) - duration = gregorian::years(1); + duration = duration_t(duration_t::YEARS, 1); } else if (word == _("daily")) - duration = gregorian::days(1); + duration = duration_t(duration_t::DAYS, 1); else if (word == _("weekly")) - duration = gregorian::weeks(1); + duration = duration_t(duration_t::WEEKS, 1); else if (word == _("biweekly")) - duration = gregorian::weeks(2); + duration = duration_t(duration_t::WEEKS, 2); else if (word == _("monthly")) - duration = gregorian::months(1); + duration = duration_t(duration_t::MONTHS, 1); else if (word == _("bimonthly")) - duration = gregorian::months(2); + duration = duration_t(duration_t::MONTHS, 2); else if (word == _("quarterly")) - duration = gregorian::months(3); + duration = duration_t(duration_t::MONTHS, 3); else if (word == _("yearly")) - duration = gregorian::years(1); + duration = duration_t(duration_t::YEARS, 1); else if (word == _("this") || word == _("last") || word == _("next") || word == _("today")) { parse_date_words(in, word, *this); @@ -760,17 +728,17 @@ void date_interval_t::parse(std::istream& in) if (wday) { while (start->day_of_week() != *wday) - *start -= gregorian::days(1); + *start = duration_t(duration_t::DAYS, 1).subtract(*start); if (! end) - end = *start + gregorian::days(1); + end = duration_t(duration_t::DAYS, 1).add(*start); } else { bool overwrite_end = false; if (year) { start = date_t(*year, 1, 1); if (! end) { - end = *start + gregorian::years(1); + end = duration_t(duration_t::YEARS, 1).add(*start); overwrite_end = true; } } @@ -778,7 +746,7 @@ void date_interval_t::parse(std::istream& in) if (mon) { start = date_t(start->year(), *mon, 1); if (! end || overwrite_end) - end = *start + gregorian::months(1); + end = duration_t(duration_t::MONTHS, 1).add(*start); } } } diff --git a/src/times.h b/src/times.h index 69e3af51..db83d175 100644 --- a/src/times.h +++ b/src/times.h @@ -119,10 +119,71 @@ void set_input_date_format(const char * format); class date_interval_t : public equality_comparable<date_interval_t> { public: - typedef variant<gregorian::days, - gregorian::weeks, - gregorian::months, - gregorian::years> duration_t; + struct duration_t + { + enum skip_quantum_t { + DAYS, WEEKS, MONTHS, YEARS + } quantum; + int length; + + duration_t() : quantum(DAYS), length(0) { + TRACE_CTOR(date_interval_t::duration_t, ""); + } + duration_t(skip_quantum_t _quantum, int _length) + : quantum(_quantum), length(_length) { + TRACE_CTOR(date_interval_t::duration_t, "skip_quantum_t, int"); + } + duration_t(const duration_t& dur) + : quantum(dur.quantum), length(dur.length) { + TRACE_CTOR(date_interval_t::duration_t, "copy"); + } + ~duration_t() throw() { + TRACE_DTOR(date_interval_t::duration_t); + } + + date_t add(const date_t& date) const { + switch (quantum) { + case DAYS: + return date + gregorian::days(length); + case WEEKS: + return date + gregorian::weeks(length); + case MONTHS: + return date + gregorian::months(length); + case YEARS: + return date + gregorian::years(length); + default: + assert(0); return date_t(); + } + } + + date_t subtract(const date_t& date) const { + switch (quantum) { + case DAYS: + return date - gregorian::days(length); + case WEEKS: + return date - gregorian::weeks(length); + case MONTHS: + return date - gregorian::months(length); + case YEARS: + return date - gregorian::years(length); + default: + assert(0); return date_t(); + } + } + +#if defined(HAVE_BOOST_SERIALIZATION) + private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & quantum; + ar & length; + } +#endif // HAVE_BOOST_SERIALIZATION + }; static date_t add_duration(const date_t& date, const duration_t& duration); @@ -196,6 +257,25 @@ public: } date_interval_t& operator++(); + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & start; + ar & aligned; + ar & skip_duration; + ar & factor; + ar & next; + ar & duration; + ar & end_of_duration; + ar & end; + } +#endif // HAVE_BOOST_SERIALIZATION }; void times_initialize(); diff --git a/src/utils.h b/src/utils.h index e3ae5dda..c662acbe 100644 --- a/src/utils.h +++ b/src/utils.h @@ -178,6 +178,18 @@ public: string(const char * str, size_type x); string(const char * str, size_type x, size_type y); ~string() throw(); + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<std::string>(*this); + } +#endif // HAVE_BOOST_SERIALIZATION }; inline string operator+(const string& __lhs, const string& __rhs) diff --git a/src/value.cc b/src/value.cc index a6bbb2fb..a3a05b6c 100644 --- a/src/value.cc +++ b/src/value.cc @@ -113,8 +113,8 @@ value_t::operator bool() const } } return false; - case POINTER: - return ! as_any_pointer().empty(); + case SCOPE: + return as_scope() != NULL; default: break; } @@ -1206,8 +1206,8 @@ bool value_t::is_realzero() const case SEQUENCE: return as_sequence().empty(); - case POINTER: - return as_any_pointer().empty(); + case SCOPE: + return as_scope() == NULL; default: throw_(value_error, _("Cannot determine if %1 is really zero") << label()); @@ -1235,8 +1235,8 @@ bool value_t::is_zero() const case SEQUENCE: return as_sequence().empty(); - case POINTER: - return as_any_pointer().empty(); + case SCOPE: + return as_scope() == NULL; default: throw_(value_error, _("Cannot determine if %1 is zero") << label()); @@ -1474,7 +1474,7 @@ value_t value_t::strip_annotations(const keep_details_t& what_to_keep) const case DATE: case STRING: case MASK: - case POINTER: + case SCOPE: return *this; case SEQUENCE: { @@ -1579,8 +1579,8 @@ void value_t::print(std::ostream& out, break; } - case POINTER: - out << "<POINTER>"; + case SCOPE: + out << "<SCOPE>"; break; default: @@ -1647,8 +1647,8 @@ void value_t::dump(std::ostream& out, const bool relaxed) const out << '/' << as_mask() << '/'; break; - case POINTER: - out << boost::unsafe_any_cast<const void *>(&as_any_pointer()); + case SCOPE: + out << as_scope(); break; case SEQUENCE: { diff --git a/src/value.h b/src/value.h index 31850894..3c5ce286 100644 --- a/src/value.h +++ b/src/value.h @@ -56,6 +56,8 @@ namespace ledger { DECLARE_EXCEPTION(value_error, std::runtime_error); +class scope_t; + /** * @class value_t * @@ -107,7 +109,7 @@ public: STRING, // a string object MASK, // a regular expression mask SEQUENCE, // a vector of value_t objects - POINTER // an opaque pointer of any type + SCOPE // a pointer to a scope }; private: @@ -134,7 +136,7 @@ private: string, // STRING mask_t, // MASK sequence_t *, // SEQUENCE - boost::any // POINTER + scope_t * // SCOPE > data; type_t type; @@ -225,6 +227,20 @@ private: data = false; type = VOID; } + +#if defined(HAVE_BOOST_SERIALIZATION) + private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & data; + ar & type; + ar & refc; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** @@ -332,10 +348,9 @@ public: set_sequence(val); } - template <typename T> - explicit value_t(T * item) { - TRACE_CTOR(value_t, "T *"); - set_pointer(item); + explicit value_t(scope_t * item) { + TRACE_CTOR(value_t, "scope_t *"); + set_scope(item); } /** @@ -687,50 +702,19 @@ public: } /** - * Dealing with pointers is bit involved because we actually deal - * with typed pointers. For example, if you call as_pointer it - * returns a boost::any object, but if you use as_pointer<void>, - * then it returns a void *. The latter form only succeeds if the - * stored pointers was assigned to the value as a void*, otherwise - * it throws an exception. + * Dealing with scope pointers. */ - bool is_pointer() const { - return is_type(POINTER); - } - boost::any& as_any_pointer_lval() { - VERIFY(is_pointer()); - _dup(); - return boost::get<boost::any>(storage->data); - } - template <typename T> - T * as_pointer_lval() { - return any_cast<T *>(as_any_pointer_lval()); - } - template <typename T> - T& as_ref_lval() { - return *as_pointer_lval<T>(); - } - const boost::any& as_any_pointer() const { - VERIFY(is_pointer()); - return boost::get<boost::any>(storage->data); - } - template <typename T> - T * as_pointer() const { - return any_cast<T *>(as_any_pointer()); + bool is_scope() const { + return is_type(SCOPE); } - template <typename T> - T& as_ref() const { - return *as_pointer<T>(); + scope_t * as_scope() const { + VERIFY(is_scope()); + return boost::get<scope_t *>(storage->data); } - void set_any_pointer(const boost::any& val) { - set_type(POINTER); + void set_scope(scope_t * val) { + set_type(SCOPE); storage->data = val; } - template <typename T> - void set_pointer(T * val) { - set_type(POINTER); - storage->data = boost::any(val); - } /** * Data conversion methods. These methods convert a value object to @@ -902,8 +886,8 @@ public: return _("a regexp"); case SEQUENCE: return _("a sequence"); - case POINTER: - return _("a pointer"); + case SCOPE: + return _("a scope"); default: assert(false); break; @@ -926,6 +910,20 @@ public: * Debugging methods. */ bool valid() const; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & true_value; + ar & false_value; + ar & storage; + } +#endif // HAVE_BOOST_SERIALIZATION }; #define NULL_VALUE (value_t()) diff --git a/src/xact.cc b/src/xact.cc index 9f118ec2..be8c0214 100644 --- a/src/xact.cc +++ b/src/xact.cc @@ -499,7 +499,7 @@ void auto_xact_t::extend_xact(xact_base_t& xact, bool post_handler) IF_DEBUG("xact.extend") { DEBUG("xact.extend", - "Initial post on line " << initial_post->beg_line << ": " + "Initial post on line " << initial_post->pos->beg_line << ": " << "amount " << initial_post->amount << " (precision " << initial_post->amount.precision() << ")"); @@ -509,7 +509,7 @@ void auto_xact_t::extend_xact(xact_base_t& xact, bool post_handler) #endif DEBUG("xact.extend", - "Posting on line " << post->beg_line << ": " + "Posting on line " << post->pos->beg_line << ": " << "amount " << post->amount << ", amt " << amt << " (precision " << post->amount.precision() << " != " << amt.precision() << ")"); @@ -80,6 +80,20 @@ public: virtual bool finalize(); virtual bool valid() const = 0; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<item_t>(*this); + ar & journal; + ar & posts; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** @@ -111,6 +125,20 @@ public: virtual expr_t::ptr_op_t lookup(const string& name); virtual bool valid() const; + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<xact_base_t>(*this); + ar & code; + ar & payee; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** @@ -154,6 +182,19 @@ public: virtual bool valid() const { return true; } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<xact_base_t>(*this); + ar & predicate; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** @@ -180,6 +221,18 @@ struct auto_xact_finalizer_t : public xact_finalizer_t } virtual bool operator()(xact_t& xact, bool post); + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & journal; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** @@ -205,7 +258,7 @@ class period_xact_t : public xact_base_t TRACE_CTOR(period_xact_t, "const string&"); } - virtual ~period_xact_t() throw() { + virtual ~period_xact_t() { TRACE_DTOR(period_xact_t); } @@ -218,6 +271,20 @@ class period_xact_t : public xact_base_t #endif return true; } + +#if defined(HAVE_BOOST_SERIALIZATION) +private: + /** Serialization. */ + + friend class boost::serialization::access; + + template<class Archive> + void serialize(Archive & ar, const unsigned int /* version */) { + ar & boost::serialization::base_object<xact_base_t>(*this); + ar & period; + ar & period_string; + } +#endif // HAVE_BOOST_SERIALIZATION }; /** diff --git a/tools/Makefile.am b/tools/Makefile.am new file mode 100644 index 00000000..13ea4e7f --- /dev/null +++ b/tools/Makefile.am @@ -0,0 +1,520 @@ +VERSION = 3.0 +ACLOCAL_AMFLAGS = -I m4 +dist_man_MANS = doc/ledger.1 +SUBDIRS = po intl +EXTRA_DIST = autogen.sh config.rpath contrib src/system.hh.in +DISTCLEANFILES = .timestamp + +lib_LTLIBRARIES = \ + libledger_report.la \ + libledger_data.la \ + libledger_expr.la \ + libledger_math.la \ + libledger_util.la + +lib_cppflags = -I$(srcdir)/src -I$(srcdir)/lib \ + -I$(srcdir)/lib/utfcpp/source + +libledger_util_la_SOURCES = \ + src/stream.cc \ + src/mask.cc \ + src/times.cc \ + src/error.cc \ + src/utils.cc \ + src/accum.cc \ + lib/sha1.cpp + +libledger_util_la_CPPFLAGS = $(lib_cppflags) +libledger_util_la_LDFLAGS = -release $(VERSION).0 + +libledger_math_la_SOURCES = \ + src/value.cc \ + src/balance.cc \ + src/quotes.cc \ + src/pool.cc \ + src/annotate.cc \ + src/commodity.cc \ + src/amount.cc + +libledger_math_la_CPPFLAGS = $(lib_cppflags) +libledger_math_la_LDFLAGS = -release $(VERSION).0 + +libledger_expr_la_SOURCES = \ + src/option.cc \ + src/format.cc \ + src/predicate.cc \ + src/scope.cc \ + src/interactive.cc \ + src/expr.cc \ + src/op.cc \ + src/parser.cc \ + src/token.cc + +libledger_expr_la_CPPFLAGS = $(lib_cppflags) +libledger_expr_la_LDFLAGS = -release $(VERSION).0 + +libledger_data_la_SOURCES = \ + src/compare.cc \ + src/iterators.cc \ + src/timelog.cc \ + src/textual.cc \ + src/journal.cc \ + src/archive.cc \ + src/account.cc \ + src/xact.cc \ + src/post.cc \ + src/item.cc + +libledger_data_la_CPPFLAGS = $(lib_cppflags) +libledger_data_la_LDFLAGS = -release $(VERSION).0 + +libledger_report_la_SOURCES = \ + src/stats.cc \ + src/generate.cc \ + src/derive.cc \ + src/emacs.cc \ + src/output.cc \ + src/precmd.cc \ + src/chain.cc \ + src/filters.cc \ + src/temps.cc \ + src/report.cc \ + src/session.cc + +libledger_report_la_CPPFLAGS = $(lib_cppflags) +libledger_report_la_LDFLAGS = -release $(VERSION).0 + +pkginclude_HEADERS = \ + src/utils.h \ + src/flags.h \ + src/hooks.h \ + src/error.h \ + src/times.h \ + src/mask.h \ + src/stream.h \ + src/pstream.h \ + src/unistring.h \ + src/accum.h \ + \ + src/amount.h \ + src/commodity.h \ + src/annotate.h \ + src/pool.h \ + src/quotes.h \ + src/balance.h \ + src/value.h \ + \ + src/token.h \ + src/parser.h \ + src/op.h \ + src/expr.h \ + src/scope.h \ + src/interactive.h \ + src/predicate.h \ + src/format.h \ + src/option.h \ + \ + src/item.h \ + src/post.h \ + src/xact.h \ + src/account.h \ + src/journal.h \ + src/archive.h \ + src/timelog.h \ + src/iterators.h \ + src/compare.h \ + \ + src/session.h \ + src/report.h \ + src/filters.h \ + src/temps.h \ + src/chain.h \ + src/precmd.h \ + src/derive.h \ + src/generate.h \ + src/stats.h \ + src/output.h \ + src/emacs.h \ + \ + src/global.h \ + \ + src/pyinterp.h \ + \ + lib/sha1.h \ + lib/gettext.h \ + \ + lib/utfcpp/source/utf8.h \ + lib/utfcpp/source/utf8/checked.h \ + lib/utfcpp/source/utf8/core.h \ + lib/utfcpp/source/utf8/unchecked.h + +nodist_libledger_util_la_SOURCES = system.hh + +BUILT_SOURCES = system.hh +CLEANFILES = system.hh + +system.hh: src/system.hh.in + cp -p $< $@ + +if USE_PCH +nodist_libledger_util_la_SOURCES += system.hh.gch + +BUILT_SOURCES += system.hh.gch +CLEANFILES += system.hh.gch + +system.hh.gch: system.hh + $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ + $(lib_cppflags) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) \ + -o $@ $< +endif + +###################################################################### + +bin_PROGRAMS = ledger + +ledger_CPPFLAGS = $(lib_cppflags) +if HAVE_BOOST_PYTHON +ledger_CPPFLAGS += -I$(srcdir)/python +endif +ledger_SOURCES = src/main.cc src/global.cc +ledger_LDADD = $(LIBOBJS) $(lib_LTLIBRARIES) $(INTLLIBS) +ledger_LDFLAGS = -static + +info_TEXINFOS = doc/ledger.texi + +dist_lisp_LISP = lisp/ledger.el lisp/timeclock.el +ELCFILES = +DISTCLEANFILES += ledger.elc timeclock.elc + +all_sources = $(libledger_util_la_SOURCES) \ + $(libledger_math_la_SOURCES) \ + $(libledger_expr_la_SOURCES) \ + $(libledger_data_la_SOURCES) \ + $(libledger_report_la_SOURCES) \ + $(libledger_python_la_SOURCES) \ + src/pyledger.cc + +all_files = $(all_sources) $(pkginclude_HEADERS) + +###################################################################### + +if HAVE_BOOST_PYTHON + +lib_LTLIBRARIES += libledger_python.la + +libledger_python_la_SOURCES = \ + src/pyutils.h \ + src/pyfstream.h \ + src/py_amount.cc \ + src/py_balance.cc \ + src/py_chain.cc \ + src/py_commodity.cc \ + src/py_expr.cc \ + src/py_flags.cc \ + src/py_format.cc \ + src/py_global.cc \ + src/py_item.cc \ + src/py_journal.cc \ + src/py_post.cc \ + src/py_report.cc \ + src/py_scope.cc \ + src/py_session.cc \ + src/py_timelog.cc \ + src/py_times.cc \ + src/py_utils.cc \ + src/py_value.cc \ + src/py_xact.cc \ + src/pyinterp.cc + +libledger_python_la_CPPFLAGS = $(lib_cppflags) -I$(srcdir)/python + +pyexec_LTLIBRARIES = ledger.la + +ledger_la_CPPFLAGS = $(libledger_python_la_CPPFLAGS) +ledger_la_SOURCES = src/pyledger.cc +ledger_la_DEPENDENCIES = $(lib_LTLIBRARIES) +ledger_la_LDFLAGS = -avoid-version -module +ledger_la_LIBADD = $(LIBOBJS) $(lib_LTLIBRARIES) $(INTLLIBS) + +pkgpython_PYTHON = python/__init__.py \ + python/hello.py \ + python/server.py + +endif + +###################################################################### + +TESTS = + +if HAVE_PYTHON +TESTS += RegressTests BaselineTests ConfirmTests GenerateTests +endif + +if HAVE_CPPUNIT +TESTS += \ + UtilTests \ + MathTests \ + ExprTests \ + DataTests \ + ReportTests +endif + +if HAVE_BOOST_PYTHON +TESTS += PyUnitTests +endif + +check_PROGRAMS = $(TESTS) + +UtilTests_SOURCES = \ + test/UnitTests.cc \ + test/UnitTests.h \ + test/UtilTests.cc \ + test/unit/t_utils.cc \ + test/unit/t_utils.h \ + test/unit/t_times.cc \ + test/unit/t_times.h + +UtilTests_CPPFLAGS = -I$(srcdir)/test $(lib_cppflags) +UtilTests_LDADD = libledger_util.la -lcppunit + +MathTests_SOURCES = \ + test/UnitTests.cc \ + test/UnitTests.h \ + test/MathTests.cc \ + test/unit/t_commodity.cc \ + test/unit/t_commodity.h \ + test/unit/t_amount.cc \ + test/unit/t_amount.h \ + test/unit/t_balance.cc \ + test/unit/t_balance.h + +MathTests_CPPFLAGS = -I$(srcdir)/test $(lib_cppflags) +MathTests_LDADD = libledger_math.la $(UtilTests_LDADD) + +ExprTests_SOURCES = \ + test/UnitTests.cc \ + test/UnitTests.h \ + test/ExprTests.cc \ + test/unit/t_expr.cc \ + test/unit/t_expr.h + +ExprTests_CPPFLAGS = -I$(srcdir)/test $(lib_cppflags) +ExprTests_LDADD = libledger_expr.la $(MathTests_LDADD) + +DataTests_SOURCES = \ + test/UnitTests.cc \ + test/UnitTests.h \ + test/DataTests.cc + +DataTests_CPPFLAGS = -I$(srcdir)/test $(lib_cppflags) +DataTests_LDADD = libledger_data.la $(ExprTests_LDADD) + +ReportTests_SOURCES = \ + test/UnitTests.cc \ + test/UnitTests.h \ + test/ReportTests.cc + +ReportTests_CPPFLAGS = -I$(srcdir)/test $(lib_cppflags) +ReportTests_LDADD = libledger_report.la $(DataTests_LDADD) + +all_tests_sources = \ + $(UtilTests_SOURCES) \ + $(MathTests_SOURCES) \ + $(ExprTests_SOURCES) \ + $(DataTests_SOURCES) \ + $(ReportTests_SOURCES) + +PyUnitTests_SOURCES = test/PyUnitTests.py + +all_py_tests_sources = \ + $(patsubst test/unit/%.cc,$(top_builddir)/test/python/%.py, \ + $(filter test/unit/t_%.cc,$(all_tests_sources))) + +test/python/%.py: test/unit/%.cc test/convert.py + $(PYTHON) $(srcdir)/test/convert.py $< $@ + +test/python/UnitTests.py: $(all_py_tests_sources) + @echo "from unittest import TextTestRunner, TestSuite" > $@ + @for file in $$(ls $(srcdir)/test/unit/*.cc); do \ + base=$$(basename $$file); \ + base=$$(echo $$base | sed 's/\.cc//'); \ + echo "import $$base" >> $@; \ + done + @echo "suites = [" >> $@ + @for file in $$(ls $(srcdir)/test/unit/*.cc); do \ + base=$$(basename $$file); \ + base=$$(echo $$base | sed 's/\.cc//'); \ + echo " $$base.suite()," >> $@; \ + done + @echo "]" >> $@ + @echo "TextTestRunner().run(TestSuite(suites))" >> $@ + +ledger_python = $(top_builddir)/ledger$(EXEEXT) python + +ESC_python=`echo "$(ledger_python)" | sed 's/\//\\\\\//g'` +ESC_srcdir=`echo "$(srcdir)" | sed 's/\//\\\\\//g'` +ESC_builddir=`echo "$(top_builddir)" | sed 's/\//\\\\\//g'` +ESC_distdir=`echo "$(distdir)" | sed 's/\//\\\\\//g'` + +# jww (2007-05-10): This rule will not be triggered on systems that +# define an EXEEXT. +PyUnitTests: test/PyUnitTests.py test/python/UnitTests.py + @cat $(srcdir)/test/PyUnitTests.py \ + | sed "s/%python%/$(ESC_python)/" \ + | sed "s/%srcdir%/$(ESC_srcdir)/g" \ + | sed "s/%builddir%/$(ESC_builddir)/g" > $@ + chmod 755 $@ + +RegressTests_SOURCES = test/RegressTests.py + +EXTRA_DIST += test/regress test/convert.py test/LedgerHarness.py + +RegressTests: $(srcdir)/test/RegressTests.py + echo "$(PYTHON) $(srcdir)/test/RegressTests.py $(top_builddir)/ledger$(EXEEXT) $(srcdir)/test/regress \"\$$@\"" > $@ + chmod 755 $@ + +BaselineTests_SOURCES = test/RegressTests.py + +EXTRA_DIST += test/baseline + +BaselineTests: $(srcdir)/test/RegressTests.py + echo "$(PYTHON) $(srcdir)/test/RegressTests.py $(top_builddir)/ledger$(EXEEXT) $(srcdir)/test/baseline \"\$$@\"" > $@ + chmod 755 $@ + +ConfirmTests_SOURCES = test/ConfirmTests.py + +EXTRA_DIST += test/input + +test/input/mondo.dat: test/input/standard.dat + @for i in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 ; do \ + for j in 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 ; do \ + cat $< >> $@; \ + done; \ + done + +ConfirmTests: $(srcdir)/test/ConfirmTests.py + echo "$(PYTHON) $(srcdir)/test/ConfirmTests.py $(top_builddir)/ledger$(EXEEXT) $(srcdir)/test/input \"\$$@\"" > $@ + chmod 755 $@ + +GenerateTests_SOURCES = test/GenerateTests.py + +GenerateTests: $(srcdir)/test/GenerateTests.py + echo "$(PYTHON) $(srcdir)/test/GenerateTests.py $(top_builddir)/ledger$(EXEEXT) 1 20 \"\$$@\"" > $@ + chmod 755 $@ + +FULLCHECK=$(srcdir)/test/fullcheck.sh + +if HAVE_CPPUNIT +cppunittests: check + @sh $(FULLCHECK) $(top_builddir)/UtilTests$(EXEEXT) --verify \ + 2>&1 | grep -v '^GuardMalloc:' + @sh $(FULLCHECK) $(top_builddir)/MathTests$(EXEEXT) --verify \ + 2>&1 | grep -v '^GuardMalloc:' + @sh $(FULLCHECK) $(top_builddir)/ExprTests$(EXEEXT) --verify \ + 2>&1 | grep -v '^GuardMalloc:' + @sh $(FULLCHECK) $(top_builddir)/DataTests$(EXEEXT) --verify \ + 2>&1 | grep -v '^GuardMalloc:' + @sh $(FULLCHECK) $(top_builddir)/ReportTests$(EXEEXT) --verify \ + 2>&1 | grep -v '^GuardMalloc:' +else +cppunittests: check + @test 1 -eq 1 +endif + +fullcheck: cppunittests + @$(top_builddir)/RegressTests --verify + @$(top_builddir)/BaselineTests --verify + @$(top_builddir)/ConfirmTests --verify + @$(top_builddir)/GenerateTests --verify + @$(top_builddir)/RegressTests --gmalloc + @$(top_builddir)/BaselineTests --gmalloc +# @$(top_builddir)/ConfirmTests --gmalloc +# @$(top_builddir)/GenerateTests --gmalloc + +###################################################################### + +EXTRA_DIST += doc/README doc/LICENSE doc/NEWS doc/ledger.pdf +if USE_DOXYGEN +EXTRA_DIST += doc/Doxyfile doc/refman.pdf +endif + +DISTCLEANFILES += doc/ledger.info doc/ledger.pdf +if USE_DOXYGEN +DISTCLEANFILES += Doxyfile.gen doc/Doxyfile.bak doc/refman.pdf +endif + +if USE_DOXYGEN +dist-hook-doxygen: + find $(distdir)/doc -name .dirstamp -delete + rm -fr $(distdir)/doc/latex \ + $(distdir)/doc/Doxyfile.bak \ + $(distdir)/doc/Doxyfile.gen + cp -pR doc/html $(distdir)/doc +else +dist-hook-doxygen: + @test 1 -eq 1 +endif + +dist-hook: dist-hook-doxygen + find $(distdir) -name .DS_Store -delete + find $(distdir) -name .localized -delete + rm -f $(distdir)/README.textile + cp -p $(srcdir)/doc/README $(distdir)/README + +if USE_DOXYGEN +distclean-local-doxygen: + rm -fr doc/html doc/latex + rm -f doc/refman.pdf +else +distclean-local-doxygen: + @test 1 -eq 1 +endif + +distclean-local: distclean-local-doxygen + rm -fr test/python + +if USE_DOXYGEN +ESC_top_builddir=`cd $(top_builddir); pwd | sed 's/\//\\\\\//g'` + +Doxyfile.gen: doc/Doxyfile + cat $< | sed "s/%srcdir%/$(ESC_srcdir)/g" \ + | sed "s/%builddir%/$(ESC_top_builddir)/g" > $@ + +doc/html/index.html: Doxyfile.gen $(all_files) + BUILD_DIR=`cd $(top_builddir); pwd`; \ + (cd $(srcdir); doxygen $$BUILD_DIR/Doxyfile.gen) + +# The intention with the following rules is that all of the Doxygen +# documentation (both HTML and PDF) is built locally before distcheck is +# run, since it's quite possible that the user will not have a complete +# TeX + Doxygen + dot environment on their own system. + +doc/refman.pdf: doc/html/index.html + (cd doc/latex && make) + cp doc/latex/refman.pdf $@ + +docs: pdf doc/refman.pdf +else +docs: pdf +endif + +libs: + @echo Building dependency libs and installing in /usr/local/stow ... + git submodule update --init + (cd lib; make) + +report: all + -rm -fr build + lcov -d $(shell pwd) --zerocounters + -mkdir doc/report + lcov -c -i -d $(shell pwd) -o doc/report/ledger_base.info + make fullcheck + lcov -c -d $(shell pwd) --checksum -o doc/report/ledger_test.info + lcov -a doc/report/ledger_base.info \ + -a doc/report/ledger_test.info -o doc/report/ledger_total.info + lcov --extract doc/report/ledger_total.info '*src/ledger/*' \ + -o doc/report/ledger_cov.info + genhtml -o doc/report doc/report/ledger_cov.info + @echo Coverage reported generated\; now open doc/report/index.html + +# Makefile.am ends here diff --git a/tools/autogen.sh b/tools/autogen.sh new file mode 100755 index 00000000..a8b63eff --- /dev/null +++ b/tools/autogen.sh @@ -0,0 +1,1491 @@ +#!/bin/sh +# a u t o g e n . s h +# +# Copyright (c) 2005-2007 United States Government as represented by +# the U.S. Army Research Laboratory. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following +# disclaimer in the documentation and/or other materials provided +# with the distribution. +# +# 3. The name of the author may not be used to endorse or promote +# products derived from this software without specific prior written +# permission. +# +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS +# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +### +# +# Script for automatically preparing the sources for compilation by +# performing the myrid of necessary steps. The script attempts to +# detect proper version support, and outputs warnings about particular +# systems that have autotool peculiarities. +# +# Basically, if everything is set up and installed correctly, the +# script will validate that minimum versions of the GNU Build System +# tools are installed, account for several common configuration +# issues, and then simply run autoreconf for you. +# +# If autoreconf fails, which can happen for many valid configurations, +# this script proceeds to run manual preparation steps effectively +# providing a POSIX shell script (mostly complete) reimplementation of +# autoreconf. +# +# The AUTORECONF, AUTOCONF, AUTOMAKE, LIBTOOLIZE, ACLOCAL, AUTOHEADER +# environment variables and corresponding _OPTIONS variables (e.g. +# AUTORECONF_OPTIONS) may be used to override the default automatic +# detection behaviors. Similarly the _VERSION variables will override +# the minimum required version numbers. +# +# Examples: +# +# To obtain help on usage: +# ./autogen.sh --help +# +# To obtain verbose output: +# ./autogen.sh --verbose +# +# To skip autoreconf and prepare manually: +# AUTORECONF=false ./autogen.sh +# +# To verbosely try running with an older (unsupported) autoconf: +# AUTOCONF_VERSION=2.50 ./autogen.sh --verbose +# +# Author: Christopher Sean Morrison <morrison@brlcad.org> +# +###################################################################### + +# set to minimum acceptible version of autoconf +if [ "x$AUTOCONF_VERSION" = "x" ] ; then + AUTOCONF_VERSION=2.52 +fi +# set to minimum acceptible version of automake +if [ "x$AUTOMAKE_VERSION" = "x" ] ; then + AUTOMAKE_VERSION=1.6.0 +fi +# set to minimum acceptible version of libtool +if [ "x$LIBTOOL_VERSION" = "x" ] ; then + LIBTOOL_VERSION=1.4.2 +fi + + +################## +# ident function # +################## +ident ( ) { + # extract copyright from header + __copyright="`grep Copyright $AUTOGEN_SH | head -${HEAD_N}1 | awk '{print $4}'`" + if [ "x$__copyright" = "x" ] ; then + __copyright="`date +%Y`" + fi + + # extract version from CVS Id string + __id="$Id: autogen.sh,v 14.97 2007/06/18 22:25:02 brlcad Exp $" + __version="`echo $__id | sed 's/.*\([0-9][0-9][0-9][0-9]\)[-\/]\([0-9][0-9]\)[-\/]\([0-9][0-9]\).*/\1\2\3/'`" + if [ "x$__version" = "x" ] ; then + __version="" + fi + + echo "autogen.sh build preparation script by Christopher Sean Morrison" + echo "revised 3-clause BSD-style license, copyright (c) $__copyright" + echo "script version $__version, ISO/IEC 9945 POSIX shell script" +} + + +################## +# USAGE FUNCTION # +################## +usage ( ) { + echo "Usage: $AUTOGEN_SH [-h|--help] [-v|--verbose] [-q|--quiet] [--version]" + echo " --help Help on $NAME_OF_AUTOGEN usage" + echo " --verbose Verbose progress output" + echo " --quiet Quiet suppressed progress output" + echo " --version Only perform GNU Build System version checks" + echo + echo "Description: This script will validate that minimum versions of the" + echo "GNU Build System tools are installed and then run autoreconf for you." + echo "Should autoreconf fail, manual preparation steps will be run" + echo "potentially accounting for several common preparation issues. The" + + echo "AUTORECONF, AUTOCONF, AUTOMAKE, LIBTOOLIZE, ACLOCAL, AUTOHEADER," + echo "PROJECT, & CONFIGURE environment variables and corresponding _OPTIONS" + echo "variables (e.g. AUTORECONF_OPTIONS) may be used to override the" + echo "default automatic detection behavior." + echo + + ident + + return 0 +} + + +########################## +# VERSION_ERROR FUNCTION # +########################## +version_error ( ) { + if [ "x$1" = "x" ] ; then + echo "INTERNAL ERROR: version_error was not provided a version" + exit 1 + fi + if [ "x$2" = "x" ] ; then + echo "INTERNAL ERROR: version_error was not provided an application name" + exit 1 + fi + $ECHO + $ECHO "ERROR: To prepare the ${PROJECT} build system from scratch," + $ECHO " at least version $1 of $2 must be installed." + $ECHO + $ECHO "$NAME_OF_AUTOGEN does not need to be run on the same machine that will" + $ECHO "run configure or make. Either the GNU Autotools will need to be installed" + $ECHO "or upgraded on this system, or $NAME_OF_AUTOGEN must be run on the source" + $ECHO "code on another system and then transferred to here. -- Cheers!" + $ECHO +} + +########################## +# VERSION_CHECK FUNCTION # +########################## +version_check ( ) { + if [ "x$1" = "x" ] ; then + echo "INTERNAL ERROR: version_check was not provided a minimum version" + exit 1 + fi + _min="$1" + if [ "x$2" = "x" ] ; then + echo "INTERNAL ERROR: version check was not provided a comparison version" + exit 1 + fi + _cur="$2" + + # needed to handle versions like 1.10 and 1.4-p6 + _min="`echo ${_min}. | sed 's/[^0-9]/./g' | sed 's/\.\././g'`" + _cur="`echo ${_cur}. | sed 's/[^0-9]/./g' | sed 's/\.\././g'`" + + _min_major="`echo $_min | cut -d. -f1`" + _min_minor="`echo $_min | cut -d. -f2`" + _min_patch="`echo $_min | cut -d. -f3`" + + _cur_major="`echo $_cur | cut -d. -f1`" + _cur_minor="`echo $_cur | cut -d. -f2`" + _cur_patch="`echo $_cur | cut -d. -f3`" + + if [ "x$_min_major" = "x" ] ; then + _min_major=0 + fi + if [ "x$_min_minor" = "x" ] ; then + _min_minor=0 + fi + if [ "x$_min_patch" = "x" ] ; then + _min_patch=0 + fi + if [ "x$_cur_minor" = "x" ] ; then + _cur_major=0 + fi + if [ "x$_cur_minor" = "x" ] ; then + _cur_minor=0 + fi + if [ "x$_cur_patch" = "x" ] ; then + _cur_patch=0 + fi + + $VERBOSE_ECHO "Checking if ${_cur_major}.${_cur_minor}.${_cur_patch} is greater than ${_min_major}.${_min_minor}.${_min_patch}" + + if [ $_min_major -lt $_cur_major ] ; then + return 0 + elif [ $_min_major -eq $_cur_major ] ; then + if [ $_min_minor -lt $_cur_minor ] ; then + return 0 + elif [ $_min_minor -eq $_cur_minor ] ; then + if [ $_min_patch -lt $_cur_patch ] ; then + return 0 + elif [ $_min_patch -eq $_cur_patch ] ; then + return 0 + fi + fi + fi + return 1 +} + + +###################################### +# LOCATE_CONFIGURE_TEMPLATE FUNCTION # +###################################### +locate_configure_template ( ) { + _pwd="`pwd`" + if test -f "./configure.ac" ; then + echo "./configure.ac" + elif test -f "./configure.in" ; then + echo "./configure.in" + elif test -f "$_pwd/configure.ac" ; then + echo "$_pwd/configure.ac" + elif test -f "$_pwd/configure.in" ; then + echo "$_pwd/configure.in" + elif test -f "$PATH_TO_AUTOGEN/configure.ac" ; then + echo "$PATH_TO_AUTOGEN/configure.ac" + elif test -f "$PATH_TO_AUTOGEN/configure.in" ; then + echo "$PATH_TO_AUTOGEN/configure.in" + fi +} + + +################## +# argument check # +################## +ARGS="$*" +PATH_TO_AUTOGEN="`dirname $0`" +NAME_OF_AUTOGEN="`basename $0`" +AUTOGEN_SH="$PATH_TO_AUTOGEN/$NAME_OF_AUTOGEN" + +LIBTOOL_M4="${PATH_TO_AUTOGEN}/misc/libtool.m4" + +if [ "x$HELP" = "x" ] ; then + HELP=no +fi +if [ "x$QUIET" = "x" ] ; then + QUIET=no +fi +if [ "x$VERBOSE" = "x" ] ; then + VERBOSE=no +fi +if [ "x$VERSION_ONLY" = "x" ] ; then + VERSION_ONLY=no +fi +if [ "x$AUTORECONF_OPTIONS" = "x" ] ; then + AUTORECONF_OPTIONS="-i -f" +fi +if [ "x$AUTOCONF_OPTIONS" = "x" ] ; then + AUTOCONF_OPTIONS="-f" +fi +if [ "x$AUTOMAKE_OPTIONS" = "x" ] ; then + AUTOMAKE_OPTIONS="-a -c -f" +fi +ALT_AUTOMAKE_OPTIONS="-a -c" +if [ "x$LIBTOOLIZE_OPTIONS" = "x" ] ; then + LIBTOOLIZE_OPTIONS="--automake -c -f" +fi +ALT_LIBTOOLIZE_OPTIONS="--automake --copy --force" +if [ "x$ACLOCAL_OPTIONS" = "x" ] ; then + ACLOCAL_OPTIONS="" +fi +if [ "x$AUTOHEADER_OPTIONS" = "x" ] ; then + AUTOHEADER_OPTIONS="" +fi +for arg in $ARGS ; do + case "x$arg" in + x--help) HELP=yes ;; + x-[hH]) HELP=yes ;; + x--quiet) QUIET=yes ;; + x-[qQ]) QUIET=yes ;; + x--verbose) VERBOSE=yes ;; + x-[vV]) VERBOSE=yes ;; + x--version) VERSION_ONLY=yes ;; + *) + echo "Unknown option: $arg" + echo + usage + exit 1 + ;; + esac +done + + +##################### +# environment check # +##################### + +# sanity check before recursions potentially begin +if [ ! -f "$AUTOGEN_SH" ] ; then + echo "INTERNAL ERROR: $AUTOGEN_SH does not exist" + if [ ! "x$0" = "x$AUTOGEN_SH" ] ; then + echo "INTERNAL ERROR: dirname/basename inconsistency: $0 != $AUTOGEN_SH" + fi + exit 1 +fi + +# force locale setting to C so things like date output as expected +LC_ALL=C + +# commands that this script expects +for __cmd in echo head tail pwd ; do + echo "test" | $__cmd > /dev/null 2>&1 + if [ $? != 0 ] ; then + echo "INTERNAL ERROR: '${__cmd}' command is required" + exit 2 + fi +done +echo "test" | grep "test" > /dev/null 2>&1 +if test ! x$? = x0 ; then + echo "INTERNAL ERROR: grep command is required" + exit 1 +fi +echo "test" | sed "s/test/test/" > /dev/null 2>&1 +if test ! x$? = x0 ; then + echo "INTERNAL ERROR: sed command is required" + exit 1 +fi + + +# determine the behavior of echo +case `echo "testing\c"; echo 1,2,3`,`echo -n testing; echo 1,2,3` in + *c*,-n*) ECHO_N= ECHO_C=' +' ECHO_T=' ' ;; + *c*,* ) ECHO_N=-n ECHO_C= ECHO_T= ;; + *) ECHO_N= ECHO_C='\c' ECHO_T= ;; +esac + +# determine the behavior of head +case "x`echo 'head' | head -n 1 2>&1`" in + *xhead*) HEAD_N="n " ;; + *) HEAD_N="" ;; +esac + +# determine the behavior of tail +case "x`echo 'tail' | tail -n 1 2>&1`" in + *xtail*) TAIL_N="n " ;; + *) TAIL_N="" ;; +esac + +VERBOSE_ECHO=: +ECHO=: +if [ "x$QUIET" = "xyes" ] ; then + if [ "x$VERBOSE" = "xyes" ] ; then + echo "Verbose output quelled by quiet option. Further output disabled." + fi +else + ECHO=echo + if [ "x$VERBOSE" = "xyes" ] ; then + echo "Verbose output enabled" + VERBOSE_ECHO=echo + fi +fi + + +# allow a recursive run to disable further recursions +if [ "x$RUN_RECURSIVE" = "x" ] ; then + RUN_RECURSIVE=yes +fi + + +################################################ +# check for help arg and bypass version checks # +################################################ +if [ "x`echo $ARGS | sed 's/.*[hH][eE][lL][pP].*/help/'`" = "xhelp" ] ; then + HELP=yes +fi +if [ "x$HELP" = "xyes" ] ; then + usage + $ECHO "---" + $ECHO "Help was requested. No preparation or configuration will be performed." + exit 0 +fi + + +####################### +# set up signal traps # +####################### +untrap_abnormal ( ) { + for sig in 1 2 13 15; do + trap - $sig + done +} + +# do this cleanup whenever we exit. +trap ' + # start from the root + if test -d "$START_PATH" ; then + cd "$START_PATH" + fi + + # restore/delete backup files + if test "x$PFC_INIT" = "x1" ; then + recursive_restore + fi +' 0 + +# trap SIGHUP (1), SIGINT (2), SIGPIPE (13), SIGTERM (15) +for sig in 1 2 13 15; do + trap ' + $ECHO "" + $ECHO "Aborting $NAME_OF_AUTOGEN: caught signal '$sig'" + + # start from the root + if test -d "$START_PATH" ; then + cd "$START_PATH" + fi + + # clean up on abnormal exit + $VERBOSE_ECHO "rm -rf autom4te.cache" + rm -rf autom4te.cache + + if test -f "acinclude.m4.$$.backup" ; then + $VERBOSE_ECHO "cat acinclude.m4.$$.backup > acinclude.m4" + chmod u+w acinclude.m4 + cat acinclude.m4.$$.backup > acinclude.m4 + + $VERBOSE_ECHO "rm -f acinclude.m4.$$.backup" + rm -f acinclude.m4.$$.backup + fi + + { (exit 1); exit 1; } +' $sig +done + + +############################# +# look for a configure file # +############################# +if [ "x$CONFIGURE" = "x" ] ; then + CONFIGURE="`locate_configure_template`" + if [ ! "x$CONFIGURE" = "x" ] ; then + $VERBOSE_ECHO "Found a configure template: $CONFIGURE" + fi +else + $ECHO "Using CONFIGURE environment variable override: $CONFIGURE" +fi +if [ "x$CONFIGURE" = "x" ] ; then + if [ "x$VERSION_ONLY" = "xyes" ] ; then + CONFIGURE=/dev/null + else + $ECHO + $ECHO "A configure.ac or configure.in file could not be located implying" + $ECHO "that the GNU Build System is at least not used in this directory. In" + $ECHO "any case, there is nothing to do here without one of those files." + $ECHO + $ECHO "ERROR: No configure.in or configure.ac file found in `pwd`" + exit 1 + fi +fi + +#################### +# get project name # +#################### +if [ "x$PROJECT" = "x" ] ; then + PROJECT="`grep AC_INIT $CONFIGURE | grep -v '.*#.*AC_INIT' | tail -${TAIL_N}1 | sed 's/^[ ]*AC_INIT(\([^,)]*\).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`" + if [ "x$PROJECT" = "xAC_INIT" ] ; then + # projects might be using the older/deprecated arg-less AC_INIT .. look for AM_INIT_AUTOMAKE instead + PROJECT="`grep AM_INIT_AUTOMAKE $CONFIGURE | grep -v '.*#.*AM_INIT_AUTOMAKE' | tail -${TAIL_N}1 | sed 's/^[ ]*AM_INIT_AUTOMAKE(\([^,)]*\).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`" + fi + if [ "x$PROJECT" = "xAM_INIT_AUTOMAKE" ] ; then + PROJECT="project" + fi + if [ "x$PROJECT" = "x" ] ; then + PROJECT="project" + fi +else + $ECHO "Using PROJECT environment variable override: $PROJECT" +fi +$ECHO "Preparing the $PROJECT build system...please wait" +$ECHO + + +######################## +# check for autoreconf # +######################## +HAVE_AUTORECONF=no +if [ "x$AUTORECONF" = "x" ] ; then + for AUTORECONF in autoreconf ; do + $VERBOSE_ECHO "Checking autoreconf version: $AUTORECONF --version" + $AUTORECONF --version > /dev/null 2>&1 + if [ $? = 0 ] ; then + HAVE_AUTORECONF=yes + break + fi + done +else + HAVE_AUTORECONF=yes + $ECHO "Using AUTORECONF environment variable override: $AUTORECONF" +fi + + +########################## +# autoconf version check # +########################## +_acfound=no +if [ "x$AUTOCONF" = "x" ] ; then + for AUTOCONF in autoconf ; do + $VERBOSE_ECHO "Checking autoconf version: $AUTOCONF --version" + $AUTOCONF --version > /dev/null 2>&1 + if [ $? = 0 ] ; then + _acfound=yes + break + fi + done +else + _acfound=yes + $ECHO "Using AUTOCONF environment variable override: $AUTOCONF" +fi + +_report_error=no +if [ ! "x$_acfound" = "xyes" ] ; then + $ECHO "ERROR: Unable to locate GNU Autoconf." + _report_error=yes +else + _version="`$AUTOCONF --version | head -${HEAD_N}1 | sed 's/[^0-9]*\([0-9\.][0-9\.]*\)/\1/'`" + if [ "x$_version" = "x" ] ; then + _version="0.0.0" + fi + $ECHO "Found GNU Autoconf version $_version" + version_check "$AUTOCONF_VERSION" "$_version" + if [ $? -ne 0 ] ; then + _report_error=yes + fi +fi +if [ "x$_report_error" = "xyes" ] ; then + version_error "$AUTOCONF_VERSION" "GNU Autoconf" + exit 1 +fi + + +########################## +# automake version check # +########################## +_amfound=no +if [ "x$AUTOMAKE" = "x" ] ; then + for AUTOMAKE in automake ; do + $VERBOSE_ECHO "Checking automake version: $AUTOMAKE --version" + $AUTOMAKE --version > /dev/null 2>&1 + if [ $? = 0 ] ; then + _amfound=yes + break + fi + done +else + _amfound=yes + $ECHO "Using AUTOMAKE environment variable override: $AUTOMAKE" +fi + + +_report_error=no +if [ ! "x$_amfound" = "xyes" ] ; then + $ECHO + $ECHO "ERROR: Unable to locate GNU Automake." + _report_error=yes +else + _version="`$AUTOMAKE --version | head -${HEAD_N}1 | sed 's/[^0-9]*\([0-9\.][0-9\.]*\)/\1/'`" + if [ "x$_version" = "x" ] ; then + _version="0.0.0" + fi + $ECHO "Found GNU Automake version $_version" + version_check "$AUTOMAKE_VERSION" "$_version" + if [ $? -ne 0 ] ; then + _report_error=yes + fi +fi +if [ "x$_report_error" = "xyes" ] ; then + version_error "$AUTOMAKE_VERSION" "GNU Automake" + exit 1 +fi + + +######################## +# check for libtoolize # +######################## +HAVE_LIBTOOLIZE=yes +HAVE_ALT_LIBTOOLIZE=no +_ltfound=no +if [ "x$LIBTOOLIZE" = "x" ] ; then + LIBTOOLIZE=libtoolize + $VERBOSE_ECHO "Checking libtoolize version: $LIBTOOLIZE --version" + $LIBTOOLIZE --version > /dev/null 2>&1 + if [ ! $? = 0 ] ; then + HAVE_LIBTOOLIZE=no + $ECHO + if [ "x$HAVE_AUTORECONF" = "xno" ] ; then + $ECHO "Warning: libtoolize does not appear to be available." + else + $ECHO "Warning: libtoolize does not appear to be available. This means that" + $ECHO "the automatic build preparation via autoreconf will probably not work." + $ECHO "Preparing the build by running each step individually, however, should" + $ECHO "work and will be done automatically for you if autoreconf fails." + fi + + # look for some alternates + for tool in glibtoolize libtoolize15 libtoolize14 libtoolize13 ; do + $VERBOSE_ECHO "Checking libtoolize alternate: $tool --version" + _glibtoolize="`$tool --version > /dev/null 2>&1`" + if [ $? = 0 ] ; then + $VERBOSE_ECHO "Found $tool --version" + _glti="`which $tool`" + if [ "x$_glti" = "x" ] ; then + $VERBOSE_ECHO "Cannot find $tool with which" + continue; + fi + if test ! -f "$_glti" ; then + $VERBOSE_ECHO "Cannot use $tool, $_glti is not a file" + continue; + fi + _gltidir="`dirname $_glti`" + if [ "x$_gltidir" = "x" ] ; then + $VERBOSE_ECHO "Cannot find $tool path with dirname of $_glti" + continue; + fi + if test ! -d "$_gltidir" ; then + $VERBOSE_ECHO "Cannot use $tool, $_gltidir is not a directory" + continue; + fi + HAVE_ALT_LIBTOOLIZE=yes + LIBTOOLIZE="$tool" + $ECHO + $ECHO "Fortunately, $tool was found which means that your system may simply" + $ECHO "have a non-standard or incomplete GNU Autotools install. If you have" + $ECHO "sufficient system access, it may be possible to quell this warning by" + $ECHO "running:" + $ECHO + sudo -V > /dev/null 2>&1 + if [ $? = 0 ] ; then + $ECHO " sudo ln -s $_glti $_gltidir/libtoolize" + $ECHO + else + $ECHO " ln -s $_glti $_gltidir/libtoolize" + $ECHO + $ECHO "Run that as root or with proper permissions to the $_gltidir directory" + $ECHO + fi + _ltfound=yes + break + fi + done + else + _ltfound=yes + fi +else + _ltfound=yes + $ECHO "Using LIBTOOLIZE environment variable override: $LIBTOOLIZE" +fi + + +############################ +# libtoolize version check # +############################ +_report_error=no +if [ ! "x$_ltfound" = "xyes" ] ; then + $ECHO + $ECHO "ERROR: Unable to locate GNU Libtool." + _report_error=yes +else + _version="`$LIBTOOLIZE --version | head -${HEAD_N}1 | sed 's/[^0-9]*\([0-9\.][0-9\.]*\)/\1/'`" + if [ "x$_version" = "x" ] ; then + _version="0.0.0" + fi + $ECHO "Found GNU Libtool version $_version" + version_check "$LIBTOOL_VERSION" "$_version" + if [ $? -ne 0 ] ; then + _report_error=yes + fi +fi +if [ "x$_report_error" = "xyes" ] ; then + version_error "$LIBTOOL_VERSION" "GNU Libtool" + exit 1 +fi + + +##################### +# check for aclocal # +##################### +if [ "x$ACLOCAL" = "x" ] ; then + for ACLOCAL in aclocal ; do + $VERBOSE_ECHO "Checking aclocal version: $ACLOCAL --version" + $ACLOCAL --version > /dev/null 2>&1 + if [ $? = 0 ] ; then + break + fi + done +else + $ECHO "Using ACLOCAL environment variable override: $ACLOCAL" +fi + + +######################## +# check for autoheader # +######################## +if [ "x$AUTOHEADER" = "x" ] ; then + for AUTOHEADER in autoheader ; do + $VERBOSE_ECHO "Checking autoheader version: $AUTOHEADER --version" + $AUTOHEADER --version > /dev/null 2>&1 + if [ $? = 0 ] ; then + break + fi + done +else + $ECHO "Using AUTOHEADER environment variable override: $AUTOHEADER" +fi + + +######################### +# check if version only # +######################### +$VERBOSE_ECHO "Checking whether to only output version information" +if [ "x$VERSION_ONLY" = "xyes" ] ; then + $ECHO + ident + $ECHO "---" + $ECHO "Version requested. No preparation or configuration will be performed." + exit 0 +fi + + +################################# +# PROTECT_FROM_CLOBBER FUNCTION # +################################# +protect_from_clobber ( ) { + PFC_INIT=1 + + # protect COPYING & INSTALL from overwrite by automake. the + # automake force option will (inappropriately) ignore the existing + # contents of a COPYING and/or INSTALL files (depending on the + # version) instead of just forcing *missing* files like it does + # for AUTHORS, NEWS, and README. this is broken but extremely + # prevalent behavior, so we protect against it by keeping a backup + # of the file that can later be restored. + + if test -f COPYING ; then + if test -f COPYING.$$.protect_from_automake.backup ; then + $VERBOSE_ECHO "Already backed up COPYING in `pwd`" + else + $VERBOSE_ECHO "Backing up COPYING in `pwd`" + $VERBOSE_ECHO "cp -p COPYING COPYING.$$.protect_from_automake.backup" + cp -p COPYING COPYING.$$.protect_from_automake.backup + fi + fi + if test -f INSTALL ; then + if test -f INSTALL.$$.protect_from_automake.backup ; then + $VERBOSE_ECHO "Already backed up INSTALL in `pwd`" + else + $VERBOSE_ECHO "Backing up INSTALL in `pwd`" + $VERBOSE_ECHO "cp -p INSTALL INSTALL.$$.protect_from_automake.backup" + cp -p INSTALL INSTALL.$$.protect_from_automake.backup + fi + fi +} + + +############################## +# RECURSIVE_PROTECT FUNCTION # +############################## +recursive_protect ( ) { + + # for projects using recursive configure, run the build + # preparation steps for the subdirectories. this function assumes + # START_PATH was set to pwd before recursion begins so that + # relative paths work. + + # git 'r done, protect COPYING and INSTALL from being clobbered + protect_from_clobber + + if test -d autom4te.cache ; then + $VERBOSE_ECHO "Found an autom4te.cache directory, deleting it" + $VERBOSE_ECHO "rm -rf autom4te.cache" + rm -rf autom4te.cache + fi + + # find configure template + _configure="`locate_configure_template`" + if [ "x$_configure" = "x" ] ; then + return + fi + # $VERBOSE_ECHO "Looking for configure template found `pwd`/$_configure" + + # look for subdirs + # $VERBOSE_ECHO "Looking for subdirs in `pwd`" + _det_config_subdirs="`grep AC_CONFIG_SUBDIRS $_configure | grep -v '.*#.*AC_CONFIG_SUBDIRS' | sed 's/^[ ]*AC_CONFIG_SUBDIRS(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`" + CHECK_DIRS="" + for dir in $_det_config_subdirs ; do + if test -d "`pwd`/$dir" ; then + CHECK_DIRS="$CHECK_DIRS \"`pwd`/$dir\"" + fi + done + + # process subdirs + if [ ! "x$CHECK_DIRS" = "x" ] ; then + $VERBOSE_ECHO "Recursively scanning the following directories:" + $VERBOSE_ECHO " $CHECK_DIRS" + for dir in $CHECK_DIRS ; do + $VERBOSE_ECHO "Protecting files from automake in $dir" + cd "$START_PATH" + eval "cd $dir" + + # recursively git 'r done + recursive_protect + done + fi +} # end of recursive_protect + + +############################# +# RESTORE_CLOBBERED FUNCION # +############################# +restore_clobbered ( ) { + + # The automake (and autoreconf by extension) -f/--force-missing + # option may overwrite COPYING and INSTALL even if they do exist. + # Here we restore the files if necessary. + + spacer=no + + # COPYING + if test -f COPYING.$$.protect_from_automake.backup ; then + if test -f COPYING ; then + # compare entire content, restore if needed + if test "x`cat COPYING`" != "x`cat COPYING.$$.protect_from_automake.backup`" ; then + if test "x$spacer" = "xno" ; then + $VERBOSE_ECHO + spacer=yes + fi + # restore the backup + $VERBOSE_ECHO "Restoring COPYING from backup (automake -f likely clobbered it)" + $VERBOSE_ECHO "rm -f COPYING" + rm -f COPYING + $VERBOSE_ECHO "mv COPYING.$$.protect_from_automake.backup COPYING" + mv COPYING.$$.protect_from_automake.backup COPYING + fi # check contents + elif test -f COPYING.$$.protect_from_automake.backup ; then + $VERBOSE_ECHO "mv COPYING.$$.protect_from_automake.backup COPYING" + mv COPYING.$$.protect_from_automake.backup COPYING + fi # -f COPYING + + # just in case + $VERBOSE_ECHO "rm -f COPYING.$$.protect_from_automake.backup" + rm -f COPYING.$$.protect_from_automake.backup + fi # -f COPYING.$$.protect_from_automake.backup + + # INSTALL + if test -f INSTALL.$$.protect_from_automake.backup ; then + if test -f INSTALL ; then + # compare entire content, restore if needed + if test "x`cat INSTALL`" != "x`cat INSTALL.$$.protect_from_automake.backup`" ; then + if test "x$spacer" = "xno" ; then + $VERBOSE_ECHO + spacer=yes + fi + # restore the backup + $VERBOSE_ECHO "Restoring INSTALL from backup (automake -f likely clobbered it)" + $VERBOSE_ECHO "rm -f INSTALL" + rm -f INSTALL + $VERBOSE_ECHO "mv INSTALL.$$.protect_from_automake.backup INSTALL" + mv INSTALL.$$.protect_from_automake.backup INSTALL + fi # check contents + elif test -f INSTALL.$$.protect_from_automake.backup ; then + $VERBOSE_ECHO "mv INSTALL.$$.protect_from_automake.backup INSTALL" + mv INSTALL.$$.protect_from_automake.backup INSTALL + fi # -f INSTALL + + # just in case + $VERBOSE_ECHO "rm -f INSTALL.$$.protect_from_automake.backup" + rm -f INSTALL.$$.protect_from_automake.backup + fi # -f INSTALL.$$.protect_from_automake.backup + + CONFIGURE="`locate_configure_template`" + if [ "x$CONFIGURE" = "x" ] ; then + return + fi + + _aux_dir="`grep AC_CONFIG_AUX_DIR $CONFIGURE | grep -v '.*#.*AC_CONFIG_AUX_DIR' | tail -${TAIL_N}1 | sed 's/^[ ]*AC_CONFIG_AUX_DIR(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`" + if test ! -d "$_aux_dir" ; then + _aux_dir=. + fi + + for file in config.guess config.sub ltmain.sh ; do + if test -f "${_aux_dir}/${file}" ; then + $VERBOSE_ECHO "rm -f \"${_aux_dir}/${file}.backup\"" + rm -f "${_aux_dir}/${file}.backup" + fi + done +} # end of restore_clobbered + + +############################## +# RECURSIVE_RESTORE FUNCTION # +############################## +recursive_restore ( ) { + + # restore COPYING and INSTALL from backup if they were clobbered + # for each directory recursively. + + # git 'r undone + restore_clobbered + + # find configure template + _configure="`locate_configure_template`" + if [ "x$_configure" = "x" ] ; then + return + fi + + # look for subdirs + _det_config_subdirs="`grep AC_CONFIG_SUBDIRS $_configure | grep -v '.*#.*AC_CONFIG_SUBDIRS' | sed 's/^[ ]*AC_CONFIG_SUBDIRS(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`" + CHECK_DIRS="" + for dir in $_det_config_subdirs ; do + if test -d "`pwd`/$dir" ; then + CHECK_DIRS="$CHECK_DIRS \"`pwd`/$dir\"" + fi + done + + # process subdirs + if [ ! "x$CHECK_DIRS" = "x" ] ; then + $VERBOSE_ECHO "Recursively scanning the following directories:" + $VERBOSE_ECHO " $CHECK_DIRS" + for dir in $CHECK_DIRS ; do + $VERBOSE_ECHO "Checking files for automake damage in $dir" + cd "$START_PATH" + eval "cd $dir" + + # recursively git 'r undone + recursive_restore + done + fi +} # end of recursive_restore + + +####################### +# INITIALIZE FUNCTION # +####################### +initialize ( ) { + + # this routine performs a variety of directory-specific + # initializations. some are sanity checks, some are preventive, + # and some are necessary setup detection. + # + # this function sets: + # CONFIGURE + # SEARCH_DIRS + # CONFIG_SUBDIRS + + ################################## + # check for a configure template # + ################################## + CONFIGURE="`locate_configure_template`" + if [ "x$CONFIGURE" = "x" ] ; then + $ECHO + $ECHO "A configure.ac or configure.in file could not be located implying" + $ECHO "that the GNU Build System is at least not used in this directory. In" + $ECHO "any case, there is nothing to do here without one of those files." + $ECHO + $ECHO "ERROR: No configure.in or configure.ac file found in `pwd`" + exit 1 + fi + + ##################### + # detect an aux dir # + ##################### + _aux_dir="`grep AC_CONFIG_AUX_DIR $CONFIGURE | grep -v '.*#.*AC_CONFIG_AUX_DIR' | tail -${TAIL_N}1 | sed 's/^[ ]*AC_CONFIG_AUX_DIR(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`" + if test ! -d "$_aux_dir" ; then + _aux_dir=. + else + $VERBOSE_ECHO "Detected auxillary directory: $_aux_dir" + fi + + ################################ + # detect a recursive configure # + ################################ + CONFIG_SUBDIRS="" + _det_config_subdirs="`grep AC_CONFIG_SUBDIRS $CONFIGURE | grep -v '.*#.*AC_CONFIG_SUBDIRS' | sed 's/^[ ]*AC_CONFIG_SUBDIRS(\(.*\)).*/\1/' | sed 's/.*\[\(.*\)\].*/\1/'`" + for dir in $_det_config_subdirs ; do + if test -d "`pwd`/$dir" ; then + $VERBOSE_ECHO "Detected recursive configure directory: `pwd`/$dir" + CONFIG_SUBDIRS="$CONFIG_SUBDIRS `pwd`/$dir" + fi + done + + ########################################## + # make sure certain required files exist # + ########################################## + #for file in AUTHORS COPYING ChangeLog INSTALL NEWS README ; do + # if test ! -f $file ; then + # $VERBOSE_ECHO "Touching ${file} since it does not exist" + # touch $file + # fi + #done + + ################################################## + # make sure certain generated files do not exist # + ################################################## + for file in config.guess config.sub ltmain.sh ; do + if test -f "${_aux_dir}/${file}" ; then + $VERBOSE_ECHO "mv -f \"${_aux_dir}/${file}\" \"${_aux_dir}/${file}.backup\"" + mv -f "${_aux_dir}/${file}" "${_aux_dir}/${file}.backup" + fi + done + + ############################ + # search alternate m4 dirs # + ############################ + SEARCH_DIRS="" + for dir in m4 ; do + if [ -d $dir ] ; then + $VERBOSE_ECHO "Found extra aclocal search directory: $dir" + SEARCH_DIRS="$SEARCH_DIRS -I $dir" + fi + done + + ###################################### + # remove any previous build products # + ###################################### + if test -d autom4te.cache ; then + $VERBOSE_ECHO "Found an autom4te.cache directory, deleting it" + $VERBOSE_ECHO "rm -rf autom4te.cache" + rm -rf autom4te.cache + fi +# tcl/tk (and probably others) have a customized aclocal.m4, so can't delete it +# if test -f aclocal.m4 ; then +# $VERBOSE_ECHO "Found an aclocal.m4 file, deleting it" +# $VERBOSE_ECHO "rm -f aclocal.m4" +# rm -f aclocal.m4 +# fi + +} # end of initialize() + + +############## +# initialize # +############## + +# stash path +START_PATH="`pwd`" + +# Before running autoreconf or manual steps, some prep detection work +# is necessary or useful. Only needs to occur once per directory, but +# does need to traverse the entire subconfigure hierarchy to protect +# files from being clobbered even by autoreconf. +recursive_protect + +# start from where we started +cd "$START_PATH" + +# get ready to process +initialize + + +############################################ +# prepare build via autoreconf or manually # +############################################ +reconfigure_manually=no +if [ "x$HAVE_AUTORECONF" = "xyes" ] ; then + $ECHO + $ECHO $ECHO_N "Automatically preparing build ... $ECHO_C" + + $VERBOSE_ECHO "$AUTORECONF $SEARCH_DIRS $AUTORECONF_OPTIONS" + autoreconf_output="`$AUTORECONF $SEARCH_DIRS $AUTORECONF_OPTIONS 2>&1`" + ret=$? + $VERBOSE_ECHO "$autoreconf_output" + + if [ ! $ret = 0 ] ; then + if [ "x$HAVE_ALT_LIBTOOLIZE" = "xyes" ] ; then + if [ ! "x`echo \"$autoreconf_output\" | grep libtoolize | grep \"No such file or directory\"`" = "x" ] ; then + $ECHO + $ECHO "Warning: autoreconf failed but due to what is usually a common libtool" + $ECHO "misconfiguration issue. This problem is encountered on systems that" + $ECHO "have installed libtoolize under a different name without providing a" + $ECHO "symbolic link or without setting the LIBTOOLIZE environment variable." + $ECHO + $ECHO "Restarting the preparation steps with LIBTOOLIZE set to $LIBTOOLIZE" + + export LIBTOOLIZE + RUN_RECURSIVE=no + export RUN_RECURSIVE + untrap_abnormal + + $VERBOSE_ECHO sh $AUTOGEN_SH "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" + sh "$AUTOGEN_SH" "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" + exit $? + fi + fi + + $ECHO "Warning: $AUTORECONF failed" + + if test -f ltmain.sh ; then + $ECHO "libtoolize being run by autoreconf is not creating ltmain.sh in the auxillary directory like it should" + fi + + $ECHO "Attempting to run the preparation steps individually" + reconfigure_manually=yes + fi +else + reconfigure_manually=yes +fi + + +############################ +# LIBTOOL_FAILURE FUNCTION # +############################ +libtool_failure ( ) { + + # libtool is rather error-prone in comparison to the other + # autotools and this routine attempts to compensate for some + # common failures. the output after a libtoolize failure is + # parsed for an error related to AC_PROG_LIBTOOL and if found, we + # attempt to inject a project-provided libtool.m4 file. + + _autoconf_output="$1" + + if [ "x$RUN_RECURSIVE" = "xno" ] ; then + # we already tried the libtool.m4, don't try again + return 1 + fi + + if test -f "$LIBTOOL_M4" ; then + found_libtool="`$ECHO $_autoconf_output | grep AC_PROG_LIBTOOL`" + if test ! "x$found_libtool" = "x" ; then + if test -f acinclude.m4 ; then + rm -f acinclude.m4.$$.backup + $VERBOSE_ECHO "cat acinclude.m4 > acinclude.m4.$$.backup" + cat acinclude.m4 > acinclude.m4.$$.backup + fi + $VERBOSE_ECHO "cat \"$LIBTOOL_M4\" >> acinclude.m4" + chmod u+w acinclude.m4 + cat "$LIBTOOL_M4" >> acinclude.m4 + + # don't keep doing this + RUN_RECURSIVE=no + export RUN_RECURSIVE + untrap_abnormal + + $ECHO + $ECHO "Restarting the preparation steps with libtool macros in acinclude.m4" + $VERBOSE_ECHO sh $AUTOGEN_SH "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" + sh "$AUTOGEN_SH" "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" + exit $? + fi + fi +} + + +########################### +# MANUAL_AUTOGEN FUNCTION # +########################### +manual_autogen ( ) { + + ################################################## + # Manual preparation steps taken are as follows: # + # aclocal [-I m4] # + # libtoolize --automake -c -f # + # aclocal [-I m4] # + # autoconf -f # + # autoheader # + # automake -a -c -f # + ################################################## + + ########### + # aclocal # + ########### + $VERBOSE_ECHO "$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS" + aclocal_output="`$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS 2>&1`" + ret=$? + $VERBOSE_ECHO "$aclocal_output" + if [ ! $ret = 0 ] ; then $ECHO "ERROR: $ACLOCAL failed" && exit 2 ; fi + + ############## + # libtoolize # + ############## + need_libtoolize=no + for feature in AC_PROG_LIBTOOL LT_INIT ; do + $VERBOSE_ECHO "Searching for $feature in $CONFIGURE" + found="`grep \"^$feature.*\" $CONFIGURE`" + if [ ! "x$found" = "x" ] ; then + need_libtoolize=yes + break + fi + done + if [ "x$need_libtoolize" = "xyes" ] ; then + if [ "x$HAVE_LIBTOOLIZE" = "xyes" ] ; then + $VERBOSE_ECHO "$LIBTOOLIZE $LIBTOOLIZE_OPTIONS" + libtoolize_output="`$LIBTOOLIZE $LIBTOOLIZE_OPTIONS 2>&1`" + ret=$? + $VERBOSE_ECHO "$libtoolize_output" + + if [ ! $ret = 0 ] ; then $ECHO "ERROR: $LIBTOOLIZE failed" && exit 2 ; fi + else + if [ "x$HAVE_ALT_LIBTOOLIZE" = "xyes" ] ; then + $VERBOSE_ECHO "$LIBTOOLIZE $ALT_LIBTOOLIZE_OPTIONS" + libtoolize_output="`$LIBTOOLIZE $ALT_LIBTOOLIZE_OPTIONS 2>&1`" + ret=$? + $VERBOSE_ECHO "$libtoolize_output" + + if [ ! $ret = 0 ] ; then $ECHO "ERROR: $LIBTOOLIZE failed" && exit 2 ; fi + fi + fi + + ########### + # aclocal # + ########### + # re-run again as instructed by libtoolize + $VERBOSE_ECHO "$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS" + aclocal_output="`$ACLOCAL $SEARCH_DIRS $ACLOCAL_OPTIONS 2>&1`" + ret=$? + $VERBOSE_ECHO "$aclocal_output" + + # libtoolize might put ltmain.sh in the wrong place + if test -f ltmain.sh ; then + if test ! -f "${_aux_dir}/ltmain.sh" ; then + $ECHO + $ECHO "Warning: $LIBTOOLIZE is creating ltmain.sh in the wrong directory" + $ECHO + $ECHO "Fortunately, the problem can be worked around by simply copying the" + $ECHO "file to the appropriate location (${_aux_dir}/). This has been done for you." + $ECHO + $VERBOSE_ECHO "cp -p ltmain.sh \"${_aux_dir}/ltmain.sh\"" + cp -p ltmain.sh "${_aux_dir}/ltmain.sh" + $ECHO $ECHO_N "Continuing build preparation ... $ECHO_C" + fi + fi # ltmain.sh + fi # need_libtoolize + + ############ + # autoconf # + ############ + $VERBOSE_ECHO + $VERBOSE_ECHO "$AUTOCONF $AUTOCONF_OPTIONS" + autoconf_output="`$AUTOCONF $AUTOCONF_OPTIONS 2>&1`" + ret=$? + $VERBOSE_ECHO "$autoconf_output" + + if [ ! $ret = 0 ] ; then + # retry without the -f and check for usage of macros that are too new + ac2_59_macros="AC_C_RESTRICT AC_INCLUDES_DEFAULT AC_LANG_ASSERT AC_LANG_WERROR AS_SET_CATFILE" + ac2_55_macros="AC_COMPILER_IFELSE AC_FUNC_MBRTOWC AC_HEADER_STDBOOL AC_LANG_CONFTEST AC_LANG_SOURCE AC_LANG_PROGRAM AC_LANG_CALL AC_LANG_FUNC_TRY_LINK AC_MSG_FAILURE AC_PREPROC_IFELSE" + ac2_54_macros="AC_C_BACKSLASH_A AC_CONFIG_LIBOBJ_DIR AC_GNU_SOURCE AC_PROG_EGREP AC_PROG_FGREP AC_REPLACE_FNMATCH AC_FUNC_FNMATCH_GNU AC_FUNC_REALLOC AC_TYPE_MBSTATE_T" + + macros_to_search="" + ac_major="`echo ${AUTOCONF_VERSION}. | cut -d. -f1 | sed 's/[^0-9]//g'`" + ac_minor="`echo ${AUTOCONF_VERSION}. | cut -d. -f2 | sed 's/[^0-9]//g'`" + + if [ $ac_major -lt 2 ] ; then + macros_to_search="$ac2_59_macros $ac2_55_macros $ac2_54_macros" + else + if [ $ac_minor -lt 54 ] ; then + macros_to_search="$ac2_59_macros $ac2_55_macros $ac2_54_macros" + elif [ $ac_minor -lt 55 ] ; then + macros_to_search="$ac2_59_macros $ac2_55_macros" + elif [ $ac_minor -lt 59 ] ; then + macros_to_search="$ac2_59_macros" + fi + fi + + configure_ac_macros=__none__ + for feature in $macros_to_search ; do + $VERBOSE_ECHO "Searching for $feature in $CONFIGURE" + found="`grep \"^$feature.*\" $CONFIGURE`" + if [ ! "x$found" = "x" ] ; then + if [ "x$configure_ac_macros" = "x__none__" ] ; then + configure_ac_macros="$feature" + else + configure_ac_macros="$feature $configure_ac_macros" + fi + fi + done + if [ ! "x$configure_ac_macros" = "x__none__" ] ; then + $ECHO + $ECHO "Warning: Unsupported macros were found in $CONFIGURE" + $ECHO + $ECHO "The `echo $CONFIGURE | basename` file was scanned in order to determine if any" + $ECHO "unsupported macros are used that exceed the minimum version" + $ECHO "settings specified within this file. As such, the following macros" + $ECHO "should be removed from configure.ac or the version numbers in this" + $ECHO "file should be increased:" + $ECHO + $ECHO "$configure_ac_macros" + $ECHO + $ECHO $ECHO_N "Ignorantly continuing build preparation ... $ECHO_C" + fi + + ################### + # autoconf, retry # + ################### + $VERBOSE_ECHO + $VERBOSE_ECHO "$AUTOCONF" + autoconf_output="`$AUTOCONF 2>&1`" + ret=$? + $VERBOSE_ECHO "$autoconf_output" + + if [ ! $ret = 0 ] ; then + # test if libtool is busted + libtool_failure "$autoconf_output" + + # let the user know what went wrong + cat <<EOF +$autoconf_output +EOF + $ECHO "ERROR: $AUTOCONF failed" + exit 2 + else + # autoconf sans -f and possibly sans unsupported options succeed so warn verbosely + $ECHO + $ECHO "Warning: autoconf seems to have succeeded by removing the following options:" + $ECHO " AUTOCONF_OPTIONS=\"$AUTOCONF_OPTIONS\"" + $ECHO + $ECHO "Removing those options should not be necessary and indicate some other" + $ECHO "problem with the build system. The build preparation is highly suspect" + $ECHO "and may result in configuration or compilation errors. Consider" + if [ "x$VERBOSE_ECHO" = "x:" ] ; then + $ECHO "rerunning the build preparation with verbose output enabled." + $ECHO " $AUTOGEN_SH --verbose" + else + $ECHO "reviewing the minimum GNU Autotools version settings contained in" + $ECHO "this script along with the macros being used in your `echo $CONFIGURE | basename` file." + fi + $ECHO + $ECHO $ECHO_N "Continuing build preparation ... $ECHO_C" + fi # autoconf ret = 0 + fi # autoconf ret = 0 + + ############## + # autoheader # + ############## + need_autoheader=no + for feature in AM_CONFIG_HEADER AC_CONFIG_HEADER ; do + $VERBOSE_ECHO "Searching for $feature in $CONFIGURE" + found="`grep \"^$feature.*\" $CONFIGURE`" + if [ ! "x$found" = "x" ] ; then + need_autoheader=yes + break + fi + done + if [ "x$need_autoheader" = "xyes" ] ; then + $VERBOSE_ECHO "$AUTOHEADER $AUTOHEADER_OPTIONS" + autoheader_output="`$AUTOHEADER $AUTOHEADER_OPTIONS 2>&1`" + ret=$? + $VERBOSE_ECHO "$autoheader_output" + if [ ! $ret = 0 ] ; then $ECHO "ERROR: $AUTOHEADER failed" && exit 2 ; fi + fi # need_autoheader + + ############ + # automake # + ############ + need_automake=no + for feature in AM_INIT_AUTOMAKE ; do + $VERBOSE_ECHO "Searching for $feature in $CONFIGURE" + found="`grep \"^$feature.*\" $CONFIGURE`" + if [ ! "x$found" = "x" ] ; then + need_automake=yes + break + fi + done + + if [ "x$need_automake" = "xyes" ] ; then + $VERBOSE_ECHO "$AUTOMAKE $AUTOMAKE_OPTIONS" + automake_output="`$AUTOMAKE $AUTOMAKE_OPTIONS 2>&1`" + ret=$? + $VERBOSE_ECHO "$automake_output" + + if [ ! $ret = 0 ] ; then + + ################### + # automake, retry # + ################### + $VERBOSE_ECHO + $VERBOSE_ECHO "$AUTOMAKE $ALT_AUTOMAKE_OPTIONS" + # retry without the -f + automake_output="`$AUTOMAKE $ALT_AUTOMAKE_OPTIONS 2>&1`" + ret=$? + $VERBOSE_ECHO "$automake_output" + + if [ ! $ret = 0 ] ; then + # test if libtool is busted + libtool_failure "$automake_output" + + # let the user know what went wrong + cat <<EOF +$automake_output +EOF + $ECHO "ERROR: $AUTOMAKE failed" + exit 2 + fi # automake retry + fi # automake ret = 0 + fi # need_automake +} # end of manual_autogen + + +##################################### +# RECURSIVE_MANUAL_AUTOGEN FUNCTION # +##################################### +recursive_manual_autogen ( ) { + + # run the build preparation steps manually for this directory + manual_autogen + + # for projects using recursive configure, run the build + # preparation steps for the subdirectories. + if [ ! "x$CONFIG_SUBDIRS" = "x" ] ; then + $VERBOSE_ECHO "Recursively configuring the following directories:" + $VERBOSE_ECHO " $CONFIG_SUBDIRS" + for dir in $CONFIG_SUBDIRS ; do + $VERBOSE_ECHO "Processing recursive configure in $dir" + cd "$START_PATH" + cd "$dir" + + # new directory, prepare + initialize + + # run manual steps for the subdir and any others below + recursive_manual_autogen + done + fi +} + + +################################ +# run manual preparation steps # +################################ +if [ "x$reconfigure_manually" = "xyes" ] ; then + $ECHO + $ECHO $ECHO_N "Preparing build ... $ECHO_C" + + recursive_manual_autogen +fi + + +######################### +# restore and summarize # +######################### +cd "$START_PATH" + +# restore COPYING and INSTALL from backup if necessary +recursive_restore + +# make sure we end up with a configure script +config_ac="`locate_configure_template`" +config="`echo $config_ac | sed 's/\.ac$//' | sed 's/\.in$//'`" +if [ "x$config" = "x" ] ; then + $VERBOSE_ECHO "Could not locate the configure template (from `pwd`)" +fi + +# summarize +$ECHO "done" +$ECHO +if test "x$config" = "x" -o ! -f "$config" ; then + $ECHO "WARNING: The $PROJECT build system should now be prepared but there" + $ECHO "does not seem to be a resulting configure file. This is unexpected" + $ECHO "and likely the result of an error. You should run $NAME_OF_AUTOGEN" + $ECHO "with the --verbose option to get more details on a potential" + $ECHO "misconfiguration." +else + $ECHO "The $PROJECT build system is now prepared. To build here, run:" + $ECHO " $config" + $ECHO " make" +fi + + +# Local Variables: +# mode: sh +# tab-width: 8 +# sh-basic-offset: 4 +# sh-indentation: 4 +# indent-tabs-mode: t +# End: +# ex: shiftwidth=4 tabstop=8 diff --git a/tools/configure.ac b/tools/configure.ac new file mode 100644 index 00000000..22b4b96a --- /dev/null +++ b/tools/configure.ac @@ -0,0 +1,391 @@ +# -*- Autoconf -*- +# Process this file with autoconf to produce a configure script. + +AC_PREREQ(2.61) + +m4_include([version.m4]) + +AC_INIT([ledger],[VERSION_NUMBER],[johnw@newartisans.com]) + +AC_CONFIG_AUX_DIR([.]) +AM_INIT_AUTOMAKE([dist-bzip2 foreign]) +AC_CONFIG_MACRO_DIR([m4]) + +AC_CONFIG_SRCDIR([src/main.cc]) +AC_CONFIG_HEADER([config.h]) + +# Checks for programs. +AC_USE_SYSTEM_EXTENSIONS +AC_PROG_CXX +AC_PROG_MAKE_SET +AC_PROG_LIBTOOL +AM_GNU_GETTEXT +AM_GNU_GETTEXT_VERSION([0.17]) + +# Checks for emacs lisp path +AM_PATH_LISPDIR + +# Check for options +AC_ARG_ENABLE(debug, + [ --enable-debug Turn on debugging], + [case "${enableval}" in + yes) debug=true ;; + no) debug=false ;; + *) AC_MSG_ERROR(bad value ${enableval} for --enable-debug) ;; + esac],[debug=false]) + +if [ test x$debug = xtrue ]; then + AC_DEFINE([DEBUG_MODE], [1], [Whether debugging is enabled]) +fi +AM_CONDITIONAL(DEBUG, test x$debug = xtrue) + +AC_ARG_ENABLE(pch, + [ --enable-pch Use GCC 4.x pre-compiled headers], + [case "${enableval}" in + yes) pch=true ;; + no) pch=false ;; + *) AC_MSG_ERROR(bad value ${enableval} for --enable-pch) ;; + esac],[pch=false]) + +if [ test x$pch = xtrue ]; then + AC_DEFINE([USE_PCH], [1], [Whether pre-compiled headers are being used]) +fi +AM_CONDITIONAL(USE_PCH, test x$pch = xtrue) + +AC_ARG_ENABLE(doxygen, + [ --enable-doxygen Turns on generation of code documentation], + [case "${enableval}" in + yes) doxygen=true ;; + no) doxygen=false ;; + *) AC_MSG_ERROR(bad value ${enableval} for --enable-doxygen) ;; + esac],[doxygen=false]) + +AM_CONDITIONAL(USE_DOXYGEN, test x$doxygen = xtrue) + +AC_ARG_WITH(boost-suffix, + [ --with-boost-suffix=X Append X to the Boost library names], + [BOOST_SUFFIX="${withval}"], + [BOOST_SUFFIX=""]) + +AC_SUBST([BOOST_SUFFIX], $BOOST_SUFFIX) + +# check if UNIX pipes are available +AC_CACHE_CHECK( + [if pipes can be used], + [pipes_avail_cv_], + [AC_LANG_PUSH(C++) + AC_LINK_IFELSE( + [AC_LANG_PROGRAM( + [[#include <sys/types.h> + #include <sys/wait.h> + #include <unistd.h> + #include <stdlib.h> + #include <string.h> + #include <stdio.h>]], + [[int status, pfd[2]; + status = pipe(pfd); + status = fork(); + if (status < 0) { + ; + } else if (status == 0) { + char *arg0 = NULL; + + status = dup2(pfd[0], STDIN_FILENO); + + close(pfd[1]); + close(pfd[0]); + + execlp("", arg0, (char *)0); + perror("execl"); + exit(1); + } else { + close(pfd[0]); + }]])], + [pipes_avail_cv_=true], + [pipes_avail_cv_=false]) + AC_LANG_POP]) + +if [test x$pipes_avail_cv_ = xtrue ]; then + AC_DEFINE([HAVE_UNIX_PIPES], [1], [Whether UNIX pipes are available]) +fi + +# check for gmp +AC_CACHE_CHECK( + [if GMP is available], + [libgmp_avail_cv_], + [libgmp_save_libs=$LIBS + LIBS="-lgmp $LIBS" + AC_LANG_PUSH(C++) + AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include <gmp.h>]], [[mpz_t bar; + mpz_init(bar); + mpz_clear(bar);]])],[libgmp_avail_cv_=true],[libgmp_avail_cv_=false]) + AC_LANG_POP + LIBS=$libgmp_save_libs]) + +if [test x$libgmp_avail_cv_ = xtrue ]; then + LIBS="-lgmp $LIBS" +else + AC_MSG_FAILURE("Could not find gmp library (set CPPFLAGS and LDFLAGS?)") +fi + +# check for mpfr +AC_CACHE_CHECK( + [if MPFR is available], + [libmpfr_avail_cv_], + [libmpfr_save_libs=$LIBS + LIBS="-lmpfr $LIBS" + AC_LANG_PUSH(C++) + AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include <mpfr.h>]], [[mpfr_t bar; + mpfr_init(bar); + char * buf = NULL; + mpfr_asprintf(&buf, "%Rf", bar); + mpfr_clear(bar);]])],[libmpfr_avail_cv_=true],[libmpfr_avail_cv_=false]) + AC_LANG_POP + LIBS=$libmpfr_save_libs]) + +if [test x$libmpfr_avail_cv_ = xtrue ]; then + LIBS="-lmpfr $LIBS" +else + AC_MSG_FAILURE("Could not find mpfr library 2.4.0 or higher (set CPPFLAGS and LDFLAGS?)") +fi + +# check for edit +AC_CACHE_CHECK( + [if libedit is available], + [libedit_avail_cv_], + [libedit_save_libs=$LIBS + LIBS="-ledit $LIBS" + AC_LANG_PUSH(C++) + AC_LINK_IFELSE([AC_LANG_PROGRAM( + [[#include <stdlib.h> + #include <stdio.h> + #include <editline/readline.h>]], + [[rl_readline_name = const_cast<char *>("foo"); + char * line = readline(const_cast<char *>("foo: ")); + free(line);]])],[libedit_avail_cv_=true],[libedit_avail_cv_=false]) + AC_LANG_POP + LIBS=$libedit_save_libs]) + +if [test x$libedit_avail_cv_ = xtrue ]; then + LIBS="-ledit $LIBS" + AC_DEFINE([HAVE_LIBEDIT], [1], [If the libedit library is available]) +fi + +# check for boost_regex +AC_CACHE_CHECK( + [if boost_regex is available], + [boost_regex_avail_cv_], + [boost_regex_save_libs=$LIBS + LIBS="-lboost_regex$BOOST_SUFFIX $LIBS" + AC_LANG_PUSH(C++) + AC_LINK_IFELSE( + [AC_LANG_PROGRAM( + [[#include <boost/regex.hpp>]], + [[boost::regex foo_regexp("Hello, world!");]])], + [boost_regex_avail_cv_=true], + [boost_regex_avail_cv_=false]) + AC_LANG_POP + LIBS=$boost_regex_save_libs]) + +if [test x$boost_regex_avail_cv_ = xtrue ]; then + LIBS="-lboost_regex$BOOST_SUFFIX $LIBS" +else + AC_MSG_FAILURE("Could not find boost_regex library (set CPPFLAGS and LDFLAGS?)") +fi + +# check for boost_date_time +AC_CACHE_CHECK( + [if boost_date_time is available], + [boost_date_time_cpplib_avail_cv_], + [boost_date_time_save_libs=$LIBS + LIBS="-lboost_date_time$BOOST_SUFFIX $LIBS" + AC_LANG_PUSH(C++) + AC_LINK_IFELSE( + [AC_LANG_PROGRAM( + [[#include <boost/date_time/posix_time/posix_time.hpp> + #include <boost/date_time/gregorian/gregorian.hpp> + #include <boost/date_time/local_time_adjustor.hpp> + #include <boost/date_time/time_duration.hpp> + + using namespace boost::posix_time; + using namespace boost::date_time; + + #include <ctime> + + inline ptime time_to_system_local(const ptime& when) { + struct std::tm tm_gmt = to_tm(when); + return from_time_t(mktime(&tm_gmt)); + }]], + [[ptime t10 = ptime(boost::gregorian::from_string("2007-01-15"), + ptime::time_duration_type()); + + ptime t12 = time_to_system_local(t10); + + return t10 != t12;]])], + [boost_date_time_cpplib_avail_cv_=true], + [boost_date_time_cpplib_avail_cv_=false]) + AC_LANG_POP + LIBS=$boost_date_time_save_libs]) + +if [test x$boost_date_time_cpplib_avail_cv_ = xtrue ]; then + LIBS="-lboost_date_time$BOOST_SUFFIX $LIBS" +else + AC_MSG_FAILURE("Could not find boost_date_time library (set CPPFLAGS and LDFLAGS?)") +fi + +# check for boost_filesystem +AC_CACHE_CHECK( + [if boost_filesystem is available], + [boost_filesystem_cpplib_avail_cv_], + [boost_filesystem_save_libs=$LIBS + LIBS="-lboost_filesystem$BOOST_SUFFIX -lboost_system$BOOST_SUFFIX $LIBS" + AC_LANG_PUSH(C++) + AC_LINK_IFELSE( + [AC_LANG_PROGRAM( + [[#include <boost/filesystem/path.hpp>]], + [[boost::filesystem::path this_path("Hello");]])], + [boost_filesystem_cpplib_avail_cv_=true], + [boost_filesystem_cpplib_avail_cv_=false]) + AC_LANG_POP + LIBS=$boost_filesystem_save_libs]) + +if [test x$boost_filesystem_cpplib_avail_cv_ = xtrue ]; then + LIBS="-lboost_filesystem$BOOST_SUFFIX -lboost_system$BOOST_SUFFIX $LIBS" +else + AC_MSG_FAILURE("Could not find boost_filesystem library (set CPPFLAGS and LDFLAGS?)") +fi + +# check for boost_iostreams +AC_CACHE_CHECK( + [if boost_iostreams is available], + [boost_iostreams_cpplib_avail_cv_], + [boost_iostreams_save_libs=$LIBS + LIBS="-lboost_iostreams$BOOST_SUFFIX -lboost_system$BOOST_SUFFIX $LIBS" + AC_LANG_PUSH(C++) + AC_LINK_IFELSE( + [AC_LANG_PROGRAM( + [[#include <boost/iostreams/device/file_descriptor.hpp> + #include <boost/iostreams/stream.hpp>]], + [[namespace io = boost::iostreams; + typedef io::stream<io::file_descriptor_sink> ofdstream; + ofdstream outstream(1);]])], + [boost_iostreams_cpplib_avail_cv_=true], + [boost_iostreams_cpplib_avail_cv_=false]) + AC_LANG_POP + LIBS=$boost_iostreams_save_libs]) + +if [test x$boost_iostreams_cpplib_avail_cv_ = xtrue ]; then + LIBS="-lboost_iostreams$BOOST_SUFFIX -lboost_system$BOOST_SUFFIX $LIBS" +else + AC_MSG_FAILURE("Could not find boost_iostreams library (set CPPFLAGS and LDFLAGS?)") +fi + +# check for boost_serialization +AC_CACHE_CHECK( + [if boost_serialization is available], + [boost_serialization_cpplib_avail_cv_], + [boost_serialization_save_libs=$LIBS + LIBS="-lboost_serialization$BOOST_SUFFIX -lboost_system$BOOST_SUFFIX $LIBS" + AC_LANG_PUSH(C++) + AC_LINK_IFELSE( + [AC_LANG_PROGRAM( + [[#include <boost/archive/binary_oarchive.hpp> + #include <iostream> + struct foo { + int a; + template<class Archive> + void serialize(Archive & ar, const unsigned int) { + ar & a; + } + };]], + [[boost::archive::binary_oarchive oa(std::cout); + foo x; + oa << x;]])], + [boost_serialization_cpplib_avail_cv_=true], + [boost_serialization_cpplib_avail_cv_=false]) + AC_LANG_POP + LIBS=$boost_serialization_save_libs]) + +if [test x$boost_serialization_cpplib_avail_cv_ = xtrue ]; then + AC_DEFINE([HAVE_BOOST_SERIALIZATION], [1], [Whether Boost.Serialization is available]) + LIBS="-lboost_serialization$BOOST_SUFFIX $LIBS" +fi +AM_CONDITIONAL(HAVE_BOOST_SERIALIZATION, test x$boost_serialization_cpplib_avail_cv_ = xtrue) + +# check for Python +AM_PATH_PYTHON(2.4,, :) +if [test "$PYTHON" != :]; then + AM_CONDITIONAL(HAVE_PYTHON, true) + AC_CACHE_CHECK( + [if boost_python is available], + [boost_python_cpplib_avail_cv_], + [boost_python_save_libs=$LIBS + LIBS="-lboost_python$BOOST_SUFFIX -lpython$PYTHON_VERSION $LIBS" + AC_LANG_PUSH(C++) + AC_LINK_IFELSE( + [AC_LANG_PROGRAM( + [[#include <boost/python.hpp> + using namespace boost::python; + class foo {}; + BOOST_PYTHON_MODULE(samp) { + class_< foo > ("foo") ; + }]], + [[return 0]])], + [boost_python_cpplib_avail_cv_=true], + [boost_python_cpplib_avail_cv_=false]) + AC_LANG_POP + LIBS=$boost_python_save_libs]) + + if [ test x$boost_python_cpplib_avail_cv_ = xtrue ]; then + AC_DEFINE([HAVE_BOOST_PYTHON], [1], [Whether Boost.Python is available]) + LIBS="-lboost_python$BOOST_SUFFIX -lpython$PYTHON_VERSION $LIBS" + fi +else + AM_CONDITIONAL(HAVE_PYTHON, false) +fi +AM_CONDITIONAL(HAVE_BOOST_PYTHON, test x$boost_python_cpplib_avail_cv_ = xtrue) + +# check for CppUnit +AC_CACHE_CHECK( + [if cppunit is available], + [cppunit_avail_cv_], + [cppunit_save_libs=$LIBS + LIBS="-lcppunit $LIBS" + AC_LANG_PUSH(C++) + AC_LINK_IFELSE( + [AC_LANG_PROGRAM( + [[#include <cppunit/CompilerOutputter.h> + #include <cppunit/TestResult.h> + #include <cppunit/TestResultCollector.h> + #include <cppunit/TestRunner.h> + #include <cppunit/TextTestProgressListener.h> + #include <cppunit/BriefTestProgressListener.h> + #include <cppunit/XmlOutputter.h> + #include <cppunit/extensions/TestFactoryRegistry.h>]], + [[CPPUNIT_NS::TestResult controller; + CPPUNIT_NS::TestResultCollector result;]])], + [cppunit_avail_cv_=true], + [cppunit_avail_cv_=false]) + AC_LANG_POP + LIBS=$cppunit_save_libs]) + +AM_CONDITIONAL(HAVE_CPPUNIT, test x$cppunit_avail_cv_ = xtrue) + +# Checks for header files. +AC_HEADER_STDC +AC_HEADER_STAT +AC_CHECK_HEADERS([langinfo.h]) + +# Checks for typedefs, structures, and compiler characteristics. +AC_TYPE_SIZE_T +AC_STRUCT_TM + +# Checks for library functions. +#AC_FUNC_MKTIME +#AC_FUNC_STAT +#AC_FUNC_STRFTIME +AC_CHECK_FUNCS([access realpath getpwuid getpwnam isatty]) + +# Pepare the Makefiles +AC_CONFIG_FILES([Makefile po/Makefile.in intl/Makefile]) +AC_OUTPUT |