diff --git a/.gitignore b/.gitignore index 3f3494c..20c6226 100644 --- a/.gitignore +++ b/.gitignore @@ -1,24 +1,5 @@ -*.lo -*.la +.vscode/ +build/ *.o *.so -.deps -.libs -aclocal.m4 -autom4te.cache -configure -Makefile -Makefile.in -INSTALL -m4 -py-compile -config.* -src/psftest -test/test_psfdataset -missing -ltmain.sh -libtool -libpsf.pc -libpsf-uninstalled.pc -depcomp *~ diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..69aba94 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,25 @@ +# https://stackoverflow.com/questions/17511496/how-to-create-a-shared-library-with-cmake +# https://github.com/giuliopaci/cmake-tutorial/blob/master/CMakeLists.txt + +cmake_minimum_required(VERSION 3.12) + +project(libpsf VERSION 0.3 + DESCRIPTION "Load Cadence Spectre PSF simulation data" + LANGUAGES CXX) + +option(WITH_PYTHON "Build python bindings" ON) + +set(CMAKE_CXX_STANDARD 11) +set(CMAKE_CXX_STANDARD_REQUIRED True) +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +if(CMAKE_PROJECT_NAME STREQUAL PROJECT_NAME) + include(CTest) +endif() + +add_subdirectory(src) +add_subdirectory(test) + +if(WITH_PYTHON) + add_subdirectory(bindings) +endif() \ No newline at end of file diff --git a/ChangeLog b/ChangeLog deleted file mode 100644 index e69de29..0000000 diff --git a/Makefile.am b/Makefile.am deleted file mode 100644 index 6791395..0000000 --- a/Makefile.am +++ /dev/null @@ -1,14 +0,0 @@ -if ENABLE_TESTS - MAYBE_TEST = test -endif - -SUBDIRS = \ - src \ - include \ - bindings \ - $(MAYBE_TEST) - -pkgconfigdir=$(libdir)/pkgconfig -pkgconfig_DATA=libpsf.pc - -README: README.rst diff --git a/NEWS b/NEWS deleted file mode 100644 index e69de29..0000000 diff --git a/README.rst b/README.rst index 818b05c..38a27a5 100644 --- a/README.rst +++ b/README.rst @@ -3,46 +3,44 @@ libpsf is a c++ library that reads Cadence PSF waveform files Install ======= -Install prerequisits +Install prerequisites -------------------- +If building without python binding, only cmake and boost are required -On a debian based system you can run the following to install the +- On a debian based system you can run the following to install the packages needed to build libpsf: -sudo apt-get install autoconf automake libtool libboost-all-dev python-numpy-dev + $ sudo apt-get install cmake libboost-all-dev python-numpy-dev cython cppunit + +- Otherwise conda can be used to install the following packages: + + $ conda install python numpy cython cmake + + Then install boost libraries and set + + $ export BOOST_LOC= Build and install ----------------- -To build and install the library:: +- From root directory, create build directory - ./autogen.sh - make - sudo make install + $ mkdir build && cd build +- Run cmake configuration -To build the python extension with conda:: - - conda install python=3.7 numpy automake libtool cython + $ cmake .. -DCMAKE_INSTALL_PREFIX=$CONDA_PREFIX -DCMAKE_BUILD_TYPE=RELEASE -DWITH_PYTHON=ON - # link python3.7 to python3.7m - ln -s $CONDA_PREFIX/lib/libpython3.7m.so $CONDA_PREFIX/lib/libpython3.7.so + `CONDA_PREFIX` is the destination where you want libpsf to be installed + To build without the python binding, just set `-DWITH_PYTHON=OFF` +- Build + + $ make +- To run tests, [cppunit](https://www.freedesktop.org/wiki/Software/cppunit) is required. - - ./autogen.sh - ./configure --prefix=$CONDA_PREFIX --with-python - # make errors out with "cannot find the library 'libpsf.la'" so build libpsf.la first - cd src - make libpsf.la - cd .. - make install - cd bindings/python - python setup.py install - - -Running the tests ------------------ -Install cppunit, then compile and run the tests in the test dir:: + $ ctest + + `ctest --verbose` to see individual test result outputs + +- Install - sudo apt-get install libcppunit-dev - cd test - make - ./test_psfdataset + $ make install + diff --git a/autogen.sh b/autogen.sh deleted file mode 100755 index 6dec92d..0000000 --- a/autogen.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -e -test -n "$srcdir" || srcdir=`dirname "$0"` -test -n "$srcdir" || srcdir=. - -autoreconf --force --install --verbose "$srcdir" -Im4 -test -n "$NOCONFIGURE" || "$srcdir/configure" "$@" diff --git a/bindings/CMakeLists.txt b/bindings/CMakeLists.txt new file mode 100644 index 0000000..8e5f91a --- /dev/null +++ b/bindings/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(python) diff --git a/bindings/Makefile.am b/bindings/Makefile.am deleted file mode 100644 index 99ec3a9..0000000 --- a/bindings/Makefile.am +++ /dev/null @@ -1,6 +0,0 @@ -if WITH_PYTHONBINDINGS -MAYBE_PYTHONBINDINGS = python -endif - -SUBDIRS = $(MAYBE_PYTHONBINDINGS) - diff --git a/bindings/python/.gitignore b/bindings/python/.gitignore new file mode 100644 index 0000000..5fa7484 --- /dev/null +++ b/bindings/python/.gitignore @@ -0,0 +1,3 @@ +libpsf.cpp +libpsf.h +*.pyc diff --git a/bindings/python/CMakeLists.txt b/bindings/python/CMakeLists.txt new file mode 100644 index 0000000..77b0973 --- /dev/null +++ b/bindings/python/CMakeLists.txt @@ -0,0 +1,46 @@ +find_package(Python COMPONENTS Interpreter Development) +find_program(CYTHON_EXECUTABLE NAMES cython cython3 + HINTS ${_python_path}) +# find_program(Python REQUIRED COMPONENTS Interpreter Development NumPy) +# https://bloerg.net/posts/cmake-and-distutils/ +if( Python_EXECUTABLE AND CYTHON_EXECUTABLE) + MESSAGE( STATUS "numpy headers found at: ${Python_NumPy_INCLUDE_DIRS}") + MESSAGE( STATUS "cython found at: ${CYTHON_EXECUTABLE}") + + # set variables for setup.py.in + get_target_property(LIBPSF_BUILD_DIR psf BINARY_DIR) + get_target_property(LIBPSF_INCLUDE psf INCLUDE_DIRECTORIES) + + set(SETUP_PY_IN "${CMAKE_CURRENT_SOURCE_DIR}/setup.py.in") + set(SETUP_PY "${CMAKE_CURRENT_BINARY_DIR}/setup.py") + set(DEPS setup.py.in psfpython.h psfpython.cc libpsf.pyx cpp_defs.pxd) + set(OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/build/timestamp") + configure_file(${SETUP_PY_IN} ${SETUP_PY}) + # build commands + add_custom_command(OUTPUT ${OUTPUT} + COMMAND ${Python_EXECUTABLE} ${SETUP_PY} build_ext --inplace + COMMAND ${Python_EXECUTABLE} ${SETUP_PY} bdist_wheel + COMMAND ${CMAKE_COMMAND} -E touch ${OUTPUT} + DEPENDS ${DEPS}) + + add_custom_target(python_binding ALL DEPENDS ${OUTPUT}) + add_dependencies(python_binding psf) + # install binding + install(CODE "execute_process(COMMAND ${Python_EXECUTABLE} ${SETUP_PY} install)") + + + install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/dist + DESTINATION ${CMAKE_CURRENT_SOURCE_DIR}) + + # tests + if(CMAKE_PROJECT_NAME STREQUAL PROJECT_NAME AND BUILD_TESTING) + add_test(NAME python_test + COMMAND ${Python_EXECUTABLE} -m unittest test_psfdataset.py -v + WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/tests") + set_tests_properties(python_test PROPERTIES + ENVIRONMENT PYTHONPATH=${CMAKE_CURRENT_BINARY_DIR}:$ENV{PYTHONPATH}) + endif() + +else() + MESSAGE( WARNING "python/numpy/cython include not found, skipping python bindings") +endif() diff --git a/bindings/python/Makefile.am b/bindings/python/Makefile.am deleted file mode 100644 index 6da057b..0000000 --- a/bindings/python/Makefile.am +++ /dev/null @@ -1,17 +0,0 @@ -EXTRA_DIST = setup.py - -if WITH_PYTHONBINDINGS - -all-local: - $(PYTHON) setup.py build_ext --include-dirs=../../include:@BOOST_ROOT_PATH@/include --library-dirs=../../src/.libs:@BOOST_ROOT_PATH@/lib - -install-exec-local: - $(PYTHON) setup.py install --prefix=$(prefix) --root=$(DESTDIR) - -uninstall-local: - rm -rf $(DESTDIR)$(libdir)/python*/*-packages/*libpsf* - -clean-local: - $(PYTHON) setup.py clean --all - -endif \ No newline at end of file diff --git a/bindings/python/dist/libpsf-0.0.1-cp36-cp36m-linux_x86_64.whl b/bindings/python/dist/libpsf-0.0.1-cp36-cp36m-linux_x86_64.whl deleted file mode 100644 index 0b0f611..0000000 Binary files a/bindings/python/dist/libpsf-0.0.1-cp36-cp36m-linux_x86_64.whl and /dev/null differ diff --git a/bindings/python/dist/libpsf-0.0.1-cp37-cp37m-linux_x86_64.whl b/bindings/python/dist/libpsf-0.0.1-cp37-cp37m-linux_x86_64.whl deleted file mode 100644 index c7d0b3d..0000000 Binary files a/bindings/python/dist/libpsf-0.0.1-cp37-cp37m-linux_x86_64.whl and /dev/null differ diff --git a/bindings/python/dist/libpsf-0.3-cp37-cp37m-linux_x86_64.whl b/bindings/python/dist/libpsf-0.3-cp37-cp37m-linux_x86_64.whl new file mode 100644 index 0000000..764bf82 Binary files /dev/null and b/bindings/python/dist/libpsf-0.3-cp37-cp37m-linux_x86_64.whl differ diff --git a/bindings/python/setup.py b/bindings/python/setup.py.in similarity index 59% rename from bindings/python/setup.py rename to bindings/python/setup.py.in index 81fce68..041fb6e 100644 --- a/bindings/python/setup.py +++ b/bindings/python/setup.py.in @@ -1,14 +1,13 @@ #!/usr/bin/env python -import os, sys +import os import numpy -from sysconfig import get_paths # BEFORE importing setuptools, remove MANIFEST. Otherwise it may not be # properly updated when the contents of directories change (true for distutils, # not sure about setuptools). if os.path.exists('MANIFEST'): - os.remove('MANIFEST') + os.remove('MANIFEST') from setuptools import setup, Extension from Cython.Build import cythonize @@ -19,31 +18,33 @@ except: long_description = '' -root_include = os.path.abspath(os.path.join(get_paths()['include'], '..')) +numpy_includes = [numpy.get_include()] +python_includes = "${Python_INCLUDE_DIRS}".split(":") +psf_includes = "${LIBPSF_INCLUDE}".split() +psf_lib_dir = "${LIBPSF_BUILD_DIR}" # https://stackoverflow.com/questions/4597228/how-to-statically-link-a-library-when-compiling-a-python-module-extension -lib_dir = os.path.abspath(os.path.join(get_paths()['stdlib'], '..')) -if 'bdist_wheel' in sys.argv: - static_libraries = ['psf'] - extra_objects = ['{}/lib{}.a'.format(lib_dir, l) for l in static_libraries] - libraries = [] -else: - extra_objects = [] - libraries = ['psf'] +static_libraries = ['psf'] +extra_objects = [ + '{}/lib{}_static.a'.format(psf_lib_dir, l) for l in static_libraries] +libraries = [] +extra_link_args=[] libpsf_ext = Extension( name="libpsf", - sources = ["libpsf.pyx", "psfpython.cc"], - extra_objects = extra_objects, - libraries = ["psf"], - include_dirs = [root_include, numpy.get_include() ], + sources=["${CMAKE_CURRENT_SOURCE_DIR}/libpsf.pyx", + "${CMAKE_CURRENT_SOURCE_DIR}/psfpython.cc"], + extra_objects=extra_objects, + libraries=libraries, + include_dirs=psf_includes + numpy_includes + python_includes, + extra_link_args=extra_link_args, ) setup( name="libpsf", ext_modules=cythonize([libpsf_ext]), - version="0.0.1", + version="${CMAKE_PROJECT_VERSION}", description="library to read Cadence PSF output", install_requires=['numpy>=1.10.0'], test_suite="tests", @@ -52,7 +53,7 @@ long_description=long_description, long_description_content_type="text/markdown", license="GNU Lesser General Public License v3.0", - keywords=["cadence","spectre","virtuoso","circtuit", "simulation", + keywords=["cadence", "spectre", "virtuoso", "circtuit", "simulation", "waveform", "circuit simulation"], zip_safe=False ) diff --git a/bindings/python/tests/__init__.py b/bindings/python/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/bindings/python/tests/test_psfdataset.py b/bindings/python/tests/test_psfdataset.py index 1f50894..08e9b59 100644 --- a/bindings/python/tests/test_psfdataset.py +++ b/bindings/python/tests/test_psfdataset.py @@ -55,6 +55,6 @@ def test_is_swept(self): def test_get_signal_properties(self): - with self.assertRaises(libpsf.NotFound) as context: + with self.assertRaises(libpsf.NotFound): self.psf.get_signal_properties("PSUP") diff --git a/configure.ac b/configure.ac deleted file mode 100644 index 3c4a092..0000000 --- a/configure.ac +++ /dev/null @@ -1,98 +0,0 @@ -# -*- Autoconf -*- -# Process this file with autoconf to produce a configure script. - -AC_PREREQ([2.59]) -AC_INIT([libpsf],[0.2],[henrik@johome.net]) -AC_CONFIG_MACRO_DIR([m4]) -AM_INIT_AUTOMAKE([1.9 -Wall no-define]) - -AC_LANG([C++]) - -dnl Checks for programs. -AC_PROG_CXX -AC_PROG_AWK -AC_PROG_CC -AC_PROG_CPP -AC_PROG_LIBTOOL -AC_PROG_INSTALL -AC_PROG_LN_S -AC_PROG_MAKE_SET -AC_PROG_RANLIB -AM_PROG_LIBTOOL - -dnl Checks for libraries. - -# Checks for header files. -AC_CHECK_HEADERS([arpa/inet.h fcntl.h stdint.h stdlib.h string.h unistd.h]) - -dnl Checks for typedefs, structures, and compiler characteristics. -AC_HEADER_STDBOOL -AC_TYPE_INT32_T -AC_TYPE_INT8_T -AC_TYPE_OFF_T -AC_TYPE_UINT32_T -AC_TYPE_UINT64_T - -dnl Check existence of tr1::unordered_map -AC_CHECK_HEADERS([tr1/unordered_map]) - -dnl Checks for boost -AX_BOOST_BASE([1.32.0]) -if test "$succeeded" != "yes" ; then - echo "Error: You need to install the boost library!" - exit -fi - -dnl check to build python bindings -AC_ARG_WITH(python, [AS_HELP_STRING([--with-python], [compile with Python bindings])], - with_python=$withval, with_python=no) - -if test $with_python = yes; then - AX_PYTHON_DEVEL - - AX_BOOST_PYTHON - - dnl Checks for boost python and numpy - AM_PATH_PYTHON(2.5, have_python=true, have_python=false) - - dnl Check for numpy - AC_MSG_CHECKING([for Numpy include directory]) - - CPPFLAGS_SAVE=$CPPFLAGS - CPPFLAGS=$PYTHON_CPPFLAGS $CPPFLAGS - NUMPY_INCLUDE_DIR=`echo "import numpy; print numpy.get_include()" | $PYTHON - 2>/dev/null` - AC_SUBST(NUMPY_INCLUDE_DIR) - AC_CHECK_HEADER([${NUMPY_INCLUDE_DIR}/numpy/arrayobject.h], - [NUMPY_HEADER=yes], - [AC_MSG_WARN([Numpy extension header not found])], - [#include "Python.h"]) - - AC_SUBST(NUMPY_HEADER) - CPPFLAGS_SAVE=$CPPFLAGS -fi - - -AM_CONDITIONAL(WITH_PYTHONBINDINGS, test ! -z "$BOOST_PYTHON_LIB" -a "x$NUMPY_HEADER" == "xyes" ) - -dnl Checks for library functions. -AC_FUNC_MMAP - -dnl Include libtool to build shared libraries -dnl LT_INIT - -dnl Check if we should build tests -AC_ARG_ENABLE([tests], AS_HELP_STRING([--enable-tests], [Enable tests])) - -AM_CONDITIONAL([ENABLE_TESTS], [test "$enable_tests" = yes]) - -AC_CONFIG_FILES([Makefile - include/Makefile - src/Makefile - bindings/Makefile - bindings/python/Makefile - test/Makefile - libpsf.pc - libpsf-uninstalled.pc - ]) -AC_OUTPUT - diff --git a/doc/format.txt b/doc/format.txt new file mode 100644 index 0000000..ddac0ba --- /dev/null +++ b/doc/format.txt @@ -0,0 +1,15 @@ +PSF + +Size: Last four bytes (Big-endian) + +nsections = (size - datasize - 12) / 8 + +const char *toc = buf + size - 12 - nsections*8; + +#define SECTION_HEADER 0 +#define SECTION_TYPE 1 +#define SECTION_SWEEP 2 +#define SECTION_TRACE 3 +#define SECTION_VALUE 4 + + diff --git a/include/Makefile.am b/include/Makefile.am deleted file mode 100644 index d147cc9..0000000 --- a/include/Makefile.am +++ /dev/null @@ -1 +0,0 @@ -include_HEADERS = psfdata.h psf.h diff --git a/install-sh b/install-sh deleted file mode 100755 index 8175c64..0000000 --- a/install-sh +++ /dev/null @@ -1,518 +0,0 @@ -#!/bin/sh -# install - install a program, script, or datafile - -scriptversion=2018-03-11.20; # UTC - -# This originates from X11R5 (mit/util/scripts/install.sh), which was -# later released in X11R6 (xc/config/util/install.sh) with the -# following copyright and license. -# -# Copyright (C) 1994 X Consortium -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to -# deal in the Software without restriction, including without limitation the -# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or -# sell copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN -# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC- -# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -# -# Except as contained in this notice, the name of the X Consortium shall not -# be used in advertising or otherwise to promote the sale, use or other deal- -# ings in this Software without prior written authorization from the X Consor- -# tium. -# -# -# FSF changes to this file are in the public domain. -# -# Calling this script install-sh is preferred over install.sh, to prevent -# 'make' implicit rules from creating a file called install from it -# when there is no Makefile. -# -# This script is compatible with the BSD install script, but was written -# from scratch. - -tab=' ' -nl=' -' -IFS=" $tab$nl" - -# Set DOITPROG to "echo" to test this script. - -doit=${DOITPROG-} -doit_exec=${doit:-exec} - -# Put in absolute file names if you don't have them in your path; -# or use environment vars. - -chgrpprog=${CHGRPPROG-chgrp} -chmodprog=${CHMODPROG-chmod} -chownprog=${CHOWNPROG-chown} -cmpprog=${CMPPROG-cmp} -cpprog=${CPPROG-cp} -mkdirprog=${MKDIRPROG-mkdir} -mvprog=${MVPROG-mv} -rmprog=${RMPROG-rm} -stripprog=${STRIPPROG-strip} - -posix_mkdir= - -# Desired mode of installed file. -mode=0755 - -chgrpcmd= -chmodcmd=$chmodprog -chowncmd= -mvcmd=$mvprog -rmcmd="$rmprog -f" -stripcmd= - -src= -dst= -dir_arg= -dst_arg= - -copy_on_change=false -is_target_a_directory=possibly - -usage="\ -Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE - or: $0 [OPTION]... SRCFILES... DIRECTORY - or: $0 [OPTION]... -t DIRECTORY SRCFILES... - or: $0 [OPTION]... -d DIRECTORIES... - -In the 1st form, copy SRCFILE to DSTFILE. -In the 2nd and 3rd, copy all SRCFILES to DIRECTORY. -In the 4th, create DIRECTORIES. - -Options: - --help display this help and exit. - --version display version info and exit. - - -c (ignored) - -C install only if different (preserve the last data modification time) - -d create directories instead of installing files. - -g GROUP $chgrpprog installed files to GROUP. - -m MODE $chmodprog installed files to MODE. - -o USER $chownprog installed files to USER. - -s $stripprog installed files. - -t DIRECTORY install into DIRECTORY. - -T report an error if DSTFILE is a directory. - -Environment variables override the default commands: - CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG - RMPROG STRIPPROG -" - -while test $# -ne 0; do - case $1 in - -c) ;; - - -C) copy_on_change=true;; - - -d) dir_arg=true;; - - -g) chgrpcmd="$chgrpprog $2" - shift;; - - --help) echo "$usage"; exit $?;; - - -m) mode=$2 - case $mode in - *' '* | *"$tab"* | *"$nl"* | *'*'* | *'?'* | *'['*) - echo "$0: invalid mode: $mode" >&2 - exit 1;; - esac - shift;; - - -o) chowncmd="$chownprog $2" - shift;; - - -s) stripcmd=$stripprog;; - - -t) - is_target_a_directory=always - dst_arg=$2 - # Protect names problematic for 'test' and other utilities. - case $dst_arg in - -* | [=\(\)!]) dst_arg=./$dst_arg;; - esac - shift;; - - -T) is_target_a_directory=never;; - - --version) echo "$0 $scriptversion"; exit $?;; - - --) shift - break;; - - -*) echo "$0: invalid option: $1" >&2 - exit 1;; - - *) break;; - esac - shift -done - -# We allow the use of options -d and -T together, by making -d -# take the precedence; this is for compatibility with GNU install. - -if test -n "$dir_arg"; then - if test -n "$dst_arg"; then - echo "$0: target directory not allowed when installing a directory." >&2 - exit 1 - fi -fi - -if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then - # When -d is used, all remaining arguments are directories to create. - # When -t is used, the destination is already specified. - # Otherwise, the last argument is the destination. Remove it from $@. - for arg - do - if test -n "$dst_arg"; then - # $@ is not empty: it contains at least $arg. - set fnord "$@" "$dst_arg" - shift # fnord - fi - shift # arg - dst_arg=$arg - # Protect names problematic for 'test' and other utilities. - case $dst_arg in - -* | [=\(\)!]) dst_arg=./$dst_arg;; - esac - done -fi - -if test $# -eq 0; then - if test -z "$dir_arg"; then - echo "$0: no input file specified." >&2 - exit 1 - fi - # It's OK to call 'install-sh -d' without argument. - # This can happen when creating conditional directories. - exit 0 -fi - -if test -z "$dir_arg"; then - if test $# -gt 1 || test "$is_target_a_directory" = always; then - if test ! -d "$dst_arg"; then - echo "$0: $dst_arg: Is not a directory." >&2 - exit 1 - fi - fi -fi - -if test -z "$dir_arg"; then - do_exit='(exit $ret); exit $ret' - trap "ret=129; $do_exit" 1 - trap "ret=130; $do_exit" 2 - trap "ret=141; $do_exit" 13 - trap "ret=143; $do_exit" 15 - - # Set umask so as not to create temps with too-generous modes. - # However, 'strip' requires both read and write access to temps. - case $mode in - # Optimize common cases. - *644) cp_umask=133;; - *755) cp_umask=22;; - - *[0-7]) - if test -z "$stripcmd"; then - u_plus_rw= - else - u_plus_rw='% 200' - fi - cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;; - *) - if test -z "$stripcmd"; then - u_plus_rw= - else - u_plus_rw=,u+rw - fi - cp_umask=$mode$u_plus_rw;; - esac -fi - -for src -do - # Protect names problematic for 'test' and other utilities. - case $src in - -* | [=\(\)!]) src=./$src;; - esac - - if test -n "$dir_arg"; then - dst=$src - dstdir=$dst - test -d "$dstdir" - dstdir_status=$? - else - - # Waiting for this to be detected by the "$cpprog $src $dsttmp" command - # might cause directories to be created, which would be especially bad - # if $src (and thus $dsttmp) contains '*'. - if test ! -f "$src" && test ! -d "$src"; then - echo "$0: $src does not exist." >&2 - exit 1 - fi - - if test -z "$dst_arg"; then - echo "$0: no destination specified." >&2 - exit 1 - fi - dst=$dst_arg - - # If destination is a directory, append the input filename. - if test -d "$dst"; then - if test "$is_target_a_directory" = never; then - echo "$0: $dst_arg: Is a directory" >&2 - exit 1 - fi - dstdir=$dst - dstbase=`basename "$src"` - case $dst in - */) dst=$dst$dstbase;; - *) dst=$dst/$dstbase;; - esac - dstdir_status=0 - else - dstdir=`dirname "$dst"` - test -d "$dstdir" - dstdir_status=$? - fi - fi - - case $dstdir in - */) dstdirslash=$dstdir;; - *) dstdirslash=$dstdir/;; - esac - - obsolete_mkdir_used=false - - if test $dstdir_status != 0; then - case $posix_mkdir in - '') - # Create intermediate dirs using mode 755 as modified by the umask. - # This is like FreeBSD 'install' as of 1997-10-28. - umask=`umask` - case $stripcmd.$umask in - # Optimize common cases. - *[2367][2367]) mkdir_umask=$umask;; - .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;; - - *[0-7]) - mkdir_umask=`expr $umask + 22 \ - - $umask % 100 % 40 + $umask % 20 \ - - $umask % 10 % 4 + $umask % 2 - `;; - *) mkdir_umask=$umask,go-w;; - esac - - # With -d, create the new directory with the user-specified mode. - # Otherwise, rely on $mkdir_umask. - if test -n "$dir_arg"; then - mkdir_mode=-m$mode - else - mkdir_mode= - fi - - posix_mkdir=false - case $umask in - *[123567][0-7][0-7]) - # POSIX mkdir -p sets u+wx bits regardless of umask, which - # is incompatible with FreeBSD 'install' when (umask & 300) != 0. - ;; - *) - # Note that $RANDOM variable is not portable (e.g. dash); Use it - # here however when possible just to lower collision chance. - tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$ - - trap 'ret=$?; rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" 2>/dev/null; exit $ret' 0 - - # Because "mkdir -p" follows existing symlinks and we likely work - # directly in world-writeable /tmp, make sure that the '$tmpdir' - # directory is successfully created first before we actually test - # 'mkdir -p' feature. - if (umask $mkdir_umask && - $mkdirprog $mkdir_mode "$tmpdir" && - exec $mkdirprog $mkdir_mode -p -- "$tmpdir/a/b") >/dev/null 2>&1 - then - if test -z "$dir_arg" || { - # Check for POSIX incompatibilities with -m. - # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or - # other-writable bit of parent directory when it shouldn't. - # FreeBSD 6.1 mkdir -m -p sets mode of existing directory. - test_tmpdir="$tmpdir/a" - ls_ld_tmpdir=`ls -ld "$test_tmpdir"` - case $ls_ld_tmpdir in - d????-?r-*) different_mode=700;; - d????-?--*) different_mode=755;; - *) false;; - esac && - $mkdirprog -m$different_mode -p -- "$test_tmpdir" && { - ls_ld_tmpdir_1=`ls -ld "$test_tmpdir"` - test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1" - } - } - then posix_mkdir=: - fi - rmdir "$tmpdir/a/b" "$tmpdir/a" "$tmpdir" - else - # Remove any dirs left behind by ancient mkdir implementations. - rmdir ./$mkdir_mode ./-p ./-- "$tmpdir" 2>/dev/null - fi - trap '' 0;; - esac;; - esac - - if - $posix_mkdir && ( - umask $mkdir_umask && - $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir" - ) - then : - else - - # The umask is ridiculous, or mkdir does not conform to POSIX, - # or it failed possibly due to a race condition. Create the - # directory the slow way, step by step, checking for races as we go. - - case $dstdir in - /*) prefix='/';; - [-=\(\)!]*) prefix='./';; - *) prefix='';; - esac - - oIFS=$IFS - IFS=/ - set -f - set fnord $dstdir - shift - set +f - IFS=$oIFS - - prefixes= - - for d - do - test X"$d" = X && continue - - prefix=$prefix$d - if test -d "$prefix"; then - prefixes= - else - if $posix_mkdir; then - (umask=$mkdir_umask && - $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break - # Don't fail if two instances are running concurrently. - test -d "$prefix" || exit 1 - else - case $prefix in - *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;; - *) qprefix=$prefix;; - esac - prefixes="$prefixes '$qprefix'" - fi - fi - prefix=$prefix/ - done - - if test -n "$prefixes"; then - # Don't fail if two instances are running concurrently. - (umask $mkdir_umask && - eval "\$doit_exec \$mkdirprog $prefixes") || - test -d "$dstdir" || exit 1 - obsolete_mkdir_used=true - fi - fi - fi - - if test -n "$dir_arg"; then - { test -z "$chowncmd" || $doit $chowncmd "$dst"; } && - { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } && - { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false || - test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1 - else - - # Make a couple of temp file names in the proper directory. - dsttmp=${dstdirslash}_inst.$$_ - rmtmp=${dstdirslash}_rm.$$_ - - # Trap to clean up those temp files at exit. - trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0 - - # Copy the file name to the temp name. - (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") && - - # and set any options; do chmod last to preserve setuid bits. - # - # If any of these fail, we abort the whole thing. If we want to - # ignore errors from any of these, just make sure not to ignore - # errors from the above "$doit $cpprog $src $dsttmp" command. - # - { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } && - { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } && - { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } && - { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } && - - # If -C, don't bother to copy if it wouldn't change the file. - if $copy_on_change && - old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` && - new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` && - set -f && - set X $old && old=:$2:$4:$5:$6 && - set X $new && new=:$2:$4:$5:$6 && - set +f && - test "$old" = "$new" && - $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1 - then - rm -f "$dsttmp" - else - # Rename the file to the real destination. - $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null || - - # The rename failed, perhaps because mv can't rename something else - # to itself, or perhaps because mv is so ancient that it does not - # support -f. - { - # Now remove or move aside any old file at destination location. - # We try this two ways since rm can't unlink itself on some - # systems and the destination file might be busy for other - # reasons. In this case, the final cleanup might fail but the new - # file should still install successfully. - { - test ! -f "$dst" || - $doit $rmcmd -f "$dst" 2>/dev/null || - { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null && - { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; } - } || - { echo "$0: cannot unlink or rename $dst" >&2 - (exit 1); exit 1 - } - } && - - # Now rename the file to the real destination. - $doit $mvcmd "$dsttmp" "$dst" - } - fi || exit 1 - - trap '' 0 - fi -done - -# Local variables: -# eval: (add-hook 'before-save-hook 'time-stamp) -# time-stamp-start: "scriptversion=" -# time-stamp-format: "%:y-%02m-%02d.%02H" -# time-stamp-time-zone: "UTC0" -# time-stamp-end: "; # UTC" -# End: diff --git a/libpsf-uninstalled.pc.in b/libpsf-uninstalled.pc.in deleted file mode 100644 index e4cba00..0000000 --- a/libpsf-uninstalled.pc.in +++ /dev/null @@ -1,12 +0,0 @@ -prefix= -exec_prefix= -libdir=src -includedir=include - -Name: @PACKAGE@ -Description: C++ PSF waveform reader library -Version: @VERSION@ -Requires: -Conflicts: -Libs: ${pcfiledir}/${libdir}/libpsf.la -Cflags: -I${pcfiledir}/${includedir} diff --git a/libpsf.pc.in b/libpsf.pc.in deleted file mode 100644 index 29c6e61..0000000 --- a/libpsf.pc.in +++ /dev/null @@ -1,12 +0,0 @@ -prefix=@prefix@ -exec_prefix=@prefix@ -libdir=@exec_prefix@/lib -includedir=@prefix@/include - -Name: @PACKAGE@ -Description: C++ PSF waveform reader library -Version: @VERSION@ -Requires: -Conflicts: -Libs: -L${libdir} -lpsf -Cflags: -I${includedir} diff --git a/m4/ax_boost_base.m4 b/m4/ax_boost_base.m4 deleted file mode 100644 index 5894d0c..0000000 --- a/m4/ax_boost_base.m4 +++ /dev/null @@ -1,207 +0,0 @@ -##### http://autoconf-archive.cryp.to/ax_boost_base.html -# -# SYNOPSIS -# -# AX_BOOST_BASE([MINIMUM-VERSION]) -# -# DESCRIPTION -# -# Test for the Boost C++ libraries of a particular version (or newer) -# -# If no path to the installed boost library is given the macro -# searchs under /usr, /usr/local, /opt and /opt/local and evaluates -# the $BOOST_ROOT environment variable. Further documentation is -# available at . -# -# This macro calls: -# -# AC_SUBST(BOOST_CPPFLAGS) / AC_SUBST(BOOST_LDFLAGS) -# -# And sets: -# -# HAVE_BOOST -# -# LAST MODIFICATION -# -# 2007-07-28 -# Modified for use in Thrift -# -# COPYLEFT -# -# Copyright (c) 2007 Thomas Porschberg -# -# Copying and distribution of this file, with or without -# modification, are permitted in any medium without royalty provided -# the copyright notice and this notice are preserved. - -AC_DEFUN([AX_BOOST_BASE], -[ -AC_ARG_WITH([boost], - AS_HELP_STRING([--with-boost@<:@=DIR@:>@], [use boost (default is yes) - it is possible to specify the root directory for boost (optional)]), - [ - if test "$withval" = "no"; then - want_boost="no" - elif test "$withval" = "yes"; then - want_boost="yes" - ac_boost_path="" - else - want_boost="yes" - ac_boost_path="$withval" - fi - ], - [want_boost="yes"]) - -if test "x$want_boost" = "xyes"; then - boost_lib_version_req=ifelse([$1], ,1.20.0,$1) - boost_lib_version_req_shorten=`expr $boost_lib_version_req : '\([[0-9]]*\.[[0-9]]*\)'` - boost_lib_version_req_major=`expr $boost_lib_version_req : '\([[0-9]]*\)'` - boost_lib_version_req_minor=`expr $boost_lib_version_req : '[[0-9]]*\.\([[0-9]]*\)'` - boost_lib_version_req_sub_minor=`expr $boost_lib_version_req : '[[0-9]]*\.[[0-9]]*\.\([[0-9]]*\)'` - if test "x$boost_lib_version_req_sub_minor" = "x" ; then - boost_lib_version_req_sub_minor="0" - fi - WANT_BOOST_VERSION=`expr $boost_lib_version_req_major \* 100000 \+ $boost_lib_version_req_minor \* 100 \+ $boost_lib_version_req_sub_minor` - AC_MSG_CHECKING(for boostlib >= $boost_lib_version_req) - succeeded=no - - dnl first we check the system location for boost libraries - dnl this location ist chosen if boost libraries are installed with the --layout=system option - dnl or if you install boost with RPM - if test "$ac_boost_path" != ""; then - BOOST_LDFLAGS="-L$ac_boost_path/lib" - BOOST_CPPFLAGS="-I$ac_boost_path/include" - BOOST_ROOT_PATH="$ac_boost_path" - else - for ac_boost_path_tmp in /usr /usr/local /opt /opt/local ; do - if test -d "$ac_boost_path_tmp/include/boost" && test -r "$ac_boost_path_tmp/include/boost"; then - BOOST_LDFLAGS="-L$ac_boost_path_tmp/lib" - BOOST_CPPFLAGS="-I$ac_boost_path_tmp/include" - BOOST_ROOT_PATH="$ac_boost_path_tmp" - break; - fi - done - fi - - CPPFLAGS_SAVED="$CPPFLAGS" - CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS" - export CPPFLAGS - - LDFLAGS_SAVED="$LDFLAGS" - LDFLAGS="$LDFLAGS $BOOST_LDFLAGS" - export LDFLAGS - - export BOOST_ROOT_PATH - - AC_LANG_PUSH(C++) - AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ - @%:@include - ]], [[ - #if BOOST_VERSION >= $WANT_BOOST_VERSION - // Everything is okay - #else - # error Boost version is too old - #endif - ]])],[ - AC_MSG_RESULT(yes) - succeeded=yes - found_system=yes - ],[ - ]) - AC_LANG_POP([C++]) - - - - dnl if we found no boost with system layout we search for boost libraries - dnl built and installed without the --layout=system option or for a staged(not installed) version - if test "x$succeeded" != "xyes"; then - _version=0 - if test "$ac_boost_path" != ""; then - BOOST_LDFLAGS="-L$ac_boost_path/lib" - if test -d "$ac_boost_path" && test -r "$ac_boost_path"; then - for i in `ls -d $ac_boost_path/include/boost-* 2>/dev/null`; do - _version_tmp=`echo $i | sed "s#$ac_boost_path##" | sed 's/\/include\/boost-//' | sed 's/_/./'` - V_CHECK=`expr $_version_tmp \> $_version` - if test "$V_CHECK" = "1" ; then - _version=$_version_tmp - fi - VERSION_UNDERSCORE=`echo $_version | sed 's/\./_/'` - BOOST_CPPFLAGS="-I$ac_boost_path/include/boost-$VERSION_UNDERSCORE" - done - fi - else - for ac_boost_path in /usr /usr/local /opt /opt/local ; do - if test -d "$ac_boost_path" && test -r "$ac_boost_path"; then - for i in `ls -d $ac_boost_path/include/boost-* 2>/dev/null`; do - _version_tmp=`echo $i | sed "s#$ac_boost_path##" | sed 's/\/include\/boost-//' | sed 's/_/./'` - V_CHECK=`expr $_version_tmp \> $_version` - if test "$V_CHECK" = "1" ; then - _version=$_version_tmp - best_path=$ac_boost_path - fi - done - fi - done - - VERSION_UNDERSCORE=`echo $_version | sed 's/\./_/'` - BOOST_CPPFLAGS="-I$best_path/include/boost-$VERSION_UNDERSCORE" - BOOST_LDFLAGS="-L$best_path/lib" - BOOST_ROOT_PATH="$best_path" - - if test "x$BOOST_ROOT" != "x"; then - if test -d "$BOOST_ROOT" && test -r "$BOOST_ROOT" && test -d "$BOOST_ROOT/stage/lib" && test -r "$BOOST_ROOT/stage/lib"; then - version_dir=`expr //$BOOST_ROOT : '.*/\(.*\)'` - stage_version=`echo $version_dir | sed 's/boost_//' | sed 's/_/./g'` - stage_version_shorten=`expr $stage_version : '\([[0-9]]*\.[[0-9]]*\)'` - V_CHECK=`expr $stage_version_shorten \>\= $_version` - if test "$V_CHECK" = "1" ; then - AC_MSG_NOTICE(We will use a staged boost library from $BOOST_ROOT) - BOOST_CPPFLAGS="-I$BOOST_ROOT" - BOOST_LDFLAGS="-L$BOOST_ROOT/stage/lib" - BOOST_ROOT_PATH="$BOOST_ROOT" - fi - fi - fi - fi - - CPPFLAGS="$CPPFLAGS $BOOST_CPPFLAGS" - export CPPFLAGS - LDFLAGS="$LDFLAGS $BOOST_LDFLAGS" - export LDFLAGS - export BOOST_ROOT_PATH - - AC_LANG_PUSH(C++) - AC_COMPILE_IFELSE([AC_LANG_PROGRAM([[ - @%:@include - ]], [[ - #if BOOST_VERSION >= $WANT_BOOST_VERSION - // Everything is okay - #else - # error Boost version is too old - #endif - ]])],[ - AC_MSG_RESULT(yes) - succeeded=yes - found_system=yes - ],[ - ]) - AC_LANG_POP([C++]) - fi - - if test "$succeeded" != "yes" ; then - if test "$_version" = "0" ; then - AC_MSG_WARN([[We could not detect the boost libraries (version $boost_lib_version_req_shorten or higher). If you have a staged boost library (still not installed) please specify \$BOOST_ROOT in your environment and do not give a PATH to --with-boost option. If you are sure you have boost installed, then check your version number looking in . See http://randspringer.de/boost for more documentation.]]) - else - AC_MSG_WARN([Your boost libraries seems to old (version $_version).]) - fi - else - AC_SUBST(BOOST_CPPFLAGS) - AC_SUBST(BOOST_LDFLAGS) - AC_SUBST(BOOST_ROOT_PATH) - AC_DEFINE(HAVE_BOOST,,[define if the Boost library is available]) - fi - - CPPFLAGS="$CPPFLAGS_SAVED" - LDFLAGS="$LDFLAGS_SAVED" -fi - -]) diff --git a/m4/ax_boost_python.m4 b/m4/ax_boost_python.m4 deleted file mode 100644 index bc1152e..0000000 --- a/m4/ax_boost_python.m4 +++ /dev/null @@ -1,89 +0,0 @@ -# =========================================================================== -# http://autoconf-archive.cryp.to/ax_boost_python.html -# =========================================================================== -# -# SYNOPSIS -# -# AX_BOOST_PYTHON -# -# DESCRIPTION -# -# This macro checks to see if the Boost.Python library is installed. It -# also attempts to guess the currect library name using several attempts. -# It tries to build the library name using a user supplied name or suffix -# and then just the raw library. -# -# If the library is found, HAVE_BOOST_PYTHON is defined and -# BOOST_PYTHON_LIB is set to the name of the library. -# -# This macro calls AC_SUBST(BOOST_PYTHON_LIB). -# -# In order to ensure that the Python headers are specified on the include -# path, this macro requires AX_PYTHON to be called. -# -# LAST MODIFICATION -# -# 2008-04-12 -# -# COPYLEFT -# -# Copyright (c) 2008 Michael Tindal -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the -# Free Software Foundation; either version 2 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -# Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program. If not, see . -# -# As a special exception, the respective Autoconf Macro's copyright owner -# gives unlimited permission to copy, distribute and modify the configure -# scripts that are the output of Autoconf when processing the Macro. You -# need not follow the terms of the GNU General Public License when using -# or distributing such scripts, even though portions of the text of the -# Macro appear in them. The GNU General Public License (GPL) does govern -# all other use of the material that constitutes the Autoconf Macro. -# -# This special exception to the GPL applies to versions of the Autoconf -# Macro released by the Autoconf Macro Archive. When you make and -# distribute a modified version of the Autoconf Macro, you may extend this -# special exception to the GPL to apply to your modified version as well. - -AC_DEFUN([AX_BOOST_PYTHON], -[AC_REQUIRE([AX_PYTHON_DEVEL])dnl -AC_CACHE_CHECK(whether the Boost::Python library is available, -ac_cv_boost_python, -[AC_LANG_SAVE - AC_LANG_CPLUSPLUS - CPPFLAGS_SAVE=$CPPFLAGS - CPPFLAGS="$PYTHON_CPPFLAGS $BOOST_CPPFLAGS $CPPFLAGS" - - AC_COMPILE_IFELSE(AC_LANG_PROGRAM([[ - #include - using namespace boost::python; - BOOST_PYTHON_MODULE(test) { throw "Boost::Python test."; }]], - [[return 0;]]), - ac_cv_boost_python=yes, ac_cv_boost_python=no) - AC_LANG_RESTORE - CPPFLAGS="$CPPFLAGS_SAVE" -]) -if test "$ac_cv_boost_python" = "yes"; then - AC_DEFINE(HAVE_BOOST_PYTHON,,[define if the Boost::Python library is available]) - ax_python_lib=boost_python - AC_ARG_WITH([boost-python],AS_HELP_STRING([--with-boost-python],[specify the boost python library or suffix to use]), - [if test "x$with_boost_python" != "xno"; then - ax_python_lib=$with_boost_python - ax_boost_python_lib=boost_python-$with_boost_python - fi]) - for ax_lib in $ax_boost_python_lib boost_python boost_python-mt boost_python-mt-py2.5 boost_python-mt-py2.6; do - AC_CHECK_LIB($ax_lib, exit, [BOOST_PYTHON_LIB=$ax_lib break], [], [$PYTHON_LDFLAGS]) - done - AC_SUBST(BOOST_PYTHON_LIB) -fi -])dnl diff --git a/m4/ax_python.m4 b/m4/ax_python.m4 deleted file mode 100644 index 18216f9..0000000 --- a/m4/ax_python.m4 +++ /dev/null @@ -1,110 +0,0 @@ -# =========================================================================== -# http://autoconf-archive.cryp.to/ax_python.html -# =========================================================================== -# -# SYNOPSIS -# -# AX_PYTHON -# -# DESCRIPTION -# -# This macro does a complete Python development environment check. -# -# It recurses through several python versions (from 2.1 to 2.5 in this -# version), looking for an executable. When it finds an executable, it -# looks to find the header files and library. -# -# It sets PYTHON_BIN to the name of the python executable, -# PYTHON_INCLUDE_DIR to the directory holding the header files, and -# PYTHON_LIB to the name of the Python library. -# -# This macro calls AC_SUBST on PYTHON_BIN (via AC_CHECK_PROG), -# PYTHON_INCLUDE_DIR and PYTHON_LIB. -# -# LAST MODIFICATION -# -# 2008-04-12 -# -# COPYLEFT -# -# Copyright (c) 2008 Michael Tindal -# -# This program is free software; you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the -# Free Software Foundation; either version 2 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -# Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program. If not, see . -# -# As a special exception, the respective Autoconf Macro's copyright owner -# gives unlimited permission to copy, distribute and modify the configure -# scripts that are the output of Autoconf when processing the Macro. You -# need not follow the terms of the GNU General Public License when using -# or distributing such scripts, even though portions of the text of the -# Macro appear in them. The GNU General Public License (GPL) does govern -# all other use of the material that constitutes the Autoconf Macro. -# -# This special exception to the GPL applies to versions of the Autoconf -# Macro released by the Autoconf Macro Archive. When you make and -# distribute a modified version of the Autoconf Macro, you may extend this -# special exception to the GPL to apply to your modified version as well. - -AC_DEFUN([AX_PYTHON], -[AC_MSG_CHECKING(for python build information) -AC_MSG_RESULT([]) - -AC_ARG_VAR(PYTHONBINARY, [Python binary]) - -if test x$PYTHONBINARY == x; then - PYTHONBINARY=python -fi - -AC_PATH_PROG([ax_python_bin], $PYTHONBINARY) - -ax_python_bin=$PYTHONBINARY - -if test x$ax_python_bin != x; then - python_inc_dir=`$ax_python_bin -c "import distutils.sysconfig; print(distutils.sysconfig.get_python_inc())"` - python_lib_dir=`$ax_python_bin -c "import distutils.sysconfig; print(distutils.sysconfig.get_config_vars('LIBDIR')[[0]])"` - python_library=`$ax_python_bin -c "import re, distutils.sysconfig; print(re.match(\"lib(.+)\.so.*\",distutils.sysconfig.get_config_vars('LDLIBRARY')[[0]]).groups(0)[[0]])"` - - AC_CHECK_LIB([$python_library], main, ax_python_lib=$python_library, ax_python_lib=no) - AC_CHECK_HEADER([$python_inc_dir/Python.h], ax_python_header=$python_inc_dir, ax_python_header=no) - - AC_MSG_RESULT([python_bin: $python_bin]) - AC_MSG_RESULT([python_inc_dir: $python_inc_dir]) - AC_MSG_RESULT([python_lib_dir: $python_lib_dir]) - AC_MSG_RESULT([python_library: $python_library]) -fi - - -if test x$ax_python_bin = x; then - ax_python_bin=no -fi -if test x$ax_python_header = x; then - ax_python_header=no -fi -if test x$ax_python_lib = x; then - ax_python_lib=no -fi - -AC_MSG_RESULT([ results of the Python check:]) -AC_MSG_RESULT([ Binary: $ax_python_bin]) -AC_MSG_RESULT([ Library: $ax_python_lib]) -AC_MSG_RESULT([ Include Dir: $ax_python_header]) - -if test x$ax_python_header != xno; then - PYTHON_INCLUDE_DIR=$ax_python_header - AC_SUBST(PYTHON_INCLUDE_DIR) -fi -if test x$ax_python_lib != xno; then - PYTHON_LIB=$ax_python_lib - AC_SUBST(PYTHON_LIB) -fi -])dnl diff --git a/m4/ax_python_devel.m4 b/m4/ax_python_devel.m4 deleted file mode 100644 index a62b860..0000000 --- a/m4/ax_python_devel.m4 +++ /dev/null @@ -1,325 +0,0 @@ -# =========================================================================== -# http://www.gnu.org/software/autoconf-archive/ax_python_devel.html -# =========================================================================== -# -# SYNOPSIS -# -# AX_PYTHON_DEVEL([version]) -# -# DESCRIPTION -# -# Note: Defines as a precious variable "PYTHON_VERSION". Don't override it -# in your configure.ac. -# -# This macro checks for Python and tries to get the include path to -# 'Python.h'. It provides the $(PYTHON_CPPFLAGS) and $(PYTHON_LDFLAGS) -# output variables. It also exports $(PYTHON_EXTRA_LIBS) and -# $(PYTHON_EXTRA_LDFLAGS) for embedding Python in your code. -# -# You can search for some particular version of Python by passing a -# parameter to this macro, for example ">= '2.3.1'", or "== '2.4'". Please -# note that you *have* to pass also an operator along with the version to -# match, and pay special attention to the single quotes surrounding the -# version number. Don't use "PYTHON_VERSION" for this: that environment -# variable is declared as precious and thus reserved for the end-user. -# -# This macro should work for all versions of Python >= 2.1.0. As an end -# user, you can disable the check for the python version by setting the -# PYTHON_NOVERSIONCHECK environment variable to something else than the -# empty string. -# -# If you need to use this macro for an older Python version, please -# contact the authors. We're always open for feedback. -# -# LICENSE -# -# Copyright (c) 2009 Sebastian Huber -# Copyright (c) 2009 Alan W. Irwin -# Copyright (c) 2009 Rafael Laboissiere -# Copyright (c) 2009 Andrew Collier -# Copyright (c) 2009 Matteo Settenvini -# Copyright (c) 2009 Horst Knorr -# -# This program is free software: you can redistribute it and/or modify it -# under the terms of the GNU General Public License as published by the -# Free Software Foundation, either version 3 of the License, or (at your -# option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General -# Public License for more details. -# -# You should have received a copy of the GNU General Public License along -# with this program. If not, see . -# -# As a special exception, the respective Autoconf Macro's copyright owner -# gives unlimited permission to copy, distribute and modify the configure -# scripts that are the output of Autoconf when processing the Macro. You -# need not follow the terms of the GNU General Public License when using -# or distributing such scripts, even though portions of the text of the -# Macro appear in them. The GNU General Public License (GPL) does govern -# all other use of the material that constitutes the Autoconf Macro. -# -# This special exception to the GPL applies to versions of the Autoconf -# Macro released by the Autoconf Archive. When you make and distribute a -# modified version of the Autoconf Macro, you may extend this special -# exception to the GPL to apply to your modified version as well. - -#serial 8 - -AU_ALIAS([AC_PYTHON_DEVEL], [AX_PYTHON_DEVEL]) -AC_DEFUN([AX_PYTHON_DEVEL],[ - # - # Allow the use of a (user set) custom python version - # - AC_ARG_VAR([PYTHON_VERSION],[The installed Python - version to use, for example '2.3'. This string - will be appended to the Python interpreter - canonical name.]) - - AC_PATH_PROG([PYTHON],[python[$PYTHON_VERSION]]) - if test -z "$PYTHON"; then - AC_MSG_ERROR([Cannot find python$PYTHON_VERSION in your system path]) - PYTHON_VERSION="" - fi - - # - # Check for a version of Python >= 2.1.0 - # - AC_MSG_CHECKING([for a version of Python >= '2.1.0']) - ac_supports_python_ver=`$PYTHON -c "import sys; \ - ver = sys.version.split ()[[0]]; \ - print (ver >= '2.1.0')"` - if test "$ac_supports_python_ver" != "True"; then - if test -z "$PYTHON_NOVERSIONCHECK"; then - AC_MSG_RESULT([no]) - AC_MSG_FAILURE([ -This version of the AC@&t@_PYTHON_DEVEL macro -doesn't work properly with versions of Python before -2.1.0. You may need to re-run configure, setting the -variables PYTHON_CPPFLAGS, PYTHON_LDFLAGS, PYTHON_SITE_PKG, -PYTHON_EXTRA_LIBS and PYTHON_EXTRA_LDFLAGS by hand. -Moreover, to disable this check, set PYTHON_NOVERSIONCHECK -to something else than an empty string. -]) - else - AC_MSG_RESULT([skip at user request]) - fi - else - AC_MSG_RESULT([yes]) - fi - - # - # if the macro parameter ``version'' is set, honour it - # - if test -n "$1"; then - AC_MSG_CHECKING([for a version of Python $1]) - ac_supports_python_ver=`$PYTHON -c "import sys; \ - ver = sys.version.split ()[[0]]; \ - print (ver $1)"` - if test "$ac_supports_python_ver" = "True"; then - AC_MSG_RESULT([yes]) - else - AC_MSG_RESULT([no]) - AC_MSG_ERROR([this package requires Python $1. -If you have it installed, but it isn't the default Python -interpreter in your system path, please pass the PYTHON_VERSION -variable to configure. See ``configure --help'' for reference. -]) - PYTHON_VERSION="" - fi - fi - - # - # Check if you have distutils, else fail - # - AC_MSG_CHECKING([for the distutils Python package]) - ac_distutils_result=`$PYTHON -c "import distutils" 2>&1` - if test -z "$ac_distutils_result"; then - AC_MSG_RESULT([yes]) - else - AC_MSG_RESULT([no]) - AC_MSG_ERROR([cannot import Python module "distutils". -Please check your Python installation. The error was: -$ac_distutils_result]) - PYTHON_VERSION="" - fi - - # - # Check for Python include path - # - AC_MSG_CHECKING([for Python include path]) - if test -z "$PYTHON_CPPFLAGS"; then - python_path=`$PYTHON -c "import distutils.sysconfig; \ - print (distutils.sysconfig.get_python_inc ());"` - if test -n "${python_path}"; then - python_path="-I$python_path" - fi - PYTHON_CPPFLAGS=$python_path - fi - AC_MSG_RESULT([$PYTHON_CPPFLAGS]) - AC_SUBST([PYTHON_CPPFLAGS]) - - # - # Check for Python library path - # - AC_MSG_CHECKING([for Python library path]) - if test -z "$PYTHON_LDFLAGS"; then - # (makes two attempts to ensure we've got a version number - # from the interpreter) - ac_python_version=`cat<]], - [[Py_Initialize();]]) - ],[pythonexists=yes],[pythonexists=no]) - AC_LANG_POP([C]) - # turn back to default flags - CPPFLAGS="$ac_save_CPPFLAGS" - LIBS="$ac_save_LIBS" - - AC_MSG_RESULT([$pythonexists]) - - if test ! "x$pythonexists" = "xyes"; then - AC_MSG_FAILURE([ - Could not link test program to Python. Maybe the main Python library has been - installed in some non-standard library path. If so, pass it to configure, - via the LDFLAGS environment variable. - Example: ./configure LDFLAGS="-L/usr/non-standard-path/python/lib" - ============================================================================ - ERROR! - You probably have to install the development version of the Python package - for your distribution. The exact name of this package varies among them. - ============================================================================ - ]) - PYTHON_VERSION="" - fi - - # - # all done! - # -]) diff --git a/m4/ax_python_module.m4 b/m4/ax_python_module.m4 deleted file mode 100644 index bd70a06..0000000 --- a/m4/ax_python_module.m4 +++ /dev/null @@ -1,49 +0,0 @@ -# =========================================================================== -# http://www.gnu.org/software/autoconf-archive/ax_python_module.html -# =========================================================================== -# -# SYNOPSIS -# -# AX_PYTHON_MODULE(modname[, fatal]) -# -# DESCRIPTION -# -# Checks for Python module. -# -# If fatal is non-empty then absence of a module will trigger an error. -# -# LICENSE -# -# Copyright (c) 2008 Andrew Collier -# -# Copying and distribution of this file, with or without modification, are -# permitted in any medium without royalty provided the copyright notice -# and this notice are preserved. This file is offered as-is, without any -# warranty. - -#serial 5 - -AU_ALIAS([AC_PYTHON_MODULE], [AX_PYTHON_MODULE]) -AC_DEFUN([AX_PYTHON_MODULE],[ - if test -z $PYTHON; - then - PYTHON="python" - fi - PYTHON_NAME=`basename $PYTHON` - AC_MSG_CHECKING($PYTHON_NAME module: $1) - $PYTHON -c "import $1" 2>/dev/null - if test $? -eq 0; - then - AC_MSG_RESULT(yes) - eval AS_TR_CPP(HAVE_PYMOD_$1)=yes - else - AC_MSG_RESULT(no) - eval AS_TR_CPP(HAVE_PYMOD_$1)=no - # - if test -n "$2" - then - AC_MSG_ERROR(failed to find required module $1) - exit 1 - fi - fi -]) diff --git a/spkg-install b/spkg-install deleted file mode 100755 index 8c9a339..0000000 --- a/spkg-install +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh - -if [ "$SPKG_LOCAL" = "" ]; then - echo "SPKG_LOCAL undefined ... exiting"; - echo "Maybe run 'qsnake --shell'?" - exit 1 -fi - -set -e - -unset RM -unset MAKEFLAGS - -./autogen.sh --prefix="$SPKG_LOCAL" --with-boost=$SPKG_LOCAL --with-python -make -make install diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt new file mode 100644 index 0000000..069eecd --- /dev/null +++ b/src/CMakeLists.txt @@ -0,0 +1,35 @@ + +set(CMAKE_POSITION_INDEPENDENT_CODE ON) +include(GNUInstallDirs) + +set( BOOST_ROOT $ENV{BOOST_LOC} CACHE PATH "Boost library path" ) +FIND_PACKAGE( Boost COMPONENTS program_options REQUIRED ) + +include_directories(${BOOST_INCLUDE_DIRS}) +#include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../include) + +file(GLOB PSF_SOURCE_FILES "*.cc") +file(GLOB PSF_INCLUDE_FILES "${CMAKE_CURRENT_SOURCE_DIR}/../include/*.h") +set(PSF_INCLUDE "${CMAKE_CURRENT_SOURCE_DIR}/../include") + +# dynamic library +add_library(psf SHARED ${PSF_SOURCE_FILES}) +set_target_properties(psf PROPERTIES VERSION ${PROJECT_VERSION}) +set_target_properties(psf PROPERTIES SOVERSION 1) +set_target_properties(psf PROPERTIES PUBLIC_HEADER "${PSF_INCLUDE_FILES}") +set_target_properties(psf PROPERTIES INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}") +target_include_directories(psf PUBLIC ${PSF_INCLUDE}) + +# static library +add_library(psf_static STATIC ${PSF_SOURCE_FILES}) +target_include_directories(psf_static PRIVATE ${PSF_INCLUDE}) + +configure_file(libpsf.pc.in libpsf.pc @ONLY) + + +install(TARGETS psf psf_static + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + PUBLIC_HEADER DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) + +install(FILES ${CMAKE_BINARY_DIR}/src/libpsf.pc DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig) diff --git a/src/Makefile.am b/src/Makefile.am deleted file mode 100644 index c538e7f..0000000 --- a/src/Makefile.am +++ /dev/null @@ -1,20 +0,0 @@ -lib_LTLIBRARIES = libpsf.la - -bin_PROGRAMS = psftest psftestraw -psftest_SOURCES = psftest.cc -psftest_CXXFLAGS = -I../include ${BOOST_CPPFLAGS} -psftest_LDFLAGS = libpsf.la - -psftestraw_SOURCES = psftest.cc -psftestraw_CXXFLAGS = -I../include ${BOOST_CPPFLAGS} -psftestraw_LDFLAGS = libpsf.la - -libpsf_la_SOURCES = psf.cc psfdata.cc psfproperty.cc psfchunk.cc \ - psfcontainer.cc psfindexedcontainer.cc psfgroup.cc psffile.cc \ - psftype.cc psfstruct.cc psfsections.cc psftrace.cc \ - psfnonsweepvalue.cc psfsweepvalue.cc psfpropertyblock.cc - -libpsf_la_CXXFLAGS = \ - -I../include -fPIC - - diff --git a/src/libpsf.pc.in b/src/libpsf.pc.in new file mode 100644 index 0000000..9b15f62 --- /dev/null +++ b/src/libpsf.pc.in @@ -0,0 +1,12 @@ +prefix=@CMAKE_INSTALL_PREFIX@ +exec_prefix=@CMAKE_INSTALL_PREFIX@ +libdir=${exec_prefix}/@CMAKE_INSTALL_LIBDIR@ +includedir=${prefix}/@CMAKE_INSTALL_INCLUDEDIR@ + +Name: @PROJECT_NAME@ +Description: @PROJECT_DESCRIPTION@ +Version: @PROJECT_VERSION@ + +Requires: +Libs: -L${libdir} -lmylib +Cflags: -I${includedir} diff --git a/src/psffile.cc b/src/psffile.cc index 54af581..85d6aa6 100644 --- a/src/psffile.cc +++ b/src/psffile.cc @@ -20,47 +20,30 @@ PSFFile::PSFFile(std::string filename) : } PSFFile::~PSFFile() { - if(m_header) - delete(m_header); - if(m_types) - delete(m_types); - if(m_sweeps) - delete(m_sweeps); - if(m_traces) - delete(m_traces); - if(m_sweepvalues) - delete(m_sweepvalues); - if(m_nonsweepvalues) - delete(m_nonsweepvalues); + if (m_header) + delete (m_header); + if (m_types) + delete (m_types); + if (m_sweeps) + delete (m_sweeps); + if (m_traces) + delete (m_traces); + if (m_sweepvalues) + delete (m_sweepvalues); + if (m_nonsweepvalues) + delete (m_nonsweepvalues); close(); } -void PSFFile::deserialize(const char *buf, int size) { - // Last word contains the size of the data - uint32_t datasize; - datasize = GET_INT32(buf+size-4); - - // Read section index table - - std::map sections; - - int section_num = 0; +SectionMap PSFFile::load_sections(const char *buf, int size){ + std::vector
sections; uint32_t section_offset = 4; - Section section; - - std::vector all_sections = { - HeaderSection::type, - TypeSection::type, - SweepSection::type, - TraceSection::type, - ValueSectionSweep::type, - ValueSectionNonSweep::type - }; + int section_num = 0; while ( section_offset < size ){ + Section section; uint32_t section_type = GET_INT32(buf + section_offset); - if ( !(std::find(all_sections.begin(), all_sections.end(), section_type) != - all_sections.end()) ) + if ( ! (section_type == HeaderSection::type)) break; section.n = section_num; section.offset = section_offset; @@ -68,44 +51,111 @@ void PSFFile::deserialize(const char *buf, int size) { uint32_t section_end = GET_INT32(buf + section_offset + 4); section.size = section_end - section_offset; - sections[section_num] = section; + sections.push_back(section); section_num++; section_offset = section_end; } + if (sections.size() < 3){ + throw InvalidFileError(); + } m_header = new HeaderSection(); - m_header->deserialize(buf + sections[SECTION_HEADER].offset, sections[SECTION_HEADER].offset); + m_header->deserialize(buf + sections[SECTION_HEADER].offset, + sections[SECTION_HEADER].offset); + + int num_sweep_points = 0; + bool has_sweep = get_header_properties().hasprop("PSF sweep points"); + if (has_sweep) + num_sweep_points = get_header_properties().find("PSF sweep points"); + + if (num_sweep_points == 0) + sections[2].n = SECTION_VALUE; + + SectionMap section_map; + for (auto section: sections) + section_map[section.n] = section; + + return section_map; +} + +SectionMap PSFFile::load_table_of_contents(const char *buf, int size) { + // Last word contains the size of the data + uint32_t datasize; + datasize = GET_INT32(buf + size - 4); + + int nsections = (size - datasize - 12) / 8; + int lastoffset = 0, lastsectionnum = -1; + const char *toc = buf + size - 12 - nsections * 8; + + SectionMap section_map; + + for (int i = 0; i < nsections; i++) { + Section section; + section.n = GET_INT32(toc + 8 * i); + section.offset = GET_INT32(toc + 8 * i + 4); + + if (i > 0) + section_map[lastsectionnum].size = section.offset - lastoffset; + + if (i == nsections - 1) + section.size = size - section.offset; + + section_map[section.n] = section; + + lastoffset = section.offset; + lastsectionnum = section.n; + } + + m_header = new HeaderSection(); + m_header->deserialize(buf + section_map[SECTION_HEADER].offset, + section_map[SECTION_HEADER].offset); + + return section_map; +} + +void PSFFile::deserialize(const char *buf, int size) { + // Read section index table + SectionMap sections; + if (is_done()) { + sections = load_table_of_contents(buf, size); + } else { + sections = load_sections(buf, size); + } // Read types if (sections.find(SECTION_TYPE) != sections.end()) { - m_types = new TypeSection(); - m_types->deserialize(buf + sections[SECTION_TYPE].offset, sections[SECTION_TYPE].offset); + m_types = new TypeSection(); + m_types->deserialize(buf + sections[SECTION_TYPE].offset, + sections[SECTION_TYPE].offset); } // Read sweeps - if (sections.find(SECTION_SWEEP) != sections.end()) { - m_sweeps = new SweepSection(this); - m_sweeps->deserialize(buf + sections[SECTION_SWEEP].offset, sections[SECTION_SWEEP].offset); + if (sections.find(SECTION_SWEEP) != sections.end()) { + m_sweeps = new SweepSection(this); + m_sweeps->deserialize(buf + sections[SECTION_SWEEP].offset, + sections[SECTION_SWEEP].offset); } // Read traces - if (sections.find(SECTION_TRACE) != sections.end()) { - m_traces = new TraceSection(this); - m_traces->deserialize(buf + sections[SECTION_TRACE].offset, sections[SECTION_TRACE].offset); + if (sections.find(SECTION_TRACE) != sections.end()) { + m_traces = new TraceSection(this); + m_traces->deserialize(buf + sections[SECTION_TRACE].offset, + sections[SECTION_TRACE].offset); } // Read values - if (sections.find(SECTION_VALUE) != sections.end()) { - if(m_sweeps != NULL) { - m_sweepvalues = new ValueSectionSweep(this); - m_sweepvalues->deserialize(buf + sections[SECTION_VALUE].offset, sections[SECTION_VALUE].offset); - } else { - m_nonsweepvalues = new ValueSectionNonSweep(this); - m_nonsweepvalues->deserialize(buf + sections[SECTION_VALUE].offset, sections[SECTION_VALUE].offset); - } + if (sections.find(SECTION_VALUE) != sections.end()) { + if (m_sweeps != NULL) { + m_sweepvalues = new ValueSectionSweep(this); + m_sweepvalues->deserialize(buf + sections[SECTION_VALUE].offset, + sections[SECTION_VALUE].offset); + } else { + m_nonsweepvalues = new ValueSectionNonSweep(this); + m_nonsweepvalues->deserialize(buf + sections[SECTION_VALUE].offset, + sections[SECTION_VALUE].offset); + } } - } void PSFFile::open() { @@ -117,12 +167,8 @@ void PSFFile::open() { m_size = lseek(m_fd, 0, SEEK_END); m_buffer = (char *)mmap(0, m_size, PROT_READ, MAP_SHARED, m_fd, 0); - - bool valid = validate(); - if(validate()) + deserialize((const char *)m_buffer, m_size); - else - throw InvalidFileError(); } void PSFFile::close() { @@ -149,13 +195,8 @@ bool PSFFile::is_done() const { clarissa[8]=0; return !strcmp(clarissa, "Clarissa"); -} - -bool PSFFile::validate () const { - return true; } - NameList PSFFile::get_param_names() const { if (m_sweeps != NULL) return m_sweeps->get_names(); diff --git a/src/psfinternal.h b/src/psfinternal.h index df466f9..83943b5 100644 --- a/src/psfinternal.h +++ b/src/psfinternal.h @@ -64,6 +64,7 @@ typedef std::vector Filter; typedef std::vector NameList; typedef std::vector SweepValueList; typedef std::map PropertyMap; +typedef std::map SectionMap; #ifdef HAVE_TR1_UNORDERED_MAP typedef std::tr1::unordered_map TraceIDOffsetMap; typedef std::tr1::unordered_map NameIndexMap; @@ -628,6 +629,8 @@ class PSFFile { std::string m_filename; private: + SectionMap load_sections(const char *buf, int size); + SectionMap load_table_of_contents(const char *buf, int size); void deserialize(const char *buf, int size); int m_fd; diff --git a/src/psfreader.cc b/src/psfreader.cc deleted file mode 100644 index eeb24fe..0000000 --- a/src/psfreader.cc +++ /dev/null @@ -1,48 +0,0 @@ -#include "psf.h" -#include "psfdata.h" -#include "psfinternal.h" - -#include - - -Chunk * ValueSectionNonSweep::child_factory(int chunktype) { - if(NonSweepValue::ischunk(chunktype)) - return new NonSweepValue(psf); - else { - std::cerr << "Unexpected chunktype: " << chunktype << std::endl; - throw IncorrectChunk(chunktype); - } -} - -PSFData* ValueSectionNonSweep::get_value(std::string name) { - return dynamic_cast(get_child(name)).get_value(); -} - -int NonSweepValue::deserialize(const char *buf) { - const char *startbuf = buf; - - buf += Chunk::deserialize(buf); - - buf += id.deserialize(buf); - buf += name.deserialize(buf); - buf += valuetypeid.deserialize(buf); - - value = psf->types->get_typedef(valuetypeid.value).get_data_object(); - - buf += value->deserialize(buf); - - // Read optional properties - while(true) { - int chunktype = GET_INT32(buf); - - if(Property::ischunk(chunktype)) { - Property prop; - buf += prop.deserialize(buf); - properties.push_back(prop); - } else - break; - } - - return buf - startbuf; -} - diff --git a/src/psftestraw.cc b/src/psftestraw.cc deleted file mode 100644 index 817df91..0000000 --- a/src/psftestraw.cc +++ /dev/null @@ -1,27 +0,0 @@ -#include "psf.h" - -#include - -int main(int argc, char *argv[]) { - char * filename = argv[1]; - - try{ - PSFDataSet data(filename); - data.open(); - - - PSFDoubleVector* vout = (PSFDoubleVector *) data.get_signal_vector("i(vvdd)"); - - // for (auto prop: data.get_header_properties()){ - // std::cout << prop.first << ": \t" << *prop.second << std::endl; - // } - - // for (auto i = vout->begin(); i != vout->end(); ++i) - // std::cout << *i << ' '; - - std::cout << "\nNumber of time points = " << vout->size() << std::endl; - data.close(); - }catch (IncorrectChunk &exc){ - std::cerr << "Exception caught " << exc.what() << "\n"; - } -} diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt new file mode 100644 index 0000000..cacb360 --- /dev/null +++ b/test/CMakeLists.txt @@ -0,0 +1,25 @@ +add_executable(psftestraw psftestraw.cc) +target_include_directories(psftestraw PRIVATE $) +target_link_libraries(psftestraw psf) + +# https://github.com/dlaperriere/cmake_cppunit/blob/master/cppunit/cmake_modules/FindCPPUNIT.cmake +if(CMAKE_PROJECT_NAME STREQUAL PROJECT_NAME AND BUILD_TESTING) + + FIND_PATH(CPPUNIT_INCLUDE_DIR cppunit/TestCase.h PATHS ${CMAKE_INSTALL_INCLUDEDIR}) + + if( CPPUNIT_INCLUDE_DIR ) + MESSAGE( STATUS "cppunit found at: ${CPPUNIT_INCLUDE_DIR}") + FIND_LIBRARY(CPPUNIT_LIBRARY cppunit ${CPPUNIT_INCLUDE_DIR}/../lib) + + add_executable(test_psfdataset test_psfdataset.cc) + target_link_libraries( test_psfdataset psf ${CPPUNIT_LIBRARY} ${CMAKE_DL_LIBS} ) + target_include_directories(test_psfdataset PRIVATE $) + target_include_directories(test_psfdataset PRIVATE ${CPPUNIT_INCLUDE_DIR}) + + add_test(NAME psfdataset COMMAND test_psfdataset WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}") + + else( CPPUNIT_INCLUDE_DIR ) + MESSAGE( WARNING "cppunit not found: skipping unit tests") + endif() + +endif() \ No newline at end of file diff --git a/test/Makefile.am b/test/Makefile.am deleted file mode 100644 index aa7b1d3..0000000 --- a/test/Makefile.am +++ /dev/null @@ -1,8 +0,0 @@ -bin_PROGRAMS = test_psfdataset -test_psfdataset_SOURCES = test_psfdataset.cc -test_psfdataset_CXXFLAGS = -I../include ${BOOST_CPPFLAGS} -test_psfdataset_LDFLAGS = -L../src -lpsf -#test_psfdataset_LDFLAGS += -lcppunit -ldl -test_psfdataset_LDFLAGS += -ldl - - diff --git a/test/data/tran.tran b/test/data/tran.tran new file mode 100644 index 0000000..55a7a6e Binary files /dev/null and b/test/data/tran.tran differ diff --git a/test/psftestraw.cc b/test/psftestraw.cc new file mode 100644 index 0000000..a5ba8b6 --- /dev/null +++ b/test/psftestraw.cc @@ -0,0 +1,34 @@ +#include "psf.h" + +#include + +int main(int argc, char *argv[]) { + char * filename = argv[1]; + + try{ + PSFDataSet data(filename); + data.open(); + + + for (auto prop: data.get_header_properties()){ + std::cout << prop.first << ": \t" << *prop.second << std::endl; + } + + for (auto prop: data.get_signal_names()){ + std::cout << prop << std::endl; + } + + std::cout << "vin = " << data.get_signal_scalar("vin") << std::endl; + std::cout << "vout = " << data.get_signal_scalar("vout") << std::endl; + + // PSFDoubleVector* vout = (PSFDoubleVector *) data.get_signal_vector("vin"); + + // for (auto i = vout->begin(); i != vout->end(); ++i) + // std::cout << *i << ' '; + + // std::cout << "\nNumber of time points = " << vout->size() << std::endl; + data.close(); + }catch (IncorrectChunk &exc){ + std::cerr << "Exception caught " << exc.what() << "\n"; + } +} diff --git a/test/test_psfdataset.cc b/test/test_psfdataset.cc index 1fd0f4c..82cad90 100644 --- a/test/test_psfdataset.cc +++ b/test/test_psfdataset.cc @@ -71,12 +71,12 @@ class TestPSFDataSet : public CPPUNIT_NS::TestCase { void test_open_psfascii(); private: - std::auto_ptr m_dcop_ds, m_tran_ds; + std::unique_ptr m_dcop_ds, m_tran_ds; }; void TestPSFDataSet::setUp() { - m_dcop_ds = std::auto_ptr(new PSFDataSet("data/dcOp.dc")); - m_tran_ds = std::auto_ptr(new PSFDataSet("data/tran.tran")); + m_dcop_ds = std::unique_ptr(new PSFDataSet("data/dcOp.dc")); + m_tran_ds = std::unique_ptr(new PSFDataSet("data/tran.tran")); } // DCOP data set tests @@ -123,7 +123,7 @@ void TestPSFDataSet::test_tran_get_nsweeps() { void TestPSFDataSet::test_tran_get_sweep_npoints() { // test tran - CPPUNIT_ASSERT_EQUAL(m_tran_ds->get_sweep_npoints(), 24942); + CPPUNIT_ASSERT_EQUAL(m_tran_ds->get_sweep_npoints(), 76); } void TestPSFDataSet::test_tran_get_sweep_values() { @@ -131,8 +131,8 @@ void TestPSFDataSet::test_tran_get_sweep_values() { stringvector_t names = m_tran_ds->get_signal_names(); // Get signal vectors - for(stringvector_iter_t name_iter = names.begin(); name_iter != names.end(); name_iter++) { - const PSFVector* datavector = m_tran_ds->get_signal_vector("in"); + for(auto name: names) { + const PSFVector* datavector = m_tran_ds->get_signal_vector(name); delete(datavector); } }