Compare commits
124 Commits
proxyface.
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
2fd7c9ddc1 | ||
|
a55edf1f53 | ||
|
f5db57dd54 | ||
|
7976af7ae0 | ||
|
db3c9c803d | ||
|
182a840112 | ||
|
1903f3bc98 | ||
|
1cd48dc8fb | ||
|
8090e269c6 | ||
|
ed44b3f321 | ||
|
a1f5a5551f | ||
|
55a4f7160e | ||
|
59201fd920 | ||
|
15204811b7 | ||
|
ace7775c71 | ||
|
36186932f2 | ||
|
eafeb966d2 | ||
|
4160481382 | ||
|
2225e46a3b | ||
|
a71a30824b | ||
|
1c6965a891 | ||
|
3616424576 | ||
|
fa1f9e757f | ||
|
0c095ff2b8 | ||
|
d797b4d9be | ||
|
e0f4b004d1 | ||
|
1cbcd41dbe | ||
|
fe905b97f9 | ||
|
38d5f118cc | ||
|
723ed15fab | ||
|
dc486d3597 | ||
|
b6008dfa7f | ||
|
b99980c84d | ||
|
e0786e1eba | ||
|
7be5e08627 | ||
|
ec28e9ed3c | ||
|
589dc56a1a | ||
|
c6d35bb70e | ||
|
d3744a8c51 | ||
|
79eabef896 | ||
|
28f5e1c1b5 | ||
|
f0440aa08d | ||
|
6ebf3f996e | ||
|
7b8315eb2c | ||
|
09062acd8a | ||
|
2e37a0634d | ||
|
1767ffb39d | ||
|
cb8217aa61 | ||
|
af99ea712a | ||
|
5967ca836f | ||
|
da6a45880f | ||
|
5653a7eb66 | ||
|
12e1e88b74 | ||
|
1d82b57238 | ||
|
378d8f7f26 | ||
|
edd53ea893 | ||
|
76d283b4db | ||
|
40ae220671 | ||
|
2ab9080dc3 | ||
|
ff210745f0 | ||
|
70b2da34cd | ||
|
79cb4c41ba | ||
|
fb78ec1507 | ||
|
bcc66d48d4 | ||
|
7b5b8bd4ae | ||
|
0bbe8d9171 | ||
|
2585b95ff1 | ||
|
40cc025971 | ||
|
6b1bdb8d85 | ||
|
d7f4952012 | ||
|
50baa546a2 | ||
|
fd931786fd | ||
|
1afcd433e2 | ||
|
ba27dee9fa | ||
|
9eff34e7fd | ||
|
0bac931e76 | ||
|
789b5dcaba | ||
|
234ecb3ad1 | ||
|
b9ed82a10b | ||
|
22815c914c | ||
|
22d3685846 | ||
|
4228168aba | ||
|
4e61bf0846 | ||
|
1ee683812c | ||
|
3dc73a5e94 | ||
|
b7a28239d7 | ||
|
ba0779fe0b | ||
|
6b74c744d9 | ||
|
f038e6a03e | ||
|
9f77c5ed28 | ||
|
412cc42f75 | ||
|
730abe7ee2 | ||
|
ab0cbcd43b | ||
|
10710251f9 | ||
|
60cb6b7f8d | ||
|
bcce590acc | ||
|
a9bfc20a1b | ||
|
70fd26bcbd | ||
|
1b93a16f53 | ||
|
0bc4881e75 | ||
|
5619a9688b | ||
|
00ba31aa98 | ||
|
a0c134287c | ||
|
4a4a2b3f93 | ||
|
527a4dbca3 | ||
|
cc80e214d7 | ||
|
df9684f492 | ||
|
b283cc88c1 | ||
|
6944b919b9 | ||
|
fec3c4471b | ||
|
165a03bb29 | ||
|
a0208dabec | ||
|
82286e52eb | ||
|
55972403fa | ||
|
26dc507d68 | ||
|
d760542ebb | ||
|
dcab643014 | ||
|
8a04f708cb | ||
|
c61d901f32 | ||
|
1d96325b1c | ||
|
c1f1264812 | ||
|
179f8f98c6 | ||
|
0f46d7ae7a | ||
|
7176effe41 |
136
INSTALL
Normal file
136
INSTALL
Normal file
@@ -0,0 +1,136 @@
|
||||
Install Software
|
||||
================
|
||||
|
||||
There are prebuilt software packages in the repositories, look in
|
||||
https://repository.mrw.sh for your operating system.
|
||||
|
||||
Note: PublicKey has changed on Novmber 1st 2015, please import the new
|
||||
key.
|
||||
|
||||
Linux
|
||||
-----
|
||||
|
||||
There are package repositories for most distributions at https://repository.mrw.sh.
|
||||
|
||||
### Ubuntu and Debian
|
||||
|
||||
To accept my signature, install my key:
|
||||
|
||||
wget -O- https://repository.mrw.sh/PublicKey \
|
||||
| sudo apt-key add -
|
||||
|
||||
You can install all software using your package manager (`apt`), first install the repository:
|
||||
|
||||
sudo apt-get install -y wget software-properties-common apt-transport-https
|
||||
sudo apt-add-repository https://repository.mrw.sh
|
||||
sudo apt-get update -y
|
||||
Then you can install any project you wish, e.g. [`webtester`](https://mrw.sh/development/webtester):
|
||||
|
||||
sudo apt-get install webtester -y
|
||||
|
||||
### OpenSUSE
|
||||
|
||||
To accept my signature, install my key:
|
||||
|
||||
wget https://repository.mrw.sh/PublicKey
|
||||
rpm --import PublicKey
|
||||
|
||||
You can use your packagemanager (`zypper` or _YaST_). First install the repostitory:
|
||||
|
||||
zypper ar https://repository.mrw.sh/opensuse/marc-waeckerlin.repo
|
||||
|
||||
Then install packages from the repository, e.g. [`webtester`](https://mrw.sh/development/webtester):
|
||||
|
||||
zypper install webtester
|
||||
|
||||
In case of trouble: Check if your version of OpenSUSE is supported,
|
||||
use `lsb_release -rs` to get the version of your OpenSUSE:
|
||||
https://drepository.mrw.sh/opensuse
|
||||
|
||||
### Fedora
|
||||
|
||||
To accept my signature, install my key:
|
||||
|
||||
wget https://repository.mrw.sh/PublicKey
|
||||
rpm --import PublicKey
|
||||
|
||||
You can use your packagemanager (`dnf`). First install the repository:
|
||||
|
||||
dnf install 'dnf-command(config-manager)'
|
||||
dnf config-manager --add-repo https://repository.mrw.sh/fedora/marc-waeckerlin.repo
|
||||
|
||||
Then install packages from the repository, e.g. [`webtester`](https://mrw.sh/development/webtester):
|
||||
|
||||
dnf install webtester
|
||||
|
||||
In case of trouble: Check if your version of Fedora is supported, use `lsb_release -rs` to get the version of your Fedora: https://repository.mrw.sh/fedora
|
||||
|
||||
### CentOS
|
||||
|
||||
To accept my signature, install my key:
|
||||
|
||||
wget https://repository.mrw.sh/PublicKey
|
||||
rpm --import PublicKey
|
||||
|
||||
You can use your packagemanager (`yum`). First install the repository:
|
||||
|
||||
yum install wget
|
||||
wget -O/etc/yum.repos.d/marc-waeckerlin.repo https://repository.mrw.sh/centos/marc-waeckerlin.repo
|
||||
|
||||
Then install packages from the repository, e.g. [`webtester`](https://mrw.sh/development/webtester):
|
||||
|
||||
yum install webtester
|
||||
|
||||
In case of trouble: Check if your version of CentOS is supported, use lsb_release -rs to get the version of your CentOS: https://repository.mrw.sh/centos
|
||||
|
||||
### Mageia
|
||||
|
||||
To accept my signature, install my key:
|
||||
|
||||
wget https://repository.mrw.sh/PublicKey
|
||||
rpm --import PublicKey
|
||||
|
||||
You can use your packagemanager (`dnf`). First install the repository:
|
||||
|
||||
dnf install 'dnf-command(config-manager)'
|
||||
dnf config-manager --add-repo https://repository.mrw.sh/mageia/marc-waeckerlin.repo
|
||||
|
||||
Then install packages from the repository, e.g. [`webtester`](https://mrw.sh/development/webtester):
|
||||
|
||||
dnf install webtester
|
||||
|
||||
### Other Linux Distributions
|
||||
|
||||
Either use the nearest possible distribution, or use alien to convert from another distribution to your preferred package format, or compile the project yourself (see below).
|
||||
|
||||
Windows
|
||||
-------
|
||||
|
||||
Download from https://repository.mrw.sh/windows
|
||||
|
||||
MacOSX
|
||||
------
|
||||
|
||||
You need e.g. Mac Ports to compile. Install the following dependencies:
|
||||
|
||||
sudo port install subversion svn2cl doxygen graphviz cppunit libtool boost log4cxx qt5-mac
|
||||
|
||||
Fix libtool-bug:
|
||||
|
||||
sudo ln -s /opt/local/bin/glibtoolize /opt/local/bin/libtoolize
|
||||
|
||||
Compile from Source
|
||||
-------------------
|
||||
|
||||
For all unsupported operating systems, including MacOSX.
|
||||
|
||||
To compile, please download the tar-sources from: https://repository.mrw.sh/sources
|
||||
|
||||
Then untar the package and use the common commands, e.g. for version `1.0.2` of a project named project, that means:
|
||||
|
||||
tar xzf project-1.0.2.tar.gz
|
||||
cd project-1.0.2
|
||||
./bootstrap.sh
|
||||
./configure
|
||||
make
|
||||
sudo make install
|
2
NEWS
2
NEWS
@@ -1 +1 @@
|
||||
See: https://dev.marc.waeckerlin.org/projects/proxyface
|
||||
See: https://mrw.sh/libraries/proxyface
|
||||
|
6
README
6
README
@@ -1,6 +0,0 @@
|
||||
Implements a Proxy detection (WPAD) interface for Linux, Mac OSX and
|
||||
Windows. Offers a GUI for manual proxy settings and automatic WPAD
|
||||
detection. Makes use of http://code.google.com/p/libproxy/ on Linux
|
||||
and Mac OSX, and uses WinHTTP on Windows. The GUI is based on QT4.
|
||||
|
||||
For more details, see: https://dev.marc.waeckerlin.org/projects/proxyface
|
21
README.md
Normal file
21
README.md
Normal file
@@ -0,0 +1,21 @@
|
||||
Platform Independent Interface for Network Proxies
|
||||
==================================================
|
||||
|
||||
Implements a Proxy detection (WPAD) interface for Linux, Mac OSX and Windows. Offers a GUI for manual proxy settings and automatic WPAD detection. Makes use of http://code.google.com/p/libproxy/ on Linux and Mac OSX, and uses WinHTTP on Windows. The GUI is based on Qt.
|
||||
|
||||

|
||||
|
||||
|
||||
In the Web
|
||||
----------
|
||||
|
||||
- [Official project page](https://mrw.sh/libraries/proxyface)
|
||||
- [Full Doxygen library documentation](https://doc.mrw.sh/proxyface)
|
||||
- [Official repositories](https://repository.mrw.sh)
|
||||
- [Download and installation instructions](https://mrw.sh/doc/mrw.sh/src/branch/master/installation.md)
|
||||
|
||||
|
||||
Missing a Feature, Found a Bug
|
||||
------------------------------
|
||||
|
||||
You are missing a feature, or an implementation is too incomplete for the purpose you need it? Or you even found a bug? Just register and open an issue [on the project management page](https://mrw.sh/libraries/proxyface/issues).
|
8
autogen.sh
Executable file
8
autogen.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash -e
|
||||
if test -n "svn" -a -d .svn -a -e -x /usr/bin/svn2cl; then
|
||||
svn2cl
|
||||
fi
|
||||
aclocal
|
||||
libtoolize --force
|
||||
automake -a
|
||||
autoconf
|
295
ax_check_qt.m4
Normal file
295
ax_check_qt.m4
Normal file
@@ -0,0 +1,295 @@
|
||||
# SYNOPSIS
|
||||
#
|
||||
# Check if a module exists:
|
||||
# AX_CHECK_QT([qt_prefix], [list-of-qt-modules], [optional-modules] [flags])
|
||||
#
|
||||
# Abort if a module does not exist:
|
||||
# AX_REQUIRE_QT([qt_prefix], [list-of-qt-modules], [optional-modules] [flags])
|
||||
#
|
||||
# DESCRIPTIONS
|
||||
#
|
||||
# qt_prefix
|
||||
#
|
||||
# Each call to AX_CHECK_QT should have a different prefix
|
||||
# value (with a few exceptions discussed later on). This value,
|
||||
# usually provided in uppercase, is used as prefix to the
|
||||
# variables holding the compiler flags and libraries reported by
|
||||
# pkg-config.
|
||||
#
|
||||
# For instance, if your prefix was to be FOO you'll be provided
|
||||
# two variables FOO_CFLAGS and FOO_LIBS.
|
||||
#
|
||||
# This will also be used as message during the configure checks:
|
||||
# checking for FOO....
|
||||
#
|
||||
# list-of-modules
|
||||
#
|
||||
# A single call to the macro can check for the presence of one or
|
||||
# more qt modules; you'll see later how to make good use of this
|
||||
# feature. Each entry in the list can have a version comparison
|
||||
# specifier, with the same syntax as the Requires keyword in the
|
||||
# data files themselves.
|
||||
#
|
||||
# optional-modules
|
||||
#
|
||||
# Optional list of more, optional modules, e.g. modules that
|
||||
# exist only in Qt5, but not in Qt4, such as QtWidgets or
|
||||
# QtWebKitWidgets
|
||||
#
|
||||
# flags
|
||||
#
|
||||
# Optional flages, space separated from this list:
|
||||
#
|
||||
# manualflags
|
||||
#
|
||||
# CXXFLAGS, CPPFLAGS and LIBS variables are not
|
||||
# automatically expanded, but you need to add the
|
||||
# qt_prefix_CXXFLAGS, qt_prefix_CPPFLAGS and qt_prefix_LIBS
|
||||
# variables manually where you need them. This is useful,
|
||||
# if some build targets need a feature and some don't.
|
||||
|
||||
|
||||
AC_DEFUN([AX_CXX_QT_TOOL], [
|
||||
PKG_PROG_PKG_CONFIG
|
||||
if test -z "${HAVE_$1}"; then
|
||||
HAVE_$1=1
|
||||
AC_MSG_CHECKING([for $2])
|
||||
AC_ARG_VAR([$1], [path to Qt tool $2])
|
||||
for package in Qt5Core QtCore; do
|
||||
if test -x "${$1}"; then
|
||||
break
|
||||
fi
|
||||
tool=$(${PKG_CONFIG} --variable=$2_location $package 2> /dev/null)
|
||||
if test -x "${tool}"; then
|
||||
$1="${tool}"
|
||||
break
|
||||
fi
|
||||
tool=$(${PKG_CONFIG} --variable=host_bins $package 2> /dev/null)
|
||||
if test -n "$tool"; then
|
||||
for name in $2 $2-qt5 $2-qt4; do
|
||||
if test -x "${tool}/${name}"; then
|
||||
$1="${tool}/${name}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
done
|
||||
if ! test -x "${$1}"; then
|
||||
if which "$2" > /dev/null; then
|
||||
$1=$2
|
||||
elif which "$2-qt5" > /dev/null; then
|
||||
$1=$2-qt5
|
||||
elif which "$2" > /dev/null; then
|
||||
$1=$2
|
||||
elif which "$2-qt4" > /dev/null; then
|
||||
$1=$2-qt4
|
||||
else
|
||||
HAVE_$1=0
|
||||
unset $1
|
||||
fi
|
||||
fi
|
||||
AC_SUBST($1)
|
||||
AM_CONDITIONAL(HAVE_$1, test $HAVE_[$1] -eq 1)
|
||||
if test $HAVE_$1 -eq 1; then
|
||||
AC_MSG_RESULT([$$1])
|
||||
else
|
||||
AC_MSG_RESULT([not found])
|
||||
fi
|
||||
fi
|
||||
])
|
||||
|
||||
AC_DEFUN([AX_CXX_QT_TOOLS], [
|
||||
AX_CXX_QT_TOOL(QMAKE, qmake)
|
||||
AX_CXX_QT_TOOL(MOC, moc)
|
||||
AX_CXX_QT_TOOL(UIC, uic)
|
||||
AX_CXX_QT_TOOL(RCC, rcc)
|
||||
AX_CXX_QT_TOOL(LUPDATE, lupdate)
|
||||
AX_CXX_QT_TOOL(LRELEASE, lrelease)
|
||||
])
|
||||
|
||||
AC_DEFUN([AX_CHECK_QT], [
|
||||
qt_modules="$2"
|
||||
qt_modules_optional="$3"
|
||||
qt_flags="$4"
|
||||
AX_CXX_QT_TOOLS
|
||||
HAVE_$1=0
|
||||
PKG_PROG_PKG_CONFIG
|
||||
PKG_CHECK_MODULES([$1]5, [${qt_modules//Qt/Qt5}], [
|
||||
HAVE_$1=1
|
||||
AC_DEFINE([HAVE_$1])
|
||||
QTDIR=$(${PKG_CONFIG} --variable=prefix Qt5Core)
|
||||
qt_host_bins=$(${PKG_CONFIG} --variable=host_bins Qt5Core)
|
||||
qt_libdir=$(${PKG_CONFIG} --variable=libdir Qt5Core)
|
||||
if test -d "${qt_libdir}" -a -d "${qt_libdir}/plugins"; then
|
||||
QT_PLUGIN_PATH="${qt_libdir}/plugins"
|
||||
elif test -d "${qt_libdir}/qt5" -a -d "${qt_libdir}/qt5/plugins"; then
|
||||
QT_PLUGIN_PATH="${qt_libdir}/qt5/plugins"
|
||||
elif test -d "${qt_host_bins}" -a -d "${qt_host_bins}/../plugins"; then
|
||||
QT_PLUGIN_PATH="${qt_host_bins}/../plugins"
|
||||
elif test -d "${QTDIR}/plugins; then
|
||||
QT_PLUGIN_PATH="${QTDIR}/plugins"
|
||||
elif test -d "${QTDIR}/share/qt5/plugins; then
|
||||
QT_PLUGIN_PATH="${QTDIR}/share/qt5/plugins"
|
||||
fi
|
||||
MOC_FLAGS+=" -DHAVE_$1=1 ${[$1]5_CFLAGS}"
|
||||
[$1]_CPPFLAGS="${[$1]5_CFLAGS}"
|
||||
[$1]_CXXFLAGS="${[$1]5_CFLAGS}"
|
||||
[$1]_LIBS="${[$1]5_LIBS}"
|
||||
AC_SUBST([$1]_CPPFLAGS)
|
||||
AC_SUBST([$1]_CXXFLAGS)
|
||||
if test "${qt_flags/manualflags/}" = "${qt_flags}"; then
|
||||
CPPFLAGS+=" ${[$1]_CPPFLAGS}"
|
||||
CXXFLAGS+=" ${[$1]_CXXFLAGS}"
|
||||
LIBS+=" ${[$1]_LIBS}"
|
||||
AC_MSG_NOTICE([Adding flags for $1])
|
||||
else
|
||||
AC_MSG_NOTICE([To enable $1, add $1_CPPFLAGS, $1_CXXFLAGS and $1_LIBS])
|
||||
fi
|
||||
PKG_REQUIREMENTS+=" ${qt_modules//Qt/Qt5}"
|
||||
if test -n "${qt_modules_optional}"; then
|
||||
PKG_CHECK_MODULES([$1]5_OPTIONAL, [${qt_modules_optional//Qt/Qt5}], [
|
||||
MOC_FLAGS+=" ${[$1]5_OPTIONAL_CFLAGS}"
|
||||
[$1]_CPPFLAGS+=" ${[$1]5_OPTIONAL_CFLAGS}"
|
||||
[$1]_CXXFLAGS+=" ${[$1]5_OPTIONAL_CFLAGS}"
|
||||
[$1]_LIBS+=" ${[$1]5_OPTIONAL_LIBS}"
|
||||
AC_SUBST([$1]_CPPFLAGS)
|
||||
AC_SUBST([$1]_CXXFLAGS)
|
||||
if test "${qt_flags/manualflags/}" = "${qt_flags}"; then
|
||||
CPPFLAGS+=" ${[$1]5_OPTIONAL_CFLAGS}"
|
||||
CXXFLAGS+=" ${[$1]5_OPTIONAL_CFLAGS}"
|
||||
LIBS+=" ${[$1]5_OPTIONAL_LIBS}"
|
||||
AC_MSG_NOTICE([Adding flags for $1])
|
||||
else
|
||||
AC_MSG_NOTICE([To enable $1, add $1_CPPFLAGS, $1_CXXFLAGS and $1_LIBS])
|
||||
fi
|
||||
PKG_REQUIREMENTS+=" ${qt_modules_optional//Qt/Qt5}"
|
||||
], [
|
||||
AC_MSG_NOTICE([Not found: ${qt_modules_optional//Qt/Qt5}])
|
||||
])
|
||||
fi
|
||||
], [
|
||||
PKG_CHECK_MODULES([$1], [${qt_modules}], [
|
||||
HAVE_$1=1
|
||||
AC_DEFINE([HAVE_$1])
|
||||
QTDIR=$(${PKG_CONFIG} --variable=prefix QtCore)
|
||||
qt_host_bins=$(${PKG_CONFIG} --variable=host_bins QtCore)
|
||||
qt_libdir=$(${PKG_CONFIG} --variable=libdir QtCore)
|
||||
if test -d "${qt_libdir}" -a -d "${qt_libdir}/plugins"; then
|
||||
QT_PLUGIN_PATH="${qt_libdir}/plugins"
|
||||
elif test -d "${qt_libdir}/qt5" -a -d "${qt_libdir}/qt5/plugins"; then
|
||||
QT_PLUGIN_PATH="${qt_libdir}/qt5/plugins"
|
||||
elif test -d "${qt_host_bins}" -a -d "${qt_host_bins}/../plugins"; then
|
||||
QT_PLUGIN_PATH="${qt_host_bins}/../plugins"
|
||||
elif test -d "${QTDIR}/plugins; then
|
||||
QT_PLUGIN_PATH="${QTDIR}/plugins"
|
||||
elif test -d "${QTDIR}/share/qt5/plugins; then
|
||||
QT_PLUGIN_PATH="${QTDIR}/share/qt5/plugins"
|
||||
fi
|
||||
MOC_FLAGS+=" -DHAVE_$1=1 ${$1_CFLAGS}"
|
||||
[$1]_CPPFLAGS="${[$1]_CFLAGS}"
|
||||
[$1]_CXXFLAGS="${[$1]_CFLAGS}"
|
||||
AC_SUBST([$1]_CPPFLAGS)
|
||||
AC_SUBST([$1]_CXXFLAGS)
|
||||
if test "${qt_flags/manualflags/}" = "${qt_flags}"; then
|
||||
CPPFLAGS+=" ${[$1]_CPPFLAGS}"
|
||||
CXXFLAGS+=" ${[$1]_CXXFLAGS}"
|
||||
LIBS+=" ${[$1]_LIBS}"
|
||||
AC_MSG_NOTICE([Adding flags for $1])
|
||||
else
|
||||
AC_MSG_NOTICE([To enable $1, add $1_CPPFLAGS, $1_CXXFLAGS and $1_LIBS])
|
||||
fi
|
||||
PKG_REQUIREMENTS+=" ${qt_modules}"
|
||||
if test -n "$3"; then
|
||||
PKG_CHECK_MODULES($1_OPTIONAL, [${qt_modules_optional}], [
|
||||
MOC_FLAGS+="${$1_OPTIONAL_CFLAGS}"
|
||||
[$1]_CPPFLAGS+=" ${$1_OPTIONAL_CFLAGS}"
|
||||
[$1]_CXXFLAGS+=" ${$1_OPTIONAL_CFLAGS}"
|
||||
[$1]_LIBS+=" ${$1_OPTIONAL_LIBS}"
|
||||
AC_SUBST([$1]_CPPFLAGS)
|
||||
AC_SUBST([$1]_CXXFLAGS)
|
||||
if test "${qt_flags/manualflags/}" = "${qt_flags}"; then
|
||||
CPPFLAGS+=" ${$1_OPTIONAL_CFLAGS}"
|
||||
CXXFLAGS+=" ${$1_OPTIONAL_CFLAGS}"
|
||||
LIBS+=" ${$1_OPTIONAL_LIBS}"
|
||||
AC_MSG_NOTICE([Adding flags for $1])
|
||||
else
|
||||
AC_MSG_NOTICE([To enable $1, add $1_CPPFLAGS, $1_CXXFLAGS and $1_LIBS])
|
||||
fi
|
||||
PKG_REQUIREMENTS+=" ${qt_modules_optional}"
|
||||
], [
|
||||
AC_MSG_NOTICE([Not found: ${qt_modules_optional}])
|
||||
])
|
||||
fi
|
||||
], [HAVE_$1=0])
|
||||
])
|
||||
AM_CONDITIONAL(HAVE_$1, test $HAVE_[$1] -eq 1)
|
||||
AX_CHECK_VALID_CXX_FLAG([-fPIC -fPIE], [position independent code flag])
|
||||
if test -n "${MINGW}"; then
|
||||
AX_CHECK_VALID_CXX_FLAG([-Wl,-subsystem,windows], [windows console flag])
|
||||
fi
|
||||
test "x$prefix" = xNONE && prefix=$ac_default_prefix
|
||||
AC_ARG_WITH([qt-plugin-path],
|
||||
[AS_HELP_STRING([--with-qt-plugin-path=PATH],
|
||||
[define a different qt plugin path, current @<:@default=check@:>@])],
|
||||
[QT_PLUGIN_PATH=$with_qt_plugin_path],
|
||||
[])
|
||||
AC_MSG_NOTICE([Qt Plugins are installed to ${QT_PLUGIN_PATH}])
|
||||
AC_SUBST(QTDIR)
|
||||
AC_SUBST(QT_PLUGIN_PATH)
|
||||
AC_SUBST(CPPFLAGS)
|
||||
AC_SUBST(MOC_FLAGS)
|
||||
AC_SUBST(CXXFLAGS)
|
||||
AC_SUBST(PKG_REQUIREMENTS)
|
||||
AX_ADDITIONAL_QT_RULES_HACK='
|
||||
#### Begin: Appended by $0
|
||||
|
||||
LANGUAGE_FILE_BASE ?= translations
|
||||
|
||||
ui_%.hxx: %.ui
|
||||
$(UIC) -o [$][@] $<
|
||||
|
||||
moc_%.cxx: %.hxx
|
||||
$(MOC) $(MOC_FLAGS) -o [$][@] $<
|
||||
|
||||
qrc_%.cxx: %.qrc
|
||||
$(RCC) -o [$][@] -name ${<:%.qrc=%} $<
|
||||
|
||||
#%.qrc: %
|
||||
# cwd=$$(pwd) && cd $< && $(RCC) -project -o $${cwd}/[$][@]
|
||||
|
||||
%.qm: %.ts
|
||||
${LRELEASE} $< -qm [$][@]
|
||||
|
||||
%.ts: ${LANGUAGE_FILES}
|
||||
${LUPDATE} -no-obsolete \
|
||||
-target-language [$]{@:${LANGUAGE_FILE_BASE}_%.ts=%} \
|
||||
[$][^] \
|
||||
-ts [$][@]
|
||||
|
||||
#### End: $0
|
||||
'
|
||||
])
|
||||
|
||||
AC_DEFUN([AX_REQUIRE_QT], [
|
||||
AX_CHECK_QT([$1], [$2], [$3], [$4])
|
||||
if ! test "$HAVE_$1" -eq 1; then
|
||||
AC_MSG_ERROR([Required Qt modules not found: $2])
|
||||
fi
|
||||
])
|
||||
|
||||
|
||||
# Omit Qt Keywords
|
||||
# AX_QT_NO_KEYWORDS
|
||||
AC_DEFUN([AX_QT_NO_KEYWORDS], [
|
||||
CPPFLAGS+=" -DQT_NO_KEYWORDS"
|
||||
])
|
||||
|
||||
AC_DEFUN([AX_INIT_QT], [
|
||||
if test -n "${AX_ADDITIONAL_QT_RULES_HACK}"; then
|
||||
for f in $(find test examples src -name makefile.in); do
|
||||
test -f "$f" && cat >> "$f" <<EOF
|
||||
${AX_ADDITIONAL_QT_RULES_HACK}
|
||||
EOF
|
||||
done
|
||||
fi
|
||||
])
|
948
ax_cxx_compile_stdcxx.m4
Normal file
948
ax_cxx_compile_stdcxx.m4
Normal file
@@ -0,0 +1,948 @@
|
||||
# ===========================================================================
|
||||
# https://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx.html
|
||||
# ===========================================================================
|
||||
#
|
||||
# SYNOPSIS
|
||||
#
|
||||
# AX_CXX_COMPILE_STDCXX(VERSION, [ext|noext], [mandatory|optional])
|
||||
#
|
||||
# DESCRIPTION
|
||||
#
|
||||
# Check for baseline language coverage in the compiler for the specified
|
||||
# version of the C++ standard. If necessary, add switches to CXX and
|
||||
# CXXCPP to enable support. VERSION may be '11' (for the C++11 standard)
|
||||
# or '14' (for the C++14 standard).
|
||||
#
|
||||
# The second argument, if specified, indicates whether you insist on an
|
||||
# extended mode (e.g. -std=gnu++11) or a strict conformance mode (e.g.
|
||||
# -std=c++11). If neither is specified, you get whatever works, with
|
||||
# preference for an extended mode.
|
||||
#
|
||||
# The third argument, if specified 'mandatory' or if left unspecified,
|
||||
# indicates that baseline support for the specified C++ standard is
|
||||
# required and that the macro should error out if no mode with that
|
||||
# support is found. If specified 'optional', then configuration proceeds
|
||||
# regardless, after defining HAVE_CXX${VERSION} if and only if a
|
||||
# supporting mode is found.
|
||||
#
|
||||
# LICENSE
|
||||
#
|
||||
# Copyright (c) 2008 Benjamin Kosnik <bkoz@redhat.com>
|
||||
# Copyright (c) 2012 Zack Weinberg <zackw@panix.com>
|
||||
# Copyright (c) 2013 Roy Stogner <roystgnr@ices.utexas.edu>
|
||||
# Copyright (c) 2014, 2015 Google Inc.; contributed by Alexey Sokolov <sokolov@google.com>
|
||||
# Copyright (c) 2015 Paul Norman <penorman@mac.com>
|
||||
# Copyright (c) 2015 Moritz Klammler <moritz@klammler.eu>
|
||||
# Copyright (c) 2016, 2018 Krzesimir Nowak <qdlacz@gmail.com>
|
||||
#
|
||||
# Copying and distribution of this file, with or without modification, are
|
||||
# permitted in any medium without royalty provided the copyright notice
|
||||
# and this notice are preserved. This file is offered as-is, without any
|
||||
# warranty.
|
||||
|
||||
#serial 10
|
||||
|
||||
dnl This macro is based on the code from the AX_CXX_COMPILE_STDCXX_11 macro
|
||||
dnl (serial version number 13).
|
||||
|
||||
AC_DEFUN([AX_CXX_COMPILE_STDCXX], [dnl
|
||||
m4_if([$1], [11], [ax_cxx_compile_alternatives="11 0x"],
|
||||
[$1], [14], [ax_cxx_compile_alternatives="14 1y"],
|
||||
[$1], [17], [ax_cxx_compile_alternatives="17 1z"],
|
||||
[m4_fatal([invalid first argument `$1' to AX_CXX_COMPILE_STDCXX])])dnl
|
||||
m4_if([$2], [], [],
|
||||
[$2], [ext], [],
|
||||
[$2], [noext], [],
|
||||
[m4_fatal([invalid second argument `$2' to AX_CXX_COMPILE_STDCXX])])dnl
|
||||
m4_if([$3], [], [ax_cxx_compile_cxx$1_required=true],
|
||||
[$3], [mandatory], [ax_cxx_compile_cxx$1_required=true],
|
||||
[$3], [optional], [ax_cxx_compile_cxx$1_required=false],
|
||||
[m4_fatal([invalid third argument `$3' to AX_CXX_COMPILE_STDCXX])])
|
||||
AC_LANG_PUSH([C++])dnl
|
||||
ac_success=no
|
||||
|
||||
m4_if([$2], [noext], [], [dnl
|
||||
if test x$ac_success = xno; then
|
||||
for alternative in ${ax_cxx_compile_alternatives}; do
|
||||
switch="-std=gnu++${alternative}"
|
||||
cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx$1_$switch])
|
||||
AC_CACHE_CHECK(whether $CXX supports C++$1 features with $switch,
|
||||
$cachevar,
|
||||
[ac_save_CXX="$CXX"
|
||||
CXX="$CXX $switch"
|
||||
AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_testbody_$1])],
|
||||
[eval $cachevar=yes],
|
||||
[eval $cachevar=no])
|
||||
CXX="$ac_save_CXX"])
|
||||
if eval test x\$$cachevar = xyes; then
|
||||
CXX="$CXX $switch"
|
||||
if test -n "$CXXCPP" ; then
|
||||
CXXCPP="$CXXCPP $switch"
|
||||
fi
|
||||
ac_success=yes
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi])
|
||||
|
||||
m4_if([$2], [ext], [], [dnl
|
||||
if test x$ac_success = xno; then
|
||||
dnl HP's aCC needs +std=c++11 according to:
|
||||
dnl http://h21007.www2.hp.com/portal/download/files/unprot/aCxx/PDF_Release_Notes/769149-001.pdf
|
||||
dnl Cray's crayCC needs "-h std=c++11"
|
||||
for alternative in ${ax_cxx_compile_alternatives}; do
|
||||
for switch in -std=c++${alternative} +std=c++${alternative} "-h std=c++${alternative}"; do
|
||||
cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx$1_$switch])
|
||||
AC_CACHE_CHECK(whether $CXX supports C++$1 features with $switch,
|
||||
$cachevar,
|
||||
[ac_save_CXX="$CXX"
|
||||
CXX="$CXX $switch"
|
||||
AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_testbody_$1])],
|
||||
[eval $cachevar=yes],
|
||||
[eval $cachevar=no])
|
||||
CXX="$ac_save_CXX"])
|
||||
if eval test x\$$cachevar = xyes; then
|
||||
CXX="$CXX $switch"
|
||||
if test -n "$CXXCPP" ; then
|
||||
CXXCPP="$CXXCPP $switch"
|
||||
fi
|
||||
ac_success=yes
|
||||
break
|
||||
fi
|
||||
done
|
||||
if test x$ac_success = xyes; then
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi])
|
||||
AC_LANG_POP([C++])
|
||||
if test x$ax_cxx_compile_cxx$1_required = xtrue; then
|
||||
if test x$ac_success = xno; then
|
||||
AC_MSG_ERROR([*** A compiler with support for C++$1 language features is required.])
|
||||
fi
|
||||
fi
|
||||
if test x$ac_success = xno; then
|
||||
HAVE_CXX$1=0
|
||||
AC_MSG_NOTICE([No compiler with C++$1 support was found])
|
||||
else
|
||||
HAVE_CXX$1=1
|
||||
AC_DEFINE(HAVE_CXX$1,1,
|
||||
[define if the compiler supports basic C++$1 syntax])
|
||||
fi
|
||||
AC_SUBST(HAVE_CXX$1)
|
||||
])
|
||||
|
||||
|
||||
dnl Test body for checking C++11 support
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_11],
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_11
|
||||
)
|
||||
|
||||
|
||||
dnl Test body for checking C++14 support
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_14],
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_11
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_14
|
||||
)
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_17],
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_11
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_14
|
||||
_AX_CXX_COMPILE_STDCXX_testbody_new_in_17
|
||||
)
|
||||
|
||||
dnl Tests for new features in C++11
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_11], [[
|
||||
|
||||
// If the compiler admits that it is not ready for C++11, why torture it?
|
||||
// Hopefully, this will speed up the test.
|
||||
|
||||
#ifndef __cplusplus
|
||||
|
||||
#error "This is not a C++ compiler"
|
||||
|
||||
#elif __cplusplus < 201103L
|
||||
|
||||
#error "This is not a C++11 compiler"
|
||||
|
||||
#else
|
||||
|
||||
namespace cxx11
|
||||
{
|
||||
|
||||
namespace test_static_assert
|
||||
{
|
||||
|
||||
template <typename T>
|
||||
struct check
|
||||
{
|
||||
static_assert(sizeof(int) <= sizeof(T), "not big enough");
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
namespace test_final_override
|
||||
{
|
||||
|
||||
struct Base
|
||||
{
|
||||
virtual void f() {}
|
||||
};
|
||||
|
||||
struct Derived : public Base
|
||||
{
|
||||
virtual void f() override {}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
namespace test_double_right_angle_brackets
|
||||
{
|
||||
|
||||
template < typename T >
|
||||
struct check {};
|
||||
|
||||
typedef check<void> single_type;
|
||||
typedef check<check<void>> double_type;
|
||||
typedef check<check<check<void>>> triple_type;
|
||||
typedef check<check<check<check<void>>>> quadruple_type;
|
||||
|
||||
}
|
||||
|
||||
namespace test_decltype
|
||||
{
|
||||
|
||||
int
|
||||
f()
|
||||
{
|
||||
int a = 1;
|
||||
decltype(a) b = 2;
|
||||
return a + b;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_type_deduction
|
||||
{
|
||||
|
||||
template < typename T1, typename T2 >
|
||||
struct is_same
|
||||
{
|
||||
static const bool value = false;
|
||||
};
|
||||
|
||||
template < typename T >
|
||||
struct is_same<T, T>
|
||||
{
|
||||
static const bool value = true;
|
||||
};
|
||||
|
||||
template < typename T1, typename T2 >
|
||||
auto
|
||||
add(T1 a1, T2 a2) -> decltype(a1 + a2)
|
||||
{
|
||||
return a1 + a2;
|
||||
}
|
||||
|
||||
int
|
||||
test(const int c, volatile int v)
|
||||
{
|
||||
static_assert(is_same<int, decltype(0)>::value == true, "");
|
||||
static_assert(is_same<int, decltype(c)>::value == false, "");
|
||||
static_assert(is_same<int, decltype(v)>::value == false, "");
|
||||
auto ac = c;
|
||||
auto av = v;
|
||||
auto sumi = ac + av + 'x';
|
||||
auto sumf = ac + av + 1.0;
|
||||
static_assert(is_same<int, decltype(ac)>::value == true, "");
|
||||
static_assert(is_same<int, decltype(av)>::value == true, "");
|
||||
static_assert(is_same<int, decltype(sumi)>::value == true, "");
|
||||
static_assert(is_same<int, decltype(sumf)>::value == false, "");
|
||||
static_assert(is_same<int, decltype(add(c, v))>::value == true, "");
|
||||
return (sumf > 0.0) ? sumi : add(c, v);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_noexcept
|
||||
{
|
||||
|
||||
int f() { return 0; }
|
||||
int g() noexcept { return 0; }
|
||||
|
||||
static_assert(noexcept(f()) == false, "");
|
||||
static_assert(noexcept(g()) == true, "");
|
||||
|
||||
}
|
||||
|
||||
namespace test_constexpr
|
||||
{
|
||||
|
||||
template < typename CharT >
|
||||
unsigned long constexpr
|
||||
strlen_c_r(const CharT *const s, const unsigned long acc) noexcept
|
||||
{
|
||||
return *s ? strlen_c_r(s + 1, acc + 1) : acc;
|
||||
}
|
||||
|
||||
template < typename CharT >
|
||||
unsigned long constexpr
|
||||
strlen_c(const CharT *const s) noexcept
|
||||
{
|
||||
return strlen_c_r(s, 0UL);
|
||||
}
|
||||
|
||||
static_assert(strlen_c("") == 0UL, "");
|
||||
static_assert(strlen_c("1") == 1UL, "");
|
||||
static_assert(strlen_c("example") == 7UL, "");
|
||||
static_assert(strlen_c("another\0example") == 7UL, "");
|
||||
|
||||
}
|
||||
|
||||
namespace test_rvalue_references
|
||||
{
|
||||
|
||||
template < int N >
|
||||
struct answer
|
||||
{
|
||||
static constexpr int value = N;
|
||||
};
|
||||
|
||||
answer<1> f(int&) { return answer<1>(); }
|
||||
answer<2> f(const int&) { return answer<2>(); }
|
||||
answer<3> f(int&&) { return answer<3>(); }
|
||||
|
||||
void
|
||||
test()
|
||||
{
|
||||
int i = 0;
|
||||
const int c = 0;
|
||||
static_assert(decltype(f(i))::value == 1, "");
|
||||
static_assert(decltype(f(c))::value == 2, "");
|
||||
static_assert(decltype(f(0))::value == 3, "");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_uniform_initialization
|
||||
{
|
||||
|
||||
struct test
|
||||
{
|
||||
static const int zero {};
|
||||
static const int one {1};
|
||||
};
|
||||
|
||||
static_assert(test::zero == 0, "");
|
||||
static_assert(test::one == 1, "");
|
||||
|
||||
}
|
||||
|
||||
namespace test_lambdas
|
||||
{
|
||||
|
||||
void
|
||||
test1()
|
||||
{
|
||||
auto lambda1 = [](){};
|
||||
auto lambda2 = lambda1;
|
||||
lambda1();
|
||||
lambda2();
|
||||
}
|
||||
|
||||
int
|
||||
test2()
|
||||
{
|
||||
auto a = [](int i, int j){ return i + j; }(1, 2);
|
||||
auto b = []() -> int { return '0'; }();
|
||||
auto c = [=](){ return a + b; }();
|
||||
auto d = [&](){ return c; }();
|
||||
auto e = [a, &b](int x) mutable {
|
||||
const auto identity = [](int y){ return y; };
|
||||
for (auto i = 0; i < a; ++i)
|
||||
a += b--;
|
||||
return x + identity(a + b);
|
||||
}(0);
|
||||
return a + b + c + d + e;
|
||||
}
|
||||
|
||||
int
|
||||
test3()
|
||||
{
|
||||
const auto nullary = [](){ return 0; };
|
||||
const auto unary = [](int x){ return x; };
|
||||
using nullary_t = decltype(nullary);
|
||||
using unary_t = decltype(unary);
|
||||
const auto higher1st = [](nullary_t f){ return f(); };
|
||||
const auto higher2nd = [unary](nullary_t f1){
|
||||
return [unary, f1](unary_t f2){ return f2(unary(f1())); };
|
||||
};
|
||||
return higher1st(nullary) + higher2nd(nullary)(unary);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_variadic_templates
|
||||
{
|
||||
|
||||
template <int...>
|
||||
struct sum;
|
||||
|
||||
template <int N0, int... N1toN>
|
||||
struct sum<N0, N1toN...>
|
||||
{
|
||||
static constexpr auto value = N0 + sum<N1toN...>::value;
|
||||
};
|
||||
|
||||
template <>
|
||||
struct sum<>
|
||||
{
|
||||
static constexpr auto value = 0;
|
||||
};
|
||||
|
||||
static_assert(sum<>::value == 0, "");
|
||||
static_assert(sum<1>::value == 1, "");
|
||||
static_assert(sum<23>::value == 23, "");
|
||||
static_assert(sum<1, 2>::value == 3, "");
|
||||
static_assert(sum<5, 5, 11>::value == 21, "");
|
||||
static_assert(sum<2, 3, 5, 7, 11, 13>::value == 41, "");
|
||||
|
||||
}
|
||||
|
||||
// http://stackoverflow.com/questions/13728184/template-aliases-and-sfinae
|
||||
// Clang 3.1 fails with headers of libstd++ 4.8.3 when using std::function
|
||||
// because of this.
|
||||
namespace test_template_alias_sfinae
|
||||
{
|
||||
|
||||
struct foo {};
|
||||
|
||||
template<typename T>
|
||||
using member = typename T::member_type;
|
||||
|
||||
template<typename T>
|
||||
void func(...) {}
|
||||
|
||||
template<typename T>
|
||||
void func(member<T>*) {}
|
||||
|
||||
void test();
|
||||
|
||||
void test() { func<foo>(0); }
|
||||
|
||||
}
|
||||
|
||||
} // namespace cxx11
|
||||
|
||||
#endif // __cplusplus >= 201103L
|
||||
|
||||
]])
|
||||
|
||||
|
||||
dnl Tests for new features in C++14
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_14], [[
|
||||
|
||||
// If the compiler admits that it is not ready for C++14, why torture it?
|
||||
// Hopefully, this will speed up the test.
|
||||
|
||||
#ifndef __cplusplus
|
||||
|
||||
#error "This is not a C++ compiler"
|
||||
|
||||
#elif __cplusplus < 201402L
|
||||
|
||||
#error "This is not a C++14 compiler"
|
||||
|
||||
#else
|
||||
|
||||
namespace cxx14
|
||||
{
|
||||
|
||||
namespace test_polymorphic_lambdas
|
||||
{
|
||||
|
||||
int
|
||||
test()
|
||||
{
|
||||
const auto lambda = [](auto&&... args){
|
||||
const auto istiny = [](auto x){
|
||||
return (sizeof(x) == 1UL) ? 1 : 0;
|
||||
};
|
||||
const int aretiny[] = { istiny(args)... };
|
||||
return aretiny[0];
|
||||
};
|
||||
return lambda(1, 1L, 1.0f, '1');
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_binary_literals
|
||||
{
|
||||
|
||||
constexpr auto ivii = 0b0000000000101010;
|
||||
static_assert(ivii == 42, "wrong value");
|
||||
|
||||
}
|
||||
|
||||
namespace test_generalized_constexpr
|
||||
{
|
||||
|
||||
template < typename CharT >
|
||||
constexpr unsigned long
|
||||
strlen_c(const CharT *const s) noexcept
|
||||
{
|
||||
auto length = 0UL;
|
||||
for (auto p = s; *p; ++p)
|
||||
++length;
|
||||
return length;
|
||||
}
|
||||
|
||||
static_assert(strlen_c("") == 0UL, "");
|
||||
static_assert(strlen_c("x") == 1UL, "");
|
||||
static_assert(strlen_c("test") == 4UL, "");
|
||||
static_assert(strlen_c("another\0test") == 7UL, "");
|
||||
|
||||
}
|
||||
|
||||
namespace test_lambda_init_capture
|
||||
{
|
||||
|
||||
int
|
||||
test()
|
||||
{
|
||||
auto x = 0;
|
||||
const auto lambda1 = [a = x](int b){ return a + b; };
|
||||
const auto lambda2 = [a = lambda1(x)](){ return a; };
|
||||
return lambda2();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_digit_separators
|
||||
{
|
||||
|
||||
constexpr auto ten_million = 100'000'000;
|
||||
static_assert(ten_million == 100000000, "");
|
||||
|
||||
}
|
||||
|
||||
namespace test_return_type_deduction
|
||||
{
|
||||
|
||||
auto f(int& x) { return x; }
|
||||
decltype(auto) g(int& x) { return x; }
|
||||
|
||||
template < typename T1, typename T2 >
|
||||
struct is_same
|
||||
{
|
||||
static constexpr auto value = false;
|
||||
};
|
||||
|
||||
template < typename T >
|
||||
struct is_same<T, T>
|
||||
{
|
||||
static constexpr auto value = true;
|
||||
};
|
||||
|
||||
int
|
||||
test()
|
||||
{
|
||||
auto x = 0;
|
||||
static_assert(is_same<int, decltype(f(x))>::value, "");
|
||||
static_assert(is_same<int&, decltype(g(x))>::value, "");
|
||||
return x;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
} // namespace cxx14
|
||||
|
||||
#endif // __cplusplus >= 201402L
|
||||
|
||||
]])
|
||||
|
||||
|
||||
dnl Tests for new features in C++17
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_17], [[
|
||||
|
||||
// If the compiler admits that it is not ready for C++17, why torture it?
|
||||
// Hopefully, this will speed up the test.
|
||||
|
||||
#ifndef __cplusplus
|
||||
|
||||
#error "This is not a C++ compiler"
|
||||
|
||||
#elif __cplusplus < 201703L
|
||||
|
||||
#error "This is not a C++17 compiler"
|
||||
|
||||
#else
|
||||
|
||||
#include <initializer_list>
|
||||
#include <utility>
|
||||
#include <type_traits>
|
||||
|
||||
namespace cxx17
|
||||
{
|
||||
|
||||
namespace test_constexpr_lambdas
|
||||
{
|
||||
|
||||
constexpr int foo = [](){return 42;}();
|
||||
|
||||
}
|
||||
|
||||
namespace test::nested_namespace::definitions
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
namespace test_fold_expression
|
||||
{
|
||||
|
||||
template<typename... Args>
|
||||
int multiply(Args... args)
|
||||
{
|
||||
return (args * ... * 1);
|
||||
}
|
||||
|
||||
template<typename... Args>
|
||||
bool all(Args... args)
|
||||
{
|
||||
return (args && ...);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_extended_static_assert
|
||||
{
|
||||
|
||||
static_assert (true);
|
||||
|
||||
}
|
||||
|
||||
namespace test_auto_brace_init_list
|
||||
{
|
||||
|
||||
auto foo = {5};
|
||||
auto bar {5};
|
||||
|
||||
static_assert(std::is_same<std::initializer_list<int>, decltype(foo)>::value);
|
||||
static_assert(std::is_same<int, decltype(bar)>::value);
|
||||
}
|
||||
|
||||
namespace test_typename_in_template_template_parameter
|
||||
{
|
||||
|
||||
template<template<typename> typename X> struct D;
|
||||
|
||||
}
|
||||
|
||||
namespace test_fallthrough_nodiscard_maybe_unused_attributes
|
||||
{
|
||||
|
||||
int f1()
|
||||
{
|
||||
return 42;
|
||||
}
|
||||
|
||||
[[nodiscard]] int f2()
|
||||
{
|
||||
[[maybe_unused]] auto unused = f1();
|
||||
|
||||
switch (f1())
|
||||
{
|
||||
case 17:
|
||||
f1();
|
||||
[[fallthrough]];
|
||||
case 42:
|
||||
f1();
|
||||
}
|
||||
return f1();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_extended_aggregate_initialization
|
||||
{
|
||||
|
||||
struct base1
|
||||
{
|
||||
int b1, b2 = 42;
|
||||
};
|
||||
|
||||
struct base2
|
||||
{
|
||||
base2() {
|
||||
b3 = 42;
|
||||
}
|
||||
int b3;
|
||||
};
|
||||
|
||||
struct derived : base1, base2
|
||||
{
|
||||
int d;
|
||||
};
|
||||
|
||||
derived d1 {{1, 2}, {}, 4}; // full initialization
|
||||
derived d2 {{}, {}, 4}; // value-initialized bases
|
||||
|
||||
}
|
||||
|
||||
namespace test_general_range_based_for_loop
|
||||
{
|
||||
|
||||
struct iter
|
||||
{
|
||||
int i;
|
||||
|
||||
int& operator* ()
|
||||
{
|
||||
return i;
|
||||
}
|
||||
|
||||
const int& operator* () const
|
||||
{
|
||||
return i;
|
||||
}
|
||||
|
||||
iter& operator++()
|
||||
{
|
||||
++i;
|
||||
return *this;
|
||||
}
|
||||
};
|
||||
|
||||
struct sentinel
|
||||
{
|
||||
int i;
|
||||
};
|
||||
|
||||
bool operator== (const iter& i, const sentinel& s)
|
||||
{
|
||||
return i.i == s.i;
|
||||
}
|
||||
|
||||
bool operator!= (const iter& i, const sentinel& s)
|
||||
{
|
||||
return !(i == s);
|
||||
}
|
||||
|
||||
struct range
|
||||
{
|
||||
iter begin() const
|
||||
{
|
||||
return {0};
|
||||
}
|
||||
|
||||
sentinel end() const
|
||||
{
|
||||
return {5};
|
||||
}
|
||||
};
|
||||
|
||||
void f()
|
||||
{
|
||||
range r {};
|
||||
|
||||
for (auto i : r)
|
||||
{
|
||||
[[maybe_unused]] auto v = i;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_lambda_capture_asterisk_this_by_value
|
||||
{
|
||||
|
||||
struct t
|
||||
{
|
||||
int i;
|
||||
int foo()
|
||||
{
|
||||
return [*this]()
|
||||
{
|
||||
return i;
|
||||
}();
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
namespace test_enum_class_construction
|
||||
{
|
||||
|
||||
enum class byte : unsigned char
|
||||
{};
|
||||
|
||||
byte foo {42};
|
||||
|
||||
}
|
||||
|
||||
namespace test_constexpr_if
|
||||
{
|
||||
|
||||
template <bool cond>
|
||||
int f ()
|
||||
{
|
||||
if constexpr(cond)
|
||||
{
|
||||
return 13;
|
||||
}
|
||||
else
|
||||
{
|
||||
return 42;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_selection_statement_with_initializer
|
||||
{
|
||||
|
||||
int f()
|
||||
{
|
||||
return 13;
|
||||
}
|
||||
|
||||
int f2()
|
||||
{
|
||||
if (auto i = f(); i > 0)
|
||||
{
|
||||
return 3;
|
||||
}
|
||||
|
||||
switch (auto i = f(); i + 4)
|
||||
{
|
||||
case 17:
|
||||
return 2;
|
||||
|
||||
default:
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_template_argument_deduction_for_class_templates
|
||||
{
|
||||
|
||||
template <typename T1, typename T2>
|
||||
struct pair
|
||||
{
|
||||
pair (T1 p1, T2 p2)
|
||||
: m1 {p1},
|
||||
m2 {p2}
|
||||
{}
|
||||
|
||||
T1 m1;
|
||||
T2 m2;
|
||||
};
|
||||
|
||||
void f()
|
||||
{
|
||||
[[maybe_unused]] auto p = pair{13, 42u};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
namespace test_non_type_auto_template_parameters
|
||||
{
|
||||
|
||||
template <auto n>
|
||||
struct B
|
||||
{};
|
||||
|
||||
B<5> b1;
|
||||
B<'a'> b2;
|
||||
|
||||
}
|
||||
|
||||
namespace test_structured_bindings
|
||||
{
|
||||
|
||||
int arr[2] = { 1, 2 };
|
||||
std::pair<int, int> pr = { 1, 2 };
|
||||
|
||||
auto f1() -> int(&)[2]
|
||||
{
|
||||
return arr;
|
||||
}
|
||||
|
||||
auto f2() -> std::pair<int, int>&
|
||||
{
|
||||
return pr;
|
||||
}
|
||||
|
||||
struct S
|
||||
{
|
||||
int x1 : 2;
|
||||
volatile double y1;
|
||||
};
|
||||
|
||||
S f3()
|
||||
{
|
||||
return {};
|
||||
}
|
||||
|
||||
auto [ x1, y1 ] = f1();
|
||||
auto& [ xr1, yr1 ] = f1();
|
||||
auto [ x2, y2 ] = f2();
|
||||
auto& [ xr2, yr2 ] = f2();
|
||||
const auto [ x3, y3 ] = f3();
|
||||
|
||||
}
|
||||
|
||||
namespace test_exception_spec_type_system
|
||||
{
|
||||
|
||||
struct Good {};
|
||||
struct Bad {};
|
||||
|
||||
void g1() noexcept;
|
||||
void g2();
|
||||
|
||||
template<typename T>
|
||||
Bad
|
||||
f(T*, T*);
|
||||
|
||||
template<typename T1, typename T2>
|
||||
Good
|
||||
f(T1*, T2*);
|
||||
|
||||
static_assert (std::is_same_v<Good, decltype(f(g1, g2))>);
|
||||
|
||||
}
|
||||
|
||||
namespace test_inline_variables
|
||||
{
|
||||
|
||||
template<class T> void f(T)
|
||||
{}
|
||||
|
||||
template<class T> inline T g(T)
|
||||
{
|
||||
return T{};
|
||||
}
|
||||
|
||||
template<> inline void f<>(int)
|
||||
{}
|
||||
|
||||
template<> int g<>(int)
|
||||
{
|
||||
return 5;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
} // namespace cxx17
|
||||
|
||||
#endif // __cplusplus < 201703L
|
||||
|
||||
]])
|
231
ax_cxx_compile_stdcxx_11.m4
Normal file
231
ax_cxx_compile_stdcxx_11.m4
Normal file
@@ -0,0 +1,231 @@
|
||||
# ============================================================================
|
||||
# http://www.gnu.org/software/autoconf-archive/ax_cxx_compile_stdcxx_11.html
|
||||
# ============================================================================
|
||||
#
|
||||
# SYNOPSIS
|
||||
#
|
||||
# AX_REQUIRE_STDCXX_11
|
||||
# AX_REQUIRE_STDCXX_14
|
||||
# AX_CXX_COMPILE_STDCXX_11([ext|noext],[mandatory|optional])
|
||||
# AX_CXX_COMPILE_STDCXX_14([ext|noext],[mandatory|optional])
|
||||
#
|
||||
# DESCRIPTION
|
||||
#
|
||||
# Check for baseline language coverage in the compiler for the C++11
|
||||
# or C++14 standard; if necessary, add switches to CXXFLAGS to
|
||||
# enable support.
|
||||
#
|
||||
# The first argument, if specified, indicates whether you insist on an
|
||||
# extended mode (e.g. -std=gnu++11) or a strict conformance mode (e.g.
|
||||
# -std=c++11). If neither is specified, you get whatever works, with
|
||||
# preference for an extended mode.
|
||||
#
|
||||
# The second argument, if specified 'mandatory' or if left unspecified,
|
||||
# indicates that baseline C++11 support is required and that the macro
|
||||
# should error out if no mode with that support is found. If specified
|
||||
# 'optional', then configuration proceeds regardless, after defining
|
||||
# HAVE_CXX11 if and only if a supporting mode is found.
|
||||
#
|
||||
# LICENSE
|
||||
#
|
||||
# Copyright (c) 2008 Benjamin Kosnik <bkoz@redhat.com>
|
||||
# Copyright (c) 2012 Zack Weinberg <zackw@panix.com>
|
||||
# Copyright (c) 2013 Roy Stogner <roystgnr@ices.utexas.edu>
|
||||
#
|
||||
# Copying and distribution of this file, with or without modification, are
|
||||
# permitted in any medium without royalty provided the copyright notice
|
||||
# and this notice are preserved. This file is offered as-is, without any
|
||||
# warranty.
|
||||
|
||||
#serial 3
|
||||
|
||||
m4_define([_AX_CXX_COMPILE_STDCXX_11_testbody], [
|
||||
template <typename T>
|
||||
struct check
|
||||
{
|
||||
static_assert(sizeof(int) <= sizeof(T), "not big enough");
|
||||
};
|
||||
|
||||
typedef check<check<bool>> right_angle_brackets;
|
||||
|
||||
int a;
|
||||
decltype(a) b;
|
||||
|
||||
typedef check<int> check_type;
|
||||
check_type c;
|
||||
check_type&& cr = static_cast<check_type&&>(c);
|
||||
|
||||
auto d = a;
|
||||
])
|
||||
|
||||
AC_DEFUN([AX_CXX_COMPILE_STDCXX_11], [dnl
|
||||
m4_if([$1], [], [],
|
||||
[$1], [ext], [],
|
||||
[$1], [noext], [],
|
||||
[m4_fatal([invalid argument `$1' to AX_CXX_COMPILE_STDCXX_11])])dnl
|
||||
m4_if([$2], [], [ax_cxx_compile_cxx11_required=true],
|
||||
[$2], [mandatory], [ax_cxx_compile_cxx11_required=true],
|
||||
[$2], [optional], [ax_cxx_compile_cxx11_required=false],
|
||||
[m4_fatal([invalid second argument `$2' to AX_CXX_COMPILE_STDCXX_11])])dnl
|
||||
AC_LANG_PUSH([C++])dnl
|
||||
ac_success=no
|
||||
AC_CACHE_CHECK(whether $CXX supports C++11 features by default,
|
||||
ax_cv_cxx_compile_cxx11,
|
||||
[AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_11_testbody])],
|
||||
[ax_cv_cxx_compile_cxx11=yes],
|
||||
[ax_cv_cxx_compile_cxx11=no])])
|
||||
if test x$ax_cv_cxx_compile_cxx11 = xyes; then
|
||||
ac_success=yes
|
||||
fi
|
||||
|
||||
m4_if([$1], [noext], [], [dnl
|
||||
if test x$ac_success = xno; then
|
||||
for switch in -std=gnu++11 -std=gnu++0x; do
|
||||
cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx11_$switch])
|
||||
AC_CACHE_CHECK(whether $CXX supports C++11 features with $switch,
|
||||
$cachevar,
|
||||
[ac_save_CXXFLAGS="$CXXFLAGS"
|
||||
CXXFLAGS="$CXXFLAGS $switch"
|
||||
AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_11_testbody])],
|
||||
[eval $cachevar=yes],
|
||||
[eval $cachevar=no])
|
||||
CXXFLAGS="$ac_save_CXXFLAGS"])
|
||||
if eval test x\$$cachevar = xyes; then
|
||||
CXXFLAGS="$CXXFLAGS $switch"
|
||||
ac_success=yes
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi])
|
||||
|
||||
m4_if([$1], [ext], [], [dnl
|
||||
if test x$ac_success = xno; then
|
||||
for switch in -std=c++11 -std=c++0x; do
|
||||
cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx11_$switch])
|
||||
AC_CACHE_CHECK(whether $CXX supports C++11 features with $switch,
|
||||
$cachevar,
|
||||
[ac_save_CXXFLAGS="$CXXFLAGS"
|
||||
CXXFLAGS="$CXXFLAGS $switch"
|
||||
AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_11_testbody])],
|
||||
[eval $cachevar=yes],
|
||||
[eval $cachevar=no])
|
||||
CXXFLAGS="$ac_save_CXXFLAGS"])
|
||||
if eval test x\$$cachevar = xyes; then
|
||||
CXXFLAGS="$CXXFLAGS $switch"
|
||||
ac_success=yes
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi])
|
||||
AC_LANG_POP([C++])
|
||||
if test x$ax_cxx_compile_cxx11_required = xtrue; then
|
||||
if test x$ac_success = xno; then
|
||||
AC_MSG_ERROR([*** A compiler with support for C++11 language features is required.])
|
||||
fi
|
||||
else
|
||||
if test x$ac_success = xno; then
|
||||
HAVE_CXX11=0
|
||||
AC_MSG_NOTICE([No compiler with C++11 support was found])
|
||||
else
|
||||
HAVE_CXX11=1
|
||||
AC_DEFINE(HAVE_CXX11,1,
|
||||
[define if the compiler supports basic C++11 syntax])
|
||||
fi
|
||||
|
||||
AC_SUBST(HAVE_CXX11)
|
||||
fi
|
||||
])
|
||||
|
||||
AC_DEFUN([AX_CXX_COMPILE_STDCXX_14], [dnl
|
||||
m4_if([$1], [], [],
|
||||
[$1], [ext], [],
|
||||
[$1], [noext], [],
|
||||
[m4_fatal([invalid argument `$1' to AX_CXX_COMPILE_STDCXX_14])])dnl
|
||||
m4_if([$2], [], [ax_cxx_compile_cxx14_required=true],
|
||||
[$2], [mandatory], [ax_cxx_compile_cxx14_required=true],
|
||||
[$2], [optional], [ax_cxx_compile_cxx14_required=false],
|
||||
[m4_fatal([invalid second argument `$2' to AX_CXX_COMPILE_STDCXX_14])])dnl
|
||||
AC_LANG_PUSH([C++])dnl
|
||||
ac_success=no
|
||||
AC_CACHE_CHECK(whether $CXX supports C++14 features by default,
|
||||
ax_cv_cxx_compile_cxx14,
|
||||
[AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_11_testbody])],
|
||||
[ax_cv_cxx_compile_cxx14=yes],
|
||||
[ax_cv_cxx_compile_cxx14=no])])
|
||||
if test x$ax_cv_cxx_compile_cxx14 = xyes; then
|
||||
ac_success=yes
|
||||
fi
|
||||
|
||||
m4_if([$1], [noext], [], [dnl
|
||||
if test x$ac_success = xno; then
|
||||
for switch in -std=gnu++14 -std=gnu++0y; do
|
||||
cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx14_$switch])
|
||||
AC_CACHE_CHECK(whether $CXX supports C++14 features with $switch,
|
||||
$cachevar,
|
||||
[ac_save_CXXFLAGS="$CXXFLAGS"
|
||||
CXXFLAGS="$CXXFLAGS $switch"
|
||||
AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_11_testbody])],
|
||||
[eval $cachevar=yes],
|
||||
[eval $cachevar=no])
|
||||
CXXFLAGS="$ac_save_CXXFLAGS"])
|
||||
if eval test x\$$cachevar = xyes; then
|
||||
CXXFLAGS="$CXXFLAGS $switch"
|
||||
ac_success=yes
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi])
|
||||
|
||||
m4_if([$1], [ext], [], [dnl
|
||||
if test x$ac_success = xno; then
|
||||
for switch in -std=c++14 -std=c++0y; do
|
||||
cachevar=AS_TR_SH([ax_cv_cxx_compile_cxx14_$switch])
|
||||
AC_CACHE_CHECK(whether $CXX supports C++14 features with $switch,
|
||||
$cachevar,
|
||||
[ac_save_CXXFLAGS="$CXXFLAGS"
|
||||
CXXFLAGS="$CXXFLAGS $switch"
|
||||
AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_11_testbody])],
|
||||
[eval $cachevar=yes],
|
||||
[eval $cachevar=no])
|
||||
CXXFLAGS="$ac_save_CXXFLAGS"])
|
||||
if eval test x\$$cachevar = xyes; then
|
||||
CXXFLAGS="$CXXFLAGS $switch"
|
||||
ac_success=yes
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi])
|
||||
AC_LANG_POP([C++])
|
||||
if test x$ax_cxx_compile_cxx14_required = xtrue; then
|
||||
if test x$ac_success = xno; then
|
||||
AC_MSG_ERROR([*** A compiler with support for C++14 language features is required.])
|
||||
fi
|
||||
else
|
||||
if test x$ac_success = xno; then
|
||||
HAVE_CXX14=0
|
||||
AC_MSG_NOTICE([No compiler with C++14 support was found])
|
||||
AX_CXX_COMPILE_STDCXX_11([$1], [optional])
|
||||
else
|
||||
HAVE_CXX11=1
|
||||
HAVE_CXX14=1
|
||||
AC_DEFINE(HAVE_CXX14,1,
|
||||
[define if the compiler supports basic C++14 syntax])
|
||||
AC_DEFINE(HAVE_CXX11,1,
|
||||
[define if the compiler supports basic C++14 syntax])
|
||||
fi
|
||||
AC_SUBST(HAVE_CXX11)
|
||||
AC_SUBST(HAVE_CXX14)
|
||||
fi
|
||||
])
|
||||
|
||||
AC_DEFUN([AX_REQUIRE_STDCXX_11], [
|
||||
if test x${HAVE_CXX11} != x1; then
|
||||
AC_MSG_ERROR([*** A compiler with support for C++11 language features is required.])
|
||||
fi
|
||||
])
|
||||
|
||||
AC_DEFUN([AX_REQUIRE_STDCXX_14], [
|
||||
if test x${HAVE_CXX14} != x1; then
|
||||
AC_MSG_ERROR([*** A compiler with support for C++14 language features is required.])
|
||||
fi
|
||||
])
|
1238
ax_init_standard_project.m4
Normal file
1238
ax_init_standard_project.m4
Normal file
@@ -0,0 +1,1238 @@
|
||||
## @id $Id: ax_init_standard_project.m4 204 2016-09-29 18:29:53Z marc $
|
||||
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
# m4_esyscmd_s does not exist on centos 5 and 6
|
||||
m4_define([mrw_esyscmd_s], [m4_normalize(m4_esyscmd([$1]))])
|
||||
|
||||
# define least version number from subversion's revision number:
|
||||
# it is taken modulo 256 due to a bug on Apple's MaxOSX
|
||||
m4_define(x_least, m4_ifdef([x_least_fix], [x_least_fix],
|
||||
m4_ifdef([x_least_diff],
|
||||
mrw_esyscmd_s([
|
||||
VCS_REVISION="ERROR-UNDEFINED-REVISION-to-be-built-in-subdirectory-of-checkout"
|
||||
for path in . .. ../.. ../../..; do
|
||||
if test -d ${path}/.svn; then
|
||||
(cd $path; svn upgrade 1>&2 > /dev/null || true)
|
||||
VCS_REVISION=$(LANG= svn info $path | sed -n 's/Last Changed Rev: //p')
|
||||
if test -n "${VCS_REVISION}"; then break; fi
|
||||
elif test -d ${path}/.git; then
|
||||
VCS_REVISION=$(cd ${path} > /dev/null 2/dev/null; git rev-list --all --count)
|
||||
if test -n "${VCS_REVISION}"; then break; fi
|
||||
fi
|
||||
done
|
||||
echo $ECHO_N $(($VCS_REVISION))
|
||||
]), mrw_esyscmd_s([
|
||||
VCS_REVISION="ERROR-UNDEFINED-REVISION-to-be-built-in-subdirectory-of-checkout"
|
||||
for path in . .. ../.. ../../..; do
|
||||
if test -d ${path}/.svn; then
|
||||
(cd $path; svn upgrade 1>&2 > /dev/null || true)
|
||||
VCS_REVISION=$(LANG= svn info $path | sed -n 's/Last Changed Rev: //p')
|
||||
if test -n "${VCS_REVISION}"; then break; fi
|
||||
elif test -d ${path}/.git; then
|
||||
VCS_REVISION=$(cd ${path} > /dev/null 2/dev/null; git rev-list --all --count)
|
||||
if test -n "${VCS_REVISION}"; then break; fi
|
||||
fi
|
||||
done
|
||||
# Mac does not support LEAST > 255
|
||||
echo $ECHO_N $(($VCS_REVISION%256))]))))
|
||||
|
||||
# define version number from subversion's revision number:
|
||||
# it is taken modulo 256 due to a bug on Apple's MacOSX
|
||||
# add to x_minor if revision number is > 256
|
||||
m4_define(x_minor_diff, m4_ifdef([x_least_fix], 0, mrw_esyscmd_s([
|
||||
VCS_REVISION="ERROR-UNDEFINED-REVISION-to-be-built-in-subdirectory-of-checkout"
|
||||
for path in . .. ../.. ../../..; do
|
||||
if test -d ${path}/.svn; then
|
||||
(cd $path; svn upgrade 1>&2 > /dev/null || true)
|
||||
VCS_REVISION=$(LANG= svn info $path | sed -n 's/Last Changed Rev: //p')
|
||||
if test -n "${VCS_REVISION}"; then break; fi
|
||||
elif test -d ${path}/.git; then
|
||||
VCS_REVISION=$(cd ${path} > /dev/null 2/dev/null; git rev-list --all --count)
|
||||
if test -n "${VCS_REVISION}"; then break; fi
|
||||
fi;
|
||||
done
|
||||
# Mac does not support LEAST > 255
|
||||
echo $ECHO_N $(($VCS_REVISION/256))])))
|
||||
|
||||
# setup version number
|
||||
m4_define(x_version, [x_major.m4_ifdef([x_least_diff], x_minor, m4_eval(x_minor+x_minor_diff)).m4_eval(m4_ifdef([x_least_diff], [x_least-x_least_diff], [x_least]))])
|
||||
|
||||
## bugreport mail address is taken from <user@host> in first line of AUTHORS
|
||||
m4_define(x_bugreport, mrw_esyscmd_s([
|
||||
head -1 AUTHORS | \
|
||||
sed -n 's,.*<\([-_.a-z0-9A-Z]*@[-_.a-z0-9A-Z]*\)>.*,\1,gp'
|
||||
]))
|
||||
|
||||
m4_include(ax_check_qt.m4)
|
||||
|
||||
AC_ALIAS([AC_DEFINE_DIR], [AX_DEFINE_DIR])
|
||||
AC_DEFUN([AX_DEFINE_DIR], [
|
||||
prefix_NONE=
|
||||
exec_prefix_NONE=
|
||||
test "x$prefix" = xNONE && prefix_NONE=yes && prefix=$ac_default_prefix
|
||||
test "x$exec_prefix" = xNONE && exec_prefix_NONE=yes && exec_prefix=$prefix
|
||||
dnl In Autoconf 2.60, ${datadir} refers to ${datarootdir}, which in turn
|
||||
dnl refers to ${prefix}. Thus we have to use `eval' twice.
|
||||
eval ax_define_dir="\"[$]$2\""
|
||||
eval ax_define_dir="\"$ax_define_dir\""
|
||||
AC_SUBST($1, "$ax_define_dir")
|
||||
AC_DEFINE_UNQUOTED($1, "$ax_define_dir", [$3])
|
||||
test "$prefix_NONE" && prefix=NONE
|
||||
test "$exec_prefix_NONE" && exec_prefix=NONE
|
||||
])
|
||||
|
||||
# add target dependencies to an existing makefile.in
|
||||
# - parameters:
|
||||
# $1 = existing target
|
||||
# $2 = new dependency for that target
|
||||
# $3 = filename of makefile.in
|
||||
AC_DEFUN([AX_ADD_MAKEFILE_TARGET_DEP], [
|
||||
sh_add_makefile_target_dep() {
|
||||
sed -i -e ':a;/^'${1}':.*\\$/{N;s/\\\n//;ta};s/^'"${1}"':.*$/& '"${2}"'/' "${srcdir}/${3}"
|
||||
if ! egrep -q "${1}:.* ${2}" "${srcdir}/${3}"; then
|
||||
echo "${1}: ${2}" >> "${srcdir}/${3}"
|
||||
fi
|
||||
}
|
||||
sh_add_makefile_target_dep "$1" "$2" "$3"
|
||||
if test "$1" != ".PHONY"; then
|
||||
sh_add_makefile_target_dep ".PHONY" "$2" "$3"
|
||||
fi
|
||||
])
|
||||
|
||||
# Same as AC_SUBST, but adds -Dname="value" option to CPPFLAGS and a
|
||||
# notz only a @name@ replacement, but also a @name_ENCODED@ one to be
|
||||
# used in code.
|
||||
# - parameters:
|
||||
# $1 = variable name
|
||||
AC_DEFUN([AX_SUBST], [
|
||||
[$1]_ENCODED=$(echo "${$1}" | awk 1 ORS='\\n' | sed 's,\\n$,,')
|
||||
[$1]_ENCODED=${[$1]_ENCODED//\"/\\\"}
|
||||
[$1]_ENCODED=${[$1]_ENCODED//\'/\'\"\'\"\'}
|
||||
[$1]_ENCODED=${[$1]_ENCODED//#/\\#}
|
||||
AM_CPPFLAGS+=" '-D$1=\"${[$1]_ENCODED}\"'"
|
||||
AC_SUBST([$1])
|
||||
AC_SUBST([$1]_ENCODED)
|
||||
AC_SUBST(AM_CPPFLAGS)
|
||||
])
|
||||
|
||||
# must be called on the right position in configure.ac
|
||||
#
|
||||
# configure.ac must start with:
|
||||
#
|
||||
# m4_define(x_package_name, YOUR_PACKAGE_NAME) # project's name
|
||||
# m4_define(x_major, MAJOR_NUMBER) # project's major version
|
||||
# m4_define(x_minor, MINOR_NUMBER) # project's minor version
|
||||
# m4_include(ax_init_standard_project.m4)
|
||||
# AC_INIT(x_package_name, x_version, x_bugreport, x_package_name)
|
||||
# AM_INIT_AUTOMAKE([1.9 tar-pax parallel-tests color-tests])
|
||||
# AX_INIT_STANDARD_PROJECT
|
||||
#
|
||||
# you change nothing but: YOUR_PACKAGE_NAME, MAJOR_NUMBER, MINOR_NUMBER
|
||||
#
|
||||
# configures the basic environment
|
||||
AC_DEFUN([AX_INIT_STANDARD_PROJECT], [
|
||||
PREFIX=$(test "$prefix" = NONE && prefix=$ac_default_prefix; eval echo "${prefix}")
|
||||
AX_SUBST(PREFIX)
|
||||
SYSCONFDIR=$(test "$prefix" = NONE && prefix=$ac_default_prefix; eval echo "${sysconfdir}")
|
||||
AX_SUBST(SYSCONFDIR)
|
||||
PKGSYSCONFDIR=$(test "$prefix" = NONE && prefix=$ac_default_prefix; eval echo "${SYSCONFDIR}/${PACKAGE_NAME}")
|
||||
AX_SUBST(PKGSYSCONFDIR)
|
||||
DATADIR=$(test "$prefix" = NONE && prefix=$ac_default_prefix; eval echo "${datadir}")
|
||||
AX_SUBST(DATADIR)
|
||||
PKGDATADIR=$(test "$prefix" = NONE && prefix=$ac_default_prefix; eval echo "${DATADIR}/${PACKAGE_NAME}")
|
||||
AX_SUBST(PKGDATADIR)
|
||||
LOCALSTATEDIR=$(test "$prefix" = NONE && prefix=$ac_default_prefix; eval echo "${localstatedir}")
|
||||
AX_SUBST(LOCALSTATEDIR)
|
||||
AC_MSG_CHECKING([target platfrom])
|
||||
UNIX=1
|
||||
MINGW=
|
||||
MACOSX=
|
||||
for h in ${target} ${target_os} ${host} ${host_os} \
|
||||
${build} ${build_os} $(uname -s 2> /dev/null); do
|
||||
p="$h is generic Unix"
|
||||
case "$h" in
|
||||
(*mingw*)
|
||||
UNIX=; MINGW=1; p="MinGW"; break;;
|
||||
(*Darwin*|*darwin*|*rhapsody*|*macosx*)
|
||||
UNIX=; MACOSX=1; p="MacOSX"; break;;
|
||||
esac
|
||||
done
|
||||
AC_MSG_RESULT($p)
|
||||
AM_CONDITIONAL(UNIX, test "$UNIX" = "1")
|
||||
AM_CONDITIONAL(MINGW, test "$MINGW" = "1")
|
||||
AM_CONDITIONAL(MACOSX, test "$MACOSX" = "1")
|
||||
AX_SUBST(UNIX)
|
||||
AX_SUBST(MINGW)
|
||||
AX_SUBST(MACOSX)
|
||||
AM_CPPFLAGS+=" '-DMAKE_STRING(X)=\#X' '-DNAMESPACE=${PACKAGE_TARNAME//[^a-zA-Z0-9]/_}'"
|
||||
AX_SUBST(NUMBERS)
|
||||
AX_SUBST(HOME)
|
||||
if test -f README.md; then
|
||||
README_FILE=README.md
|
||||
README=$(tail -n +3 README.md)
|
||||
DESCRIPTION=$(head -1 README.md | sed 's,^#\+ *,,;s, *#\+$,,')
|
||||
else
|
||||
README_FILE=README
|
||||
README=$(tail -n +3 README)
|
||||
DESCRIPTION=$(head -1 README)
|
||||
fi
|
||||
README_ESCAPED=$(echo "$README" | sed ':a;N;$!ba;s/\n/\\n/g;s,",\\",g')
|
||||
if which pandoc 2>&1 > /dev/null; then
|
||||
README_HTML=$(echo "$README" | pandoc -f markdown_github -t html | sed ':a;N;$!ba;s,\\\(.\),\\\\<span>\1</span>,g;s/\n/\\n/g;s,",\\",g;s, ,\ \ ,g')
|
||||
else
|
||||
README_HTML="${README}"
|
||||
fi
|
||||
AX_SUBST(README_FILE)
|
||||
AX_SUBST(README)
|
||||
_AM_SUBST_NOTMAKE([README])
|
||||
AX_SUBST(README_ESCAPED)
|
||||
_AM_SUBST_NOTMAKE([README_ESCAPED])
|
||||
AX_SUBST(README_HTML)
|
||||
_AM_SUBST_NOTMAKE([README_HTML])
|
||||
AX_SUBST(DESCRIPTION)
|
||||
_AM_SUBST_NOTMAKE([DESCRIPTION])
|
||||
LICENSE=$(echo $(head -1 COPYING))
|
||||
AX_SUBST(LICENSE)
|
||||
COPYING=$(<COPYING)
|
||||
AX_SUBST(COPYING)
|
||||
_AM_SUBST_NOTMAKE([COPYING])
|
||||
CHANGELOG=$(<ChangeLog)
|
||||
AC_SUBST(CHANGELOG)
|
||||
_AM_SUBST_NOTMAKE([CHANGELOG])
|
||||
DEB_CHANGELOG=$(sed '/^[[^\t]]/{h;N;d};s,\t, ,g;/^ \* /{s,,,;H;g;s,^, * ,;s,\n\([[^ ]]*\) *, \1\n ,}' ChangeLog)
|
||||
if test -z "$DEB_CHANGELOG"; then
|
||||
DEB_CHANGELOG=" * see file ChangeLog and project management web site"
|
||||
fi
|
||||
AC_SUBST(DEB_CHANGELOG)
|
||||
_AM_SUBST_NOTMAKE([DEB_CHANGELOG])
|
||||
AUTHOR=$(head -1 AUTHORS)
|
||||
AUTHOR_NAME=$(echo $AUTHOR | sed 's, *[[<(]].*$,,')
|
||||
AUTHOR_URL=$(echo $AUTHOR | sed 's,.*(\(http[[^)]]*\)).*,\1,')
|
||||
AUTHOR_MAIL=$(echo $AUTHOR | sed 's,.*<\(.*@.*\)>.*,\1,')
|
||||
PACKAGER=$(gpg -K --display-charset utf-8 --lock-never 2>/dev/null | sed -n 's,uid *\(\[[ultimate\]] *\)\?,,p' | head -1)
|
||||
if test -z "${PACKAGER}"; then
|
||||
PACKAGER="$AUTHOR"
|
||||
fi
|
||||
AX_SUBST(AUTHOR)
|
||||
_AM_SUBST_NOTMAKE([AUTHOR])
|
||||
AX_SUBST(AUTHOR_NAME)
|
||||
AX_SUBST(AUTHOR_URL)
|
||||
AX_SUBST(AUTHOR_MAIL)
|
||||
AX_SUBST(PACKAGER)
|
||||
PROJECT_URL="${PROJECT_URL:-${AUTHOR_URL}}"
|
||||
for path in . .. ../.. ../../..; do
|
||||
if test -d ${path}/.svn; then
|
||||
PROJECT_URL="$(LANG= svn info | sed -n 's,^Repository Root: ,,p')"
|
||||
break
|
||||
elif test -d ${path}/.git; then
|
||||
PROJECT_URL="$(git remote get-url origin)"
|
||||
break
|
||||
fi
|
||||
done
|
||||
SOURCE_DOWNLOAD="${SOURCE_DOWNLOAD:-${PROJECT_URL}}"
|
||||
AX_SUBST(PROJECT_URL)
|
||||
AX_SUBST(SOURCE_DOWNLOAD)
|
||||
VENDOR=$((lsb_release -is 2>/dev/null || echo unknown) | tr ' ' '_')
|
||||
AX_SUBST(VENDOR)
|
||||
DISTRO=$(lsb_release -sc 2>/dev/null || uname -s 2>/dev/null)
|
||||
if test "${DISTRO}" = "n/a"; then
|
||||
DISTRO="${VENDOR}_$(lsb_release -sr 2>/dev/null | tr ' ' '_')"
|
||||
fi
|
||||
AX_SUBST(DISTRO)
|
||||
ARCH=$((@<:@@<:@ $(uname -sm) =~ 64 @:>@@:>@ && echo amd64) || (@<:@@<:@ $(uname -sm) =~ 'i?86' @:>@@:>@ && echo i386 || uname -sm))
|
||||
AX_SUBST(ARCH)
|
||||
DISTRIBUTOR=$(lsb_release -si 2>/dev/null || uname -s 2>/dev/null)
|
||||
case "${DISTRIBUTOR// /-}" in
|
||||
(Ubuntu) UBUNTU=1; AX_SUBST(UBUNTU);;
|
||||
(Debian) DEBIAN=1; AX_SUBST(DEBIAN);;
|
||||
(SUSE-LINUX) SUSE=1; AX_SUBST(SUSE);;
|
||||
(Fedora) FEDORA=1; AX_SUBST(FEDORA);;
|
||||
(Centos) CENTOS=1; AX_SUBST(CENTOS);;
|
||||
esac
|
||||
AX_SUBST(DISTRIBUTOR)
|
||||
BUILD_NUMBER=${BUILD_NUMBER:-1}
|
||||
AX_SUBST(BUILD_NUMBER)
|
||||
BUILD_DATE=$(LANG= date +"%a, %d %b %Y %H:%M:%S %z")
|
||||
AX_SUBST(BUILD_DATE)
|
||||
if test -f "${PACKAGE_NAME}.desktop.in"; then
|
||||
PACKAGE_DESKTOP="${PACKAGE_NAME}.desktop"
|
||||
fi
|
||||
AX_SUBST(PACKAGE_DESKTOP)
|
||||
if test -f "${PACKAGE_NAME}-logo.png"; then
|
||||
PACKAGE_LOGO="${PACKAGE_NAME}-logo.png"
|
||||
fi
|
||||
AX_SUBST(PACKAGE_LOGO)
|
||||
if test -f "${PACKAGE_NAME}-icon.svg"; then
|
||||
PACKAGE_ICON="${PACKAGE_NAME}-icon.svg"
|
||||
elif test -f "${PACKAGE_NAME}-icon.png"; then
|
||||
PACKAGE_ICON="${PACKAGE_NAME}-icon.png"
|
||||
elif test -f "${PACKAGE_NAME}.svg"; then
|
||||
PACKAGE_ICON="${PACKAGE_NAME}.svg"
|
||||
elif test -f "${PACKAGE_NAME}.png"; then
|
||||
PACKAGE_ICON="${PACKAGE_NAME}.png"
|
||||
fi
|
||||
AX_SUBST(PACKAGE_ICON)
|
||||
|
||||
AC_ARG_ENABLE(pedantic,
|
||||
[AS_HELP_STRING([--enable-pedantic],
|
||||
[enable all warnings and checks, abort on warnings])],
|
||||
[have_pedantic="$enableval"; test "$enableval" = "yes" && \
|
||||
AM_CXXFLAGS="${AM_CXXFLAGS:-} -pedantic-errors -Wall -W -Wfloat-equal -Wundef -Wendif-labels -Wpointer-arith -Wcast-align -Wwrite-strings -Wconversion -Wsign-compare -Wmissing-format-attribute -Wno-multichar -Wpacked -Wredundant-decls -Werror -Wshadow -Wcast-qual -Wno-ctor-dtor-privacy"])
|
||||
dnl problem in libs: -Wshadow -Wcast-qual
|
||||
dnl auto.hpp: -Wno-ctor-dtor-privacy (removed)
|
||||
AM_CONDITIONAL(PEDANTIC, test "$enableval" = "yes")
|
||||
if test "$have_pedantic" == "yes"; then
|
||||
AC_MSG_NOTICE([Pedantic compile mode enabled!
|
||||
- all warnings for GNU g++ are enabled
|
||||
- all warnings result in an error
|
||||
- doxygen warnings are treated as error too]); fi
|
||||
|
||||
AC_ARG_ENABLE(debug,
|
||||
[AS_HELP_STRING([--enable-debug],
|
||||
[compile for debugger])],
|
||||
[have_debug="$enableval"], [have_debug="no"])
|
||||
AM_CONDITIONAL(DEBUG, test "$enableval" = "yes")
|
||||
if test "$have_debug" == "yes"; then
|
||||
AC_MSG_NOTICE([Debug compile mode enabled]);
|
||||
AM_CPPFLAGS="${AM_CPPFLAGS} -DDEBUG"
|
||||
AM_CXXFLAGS="${AM_CXXFLAGS:-} -ggdb3 -O0"
|
||||
AM_LDFLAGS="${AM_LDFLAGS} -ggdb3 -O0"
|
||||
else
|
||||
AM_CPPFLAGS="${AM_CPPFLAGS} -DQT_NO_DEBUG_OUTPUT -DQT_NO_DEBUG"
|
||||
fi
|
||||
|
||||
AC_ARG_WITH(gcov,
|
||||
[AS_HELP_STRING([--with-gcov=FILE],
|
||||
[enable gcov, set gcov file (defaults to gcov)])],
|
||||
[GCOV="$enableval"], [GCOV="no"])
|
||||
AM_CONDITIONAL(COVERAGE, test "$GCOV" != "no")
|
||||
if test "$GCOV" != "no"; then
|
||||
if test "$GCOV" == "yes"; then
|
||||
GCOV=gcov
|
||||
fi
|
||||
AC_CHECK_PROG(has_gcov, [$GCOV], [yes], [no])
|
||||
if test "$has_gcov" != "yes"; then
|
||||
AC_MSG_ERROR([gcov: program $GCOV not found])
|
||||
fi
|
||||
AC_MSG_NOTICE([Coverage tests enabled, using ${GCOV}]);
|
||||
AM_CXXFLAGS="${AM_CXXFLAGS:-} -O0 --coverage -fprofile-arcs -ftest-coverage"
|
||||
AM_LDFLAGS="${AM_LDFLAGS} -O0 --coverage -fprofile-arcs"
|
||||
AX_SUBST(GCOV)
|
||||
fi
|
||||
|
||||
if test -f ${PACKAGE_NAME}.desktop.in; then
|
||||
AC_CONFIG_FILES([${PACKAGE_NAME}.desktop])
|
||||
fi
|
||||
|
||||
AC_CONFIG_FILES([makefile])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([clean-am], [clean-standard-project-targets], [makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([distclean-am], [distclean-standard-project-targets], [makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([maintainer-clean-am], [maintainer-clean-standard-project-targets], [makefile.in])
|
||||
test -f makefile.in && cat >> makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
EXTRA_DIST += bootstrap.sh ax_init_standard_project.m4 ax_cxx_compile_stdcxx.m4 \
|
||||
ax_check_qt.m4 resolve-debbuilddeps.sh resolve-rpmbuilddeps.sh \
|
||||
build-resource-file.sh mac-create-app-bundle.sh
|
||||
|
||||
clean-standard-project-targets:
|
||||
-rm -rf \${PACKAGE_NAME}-\${PACKAGE_VERSION}
|
||||
-rm \${PACKAGE_TARNAME}-\${PACKAGE_VERSION}.tar.gz
|
||||
distclean-standard-project-targets:
|
||||
-rm -r autom4te.cache
|
||||
-rm aclocal.m4 config.guess config.sub configure depcomp compile install-sh ltmain.sh makefile missing mkinstalldirs test-driver
|
||||
maintainer-clean-standard-project-targets:
|
||||
-rm makefile.in
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# use this in configure.ac to support C++
|
||||
AC_DEFUN([AX_USE_CXX], [
|
||||
m4_include(ax_cxx_compile_stdcxx.m4)
|
||||
AC_LANG(C++)
|
||||
AX_CXX_COMPILE_STDCXX([17], [noext], [optional])
|
||||
AC_PROG_CXX
|
||||
AC_PROG_CPP
|
||||
|
||||
AC_CONFIG_FILES([src/makefile])
|
||||
|
||||
AM_CPPFLAGS+=' -I ${top_srcdir}/src -I ${top_builddir}/src -I ${srcdir} -I ${builddir}'
|
||||
AM_LDFLAGS+=' -L ${top_srcdir}/src -L ${top_builddir}/src'
|
||||
|
||||
# Get rid of those stupid -g -O2 options!
|
||||
CXXFLAGS="${CXXFLAGS//-g -O2/}"
|
||||
CFLAGS="${CFLAGS//-g -O2/}"
|
||||
|
||||
# pass compile flags to make distcheck
|
||||
AM_DISTCHECK_CONFIGURE_FLAGS="CXXFLAGS='${CXXFLAGS}' CPPFLAGS='${CPPFLAGS}' CFLAGS='${CFLAGS}' LDFLAGS='${LDFLAGS}'"
|
||||
AC_SUBST(AM_DISTCHECK_CONFIGURE_FLAGS)
|
||||
|
||||
AC_SUBST(AM_CXXFLAGS)
|
||||
AC_SUBST(AM_CPPFLAGS)
|
||||
AC_SUBST(AM_LDFLAGS)
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([maintainer-clean-am], [maintainer-clean-cxx-targets], [src/makefile.in])
|
||||
test -f src/makefile.in && cat >> src/makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
%.app: %
|
||||
-rm -r [\$][@]
|
||||
\$(MAKE) DESTDIR=[\$][\$](pwd)/tmp install
|
||||
QTDIR="\${QTDIR}" \
|
||||
QT_PLUGINS="\${QT_PLUGINS}" \
|
||||
QT_PLUGIN_PATH="\${QT_PLUGIN_PATH}" \
|
||||
\${top_builddir}/mac-create-app-bundle.sh \
|
||||
[\$][@] [\$][<] [\$][\$](pwd)/tmp[\$]{prefix}
|
||||
-rm -rf tmp
|
||||
|
||||
maintainer-clean-cxx-targets:
|
||||
-rm makefile.in
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# require specific C++ version
|
||||
AC_DEFUN([AX_USE_CXX_11], [
|
||||
AX_USE_CXX
|
||||
AX_CXX_COMPILE_STDCXX([11], [noext], [mandatory])
|
||||
])
|
||||
AC_DEFUN([AX_USE_CXX_14], [
|
||||
AX_USE_CXX
|
||||
AX_CXX_COMPILE_STDCXX([14], [noext], [mandatory])
|
||||
])
|
||||
AC_DEFUN([AX_USE_CXX_17], [
|
||||
AX_USE_CXX
|
||||
AX_CXX_COMPILE_STDCXX([17], [noext], [mandatory])
|
||||
])
|
||||
|
||||
# use this in configure.ac to support old school C
|
||||
AC_DEFUN([AX_USE_C], [
|
||||
AC_LANG(C)
|
||||
AC_PROG_CC
|
||||
AC_PROG_CPP
|
||||
|
||||
AC_CONFIG_FILES([src/makefile])
|
||||
|
||||
AM_CPPFLAGS+=' -I ${top_srcdir}/src -I ${top_builddir}/src -I ${srcdir} -I ${builddir}'
|
||||
AM_LDFLAGS+=' -L ${top_srcdir}/src -L ${top_builddir}/src'
|
||||
|
||||
# Get rid of those stupid -g -O2 options!
|
||||
CXXFLAGS="${CXXFLAGS//-g -O2/}"
|
||||
CFLAGS="${CFLAGS//-g -O2/}"
|
||||
|
||||
# pass compile flags to make distcheck
|
||||
AM_DISTCHECK_CONFIGURE_FLAGS="CFLAGS='${CFLAGS}' CPPFLAGS='${CPPFLAGS}' CFLAGS='${CFLAGS}' LDFLAGS='${LDFLAGS}'"
|
||||
AC_SUBST(AM_DISTCHECK_CONFIGURE_FLAGS)
|
||||
|
||||
AC_SUBST(AM_CFLAGS)
|
||||
AC_SUBST(AM_CPPFLAGS)
|
||||
AC_SUBST(AM_LDFLAGS)
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([maintainer-clean-am], [maintainer-clean-c-targets], [src/makefile.in])
|
||||
test -f src/makefile.in && cat >> src/makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
%.app: %
|
||||
-rm -r [\$][@]
|
||||
\$(MAKE) DESTDIR=[\$][\$](pwd)/[\$][@]/tmp install
|
||||
\${top_builddir}/mac-create-app-bundle.sh \
|
||||
[\$][@] [\$][<] [\$][\$](pwd)/[\$][@]/tmp\${prefix}
|
||||
|
||||
maintainer-clean-c-targets:
|
||||
-rm makefile.in
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# use this in configure.ac to support tests without CppUnit
|
||||
AC_DEFUN([AX_BUILD_TEST], [
|
||||
AC_CONFIG_FILES([test/makefile])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([maintainer-clean-am], [maintainer-clean-test-targets], [test/makefile.in])
|
||||
test -f test/makefile.in && cat >> test/makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
maintainer-clean-test-targets:
|
||||
-rm makefile.in
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# use this in configure.ac to support CppUnit for C++ unit tests
|
||||
AC_DEFUN([AX_USE_CPPUNIT], [
|
||||
PKG_CHECK_MODULES(CPPUNIT, cppunit, [have_cppunit="yes"], [have_cppunit="no"])
|
||||
# infos and warnings
|
||||
if test "$have_cppunit" = "no"; then
|
||||
AC_MSG_WARN([Missing cppunit development library!
|
||||
- you cannot check the project using "make check"
|
||||
- everything else works perfectly]); fi
|
||||
AX_BUILD_TEST
|
||||
])
|
||||
|
||||
# use this in configure.ac to support C++ examples
|
||||
AC_DEFUN([AX_BUILD_EXAMPLES], [
|
||||
AC_CONFIG_FILES([examples/makefile])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([maintainer-clean-am], [maintainer-clean-example-targets], [examples/makefile.in])
|
||||
test -f examples/makefile.in && cat >> examples/makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
maintainer-clean-example-targets:
|
||||
-rm makefile.in
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# use this in configure.ac to support NodeJS
|
||||
AC_DEFUN([AX_USE_NODEJS], [
|
||||
AC_PATH_PROG(ANDROID, [android], [0],
|
||||
[${PATH}${PATH_SEPARATOR}${ANDROID_HOME}/tools])
|
||||
AC_CONFIG_FILES([nodejs/package.json])
|
||||
AC_CONFIG_FILES([nodejs/makefile])
|
||||
if test -z "${DEB_SECTION}"; then
|
||||
AX_DEB_SECTION([web])
|
||||
fi
|
||||
if test -z "${RPM_GROUP}"; then
|
||||
AX_RPM_GROUP([Applications/Internet])
|
||||
fi
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([maintainer-clean-am], [maintainer-clean-nodejs-targets], [nodejs/makefile.in])
|
||||
test -f nodejs/makefile.in && cat >> nodejs/makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
maintainer-clean-nodejs-targets:
|
||||
-rm makefile.in
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# use this in configure.ac to support Cordova
|
||||
AC_DEFUN([AX_USE_CORDOVA], [
|
||||
AC_PATH_PROG(ANDROID, [android], [0],
|
||||
[${PATH}${PATH_SEPARATOR}${ANDROID_HOME}/tools])
|
||||
AC_PATH_PROG(CORDOVA, [cordova], [0],
|
||||
[${PATH}${PATH_SEPARATOR}$(pwd)/node_modules/cordova/bin])
|
||||
if test ${CORDOVA} = 0; then
|
||||
AC_MSG_WARN([cordova is missing, on ubuntu install cordova-cli from repository ppa:cordova-ubuntu/ppa])
|
||||
fi
|
||||
if test ${ANDROID} = 0; then
|
||||
AC_MSG_WARN([android sdk is missing, set variable ANDROID_HOME after installation])
|
||||
fi
|
||||
AM_CONDITIONAL(HAVE_CORDOVA, [test ${CORDOVA} != 0 -a ${ANDROID} != 0])
|
||||
AX_SUBST(CORDOVA)
|
||||
AC_CONFIG_FILES([cordova/makefile])
|
||||
AC_CONFIG_FILES([cordova/config.xml])
|
||||
EOF
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([maintainer-clean-am], [maintainer-clean-cordova-targets], [cordova/makefile.in])
|
||||
test -f cordova/makefile.in && cat >> cordova/makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
maintainer-clean-cordova-targets:
|
||||
-rm makefile.in
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# use this in configure.ac to support HTML data for webservers
|
||||
AC_DEFUN([AX_BUILD_HTML], [
|
||||
AC_CONFIG_FILES([html/makefile])
|
||||
if test -z "${DEB_SECTION}"; then
|
||||
AX_DEB_SECTION([web])
|
||||
fi
|
||||
if test -z "${RPM_GROUP}"; then
|
||||
AX_RPM_GROUP([Applications/Internet])
|
||||
fi
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([maintainer-clean-am], [maintainer-clean-html-targets], [html/makefile.in])
|
||||
test -f html/makefile.in && cat >> html/makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
maintainer-clean-html-targets:
|
||||
-rm makefile.in
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# use this in configure.ac to support HTML data for webservers
|
||||
AC_DEFUN([AX_BUILD_HTML_NPM], [
|
||||
AC_CONFIG_FILES([html/package.json])
|
||||
AX_BUILD_HTML
|
||||
])
|
||||
|
||||
# use this in configure.ac to support C++ libraries
|
||||
AC_DEFUN([AX_USE_LIBTOOL], [
|
||||
# libtool versioning
|
||||
LIB_MAJOR=m4_eval(x_major+x_minor+x_minor_diff)
|
||||
LIB_MINOR=x_least
|
||||
LIB_LEAST=m4_eval(x_minor+x_minor_diff)
|
||||
LIB_VERSION="${LIB_MAJOR}:${LIB_MINOR}:${LIB_LEAST}"
|
||||
AM_LDFLAGS="-version-info ${LIB_VERSION}"
|
||||
AC_SUBST(AM_LDFLAGS)
|
||||
AC_SUBST(LIB_VERSION)
|
||||
AC_PROG_LIBTOOL
|
||||
AC_CONFIG_FILES([src/${PACKAGE_NAME}.pc])
|
||||
if test -z "${DEB_SECTION}"; then
|
||||
AX_DEB_SECTION([devel])
|
||||
fi
|
||||
if test -z "${RPM_GROUP}"; then
|
||||
AX_RPM_GROUP([Development/Libraries])
|
||||
fi
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([install-data-am], [install-data-libtool-pkg], [src/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([uninstall-am], [uninstall-data-am], [src/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([uninstall-data-am], [uninstall-data-libtool-pkg], [src/makefile.in])
|
||||
test -f src/makefile.in && cat >> src/makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
install-data-libtool-pkg:
|
||||
test -d \$(DESTDIR)\${libdir}/pkgconfig || mkdir -p \$(DESTDIR)\${libdir}/pkgconfig
|
||||
chmod -R u+w \$(DESTDIR)\${libdir}/pkgconfig
|
||||
cp \${PACKAGE_NAME}.pc \$(DESTDIR)\${libdir}/pkgconfig/
|
||||
uninstall-data-libtool-pkg:
|
||||
-chmod -R u+w \$(DESTDIR)\${libdir}/pkgconfig
|
||||
-rm -f \$(DESTDIR)\${libdir}/pkgconfig/\${PACKAGE_NAME}.pc
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# use this in configure.ac to support debian packages
|
||||
# - $1: optional debian package section
|
||||
AC_DEFUN([AX_USE_DEBIAN_PACKAGING], [
|
||||
if test -n "$1"; then
|
||||
AX_DEB_SECTION([$1])
|
||||
fi
|
||||
if test -f README.md; then
|
||||
README_DEB=$(tail -n +3 README.md | sed -e 's/^ *$/./g' -e 's/^/ /g')
|
||||
else
|
||||
README_DEB=$(tail -n +3 README | sed -e 's/^ *$/./g' -e 's/^/ /g')
|
||||
fi
|
||||
AC_SUBST(README_DEB)
|
||||
_AM_SUBST_NOTMAKE([README_DEB])
|
||||
AC_CONFIG_FILES([debian/changelog debian/control])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([clean-am], [clean-debian-targets], [makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([distclean-am], [distclean-debian-targets], [makefile.in])
|
||||
test -f makefile.in && cat >> makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
EXTRA_DIST += debian
|
||||
clean-debian-targets:
|
||||
-rm -rf \${PACKAGE_NAME}_\${PACKAGE_VERSION}~\${DISTRO}.\${BUILD_NUMBER}.{dsc,tar.gz} \${PACKAGE_NAME}_\${PACKAGE_VERSION}~\${DISTRO}.\${BUILD_NUMBER}*.changes \$\$(sed -n 's,Package: \(.*\),\1_${PACKAGE_VERSION}~${DISTRO}.${BUILD_NUMBER}*.deb,p;' debian/control)
|
||||
deb: distdir
|
||||
cd \${PACKAGE_NAME}-\${PACKAGE_VERSION} && ( export CFLAGS="\${CFLAGS}"; export CPPFLAGS="\${CPPFLAGS}"; export CXXFLAGS="\${CXXFLAGS}"; export LDFLAGS="\${LDFLAGS}"; export DEB_CFLAGS_APPEND="\${CFLAGS}"; export DEB_CPPFLAGS_APPEND="\${CPPFLAGS}"; export DEB_CXXFLAGS_APPEND="\${CXXFLAGS}"; export DEB_LDFLAGS_APPEND="\${LDFLAGS}"; dpkg-buildpackage )
|
||||
gpg --verify \${PACKAGE_NAME}_\${PACKAGE_VERSION}~\${DISTRO}.\${BUILD_NUMBER}.dsc
|
||||
distclean-debian-targets:
|
||||
-rm debian/changelog debian/control
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# use this in configure.ac to support RPM packages
|
||||
# - $1: optional rpm package group
|
||||
AC_DEFUN([AX_USE_RPM_PACKAGING], [
|
||||
if test -n "$1"; then
|
||||
AX_RPM_GROUP([$1])
|
||||
fi
|
||||
AC_CONFIG_FILES([${PACKAGE_NAME}.spec])
|
||||
#AX_ADD_MAKEFILE_TARGET_DEP([clean-am], [clean-rpm-targets], [makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([clean-am], [clean-rpm-targets], [makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([distclean-am], [distclean-rpm-targets], [makefile.in])
|
||||
test -f makefile.in && cat >> makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
EXTRA_DIST += \${PACKAGE_NAME}.spec.in
|
||||
rpm: dist
|
||||
rpmbuild -ba --define "_topdir \$\$(pwd)" --define "_sourcedir \$\$(pwd)" \${PACKAGE_NAME}.spec
|
||||
./rpmsign.exp "\${PACKAGER}" "\{PASSWORD}" RPMS/*/*.rpm SRPMS/*.rpm
|
||||
clean-rpm-targets:
|
||||
-rm -rf BUILD BUILDROOT RPMS SPECS SRPMS
|
||||
distclean-rpm-targets:
|
||||
-rm \${PACKAGE_NAME}.spec
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# use this in configure.ac to support scripts, e.g. bash scripts
|
||||
AC_DEFUN([AX_USE_ETC], [
|
||||
AC_CONFIG_FILES([etc/makefile])
|
||||
])
|
||||
|
||||
# use this in configure.ac to support scripts, e.g. bash scripts
|
||||
AC_DEFUN([AX_USE_SCRIPTS], [
|
||||
AC_CONFIG_FILES([scripts/makefile])
|
||||
])
|
||||
|
||||
# use this in configure.ac to support Doxygen documentation generation
|
||||
AC_DEFUN([AX_USE_DOXYGEN], [
|
||||
AC_CHECK_PROG(have_doxygen, doxygen, yes, no)
|
||||
AC_CHECK_PROG(have_dot, dot, yes, no)
|
||||
AC_CHECK_PROG(have_mscgen, mscgen, yes, no)
|
||||
PDF_DOC=${PACKAGE_NAME}-${PACKAGE_VERSION}.pdf
|
||||
AC_SUBST(PDF_DOC)
|
||||
if test "$have_doxygen" = "no"; then
|
||||
AC_MSG_WARN([Missing program doxygen!
|
||||
- you cannot rebuild the documentation
|
||||
- there are precompiled derived files in the distribution]); fi
|
||||
if test "$have_dot" = "no"; then
|
||||
AC_MSG_WARN([Missing program dot!
|
||||
- when you rebild documentation, there are no generated images
|
||||
- there are precompiled derived files in the distribution]); fi
|
||||
if test "$have_mscgen" = "no"; then
|
||||
AC_MSG_WARN([Missing program mscgen!
|
||||
- when you rebild documentation, there are no message state charts
|
||||
- there are precompiled derived files in the distribution]); fi
|
||||
AC_CONFIG_FILES([doc/makefile doc/doxyfile doc/header.html doc/footer.html])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([clean-am], [clean-documentation], [doc/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([distclean-am], [distclean-documentation], [doc/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([maintainer-clean-am], [maintainer-clean-documentation], [doc/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([install-data-am], [install-data-documentation], [doc/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([uninstall-am], [uninstall-documentation], [doc/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([all], [doc], [doc/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([.PHONY], [pdf gen-uml-images], [doc/makefile.in])
|
||||
test -f doc/makefile.in && cat >> doc/makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
doc: doxyfile
|
||||
doxygen doxyfile
|
||||
@PEDANTIC_TRUE@ test \! -s doxygen.errors
|
||||
|
||||
clean-documentation:
|
||||
-rm doxygen.errors @PDF_DOC@
|
||||
distclean-documentation:
|
||||
-rm -r html
|
||||
-rm @PACKAGE_NAME@.doxytag
|
||||
maintainer-clean-documentation:
|
||||
-rm makefile.in
|
||||
install-data-documentation:
|
||||
test -d \$(DESTDIR)\${docdir} || mkdir -p \$(DESTDIR)\${docdir}
|
||||
chmod -R u+w \$(DESTDIR)\${docdir}
|
||||
cp -r html \$(DESTDIR)\${docdir}/
|
||||
uninstall-documentation:
|
||||
-chmod -R u+w \$(DESTDIR)\${docdir}
|
||||
-rm -rf \$(DESTDIR)\${docdir}/html
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# use this in configure.ac to support Doxygen documentation generation
|
||||
AC_DEFUN([AX_USE_PERLDOC], [
|
||||
PERL_SOURCES="m4_default([$1], [perl])"
|
||||
AX_SUBST(PERL_SOURCES)
|
||||
if test -z "$PERL_SOURCES"; then
|
||||
AC_MSG_ERROR([You must specify the path to perl files
|
||||
- use [AX]_[USE]_PERLDOC([[pathes to perldoc]])]); fi
|
||||
AC_CHECK_PROG(have_perldoc, pods2html, yes, no)
|
||||
if test "$have_doxygen" = "no"; then
|
||||
AC_MSG_WARN([Missing program pods2html!
|
||||
- you cannot rebuild the documentation
|
||||
- there are precompiled derived files in the distribution
|
||||
- if you need to generate documentation, install libpod-tree-perl]); fi
|
||||
AC_CONFIG_FILES([doc/makefile])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([distclean-am], [distclean-perldoc], [doc/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([maintainer-clean-am], [maintainer-clean-perldoc], [doc/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([install-data-am], [install-data-perldoc], [doc/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([uninstall-am], [uninstall-perldoc], [doc/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([all], [doc], [doc/makefile.in])
|
||||
AX_ADD_MAKEFILE_TARGET_DEP([.PHONY], [doc], [doc/makefile.in])
|
||||
test -f doc/makefile.in && cat >> doc/makefile.in <<EOF
|
||||
#### Begin: Appended by $0
|
||||
doc: perldoc/index.html
|
||||
|
||||
perldoc/index.html: \${PERL_SOURCES:%=perldoc/%}
|
||||
echo "<html><head><title>Perl Documentation</title></head><body><h1>Perl Documentation</h1><ul>" > perldoc/index.html
|
||||
for p in \${PERL_SOURCES:%=perldoc/%}; do \
|
||||
echo '<li><a href="'"\$\${p#perldoc/}"'/index.html">'"\$\${p#perldoc/}"'</a></li>' >> perldoc/index.html; \
|
||||
done
|
||||
echo "</ul></body></html>" >> perldoc/index.html
|
||||
|
||||
perldoc/%:
|
||||
pods2html --notoc --empty --index index @top_srcdir@/\${@:perldoc/%=%} \$[@]
|
||||
|
||||
distclean-perldoc:
|
||||
-rm -r perldoc
|
||||
maintainer-clean-perldoc:
|
||||
-rm makefile.in
|
||||
install-data-perldoc:
|
||||
test -d \$(DESTDIR)\${docdir} || mkdir -p \$(DESTDIR)\${docdir}
|
||||
chmod -R u+w \$(DESTDIR)\${docdir}
|
||||
cp -r perldoc \$(DESTDIR)\${docdir}/
|
||||
uninstall-perldoc:
|
||||
-chmod -R u+w \$(DESTDIR)\${docdir}
|
||||
-rm -rf \$(DESTDIR)\${docdir}/perldoc
|
||||
#### End: $0
|
||||
EOF
|
||||
])
|
||||
|
||||
# require a specific package, with fallback: test for a header
|
||||
# - parameter:
|
||||
# $1 = unique id (no special characters)
|
||||
# $2 = module name (optional, if different from id)
|
||||
# $3 = a header file to find (optional)
|
||||
# $4 = alternative module names (space separated, optional)
|
||||
# $5 = optional flags:
|
||||
# manualflags if CXXFLAGS, CPPFLAGS, LIBS should remain unchanged
|
||||
# $6 = optional parameters, allowed are (evaluated in this order):
|
||||
# - DEV_RPM_DIST_PKG=<name>
|
||||
# special name for the RPM development package
|
||||
# - DEV_DEB_DIST_PKG=<name>
|
||||
# special name for the debian development package
|
||||
# - DEV_DIST_PKG=<name>
|
||||
# if the name of the development package is different
|
||||
#
|
||||
# uses PKG_CHECK_MODULES to test for a module
|
||||
# then, if given, looks for the header file
|
||||
# if header file is not found, searches in alternative modules
|
||||
# sets all flags, so that the module can be used everywhere
|
||||
# fails if not found
|
||||
AC_DEFUN([AX_PKG_REQUIRE], [
|
||||
PKG_PROG_PKG_CONFIG
|
||||
optional_flags="$5"
|
||||
$1_found=no
|
||||
secondpar="m4_default([$2], [$1])"
|
||||
PKG_CHECK_MODULES([$1], [m4_default([$2], [$1])], [
|
||||
$1_found=$secondpar
|
||||
PKG_REQUIREMENTS+=" $secondpar"
|
||||
], [
|
||||
if test -n "$4"; then
|
||||
AC_MSG_WARN([Recommended package $secondpar for feature $1 not installed, trying $4])
|
||||
for pkg in $4; do
|
||||
PKG_CHECK_MODULES([$1], [$pkg], [
|
||||
PKG_REQUIREMENTS+=" $pkg"
|
||||
$1_found=$pkg
|
||||
break;
|
||||
], [
|
||||
AC_MSG_WARN([Recommended package $pkg for feature $1 not installed])
|
||||
])
|
||||
done
|
||||
fi
|
||||
])
|
||||
AC_SUBST(CPPFLAGS)
|
||||
AC_SUBST(CXXFLAGS)
|
||||
AC_SUBST(PKG_REQUIREMENTS)
|
||||
if test -n "$3"; then
|
||||
if test "${$1_found}" = "no"; then
|
||||
tmp_package="yes"
|
||||
else
|
||||
tmp_package=${$1_found}
|
||||
fi
|
||||
$1_found=no
|
||||
old_CPPFLAGS=${CPPFLAGS}
|
||||
CPPFLAGS=" ${$1_CFLAGS} ${CPPFLAGS}"
|
||||
AC_CHECK_HEADER([$3], [
|
||||
$1_found=${tmp_package}
|
||||
], [
|
||||
for x in ${$1_CFLAGS}; do
|
||||
AC_MSG_NOTICE([search for $3 in ${x[#]-I}])
|
||||
for f in $(find ${x[#]-I} -name "$3" 2> /dev/null); do
|
||||
if test -f "$f"; then
|
||||
$1_found=${tmp_package}
|
||||
$1_CFLAGS+=" -I${f%/*}"
|
||||
AC_MSG_NOTICE([added path ${f%/*}])
|
||||
break;
|
||||
fi
|
||||
done
|
||||
if test "${$1_found}" != "no"; then
|
||||
break;
|
||||
fi
|
||||
done
|
||||
if test "${$1_found}" = "no"; then
|
||||
tmp_includedir=$(${PKG_CONFIG} --variable=includedir $tmp_package)
|
||||
for x in ${tmp_includedir}; do
|
||||
AC_MSG_NOTICE([search for $3 in $x])
|
||||
for f in $(find ${x} -name "$3" 2> /dev/null); do
|
||||
if test -f "$f"; then
|
||||
$1_found=${tmp_package}
|
||||
$1_CFLAGS+=" -I${f%/*}"
|
||||
AC_MSG_NOTICE([added path ${f%/*}])
|
||||
break;
|
||||
fi
|
||||
done
|
||||
if test "${$1_found}" != "no"; then
|
||||
break;
|
||||
fi
|
||||
done
|
||||
fi
|
||||
])
|
||||
CPPFLAGS=${old_CPPFLAGS}
|
||||
fi
|
||||
if test "${$1_found}" = "no"; then
|
||||
if test -n "$3"; then
|
||||
if test -n "$4"; then
|
||||
AC_MSG_ERROR([Feature $1 not found, need header $3 in modules $secondpar or $4])
|
||||
else
|
||||
AC_MSG_ERROR([Feature $1 not found, need header $3 in module $secondpar])
|
||||
fi
|
||||
else
|
||||
AC_MSG_ERROR([Feature $1 not found please install module $secondpar])
|
||||
fi
|
||||
fi
|
||||
[$1]_CPPFLAGS="${$1_CFLAGS}"
|
||||
[$1]_CXXFLAGS="${$1_CFLAGS}"
|
||||
AC_SUBST([$1]_CPPFLAGS)
|
||||
AC_SUBST([$1]_CXXFLAGS)
|
||||
if test "${optional_flags/manualflags/}" = "${optional_flags}"; then
|
||||
CPPFLAGS+=" ${$1_CPPFLAGS}"
|
||||
CXXFLAGS+=" ${$1_CXXFLAGS}"
|
||||
LIBS+=" ${$1_LIBS}"
|
||||
AC_MSG_NOTICE([Adding flags for $1])
|
||||
else
|
||||
AC_MSG_NOTICE([To enable $1, add $1_CPPFLAGS, $1_CXXFLAGS and $1_LIBS])
|
||||
fi
|
||||
|
||||
DEV_DEB_DIST_PKG=
|
||||
DEV_RPM_DIST_PKG=
|
||||
DEV_DIST_PKG=
|
||||
pkg=m4_default([$2], [$1])
|
||||
$6
|
||||
deb_pkg=${DEV_DEB_DIST_PKG:-${DEV_DIST_PKG:-${pkg}}-dev}
|
||||
rpm_pkg=${DEV_RPM_DIST_PKG:-${DEV_DIST_PKG:-${pkg}}-devel}
|
||||
if test -n "$4"; then
|
||||
for f in $pkg $4; do
|
||||
if test -n "$(apt-cache policy -q ${f}-dev 2> /dev/null)"; then
|
||||
deb_pkg=${f}-dev
|
||||
break
|
||||
fi
|
||||
done
|
||||
for f in $pkg $4; do
|
||||
if (test -x /usr/bin/zypper && zypper search -x "${f}-devel" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/bin/dnf && dnf list -q "${f}-devel" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/bin/yum && yum list -q "${f}-devel" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/sbin/urpmq && urpmq "${f}-devel" 1>&2 > /dev/null); then
|
||||
rpm_pkg=${f}-devel
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
AX_DEB_BUILD_DEPEND([$deb_pkg])
|
||||
AX_RPM_BUILD_DEPEND([$rpm_pkg])
|
||||
])
|
||||
|
||||
# check if a specific package exists
|
||||
# - parameter:
|
||||
# $1 = unique id (no special characters)
|
||||
# $2 = module name (optional, if different from id)
|
||||
# $3 = optional flags:
|
||||
# manualflags if CXXFLAGS, CPPFLAGS, LIBS should remain unchanged
|
||||
# $4 = optional parameters, allowed are (evaluated in this order):
|
||||
# - DEV_RPM_DIST_PKG=<name>
|
||||
# special name for the RPM development package
|
||||
# - DEV_DEB_DIST_PKG=<name>
|
||||
# special name for the debian development package
|
||||
# - DEV_DIST_PKG=<name>
|
||||
# if the name of the development package is different
|
||||
#
|
||||
# uses PKG_CHECK_MODULES to test for a module
|
||||
# sets automake conditional HAVE_$1 to 0 (not found) or 1 (found)
|
||||
# sets all flags, so that the module can be used everywhere
|
||||
AC_DEFUN([AX_PKG_CHECK], [
|
||||
optional_flags="$3"
|
||||
PKG_PROG_PKG_CONFIG
|
||||
PKG_CHECK_MODULES([$1], [m4_default([$2], [$1])], [
|
||||
HAVE_$1=1
|
||||
[$1]_CPPFLAGS="${$1_CFLAGS}"
|
||||
[$1]_CXXFLAGS="${$1_CFLAGS}"
|
||||
AC_SUBST([$1]_CPPFLAGS)
|
||||
AC_SUBST([$1]_CXXFLAGS)
|
||||
if test "${optional_flags/manualflags/}" = "${optional_flags}"; then
|
||||
CPPFLAGS+=" ${$1_CPPFLAGS}"
|
||||
CXXFLAGS+=" ${$1_CXXFLAGS}"
|
||||
LIBS+=" ${$1_LIBS}"
|
||||
AC_MSG_NOTICE([Adding flags for $1])
|
||||
else
|
||||
AC_MSG_NOTICE([To enable $1, add $1_CPPFLAGS, $1_CXXFLAGS and $1_LIBS])
|
||||
fi
|
||||
if test -z "$PKG_REQUIREMENTS"; then
|
||||
PKG_REQUIREMENTS="m4_default([$2], [$1])"
|
||||
else
|
||||
PKG_REQUIREMENTS="${PKG_REQUIREMENTS}, m4_default([$2], [$1])"
|
||||
fi
|
||||
], [
|
||||
HAVE_$1=0
|
||||
])
|
||||
AM_CONDITIONAL(HAVE_$1, test $HAVE_[$1] -eq 1)
|
||||
AC_SUBST(HAVE_$1)
|
||||
AC_SUBST(CPPFLAGS)
|
||||
AC_SUBST(CXXFLAGS)
|
||||
AC_SUBST(PKG_REQUIREMENTS)
|
||||
|
||||
DEV_DEB_DIST_PKG=
|
||||
DEV_RPM_DIST_PKG=
|
||||
DEV_DIST_PKG=
|
||||
pkg=m4_default([$2], [$1])
|
||||
$4
|
||||
dep_pkg=${DEV_DEB_DIST_PKG:-${DEV_DIST_PKG:-${pkg}}-dev}
|
||||
rpm_pkg=${DEV_RPM_DIST_PKG:-${DEV_DIST_PKG:-${pkg}}-devel}
|
||||
if test -n "$(apt-cache policy -q ${deb_pkg} 2> /dev/null)"; then
|
||||
AX_DEB_BUILD_DEPEND([$deb_pkg])
|
||||
fi
|
||||
if (test -x /usr/bin/zypper && zypper search -x "$rpm_pkg" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/bin/dnf && dnf list -q "$rpm_pkg" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/bin/yum && yum list -q "$rpm_pkg" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/sbin/urpmq && urpmq "$rpm_pkg" 1>&2 > /dev/null); then
|
||||
AX_RPM_BUILD_DEPEND([$rpm_pkg])
|
||||
fi
|
||||
])
|
||||
|
||||
# make sure, a specific header exists
|
||||
# - parameter:
|
||||
# $1 = header name
|
||||
# $2 = pathes to search for
|
||||
AC_DEFUN([AX_REQUIRE_HEADER], [
|
||||
AC_CHECK_HEADER($1, [], [
|
||||
found=0
|
||||
if test -n "$2"; then
|
||||
for d in $2; do
|
||||
if test -f "${d}/$1"; then
|
||||
AC_MSG_NOTICE([found file ${d}/$1])
|
||||
CPPFLAGS+=" -I${d}"
|
||||
found=1
|
||||
break;
|
||||
else
|
||||
AC_MSG_NOTICE([not found file ${d}/$1])
|
||||
fi
|
||||
done
|
||||
fi
|
||||
if test $found -eq 0; then
|
||||
AC_MSG_ERROR([Header $1 not found])
|
||||
fi
|
||||
], [])
|
||||
], [])
|
||||
])
|
||||
|
||||
# Check within a list of CPP-Flags for the first that is usable and
|
||||
# configure it
|
||||
# - parameter:
|
||||
# $1 = white-space separated list of alternative flags
|
||||
# $2 = module name (optional, if different from id)
|
||||
AC_DEFUN([AX_CHECK_VALID_CPP_FLAG], [
|
||||
AC_MSG_CHECKING([m4_default([$2], [for valid flag in "$1"])])
|
||||
save_cppflags="$CPPFLAGS"
|
||||
newflag="no"
|
||||
for test_flag in $1; do
|
||||
CPPFLAGS+=" ${test_flag}"
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM()], [
|
||||
newflag="$test_flag"
|
||||
CPPFLAGS="$save_cppflags"
|
||||
CPPFLAGS+=" ${test_flag}"
|
||||
break;
|
||||
])
|
||||
CPPFLAGS="$save_cppflags"
|
||||
done
|
||||
AC_SUBST(CPPFLAGS)
|
||||
AC_MSG_RESULT([$newflag in $CPPFLAGS])
|
||||
])
|
||||
|
||||
# Check within a list of CXX-Flags for the first that is usable and
|
||||
# configure it
|
||||
# - parameter:
|
||||
# $1 = white-space separated list of alternative flags
|
||||
# $2 = module name (optional, if different from id)
|
||||
AC_DEFUN([AX_CHECK_VALID_CXX_FLAG], [
|
||||
AC_MSG_CHECKING([m4_default([$2], [for valid flag in "$1"])])
|
||||
save_cxxflags="$CXXFLAGS"
|
||||
newflag="no"
|
||||
for test_flag in $1; do
|
||||
CXXFLAGS+=" ${test_flag}"
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM()], [
|
||||
newflag="$test_flag"
|
||||
CXXFLAGS="$save_cxxflags"
|
||||
CXXFLAGS+=" ${test_flag}"
|
||||
break;
|
||||
])
|
||||
CXXFLAGS="$save_cxxflags"
|
||||
done
|
||||
AC_SUBST(CXXFLAGS)
|
||||
AC_MSG_RESULT([$newflag in $CXXFLAGS])
|
||||
])
|
||||
|
||||
# Check within a list of C-Flags for the first that is usable and
|
||||
# configure it
|
||||
# - parameter:
|
||||
# $1 = white-space separated list of alternative flags
|
||||
# $2 = module name (optional, if different from id)
|
||||
AC_DEFUN([AX_CHECK_VALID_C_FLAG], [
|
||||
AC_MSG_CHECKING([m4_default([$2], [for valid flag in "$1"])])
|
||||
save_cflags="$CFLAGS"
|
||||
newflag="no"
|
||||
for test_flag in $1; do
|
||||
CFLAGS+=" ${test_flag}"
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM()], [
|
||||
newflag="$test_flag"
|
||||
CFLAGS="$save_cflags"
|
||||
CFLAGS+=" ${test_flag}"
|
||||
break;
|
||||
])
|
||||
CFLAGS="$save_cflags"
|
||||
done
|
||||
AC_SUBST(CFLAGS)
|
||||
AC_MSG_RESULT([$newflag in $CFLAGS])
|
||||
])
|
||||
|
||||
# Check within a list of LD-Flags for the first that is usable and
|
||||
# configure it
|
||||
# - parameter:
|
||||
# $1 = white-space separated list of alternative flags
|
||||
# $2 = module name (optional, if different from id)
|
||||
AC_DEFUN([AX_CHECK_VALID_LD_FLAG], [
|
||||
AC_MSG_CHECKING([m4_default([$2], [for valid flag in "$1"])])
|
||||
save_ldflags="$LDFLAGS"
|
||||
newflag="no"
|
||||
for test_flag in $1; do
|
||||
LDFLAGS+=" ${test_flag}"
|
||||
AC_COMPILE_IFELSE([AC_LANG_PROGRAM()], [
|
||||
newflag="$test_flag"
|
||||
LDFLAGS="$save_ldflags"
|
||||
LDFLAGS+=" ${test_flag}"
|
||||
break;
|
||||
])
|
||||
LDFLAGS="$save_ldflags"
|
||||
done
|
||||
AC_SUBST(LDFLAGS)
|
||||
AC_MSG_RESULT([$newflag in $LDFLAGS])
|
||||
])
|
||||
|
||||
# Check if a package exists in the current distribution, if yes, require it
|
||||
# in debian/control.in append @DEB_DEPEND_IFEXISTS@ to Build-Depends
|
||||
# if you pass a list, it will require the first matching, if any matches
|
||||
# - parameter:
|
||||
# $1 = space separated list of package names
|
||||
AC_DEFUN([AX_DEB_DEPEND_IFEXISTS], [
|
||||
for pkg in $1; do
|
||||
if test -n "$(apt-cache policy -q ${pkg} 2> /dev/null)"; then
|
||||
DEB_DEPEND_IFEXISTS="${DEB_DEPEND_IFEXISTS}, ${pkg}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
])
|
||||
|
||||
# require package in debian/control.in append @DEB_BUILD_DEPEND@ to Build-Depends
|
||||
# - parameter:
|
||||
# $1 = package name
|
||||
AC_DEFUN([AX_DEB_BUILD_DEPEND], [
|
||||
DEB_BUILD_DEPEND="${DEB_BUILD_DEPEND}, $1"
|
||||
])
|
||||
|
||||
# require package in debian/control.in append @DEB_DEPEND@ to Depends
|
||||
# - parameter:
|
||||
# $1 = package name
|
||||
AC_DEFUN([AX_DEB_DEPEND], [
|
||||
DEB_DEPEND="${DEB_DEPEND}, $1"
|
||||
])
|
||||
|
||||
# require package in debian/control.in append @DEB_DEPEND@ to Depends
|
||||
# - parameter:
|
||||
# $1 = package name
|
||||
AC_DEFUN([AX_DEB_SECTION], [
|
||||
DEB_SECTION="$1"
|
||||
])
|
||||
|
||||
# call after setting debian dependencies
|
||||
AC_DEFUN([AX_DEB_RESOLVE], [
|
||||
AC_SUBST(DEB_BUILD_DEPEND)
|
||||
AC_SUBST(DEB_DEPEND)
|
||||
AC_SUBST(DEB_SECTION)
|
||||
AC_SUBST(DEB_DEPEND_IFEXISTS)
|
||||
])
|
||||
|
||||
# Check if a package exists in the current distribution, if yes, require it
|
||||
# in .spec.in append @RPM_DEPEND_IFEXISTS@ to Build-Depends
|
||||
# if you pass a list, it will require the first matching, if any matches
|
||||
# - parameter:
|
||||
# $1 = space separated package names
|
||||
AC_DEFUN([AX_RPM_DEPEND_IFEXISTS], [
|
||||
for pkg in $1; do
|
||||
if (test -x /usr/bin/zypper && zypper search -x "$pkg" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/bin/dnf && dnf list -q "$pkg" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/bin/yum && yum list -q "$pkg" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/sbin/urpmq && urpmq "$pkg" 1>&2 > /dev/null); then
|
||||
RPM_DEPEND_IFEXISTS="${RPM_DEPEND_IFEXISTS}, ${pkg}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
])
|
||||
|
||||
# require package in .spec.in append @RPM_BUILD_DEPEND@ to Build-Depends
|
||||
# - parameter:
|
||||
# $1 = package name
|
||||
AC_DEFUN([AX_RPM_BUILD_DEPEND], [
|
||||
RPM_BUILD_DEPEND="${RPM_BUILD_DEPEND}, $1"
|
||||
])
|
||||
|
||||
# require package in .spec.in append @RPM_DEPEND@ to Depends
|
||||
# - parameter:
|
||||
# $1 = package name
|
||||
AC_DEFUN([AX_RPM_DEPEND], [
|
||||
if test -z "${RPM_DEPEND}"; then
|
||||
RPM_DEPEND="$1"
|
||||
else
|
||||
RPM_DEPEND="${RPM_DEPEND}, $1"
|
||||
fi
|
||||
])
|
||||
|
||||
# require package in debian/control.in append @DEB_DEPEND@ to Depends
|
||||
# - parameter:
|
||||
# $1 = package name
|
||||
AC_DEFUN([AX_RPM_GROUP], [
|
||||
RPM_GROUP="$1"
|
||||
])
|
||||
|
||||
# call after setting rpmian dependencies
|
||||
AC_DEFUN([AX_RPM_RESOLVE], [
|
||||
AC_SUBST(RPM_BUILD_DEPEND)
|
||||
AC_SUBST(RPM_DEPEND)
|
||||
AC_SUBST(RPM_GROUP)
|
||||
AC_SUBST(RPM_DEPEND_IFEXISTS)
|
||||
])
|
||||
|
||||
# Check if a package exists in the current distribution, if yes, require it
|
||||
# in .spec.in append @ALL_DEPEND_IFEXISTS@ to Build-Depends
|
||||
# if you pass a list, it will require the first matching, if any matches
|
||||
# - parameter:
|
||||
# $1 = space separated list of package names
|
||||
AC_DEFUN([AX_ALL_DEPEND_IFEXISTS], [
|
||||
AX_DEB_DEPEND_IFEXISTS([$1])
|
||||
AX_RPM_DEPEND_IFEXISTS([$1])
|
||||
])
|
||||
|
||||
# Check if a package exists in the current distribution, if yes, require it
|
||||
# in .spec.in append @ALL_DEPEND_IFEXISTS@ to Build-Depends
|
||||
# if you pass a list, it will require the first matching, if any matches
|
||||
# - parameter:
|
||||
# $1 = package name
|
||||
AC_DEFUN([AX_ALL_DEPEND_IFEXISTS_DEV], [
|
||||
pkgs="$1"
|
||||
AX_DEB_DEPEND_IFEXISTS([${pkgs// /-dev }-dev])
|
||||
AX_RPM_DEPEND_IFEXISTS([${pkgs// /-devel }-devel])
|
||||
])
|
||||
|
||||
# require package in .spec.in append @ALL_BUILD_DEPEND@ to Build-Depends
|
||||
# - parameter:
|
||||
# $1 = package name
|
||||
AC_DEFUN([AX_ALL_BUILD_DEPEND], [
|
||||
pkg="$1"
|
||||
DEB_BUILD_DEPEND="${DEB_BUILD_DEPEND}, ${pkg}"
|
||||
RPM_BUILD_DEPEND="${RPM_BUILD_DEPEND}, ${pkg}"
|
||||
])
|
||||
|
||||
# require package in .spec.in and control.in, append to runtime depends
|
||||
# - parameter:
|
||||
# $1 = package name
|
||||
AC_DEFUN([AX_ALL_DEPEND], [
|
||||
pkg="$1"
|
||||
DEB_DEPEND="${DEB_DEPEND}, ${pkg}"
|
||||
RPM_DEPEND="${RPM_DEPEND}, ${pkg}"
|
||||
])
|
||||
|
||||
# require package in .spec.in append @ALL_BUILD_DEPEND@ to Build-Depends
|
||||
# - parameter:
|
||||
# $1 = package name
|
||||
AC_DEFUN([AX_ALL_BUILD_DEPEND_DEV], [
|
||||
pkg="$1"
|
||||
DEB_BUILD_DEPEND="${DEB_BUILD_DEPEND}, ${pkg// /-dev}-dev"
|
||||
RPM_BUILD_DEPEND="${RPM_BUILD_DEPEND}, ${pkg// /-devel}-devel"
|
||||
])
|
||||
|
||||
# require package in .spec.in append @ALL_DEPEND@ to Depends
|
||||
# - parameter:
|
||||
# $1 = package name
|
||||
AC_DEFUN([AX_ALL_DEPEND], [
|
||||
pkg="$1"
|
||||
DEB_DEPEND="${DEB_DEPEND}, ${pkg}"
|
||||
if test -z "${RPM_DEPEND}"; then
|
||||
RPM_DEPEND="${pkg}"
|
||||
else
|
||||
RPM_DEPEND="${RPM_DEPEND}, ${pkg}"
|
||||
fi
|
||||
])
|
||||
|
||||
# finish configuration - to be called instead of AC_OUTPUT
|
||||
AC_DEFUN([AX_OUTPUT], [
|
||||
AX_INIT_QT
|
||||
AX_DEB_RESOLVE
|
||||
AX_RPM_RESOLVE
|
||||
AC_OUTPUT
|
||||
AC_MSG_NOTICE([configured for ${PACKAGE_NAME}-${VERSION}])
|
||||
])
|
1950
bootstrap.sh
1950
bootstrap.sh
@@ -1,8 +1,7 @@
|
||||
#! /bin/sh
|
||||
|
||||
#! /bin/bash
|
||||
## @file
|
||||
##
|
||||
## $Id$
|
||||
## $Id: bootstrap.sh 52 2015-11-03 15:38:21Z marc $
|
||||
##
|
||||
## $Date: 2004/08/31 15:57:19 $
|
||||
## $Author: marc $
|
||||
@@ -15,5 +14,1946 @@
|
||||
## added file header
|
||||
##
|
||||
|
||||
test -f makefile && make distclean
|
||||
aclocal && libtoolize --force && automake -a && autoconf
|
||||
MY_NAME=${0##*/}
|
||||
PROJECT_PATH=$(pwd)
|
||||
DEFAULT_PROJECT_NAME=${PROJECT_PATH##*/}
|
||||
configure=0
|
||||
build=0
|
||||
docker=0
|
||||
buildtarget=""
|
||||
overwrite=0
|
||||
rebuild=0
|
||||
novcs=0
|
||||
excludevcs=()
|
||||
rebuildfiles=()
|
||||
while test $# -gt 0; do
|
||||
case "$1" in
|
||||
(--configure|-c) configure=1;;
|
||||
(--docker|-d) docker=1;;
|
||||
(--build|-b) configure=1; build=1; buildtarget+=" distcheck";;
|
||||
(--all|-a) configure=1; build=1; buildtarget+=" all";;
|
||||
(--install|-i) configure=1; build=1; buildtarget+=" all install";;
|
||||
(--clean) configure=1; build=1; buildtarget+=" maintainer-clean";;
|
||||
(--target|-t) shift; configure=1; build=1; buildtarget+=" $1";;
|
||||
(--overwrite|-o) overwrite=1;;
|
||||
(--rebuild|-r) rebuild=1;;
|
||||
(--rebuild-file|-f) shift; rebuildfiles+=("$1");;
|
||||
(--no-vcs|-n) novcs=1;;
|
||||
(--exclude-vcs|-x) shift; excludevcs+=("$1");;
|
||||
(--version|-v)
|
||||
echo "$Id: bootstrap.sh 52 2015-11-03 15:38:21Z marc $";
|
||||
exit;;
|
||||
(--help|-h) less <<EOF
|
||||
SYNOPSIS
|
||||
|
||||
${MY_NAME} [--help|-h] [OPTIOS]
|
||||
|
||||
OPTIONS
|
||||
|
||||
--configure, -c call ./configure after initialization
|
||||
--docker, -d build and run tests in a docker instance
|
||||
--build, -b build, also call ./configure && make distcheck
|
||||
--all, -a same as -b, but make target all
|
||||
--install, -i same as -a, but add make install
|
||||
--clean same as -b, but make target maintainer-clean
|
||||
--target, -t <target> same as -b, but specify target instead of distcheck
|
||||
--overwrite, -o overwrite all basic files (bootstrap.sh, m4-macros)
|
||||
--rebuild, -r force rebuild of generated files, even if modified
|
||||
--rebuild-file, -f <file> rebild specific file (can be added multiple times)
|
||||
--no-vcs, -n do not automatically add files to version control
|
||||
--exclude-vcs, -x <file> exclude specific file from version control
|
||||
--help, -h show this help
|
||||
--version, -v show version and date of this file
|
||||
|
||||
DESCRIPTION
|
||||
|
||||
Initializes your build environment, as far as neccessary. Reads your
|
||||
used features from configure.ac, if that file exists, or creates a
|
||||
configure.ac. Automatically copies or creates all required template
|
||||
files.
|
||||
|
||||
From your new and empty project's subversion or git path, call $0 to
|
||||
initialize your build environment.
|
||||
|
||||
Before you call ${MY_NAME} the very first time, edit ${0#/*}/AUTHORS
|
||||
and replace it with your name (or the authors of your project, one
|
||||
name each line, main developper and copyright holder on the first
|
||||
line).
|
||||
|
||||
The first call to ${MY_NAME} should be something like
|
||||
../bootstrap-build-environment/${MY_NAME} and not
|
||||
./${MY_NAME}. Actually, you called $0.
|
||||
|
||||
In the way you called ${MY_NAME}, it has detected
|
||||
${DEFAULT_PROJECT_NAME} as the project name for your project in
|
||||
${PROJECT_PATH}. In the first run, you should call ${MY_NAME} from a
|
||||
checked out the bootstrap-build-environment from
|
||||
https://mrw.sh/, and the path from where you call
|
||||
${MY_NAME} (which is actually ${PROJECT_PATH}) should be the path to
|
||||
your newly created project. Please note that your project must be a
|
||||
checked out subversion or git repository, since this build
|
||||
environment relies on subversion or git.
|
||||
|
||||
Example for an initial run, where your new projet is stored in
|
||||
subversion on https:/path/to/your/new-project:
|
||||
|
||||
cd ~/svn
|
||||
svn co https://svn.mrw.sh/bootstrap-build-environment/trunk \\
|
||||
bootstrap-build-environment
|
||||
svn co https:/path/to/your/new-project/trunk new-project
|
||||
cd new-project
|
||||
../bootstrap-build-environment/bootstrap.sh
|
||||
|
||||
Example for an initial run, where your new projet is stored in
|
||||
git on https:/path/to/your/new-project:
|
||||
|
||||
cd ~/svn
|
||||
svn co https://svn.mrw.sh/bootstrap-build-environment/trunk \\
|
||||
bootstrap-build-environment
|
||||
cd ~/git
|
||||
git clone https:/path/to/your/new-project
|
||||
cd new-project
|
||||
../bootstrap-build-environment/bootstrap.sh
|
||||
|
||||
RUNNING
|
||||
|
||||
If you run ${MY_NAME}, it first generates the necessary files (see
|
||||
below), then first runs make distclean if a makefile exists. After
|
||||
this it calles aclocal, libtoolize, automake, autoconf and
|
||||
optionally ./configure. If necessary, files are added to version
|
||||
control.
|
||||
|
||||
GENERATED FILES
|
||||
|
||||
This script copies the following files into your project environment:
|
||||
* ${MY_NAME}
|
||||
* autogen.sh - just the basics to initialize auto tools and create configure
|
||||
* ax_init_standard_project.m4 - auxiliary macro definition file
|
||||
* ax_cxx_compile_stdcxx.m4 - auxiliary macro definition file
|
||||
* ax_check_qt.m4 - auxiliary macro definition file
|
||||
* resolve-debbuilddeps.sh - script to install debian package dependencies
|
||||
* resolve-rpmbuilddeps.sh - script to install RPM package dependencies
|
||||
* build-in-docker.sh - script to build the project encapsulated in a docker container
|
||||
* build-in-docker.conf - additional configuration for build-in-docker.sh
|
||||
* rpmsign.exp - script for signing rpms unattended
|
||||
* build-resource-file.sh - build resource.qrc file from a resource directory
|
||||
* sql-to-dot.sed - script to convert SQL schema files to graphviz dot in doxygen
|
||||
* mac-create-app-bundle.sh - script to create apple mac os-x app-bundle
|
||||
* dependency-graph.sh - script to draw project dependencies
|
||||
* template.sh - generic template for bash scripts
|
||||
* test/runtests.sh - template file to run test scripts, i.e. docker based
|
||||
* AUTHORS - replace your name in AUTHORS before first run
|
||||
* NEWS - empty file add your project's news
|
||||
* README (or README.md) - add project description (first line: header, followed by empty line)
|
||||
* configure.ac - global configuration file template
|
||||
* makefile.am - global makefile template
|
||||
* ${DEFAULT_PROJECT_NAME}.desktop.in - linux desktop file
|
||||
* src/makefile.am - if you enabled AX_USE_CXX
|
||||
* src/version.hxx - if you enabled AX_USE_CXX
|
||||
* src/version.cxx - if you enabled AX_USE_CXX
|
||||
* etc/makefile.am - if you enable AX_USE_ETC
|
||||
* html/makefile.am - if you enabled AX_BUILD_HTML or AX_BUILD_HTML_NPM
|
||||
* html/package.json.in - if you enabled AX_BUILD_HTML_NPM
|
||||
* scripts/makefile.am - if you enabled AX_USE_SCRIPTS
|
||||
* nodejs/makefile.am - if you add AX_USE_NODEJS
|
||||
* nodejs/${DEFAULT_PROJECT_NAME}.js - if you add AX_USE_NODEJS
|
||||
* nodejs/package.json.in - if you add AX_USE_NODEJS
|
||||
* nodejs/etc/${DEFAULT_PROJECT_NAME}.json - if you add AX_USE_NODEJS
|
||||
* nodejs/etc/default/${DEFAULT_PROJECT_NAME} - if you add AX_USE_NODEJS
|
||||
* nodejs/etc/init/${DEFAULT_PROJECT_NAME}.conf - if you add AX_USE_NODEJS
|
||||
* nodejs/etc/systemd/system/${DEFAULT_PROJECT_NAME}.service - if you add AX_USE_NODEJS
|
||||
* nodejs/public - if you add AX_USE_NODEJS
|
||||
* nodejs/public/images - if you add AX_USE_NODEJS
|
||||
* nodejs/public/javascripts/${DEFAULT_PROJECT_NAME}.js - if you add AX_USE_NODEJS
|
||||
* nodejs/public/stylesheets/style.styl - if you add AX_USE_NODEJS
|
||||
* nodejs/routes/index.js - if you add AX_USE_NODEJS
|
||||
* nodejs/sockets/index.js - if you add AX_USE_NODEJS
|
||||
* nodejs/views/index.ejs - if you add AX_USE_NODEJS
|
||||
* nodejs/views/layout.ejs - if you add AX_USE_NODEJS
|
||||
* nodejs/node_modules - if you add AX_USE_NODEJS
|
||||
* doc/makefile.am - if you enabled AX_USE_DOXYGEN or AX_USE_PERLDOC
|
||||
* doc/doxyfile.in - if you enabled AX_USE_DOXYGEN
|
||||
* doc/header.html.in - if you enabled AX_USE_DOXYGEN
|
||||
* doc/footer.html.in - if you enabled AX_USE_DOXYGEN
|
||||
* doc/style.css - if you enabled AX_USE_DOXYGEN
|
||||
* doc/plantuml.jar - if you enable AX_USE_DOXYGEN
|
||||
* test/makefile.am - if you enabled AX_USE_CPPUNIT and AX_USE_CXX
|
||||
* test/${DEFAULT_PROJECT_NAME#lib}.cxx - if you enabled AX_BUILD_TEST or AX_USE_CPPUNIT
|
||||
* examples/makefile.am - if you enabled AX_BUILD_EXAMPLES
|
||||
* debian/changelog.in - if you enabled AX_USE_DEBIAN_PACKAGING
|
||||
* debian/control.in - if you enabled AX_USE_DEBIAN_PACKAGING
|
||||
* debian/docs - if you enabled AX_USE_DEBIAN_PACKAGING
|
||||
* debian/${DEFAULT_PROJECT_NAME}.install - if you enabled AX_USE_DEBIAN_PACKAGING
|
||||
* debian/${DEFAULT_PROJECT_NAME}.dirs - if you enabled AX_USE_DEBIAN_PACKAGING
|
||||
* debian/${DEFAULT_PROJECT_NAME}-dev.install - if you enabled AX_USE_DEBIAN_PACKAGING
|
||||
* debian/${DEFAULT_PROJECT_NAME}-dev.dirs - if you enabled AX_USE_DEBIAN_PACKAGING
|
||||
* debian/rules - if you enabled AX_USE_DEBIAN_PACKAGING
|
||||
* debian/compat - if you enabled AX_USE_DEBIAN_PACKAGING
|
||||
* ${DEFAULT_PROJECT_NAME}.spec.in - if you enable AX_USE_RPM_PACKAGING
|
||||
* src/${DEFAULT_PROJECT_NAME}.pc.in - if you enabled AX_USE_LIBTOOL
|
||||
|
||||
REBUILDING FILES
|
||||
|
||||
To rebuild all these files, just run "${MY_NAME} -r".
|
||||
|
||||
To copy only the files provided by this package, that means those
|
||||
files you must never change, that means to update the build system
|
||||
to the latest release, run "${MY_NAME} -o"
|
||||
|
||||
You can also rebuild a list of singleany list of specific file files
|
||||
by adding option "${MY_NAME} -f <file>" to rebuild file
|
||||
"<file>". You can add option "-f" more than once.
|
||||
|
||||
FILES TO EDIT
|
||||
|
||||
After creation of the files, you can edit them according to your
|
||||
needs. Please don't forget to redo your edits after rebuilding a
|
||||
file. Most files don't even need to be edited, they work out of the
|
||||
box.
|
||||
|
||||
The following files normally require editing:
|
||||
* AUTHORS
|
||||
* NEWS
|
||||
* README
|
||||
* configure.ac
|
||||
* src/makefile.am
|
||||
* html/makefile.am
|
||||
* test/makefile.am
|
||||
* test/${DEFAULT_PROJECT_NAME}.cxx
|
||||
* examples/makefile.am
|
||||
|
||||
FILE DEPENDENCIES
|
||||
|
||||
You should rebuild (see above) the files, whenever you change the
|
||||
configuration a dependent, i.e.:
|
||||
|
||||
* test/makefile.am depends on AX_USE_LIBTOOL
|
||||
* html/makefile.am depends on AX_BUILD_HTML or AX_BUILD_HTML_NPM
|
||||
* doc/doxyfile.in depends on AX_BUILD_EXAMPLES
|
||||
* debian/control.in depends on AX_USE_DOXYGEN, AX_USE_PERLDOC,
|
||||
AX_USE_CPPUNIT AX_CXX_QT, AX_CHECK_QT, AX_REQUIRE_QT, AX_USE_LIBTOOL
|
||||
* debian/${DEFAULT_PROJECT_NAME}.install depends on AX_USE_LIBTOOL
|
||||
* debian/${DEFAULT_PROJECT_NAME}.dirs depends on AX_USE_LIBTOOL
|
||||
* debian/${DEFAULT_PROJECT_NAME}-dev.install depends on AX_USE_LIBTOOL
|
||||
* debian/${DEFAULT_PROJECT_NAME}-dev.dirs depends on AX_USE_LIBTOOL
|
||||
* ${DEFAULT_PROJECT_NAME}.spec.in depends on AX_USE_RPM_PACKAGING,
|
||||
AX_USE_LIBTOOL, AX_CHECK_QT, AX_REQUIRE_QT, AX_CXX_QT, AX_USE_CPPUNIT
|
||||
|
||||
FILES
|
||||
|
||||
* AUTHORS: First line is the main author and used in Debian and RPM
|
||||
packaging, so there must be a GPG key that matches
|
||||
to this line.
|
||||
* NEWS: File to add project news.
|
||||
* README: First line is a short description of your project, then an
|
||||
empty line must follow. All remaining lines are a
|
||||
long description of your project. this information
|
||||
is copied, e.g. in Debian or RPM packages. In C++
|
||||
<ou can access the readme by calling
|
||||
${DEFAULT_PROJECT_NAME}::description().
|
||||
* ChangeLog: Your changelog is automatically maintained from
|
||||
subversion history, using svn2cl. You don't need to
|
||||
care about. It uses git2cl on git repositories.
|
||||
* configure.ac: This file becomes very short and simple. You provide
|
||||
the project name, the major and minor version. The
|
||||
least version number is automatically taken from
|
||||
subversion's revision, so every checkin
|
||||
automatically increments the least version
|
||||
number. In git, git rev-list --all --count is used.
|
||||
The following macros are supported in configure.ac:
|
||||
* Enable C++: AX_USE_CXX
|
||||
* Enable system config files in /etc: AX_USE_ETC
|
||||
* Enable LibTool library creation: AX_USE_LIBTOOL
|
||||
* Enable Scripts: AX_USE_SCRIPTS
|
||||
* Enable NodeJS project: AX_USE_NODEJS
|
||||
* Enable Doxygen documentation generation: AX_USE_DOXYGEN
|
||||
* Enable Perldoc documentation generation: AX_USE_PERLDOC
|
||||
* Enable Debian packaging by calling "make deb": AX_USE_DEBIAN_PACKAGING
|
||||
* Enable RPM packaging by calling "make rpm": AX_USE_RPM_PACKAGING
|
||||
* Enable C++ testing using CppUnit: AX_USE_CPPUNIT
|
||||
* Enable other tests: AX_BUILD_TEST
|
||||
* Enable C++ examples, i.e. for libraries: AX_BUILD_EXAMPLES
|
||||
* Require C++17 support: AX_CXX_COMPILE_STDCXX([17], [noext], [mandatory]) (see ax_cxx_compile_stdcxx.m4)
|
||||
* Require a QT module: AX_REQUIRE_QT (see ax_check_qt.m4)
|
||||
* Optionally use a QT module: AX_CHECK_QT (see ax_check_qt.m4)
|
||||
* Require a module: AX_PKG_REQUIRE (see ax_init_standard_project.m4)
|
||||
* Check for an optional module: AX_PKG_CHECK (see ax_init_standard_project.m4)
|
||||
|
||||
EXAMPLES: src/makefile.am in a QT project
|
||||
|
||||
In this example, you wrote the following files:
|
||||
* ${DEFAULT_PROJECT_NAME}.hxx - Qt Header file, passed to moc
|
||||
* ${DEFAULT_PROJECT_NAME}.cxx - C++ file containing main()
|
||||
* ${DEFAULT_PROJECT_NAME}.ui - UI file
|
||||
|
||||
All rules are implicitely added, all you need to do is to add the
|
||||
following definitions, most lines are generic:
|
||||
|
||||
bin_PROGRAMS = ${DEFAULT_PROJECT_NAME}
|
||||
${DEFAULT_PROJECT_NAME}_MOCFILES = moc_${DEFAULT_PROJECT_NAME}.cxx
|
||||
${DEFAULT_PROJECT_NAME}_UIFILES = ui_${DEFAULT_PROJECT_NAME}.hxx
|
||||
${DEFAULT_PROJECT_NAME}_SOURCES = version.cxx version.hxx ${DEFAULT_PROJECT_NAME}.cxx ${DEFAULT_PROJECT_NAME}_MOCFILES ${DEFAULT_PROJECT_NAME}_UIFILES
|
||||
BUILT_SOURCES = \${${DEFAULT_PROJECT_NAME}_MOCFILES} \${${DEFAULT_PROJECT_NAME}_UIFILES}
|
||||
EXTRA_DIST = \${${DEFAULT_PROJECT_NAME}_MOCFILES:moc_%.cxx=%.hxx} \${${DEFAULT_PROJECT_NAME}_UIFILES:ui_%.hxx=%.ui}
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
EOF
|
||||
exit;;
|
||||
(*) break;;
|
||||
esac
|
||||
shift;
|
||||
done
|
||||
|
||||
# check if stdout is a terminal...
|
||||
if test -t 1; then
|
||||
|
||||
# see if it supports colors...
|
||||
ncolors=$(tput colors)
|
||||
|
||||
if test -n "$ncolors" && test $ncolors -ge 8; then
|
||||
bold="$(tput bold)"
|
||||
underline="$(tput smul)"
|
||||
standout="$(tput smso)"
|
||||
normal="$(tput sgr0)"
|
||||
black="$(tput setaf 0)"
|
||||
red="$(tput setaf 1)"
|
||||
green="$(tput setaf 2)"
|
||||
yellow="$(tput setaf 3)"
|
||||
blue="$(tput setaf 4)"
|
||||
magenta="$(tput setaf 5)"
|
||||
cyan="$(tput setaf 6)"
|
||||
white="$(tput setaf 7)"
|
||||
fi
|
||||
fi
|
||||
|
||||
notice() {
|
||||
echo "${yellow}→ notice: ${bold}$*${normal}"
|
||||
}
|
||||
|
||||
running() {
|
||||
echo -n "${bold}${blue}→ running: ${bold}${white}$*${normal} … "
|
||||
}
|
||||
|
||||
checking() {
|
||||
echo -n "${bold}${blue}→ checking: ${bold}${white}$*${normal} … "
|
||||
}
|
||||
|
||||
generating() {
|
||||
echo -n "${bold}${blue}→ generating: ${bold}${white}$*${normal} … "
|
||||
}
|
||||
|
||||
configuring() {
|
||||
echo -n "${bold}${blue}→ configuring ${bold}${white}$1${normal}:"
|
||||
shift
|
||||
echo -n "${white}$*${normal} … "
|
||||
}
|
||||
|
||||
ignored() {
|
||||
echo "${bold}${yellow}ignored $*${normal}"
|
||||
}
|
||||
|
||||
success() {
|
||||
echo "${bold}${green}success $*${normal}"
|
||||
}
|
||||
|
||||
error() {
|
||||
echo "${bold}${red}→ error: $1${normal}"
|
||||
shift
|
||||
if test -n "$*"; then
|
||||
echo "${bold}$*${normal}"
|
||||
fi
|
||||
exit 1
|
||||
}
|
||||
|
||||
run() {
|
||||
check=1
|
||||
while test $# -gt 0; do
|
||||
case "$1" in
|
||||
(--no-check) check=0;;
|
||||
(*) break;;
|
||||
esac
|
||||
shift;
|
||||
done
|
||||
running $*
|
||||
result=$($* 2>&1)
|
||||
res=$?
|
||||
if test $res -ne 0; then
|
||||
if test $check -eq 1; then
|
||||
error "Failed with return code: $res" "$result"
|
||||
else
|
||||
ignored
|
||||
fi
|
||||
else
|
||||
success
|
||||
fi
|
||||
}
|
||||
|
||||
checking for version control system
|
||||
VCS=""
|
||||
VCSDEPENDS=""
|
||||
for path in . .. ../.. ../../..; do
|
||||
if test -d ${path}/.svn; then
|
||||
VCS="svn"
|
||||
VCSDEPENDS_DEB="svn2cl, subversion, subversion-tools,"
|
||||
VCSDEPENDS_RPM="subversion, "
|
||||
success detected ${VCS}
|
||||
break
|
||||
elif test -d ${path}/.git; then
|
||||
VCS="git"
|
||||
VCSDEPENDS_DEB="git2cl, git,"
|
||||
VCSDEPENDS_RPM="git, "
|
||||
success detected ${VCS}
|
||||
break
|
||||
fi
|
||||
done
|
||||
if test -z "$VCS"; then
|
||||
ignored
|
||||
fi
|
||||
|
||||
HEADER='## @id '"\$Id\$"'
|
||||
##
|
||||
## This file has been added:
|
||||
## - by '${MY_NAME}'
|
||||
## - on '$(LANG= date +"%a, %d %B %Y %H:%M:%S %z")'
|
||||
## Feel free to change it or even remove and rebuild it, up to your needs
|
||||
##
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
'
|
||||
|
||||
CHEADER='/** @id '"\$Id\$"'
|
||||
|
||||
This file has been added:
|
||||
- by '${MY_NAME}'
|
||||
- on '$(LANG= date +"%a, %d %B %Y %H:%M:%S %z")'
|
||||
|
||||
*/
|
||||
// 1 2 3 4 5 6 7 8
|
||||
// 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
|
||||
'
|
||||
|
||||
testtag() {
|
||||
local IFS="|"
|
||||
egrep -q '^ *'"($*)"' *(\(.*)? *$' configure.ac
|
||||
}
|
||||
|
||||
contains() {
|
||||
local e
|
||||
for e in "${@:2}"; do [[ "$e" == "$1" ]] && return 0; done
|
||||
return 1
|
||||
}
|
||||
|
||||
checkdir() {
|
||||
if ! test -d "$1"; then # create path
|
||||
run mkdir -p "$1"
|
||||
if test -n "${VCS}" -a $novcs -eq 0 && ! contains "$1" "${excludevcs[@]}"; then
|
||||
run --no-check ${VCS} add "$1"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
checkfile() {
|
||||
exists=0
|
||||
if test -f "$1" -o -f "$1".in; then
|
||||
exists=1
|
||||
fi
|
||||
test $exists -eq 1
|
||||
}
|
||||
|
||||
to() {
|
||||
mode="u=rw,g=rw,o=r"
|
||||
while test $# -gt 0; do
|
||||
case "$1" in
|
||||
(--condition) shift # test for a tag, abort if not set
|
||||
if ! testtag "$1"; then
|
||||
return 0
|
||||
fi;;
|
||||
(--unless) shift # test for a tag, abort if set
|
||||
if testtag "$1"; then
|
||||
return 0
|
||||
fi;;
|
||||
(--mode) shift # test for a tag, abort if not set
|
||||
mode="$1";;
|
||||
(*) break;;
|
||||
esac
|
||||
shift;
|
||||
done
|
||||
if checkfile "$1" && test $rebuild -eq 0 -o "$1" = "configure.ac" \
|
||||
&& ! contains "$1" "${rebuildfiles[@]}"; then
|
||||
# file already exists and must not be rebuilt
|
||||
return 1
|
||||
fi
|
||||
checkdir "$(dirname ${1})"
|
||||
generating $1
|
||||
result=$(cat > "$1" 2>&1)
|
||||
res=$?
|
||||
if test $res -ne 0; then
|
||||
error "Failed with return code: $res" "$result"
|
||||
else
|
||||
success
|
||||
fi
|
||||
run chmod $mode $1
|
||||
if test $exists -eq 0; then
|
||||
if test -n "${VCS}" -a $novcs -eq 0 && ! contains "$1" "${excludevcs[@]}"; then
|
||||
run --no-check ${VCS} add "$1"
|
||||
if test "${VCS}" = "svn"; then
|
||||
run svn propset svn:keywords "Id" "$1"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
copy() {
|
||||
if checkfile "$1" && test $overwrite -eq 0 \
|
||||
&& ! contains "$1" "${rebuildfiles[@]}"; then
|
||||
# file already exists and must not be rebuilt
|
||||
return
|
||||
fi
|
||||
local source="${0%/*}/$1"
|
||||
if ! test -r "${source}"; then
|
||||
source="../${source}"
|
||||
if ! test -r "${source}"; then
|
||||
source="${0%/*}/$1"
|
||||
fi
|
||||
fi
|
||||
if test "${1%/*}" != "$1"; then
|
||||
test -d "${1%/*}" || svn mkdir "${1%/*}"
|
||||
fi
|
||||
if ! test -e "${source}"; then
|
||||
ignored "$1" not found
|
||||
return
|
||||
fi
|
||||
run cp "${source}" "$1"
|
||||
if test $exists -eq 0; then
|
||||
if test -n "${VCS}" -a $novcs -eq 0 && ! contains "$1" "${excludevcs[@]}"; then
|
||||
run --no-check ${VCS} add "$1"
|
||||
if test "${VCS}" = "svn"; then
|
||||
run svn propset svn:keywords "Id" "$1"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
doxyreplace() {
|
||||
configuring doxyfile $1
|
||||
if sed -i 's|\(^'"$1"' *=\) *.*|\1'" $2"'|g' doc/doxyfile.in; then
|
||||
success
|
||||
else
|
||||
error $0 $*
|
||||
fi
|
||||
}
|
||||
|
||||
doxyadd() {
|
||||
configuring doxyfile $1
|
||||
if sed -i '/^'"$1"' *=/a'"$1"' += '"$2" doc/doxyfile.in; then
|
||||
success
|
||||
else
|
||||
error $0 $*
|
||||
fi
|
||||
}
|
||||
|
||||
vcs2cl() {
|
||||
exists=0
|
||||
if test -f "ChangeLog"; then
|
||||
exists=1
|
||||
else
|
||||
touch "ChangeLog"
|
||||
fi
|
||||
if test -x $(which timeout); then
|
||||
local TIMEOUT="timeout 10"
|
||||
else
|
||||
local TIMEOUT=
|
||||
fi
|
||||
if test -x $(which ${VCS}2cl); then
|
||||
if test "${VCS}" = "git"; then
|
||||
$TIMEOUT ${VCS}2cl || true > ChangeLog
|
||||
elif test "${VCS}" = "svn"; then
|
||||
$TIMEOUT ${VCS}2cl --break-before-msg -a -i || true
|
||||
elif test -n "${VCS}"; then
|
||||
$TIMEOUT ${VCS}2cl || true
|
||||
fi
|
||||
fi
|
||||
if test $exists -eq 0; then
|
||||
if test -n "${VCS}" -a $novcs -eq 0 && ! contains "ChangeLog" "${excludevcs[@]}"; then
|
||||
run --no-check ${VCS} add ChangeLog
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# Check if we are in subversion root, if so, create trunk, branches, tags:
|
||||
if test "${VCS}" = "svn" -a $novcs -eq 0; then
|
||||
if test "$(LANG= svn info | sed -n 's,Relative URL: *,,p')" = "^/"; then
|
||||
svn mkdir trunk branches tags
|
||||
cd trunk
|
||||
fi
|
||||
fi
|
||||
|
||||
# Initialize the environment:
|
||||
copy ${MY_NAME}
|
||||
copy ax_init_standard_project.m4
|
||||
copy ax_cxx_compile_stdcxx.m4
|
||||
copy ax_check_qt.m4
|
||||
copy resolve-debbuilddeps.sh
|
||||
copy resolve-rpmbuilddeps.sh
|
||||
copy build-in-docker.sh
|
||||
copy rpmsign.exp
|
||||
copy build-resource-file.sh
|
||||
copy sql-to-dot.sed
|
||||
copy mac-create-app-bundle.sh
|
||||
copy dependency-graph.sh
|
||||
copy template.sh
|
||||
AUTHOR=$(gpg -K 2>/dev/null | sed -n 's,uid *\(\[ultimate\] *\)\?,,p' | head -1)
|
||||
if test -z "${AUTHOR}"; then
|
||||
AUTHOR="FIRSTNAME LASTNAME (URL) <EMAIL>"
|
||||
fi
|
||||
to AUTHORS <<EOF && notice "please edit AUTHORS"
|
||||
$AUTHOR
|
||||
EOF
|
||||
to NEWS <<EOF && notice "please edit NEWS"
|
||||
$(date) created ${DEFAULT_PROJECT_NAME}
|
||||
EOF
|
||||
if test -e README.md; then
|
||||
README=README.md
|
||||
else
|
||||
README=README
|
||||
to README <<EOF && notice "please edit README"
|
||||
|
||||
${DEFAULT_PROJECT_NAME}
|
||||
|
||||
add description for ${DEFAULT_PROJECT_NAME}
|
||||
EOF
|
||||
fi
|
||||
DESCRIPTION=$(head -1 $README | sed 's,^#\+ *,,;s, *#\+$,,')
|
||||
to configure.ac <<EOF && notice "please edit configure.ac, then rerun $0" && exit 0
|
||||
${HEADER}# default is generated from AUTHORS and project name
|
||||
PROJECT_URL=
|
||||
SOURCE_DOWNLOAD=
|
||||
|
||||
m4_define(x_package_name, ${DEFAULT_PROJECT_NAME}) # project's name
|
||||
m4_define(x_major, 0) # project's major version
|
||||
m4_define(x_minor, 0) # project's minor version
|
||||
m4_include(ax_init_standard_project.m4)
|
||||
AC_INIT(x_package_name, x_version, x_bugreport, x_package_name)
|
||||
AM_INIT_AUTOMAKE([1.9 tar-pax])
|
||||
AX_INIT_STANDARD_PROJECT
|
||||
|
||||
# requirements, uncomment, what you need:
|
||||
#AX_USE_CXX
|
||||
#AX_USE_ETC
|
||||
#AX_USE_LIBTOOL
|
||||
#AX_USE_SCRIPTS
|
||||
#AX_USE_NODEJS
|
||||
#AX_USE_DOXYGEN
|
||||
#AX_USE_PERLDOC
|
||||
#AX_USE_DEBIAN_PACKAGING
|
||||
#AX_USE_RPM_PACKAGING
|
||||
#AX_USE_CPPUNIT
|
||||
#AX_BUILD_TEST
|
||||
#AX_BUILD_EXAMPLES
|
||||
#AX_BUILD_HTML
|
||||
#AX_BUILD_HTML_NPM
|
||||
|
||||
# qt features, uncomment, what you need:
|
||||
#AX_CHECK_QT([QT], [QtCore QtGui QtNetwork], [QtWidgets])
|
||||
#AX_REQUIRE_QT([QT], [QtCore QtGui QtNetwork], [QtWidgets])
|
||||
#AX_QT_NO_KEYWORDS
|
||||
|
||||
# create output
|
||||
AX_OUTPUT
|
||||
EOF
|
||||
|
||||
PACKAGE_NAME=$(sed -n 's/.*m4_define *( *x_package_name *, *\([^ ]*\) *).*/\1/p' configure.ac)
|
||||
SAVEIFS="$IFS"
|
||||
IFS="-" PackageName=( $PACKAGE_NAME )
|
||||
IFS="$SAVEIFS"
|
||||
PackageName=${PackageName[*]^}
|
||||
PackageName=${PackageName// /}
|
||||
|
||||
if ! testtag AX_CHECK_QT && \
|
||||
! testtag AX_REQUIRE_QT; then
|
||||
echo "${HEADER}MAINTAINERCLEANFILES = makefile.in" | \
|
||||
to --condition AX_USE_CXX src/makefile.am
|
||||
if ! ls src/*.[ch]xx; then
|
||||
to --condition AX_USE_CXX src/${PACKAGE_NAME#lib}.hxx <<EOF
|
||||
${CHEADER}#ifndef ${PackageName^^}_HXX
|
||||
#define ${PackageName^^}_HXX
|
||||
|
||||
/** @mainpage @description
|
||||
|
||||
@readme
|
||||
|
||||
*/
|
||||
|
||||
#endif
|
||||
EOF
|
||||
fi
|
||||
elif ! test -e src/makefile.am; then
|
||||
to --condition AX_USE_CXX src/makefile.am <<EOF
|
||||
${HEADER}bin_PROGRAMS = ${PACKAGE_NAME}
|
||||
|
||||
## required to enable the translation feature
|
||||
LANGUAGE_FILE_BASE = ${PACKAGE_NAME}
|
||||
|
||||
## list here the Qt plugins your project depends on
|
||||
## required to build Mac OS-X app-bundle
|
||||
QT_PLUGINS = iconengines imageformats platforms
|
||||
|
||||
#### enable if you deliver a KDE/Gnome desktop file
|
||||
#applicationsdir = \${datarootdir}/applications
|
||||
#dist_applications_DATA = ${PACKAGE_NAME}.desktop
|
||||
|
||||
#### enable (ev. instead of bin_PROGRAMS) if you build a library
|
||||
#lib_LTLIBRARIES = ${PACKAGE_NAME}.la
|
||||
#${PACKAGE_NAME}_la_SOURCES = libmain.cxx version.cxx
|
||||
## noop to prevent:
|
||||
## «src/makefile.am: error: object 'version.\$(OBJEXT)' created both with
|
||||
## libtool and without»
|
||||
#${PACKAGE_NAME}_la_CXXFLAGS = \$(AM_CXXFLAGS)
|
||||
|
||||
## list headers that are required for build, but that are not installed
|
||||
noinst_HEADERS = version.hxx
|
||||
|
||||
## list all %.hxx files with Q_OBJECT as moc_%.cxx
|
||||
${PACKAGE_NAME//-/_}_MOCFILES = moc_${PACKAGE_NAME}.cxx
|
||||
|
||||
## list all %.ui files as ui_%.hxx
|
||||
${PACKAGE_NAME//-/_}_UIFILES = ui_${PACKAGE_NAME}.hxx
|
||||
|
||||
## list all %.qrc resource files as qrc_%.cxx
|
||||
## note: if there exists a directory %, the file %.qrc is generated from that
|
||||
${PACKAGE_NAME//-/_}_RESOURCES = qrc_languages.cxx # qrc_resources.cxx
|
||||
|
||||
## list all final translation files, list all supported languages here
|
||||
${PACKAGE_NAME//-/_}_TRANSLATIONS = \${LANGUAGE_FILE_BASE}_en.qm \\
|
||||
\${LANGUAGE_FILE_BASE}_de.qm \\
|
||||
\${LANGUAGE_FILE_BASE}_fr.qm \\
|
||||
\${LANGUAGE_FILE_BASE}_it.qm
|
||||
|
||||
## list all C++ files that need translation
|
||||
${PACKAGE_NAME//-/_}_TR_FILES = main.cxx version.cxx
|
||||
|
||||
## automatic assembly, no need to change
|
||||
${PACKAGE_NAME//-/_}_SOURCES = \${${PACKAGE_NAME//-/_}_TR_FILES} \${BUILT_SOURCES}
|
||||
|
||||
## automatic assembly, no need to change
|
||||
BUILT_SOURCES = \${${PACKAGE_NAME//-/_}_MOCFILES} \\
|
||||
\${${PACKAGE_NAME//-/_}_UIFILES} \\
|
||||
\${${PACKAGE_NAME//-/_}_TRANSLATIONS} \\
|
||||
\${${PACKAGE_NAME//-/_}_RESOURCES}
|
||||
|
||||
## automatic assembly, no need to change
|
||||
EXTRA_DIST_TR = \${${PACKAGE_NAME//-/_}_MOCFILES:moc_%.cxx=%.hxx} \\
|
||||
\${${PACKAGE_NAME//-/_}_UIFILES:ui_%.hxx=%.ui}
|
||||
|
||||
## automatic assembly, no need to change
|
||||
## except: adapt the pre-delivered qt_%.qm list (language files you copy from qt
|
||||
EXTRA_DIST = \${EXTRA_DIST_TR} \\
|
||||
\${${PACKAGE_NAME//-/_}_RESOURCES:qrc_%.cxx=%.qrc} \\
|
||||
\${${PACKAGE_NAME//-/_}_TRANSLATIONS:%.qm=%.ts} \\
|
||||
qt_de.qm qt_fr.qm
|
||||
|
||||
## automatic assembly, no need to change
|
||||
LANGUAGE_FILES = \${EXTRA_DIST_TR} \${${PACKAGE_NAME//-/_}_TR_FILES}
|
||||
|
||||
CLEANFILES = \${${PACKAGE_NAME//-/_}_RESOURCES}
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
EOF
|
||||
to --condition AX_USE_CXX src/main.cxx <<EOF
|
||||
${CHEADER}#include <${PACKAGE_NAME}.hxx>
|
||||
#include <QApplication>
|
||||
#include <QCommandLineParser>
|
||||
#include <iostream>
|
||||
#include <version.hxx>
|
||||
|
||||
int main(int argc, char *argv[]) try {
|
||||
QApplication a(argc, argv);
|
||||
a.setApplicationDisplayName(a.tr("${PACKAGE_NAME}"));
|
||||
a.setApplicationName(${PACKAGE_NAME}::package_name().c_str());
|
||||
a.setApplicationVersion(${PACKAGE_NAME}::version().c_str());
|
||||
QCommandLineParser parser;
|
||||
parser.addHelpOption();
|
||||
parser.process(a);
|
||||
QStringList scripts(parser.positionalArguments());
|
||||
${PackageName} w;
|
||||
w.show();
|
||||
return a.exec();
|
||||
} catch (std::exception &x) {
|
||||
std::cerr<<"**** error: "<<x.what()<<std::endl;
|
||||
return 1;
|
||||
}
|
||||
EOF
|
||||
if ! ls src/*.[ch]xx; then
|
||||
to --condition AX_USE_CXX src/${PACKAGE_NAME#lib}.hxx <<EOF
|
||||
${CHEADER}#ifndef ${PackageName^^}_HXX
|
||||
#define ${PackageName^^}_HXX
|
||||
|
||||
/** @mainpage @description
|
||||
|
||||
@readme
|
||||
|
||||
*/
|
||||
|
||||
#include <QMainWindow>
|
||||
#include <ui_${PACKAGE_NAME}.hxx>
|
||||
|
||||
/// Main Window
|
||||
/** Main window for ${PACKAGE_NAME} */
|
||||
class ${PackageName}: public QMainWindow, protected Ui::${PackageName} {
|
||||
Q_OBJECT;
|
||||
public:
|
||||
explicit ${PackageName}(QWidget *parent = 0): QMainWindow(parent) {
|
||||
setTitle(tr("${PACKAGE_NAME}[*]"));
|
||||
setupUi(this);
|
||||
}
|
||||
virtual ~${PackageName}() {}
|
||||
};
|
||||
|
||||
#endif
|
||||
EOF
|
||||
fi
|
||||
to --condition AX_USE_CXX src/${PACKAGE_NAME#lib}.ui <<EOF
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>${PackageName}</class>
|
||||
<widget class="QMainWindow" name="${PackageName}">
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>800</width>
|
||||
<height>600</height>
|
||||
</rect>
|
||||
</property>
|
||||
<property name="windowTitle">
|
||||
<string>${PackageName}</string>
|
||||
</property>
|
||||
<widget class="QWidget" name="centralwidget"/>
|
||||
<widget class="QMenuBar" name="menubar">
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>800</width>
|
||||
<height>22</height>
|
||||
</rect>
|
||||
</property>
|
||||
</widget>
|
||||
<widget class="QStatusBar" name="statusbar"/>
|
||||
</widget>
|
||||
<resources/>
|
||||
<connections/>
|
||||
</ui>
|
||||
EOF
|
||||
to --condition AX_USE_CXX src/languages.qrc <<EOF
|
||||
<RCC>
|
||||
<qresource prefix="/language">
|
||||
<file>${PACKAGE_NAME}_de.qm</file>
|
||||
<file>${PACKAGE_NAME}_fr.qm</file>
|
||||
<file>${PACKAGE_NAME}_it.qm</file>
|
||||
<file>${PACKAGE_NAME}_en.qm</file>
|
||||
</qresource>
|
||||
</RCC>
|
||||
EOF
|
||||
fi
|
||||
to --condition AX_USE_CXX src/version.hxx <<EOF
|
||||
/*! @file
|
||||
|
||||
@id \$Id\$
|
||||
*/
|
||||
// 1 2 3 4 5 6 7 8
|
||||
// 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace NAMESPACE {
|
||||
/// get package string which consists of package name and package version
|
||||
std::string package_string();
|
||||
/// get package name
|
||||
std::string package_name();
|
||||
/// get package version
|
||||
std::string version();
|
||||
/// get code build date
|
||||
std::string build_date();
|
||||
/// get author, i.e. copyright holder
|
||||
std::string author();
|
||||
/// get short package description (1st line of README)
|
||||
std::string description();
|
||||
/// get long package description (starting at 3rd line in README)
|
||||
std::string readme();
|
||||
/// get package logo file name
|
||||
std::string logo();
|
||||
/// get package icon file name
|
||||
std::string icon();
|
||||
/// used for <code>what filename</code>
|
||||
extern const std::string WHAT;
|
||||
/// used for <code>ident filename</code>
|
||||
extern const std::string IDENT;
|
||||
}
|
||||
EOF
|
||||
to --condition AX_USE_CXX src/version.cxx <<EOF
|
||||
/*! @file
|
||||
|
||||
@id \$Id\$
|
||||
*/
|
||||
// 1 2 3 4 5 6 7 8
|
||||
// 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace NAMESPACE {
|
||||
std::string package_string() {
|
||||
return PACKAGE_STRING;
|
||||
}
|
||||
std::string package_name() {
|
||||
return PACKAGE_NAME;
|
||||
}
|
||||
std::string version() {
|
||||
return PACKAGE_VERSION;
|
||||
}
|
||||
std::string build_date() {
|
||||
return BUILD_DATE;
|
||||
}
|
||||
std::string author() {
|
||||
return AUTHOR;
|
||||
}
|
||||
std::string description() {
|
||||
return DESCRIPTION;
|
||||
}
|
||||
std::string readme() {
|
||||
return README;
|
||||
}
|
||||
std::string logo() {
|
||||
return PACKAGE_LOGO;
|
||||
}
|
||||
std::string icon() {
|
||||
return PACKAGE_ICON;
|
||||
}
|
||||
const std::string WHAT("#(@) " PACKAGE_STRING);
|
||||
const std::string IDENT("\$Id: " PACKAGE_STRING);
|
||||
}
|
||||
EOF
|
||||
to --condition AX_USE_ETC etc/makefile.am <<EOF
|
||||
${HEADER}pkgsysconfdir = \${sysconfdir}/@PACKAGE_NAME@
|
||||
|
||||
dist_pkgsysconf_DATA =
|
||||
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
EOF
|
||||
to --condition AX_USE_SCRIPTS scripts/makefile.am <<EOF
|
||||
${HEADER}dist_bin_SCRIPTS =
|
||||
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
EOF
|
||||
if testtag AX_USE_NODEJS; then
|
||||
checkdir nodejs
|
||||
checkdir nodejs/public
|
||||
checkdir nodejs/public/images
|
||||
checkdir nodejs/etc
|
||||
checkdir nodejs/etc/systemd
|
||||
fi
|
||||
to --condition AX_USE_NODEJS nodejs/makefile.am <<EOF
|
||||
${HEADER}EXTRA_DIST = @PACKAGE_NAME@.js package.json.in public routes sockets views
|
||||
|
||||
nodejsdir = \${pkgdatadir}/nodejs
|
||||
|
||||
sysconfdefaultdir = \${sysconfdir}/default
|
||||
sysconfinitdir = \${sysconfdir}/init
|
||||
dist_sysconf_DATA = \${sysconfdir}/@PACKAGE_NAME@.json
|
||||
dist_sysconfdefault_DATA = \${sysconfdir}/default/@PACKAGE_NAME@
|
||||
dist_sysconfinit_DATA = \${sysconfdir}/init/@PACKAGE_NAME@.conf
|
||||
|
||||
all: node_modules
|
||||
|
||||
node_modules: package.json.in
|
||||
HOME=. npm install
|
||||
|
||||
clean-local:
|
||||
-rm -r node_modules .npm
|
||||
|
||||
install-data-hook:
|
||||
test -d \$(DESTDIR)\${nodejsdir} || mkdir -p \$(DESTDIR)\${nodejsdir}
|
||||
chmod -R u+w \$(DESTDIR)\${nodejsdir}
|
||||
cp -r . \$(DESTDIR)\${nodejsdir}
|
||||
|
||||
uninstall-local:
|
||||
-chmod -R u+w \$(DESTDIR)\${nodejsdir}
|
||||
-rm -rf \$(DESTDIR)\${nodejsdir}
|
||||
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
EOF
|
||||
to --condition AX_USE_NODEJS nodejs/${PACKAGE_NAME}.js <<EOF
|
||||
${CHEADER}try {
|
||||
|
||||
process.on('uncaughtException', function(e) {
|
||||
console.log("**** UNCAUGHT EXCEPTION ****");
|
||||
console.log(e);
|
||||
console.log(e.stack);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
/**
|
||||
* Module dependencies.
|
||||
*/
|
||||
|
||||
var express = require('express')
|
||||
, routes = require(__dirname+'/routes');
|
||||
|
||||
var app = module.exports = express.createServer();
|
||||
var io = require('socket.io').listen(app);
|
||||
var package = require(__dirname+'/package.json');
|
||||
var config = require(package.path.config);
|
||||
var authentication = require(__dirname+'/authentication')(config.restrict);
|
||||
var sockets = require(__dirname+'/sockets')(io, authentication);
|
||||
|
||||
// Configuration
|
||||
process.argv.forEach(function(val, index) {
|
||||
if (index<2) {return}
|
||||
if (index!=2 || isNaN(val)) {
|
||||
console.log("**** ERROR: Unexpected Argument - allowed is only a port number");
|
||||
process.exit(1);
|
||||
}
|
||||
config.port = parseInt(val);
|
||||
});
|
||||
if (typeof config.port != 'number') {
|
||||
console.log("**** WARNING: no valid port given, defaults to 8888");
|
||||
config.port = 8888;
|
||||
}
|
||||
|
||||
app.configure(function(){
|
||||
app.set('views', __dirname + '/views');
|
||||
app.set('view engine', 'ejs');
|
||||
app.use(express.bodyParser());
|
||||
app.use(express.methodOverride());
|
||||
app.use(require('stylus').middleware({ src: __dirname + '/public' }));
|
||||
app.use(app.router);
|
||||
app.use(express.static(__dirname + '/public'));
|
||||
});
|
||||
|
||||
app.configure('development', function(){
|
||||
app.use(express.errorHandler({ dumpExceptions: true, showStack: true }));
|
||||
});
|
||||
|
||||
app.configure('production', function(){
|
||||
app.use(express.errorHandler());
|
||||
});
|
||||
|
||||
// Routes
|
||||
app.get('/', routes.index);
|
||||
|
||||
app.listen(config.port, function() {
|
||||
console.log("Express server listening on port %d in %s mode",
|
||||
app.address().port, app.settings.env);
|
||||
});
|
||||
} catch (e) {
|
||||
console.log("**** EXCEPTION ****");
|
||||
console.log(e);
|
||||
console.log(e.stack);
|
||||
process.exit(1);
|
||||
}
|
||||
EOF
|
||||
to --condition AX_USE_NODEJS nodejs/package.json.in <<EOF
|
||||
{
|
||||
"name": "@PACKAGE_NAME@",
|
||||
"version": "@PACKAGE_VERSION@",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"express": "~2.5.8",
|
||||
"stylus": "~0.53.0",
|
||||
"ejs": ">= 0.0.1",
|
||||
"socket.io": "~1.4.4",
|
||||
"socketio-auth": "0.0.5",
|
||||
"ldapauth": "git+https://github.com/DimensionSoftware/node-ldapauth.git"
|
||||
},
|
||||
"description": "@DESCRIPTION@",
|
||||
"main": "@PACKAGE_NAME@.js",
|
||||
"devDependencies": {},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "@AUTHOR@",
|
||||
"license": "@LICENSE@",
|
||||
"path": {
|
||||
"prefix": "@PREFIX@",
|
||||
"sysconf": "@SYSCONFDIR@",
|
||||
"pkgdata": "@PKGDATADIR@",
|
||||
"localstate": "@LOCALSTATEDIR@",
|
||||
"log": "@LOCALSTATEDIR@/log/@PACKAGE_NAME@.log",
|
||||
"config": "@SYSCONFDIR@/@PACKAGE_NAME@.json",
|
||||
"nodejs": "@PKGDATADIR@/nodejs"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
to --condition AX_USE_NODEJS nodejs/etc/${PACKAGE_NAME}.json <<EOF
|
||||
{
|
||||
"port": 8888,
|
||||
"restrict": {
|
||||
"passwords": {
|
||||
"foo": ["sha256", "fcde2b2edxx56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9"]
|
||||
},
|
||||
"ldap": {
|
||||
"url": "ldap://your.ldap.host",
|
||||
"adminDn": "cn=tmp,ou=system,ou=people,dc=your,dc=ldap,dc=host",
|
||||
"adminPassword": "secret",
|
||||
"searchBase": "ou=person,ou=people,dc=your,dc=ldap,dc=host",
|
||||
"searchFilter": "(uid={{username}})"
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
PACKAGE_NAME_UPPER=$(echo ${PACKAGE_NAME} | tr '+[:lower:]' 'X[:upper:]' | tr -cd '[[:alnum:]]._-')
|
||||
to --condition AX_USE_NODEJS nodejs/etc/default/${PACKAGE_NAME} <<EOF
|
||||
#EXEC_${PACKAGE_NAME_UPPER}="/usr/bin/nodejs /usr/share/${PACKAGE_NAME}/nodejs/${PACKAGE_NAME}"
|
||||
#${PACKAGE_NAME_UPPER}_LOG="/var/log/${PACKAGE_NAME}.log"
|
||||
#${PACKAGE_NAME_UPPER}="${PACKAGE_NAME}"
|
||||
#${PACKAGE_NAME_UPPER}_PORT="8888"
|
||||
EOF
|
||||
to --condition AX_USE_NODEJS nodejs/etc/init/${PACKAGE_NAME}.conf <<EOF
|
||||
#!upstart
|
||||
description "$DESCRIPTION"
|
||||
author "$(head -1 AUTHORS)"
|
||||
|
||||
start on (local-filesystems and net-device-up)
|
||||
stop on runlevel [!2345]
|
||||
|
||||
respawn
|
||||
|
||||
script
|
||||
echo \$\$ > /var/run/${PACKAGE_NAME}.pid
|
||||
# there are some useful defaults
|
||||
# do not edit this file, overwrite values in /etc/default/${PACKAGE_NAME}
|
||||
EXEC_${PACKAGE_NAME_UPPER}="/usr/bin/nodejs /usr/share/${PACKAGE_NAME}/nodejs/${PACKAGE_NAME}"
|
||||
${PACKAGE_NAME_UPPER}_LOG="/var/log/${PACKAGE_NAME}.log"
|
||||
${PACKAGE_NAME_UPPER}_USER="${PACKAGE_NAME}"
|
||||
${PACKAGE_NAME_UPPER}_PORT=""
|
||||
[ -r /etc/default/${PACKAGE_NAME} ] && . /etc/default/${PACKAGE_NAME}
|
||||
if test -n "\${${PACKAGE_NAME_UPPER}_USER}"; then
|
||||
exec sudo -u "\${${PACKAGE_NAME_UPPER}_USER}" \${EXEC_${PACKAGE_NAME_UPPER}} \${${PACKAGE_NAME_UPPER}_PORT} >> \${${PACKAGE_NAME_UPPER}_LOG} 2>&1
|
||||
else
|
||||
exec \${EXEC_${PACKAGE_NAME_UPPER}} \${${PACKAGE_NAME_UPPER}_PORT} >> \${${PACKAGE_NAME_UPPER}_LOG} 2>&1
|
||||
fi
|
||||
end script
|
||||
|
||||
pre-start script
|
||||
${PACKAGE_NAME_UPPER}_LOG="/var/log/${PACKAGE_NAME}.log"
|
||||
[ -r /etc/default/${PACKAGE_NAME} ] && . /etc/default/${PACKAGE_NAME}
|
||||
# Date format same as (new Date()).toISOString() for consistency
|
||||
echo "[`date -u +%Y-%m-%dT%T.%3NZ`] (sys) Starting" >> \${${PACKAGE_NAME_UPPER}_LOG}
|
||||
end script
|
||||
|
||||
pre-stop script
|
||||
${PACKAGE_NAME_UPPER}_LOG="/var/log/${PACKAGE_NAME}.log"
|
||||
[ -r /etc/default/${PACKAGE_NAME} ] && . /etc/default/${PACKAGE_NAME}
|
||||
rm /var/run/${PACKAGE_NAME}.pid
|
||||
echo "[`date -u +%Y-%m-%dT%T.%3NZ`] (sys) Stopping" >> \${${PACKAGE_NAME_UPPER}_LOG}
|
||||
end script
|
||||
EOF
|
||||
to --condition AX_USE_NODEJS nodejs/etc/systemd/system/${PACKAGE_NAME}.service <<EOF
|
||||
[Unit]
|
||||
Description=$DESCRIPTION
|
||||
|
||||
[Service]
|
||||
ExecStart=/usr/bin/nodejs /usr/share/${PACKAGE_NAME}/nodejs/${PACKAGE_NAME}
|
||||
StandardOutput=journal
|
||||
StandardError=journal
|
||||
Restart=on-abort
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
EOF
|
||||
to --condition AX_USE_NODEJS nodejs/public/javascripts/${PACKAGE_NAME}.js <<EOF
|
||||
${CHEADER}var socket = null;
|
||||
function init() {
|
||||
socket = io.connect();
|
||||
/*
|
||||
socket
|
||||
.io
|
||||
.on("connect", connect)
|
||||
.on("reconnect", connect)
|
||||
.on("disconnect", disconnected)
|
||||
.on("error", disconnected);
|
||||
socket
|
||||
.on("authenticated", authenticated)
|
||||
.on("unauthorized", unauthorized)
|
||||
.on("fail", error);
|
||||
*/
|
||||
}
|
||||
|
||||
/// On Load, Call @ref start
|
||||
/*
|
||||
\$(window.onbeforeunload = function() {
|
||||
return "Are you sure you want to navigate away?";
|
||||
});
|
||||
*/
|
||||
\$(init);
|
||||
EOF
|
||||
to --condition AX_USE_NODEJS nodejs/public/stylesheets/style.styl <<EOF
|
||||
body
|
||||
padding: 50px
|
||||
font: 14px "Lucida Grande", Helvetica, Arial, sans-serif
|
||||
a
|
||||
color: #00B7FF
|
||||
EOF
|
||||
to --condition AX_USE_NODEJS nodejs/routes/index.js <<EOF
|
||||
${CHEADER}var package = require(__dirname+"/../package.json");
|
||||
|
||||
exports.index = function(req, res) {
|
||||
res.render('index', {
|
||||
packagename: package.name,
|
||||
packageversion: package.version
|
||||
});
|
||||
};
|
||||
EOF
|
||||
to --condition AX_USE_NODEJS nodejs/sockets/index.js <<EOF
|
||||
module.exports = function(io, authentication) {
|
||||
|
||||
var module={};
|
||||
|
||||
function broadcast(signal, data) {
|
||||
console.log("<= signal: "+signal);
|
||||
io.sockets.emit(signal, data);
|
||||
}
|
||||
|
||||
function fail(txt, data) {
|
||||
console.log("** "+txt, data);
|
||||
}
|
||||
|
||||
function connection(socket, userdata) {
|
||||
|
||||
console.log("=> new connection from "+userdata.username);
|
||||
|
||||
function emit(signal, data, info) {
|
||||
if (typeof data == 'string' && !data.match("\n")) {
|
||||
console.log("<- signal: "+signal+"("+data+")");
|
||||
} else {
|
||||
console.log("<- signal: "+signal);
|
||||
}
|
||||
if (info) console.log(info);
|
||||
socket.emit(signal, data);
|
||||
}
|
||||
|
||||
function fail(txt, data) {
|
||||
console.log("** "+txt, data);
|
||||
emit("fail", txt);
|
||||
}
|
||||
|
||||
/*
|
||||
socket
|
||||
.on("xxx", xxx)
|
||||
.on("yyy", yyy;
|
||||
*/
|
||||
|
||||
}
|
||||
|
||||
// Handle Connection
|
||||
require('socketio-auth')(io, {
|
||||
authenticate: function (socket, data, callback) {
|
||||
console.log("=> authenticate: ", data.username);
|
||||
//get credentials sent by the client
|
||||
var username = data.username;
|
||||
var password = data.password;
|
||||
authentication(data.username, data.password,
|
||||
function() {
|
||||
console.log("####LOGIN-SUCESS####");
|
||||
callback(null, true)
|
||||
},
|
||||
function() {
|
||||
console.log("####LOGIN-FAIL####");
|
||||
callback(new Error("wrong credentials"))
|
||||
});
|
||||
},
|
||||
postAuthenticate: connection,
|
||||
timeout: "none"
|
||||
});
|
||||
|
||||
return module;
|
||||
}
|
||||
EOF
|
||||
to --condition AX_USE_NODEJS nodejs/views/index.ejs <<EOF
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
<meta name="viewport" content="width=device-width initial-scale=1" />
|
||||
<link href="stylesheets/style.css" rel="stylesheet" type="text/css" />
|
||||
<script type="text/javascript" src="/socket.io/socket.io.js"></script>
|
||||
<script type="text/javascript" src="javascripts/${PACKAGE_NAME}.js"></script>
|
||||
<title>$DESCRIPTION</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h1>$DESCRIPTION</h1>
|
||||
<p>generated by bootstrap, please edit</p>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
EOF
|
||||
to --condition AX_USE_NODEJS nodejs/views/layout.ejs <<EOF
|
||||
<%- body %>
|
||||
EOF
|
||||
echo "${HEADER}MAINTAINERCLEANFILES = makefile.in" | to --condition 'AX_USE_DOXYGEN|AX_USE_PERLDOC' doc/makefile.am
|
||||
if testtag AX_BUILD_TEST; then
|
||||
to test/runtests.sh < ${0%/*}/test/runtests.sh
|
||||
fi
|
||||
to --condition 'AX_BUILD_TEST|AX_USE_CPPUNIT' test/makefile.am <<EOF
|
||||
${HEADER}$(if testtag AX_USE_CXX; then
|
||||
cat <<EOF2
|
||||
AM_CPPFLAGS = -I\${top_srcdir}/src -I\${top_builddir}/src
|
||||
AM_LDFLAGS = -L\${abs_top_builddir}/src/.libs
|
||||
$(if testtag AX_USE_LIBTOOL; then
|
||||
cat <<EOF3
|
||||
LDADD = -lcppunit -l${PACKAGE_NAME#lib}
|
||||
EOF3
|
||||
fi)
|
||||
EOF2
|
||||
fi)
|
||||
|
||||
check_PROGRAMS = ${PACKAGE_NAME#lib}
|
||||
TESTS = \${check_PROGRAMS}
|
||||
|
||||
${PACKAGE_NAME#lib}_SOURCES = ${PACKAGE_NAME#lib}.cxx
|
||||
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
EOF
|
||||
to --condition AX_USE_CPPUNIT --condition AX_USE_CXX test/${PACKAGE_NAME#lib}.cxx <<EOF
|
||||
${CHEADER}
|
||||
#include <cppunit/TestFixture.h>
|
||||
#include <cppunit/ui/text/TestRunner.h>
|
||||
#include <cppunit/extensions/HelperMacros.h>
|
||||
#include <cppunit/extensions/TestFactoryRegistry.h>
|
||||
#include <cppunit/XmlOutputter.h>
|
||||
#include <fstream>
|
||||
|
||||
/// @todo Rename DummyTest and DummyTest::dummy()
|
||||
/// @todo Write test cases
|
||||
class DummyTest: public CppUnit::TestFixture {
|
||||
public:
|
||||
void dummy() {
|
||||
}
|
||||
CPPUNIT_TEST_SUITE(DummyTest);
|
||||
CPPUNIT_TEST(dummy);
|
||||
CPPUNIT_TEST_SUITE_END();
|
||||
};
|
||||
CPPUNIT_TEST_SUITE_REGISTRATION(DummyTest);
|
||||
|
||||
int main(int argc, char** argv) try {
|
||||
std::ofstream ofs((*argv+std::string(".xml")).c_str());
|
||||
CppUnit::TextUi::TestRunner runner;
|
||||
runner.setOutputter(new CppUnit::XmlOutputter(&runner.result(), ofs));
|
||||
runner.addTest(CppUnit::TestFactoryRegistry::getRegistry().makeTest());
|
||||
return runner.run() ? 0 : 1;
|
||||
} catch (std::exception& e) {
|
||||
std::cerr<<"***Exception: "<<e.what()<<std::endl;
|
||||
return 1;
|
||||
}
|
||||
EOF
|
||||
to --condition AX_BUILD_EXAMPLES examples/makefile.am <<EOF
|
||||
${HEADER}AM_CPPFLAGS = -I\${top_srcdir}/src -I\${top_builddir}/src
|
||||
AM_LDFLAGS = -L\${abs_top_builddir}/src/.libs
|
||||
LDADD = -l${PACKAGE_NAME#lib}
|
||||
|
||||
exampledir = \${docdir}/examples
|
||||
example_DATA =
|
||||
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
EOF
|
||||
to --condition AX_BUILD_HTML_NPM html/package.json.in <<EOF
|
||||
{
|
||||
"name": "@PACKAGE_NAME@",
|
||||
"version": "@PACKAGE_VERSION@",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
},
|
||||
"description": "@DESCRIPTION@",
|
||||
"devDependencies": {},
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"author": "@AUTHOR@",
|
||||
"license": "@LICENSE@",
|
||||
"path": {
|
||||
"prefix": "@PREFIX@",
|
||||
"sysconf": "@SYSCONFDIR@",
|
||||
"pkgdata": "@PKGDATADIR@",
|
||||
"localstate": "@LOCALSTATEDIR@",
|
||||
"log": "@LOCALSTATEDIR@/log/@PACKAGE_NAME@.log",
|
||||
"config": "@SYSCONFDIR@/@PACKAGE_NAME@.json",
|
||||
"nodejs": "@PKGDATADIR@/nodejs"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
to --condition 'AX_BUILD_HTML|AX_BUILD_HTML_NPM' html/makefile.am <<EOF
|
||||
${HEADER}EXTRA_DIST = $(testtag AX_BUILD_HTML_NPM && echo "package.json.in")
|
||||
|
||||
wwwdir = \${pkgdatadir}/html
|
||||
www_DATA = $(testtag AX_BUILD_HTML_NPM && echo "package.json")
|
||||
dist_www_DATA =
|
||||
|
||||
$(if testtag AX_BUILD_HTML_NPM; then
|
||||
cat<<EOF2
|
||||
|
||||
all: node_modules
|
||||
|
||||
node_modules: package.json.in
|
||||
HOME=. npm install
|
||||
|
||||
clean-local:
|
||||
-rm -r node_modules .npm
|
||||
|
||||
install-data-hook:
|
||||
test -d \$(DESTDIR)\${wwwdir} || mkdir -p \$(DESTDIR)\${wwwdir}
|
||||
chmod -R u+w \$(DESTDIR)\${wwwdir}
|
||||
cp -r . \$(DESTDIR)\${wwwdir}
|
||||
|
||||
uninstall-local:
|
||||
-chmod -R u+w \$(DESTDIR)\${wwwdir}
|
||||
-rm -rf \$(DESTDIR)\${wwwdir}
|
||||
EOF2
|
||||
fi)
|
||||
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
EOF
|
||||
to --condition AX_USE_DOXYGEN doc/header.html.in <<EOF
|
||||
<!-- HTML header for doxygen 1.8.6-->
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
|
||||
<meta name="generator" content="Doxygen \$doxygenversion"/>
|
||||
<!--BEGIN PROJECT_NAME--><title>\$projectname: \$title</title><!--END PROJECT_NAME-->
|
||||
<!--BEGIN !PROJECT_NAME--><title>\$title</title><!--END !PROJECT_NAME-->
|
||||
<link href="\$relpath^tabs.css" rel="stylesheet" type="text/css"/>
|
||||
<script type="text/javascript" src="\$relpath^jquery.js"></script>
|
||||
<script type="text/javascript" src="\$relpath^dynsections.js"></script>
|
||||
\$treeview
|
||||
\$search
|
||||
\$mathjax
|
||||
<link href="\$relpath^\$stylesheet" rel="stylesheet" type="text/css" />
|
||||
\$extrastylesheet
|
||||
</head>
|
||||
<body>
|
||||
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
|
||||
|
||||
<div id="titlearea">
|
||||
<div id="projecthead">
|
||||
<div id="projectlogo"><img alt="" src="\$relpath^\$projectlogo"/></div>
|
||||
<div id="projectname">\$projectname</span> <span id="projectnumber">\$projectnumber</div>
|
||||
<div id="projectbrief">\$projectbrief</div>
|
||||
</div>
|
||||
<nav>
|
||||
<a href="@PROJECT_URL@" target="_blank">Project Management</a>
|
||||
<a href="@SOURCE_DOWNLOAD@" target="_blank">Download</a>
|
||||
<div>\$searchbox</div>
|
||||
</nav>
|
||||
</div>
|
||||
EOF
|
||||
to --condition AX_USE_DOXYGEN doc/footer.html.in <<EOF
|
||||
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
|
||||
<ul>
|
||||
\$navpath
|
||||
<li class="footer"><a href="@AUTHOR_URL@" target="_blank">@AUTHOR_NAME@</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
EOF
|
||||
to --condition AX_USE_DOXYGEN doc/style.css <<EOF
|
||||
#titlearea {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: flex-begin;
|
||||
}
|
||||
#titlearea nav {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
#titlearea nav a {
|
||||
background-color: lightgray;
|
||||
border: 1px solid gray;
|
||||
color: black;
|
||||
padding: 1ex;
|
||||
margin: 0;
|
||||
}
|
||||
img, object {
|
||||
max-width: 100% !important;
|
||||
}
|
||||
@media (max-width: 50em) {
|
||||
#navrow1, #navrow2 {
|
||||
display: block
|
||||
}
|
||||
#side-nav, #splitbar, .ui-resizable-handle ui-resizable-e, .ui-resizable-handle ui-resizable-s {
|
||||
display: none;
|
||||
}
|
||||
#doc-content {
|
||||
margin-left: 0 !important;
|
||||
}
|
||||
}
|
||||
@media (min-width: 50em) {
|
||||
#navrow1, #navrow2 {
|
||||
display: none;
|
||||
}
|
||||
#side-nav, #splitbar, .ui-resizable-handle ui-resizable-e, .ui-resizable-handle ui-resizable-s {
|
||||
display: block
|
||||
}
|
||||
}
|
||||
EOF
|
||||
if testtag AX_USE_DOXYGEN; then
|
||||
copy doc/plantuml.jar
|
||||
fi
|
||||
if testtag AX_USE_DOXYGEN; then
|
||||
if ! checkfile doc/doxyfile.in || \
|
||||
contains doc/doxyfile.in "${rebuildfiles[@]}"; then
|
||||
run doxygen -g doc/doxyfile.in
|
||||
sed -i ':a;/\\$/{s///;N;s/ *\n */ /g;ba}' doc/doxyfile.in
|
||||
if test $exists -eq 0; then
|
||||
if test -n "${VCS}" -a $novcs -eq 0 && ! contains "doc/doxyfile" "${excludevcs[@]}"; then
|
||||
run --no-check ${VCS} add doc/doxyfile.in
|
||||
if test "${VCS}" = "svn"; then
|
||||
run svn propset svn:keywords "Id" doc/doxyfile.in
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
doxyreplace PROJECT_NAME "@PACKAGE_NAME@"
|
||||
doxyreplace PROJECT_NUMBER "@PACKAGE_VERSION@"
|
||||
doxyreplace PROJECT_BRIEF "@DESCRIPTION@"
|
||||
doxyreplace PROJECT_LOGO "@top_srcdir@/@PACKAGE_LOGO@"
|
||||
doxyreplace INLINE_INHERITED_MEMB YES
|
||||
doxyreplace MULTILINE_CPP_IS_BRIEF YES
|
||||
doxyreplace TAB_SIZE 2
|
||||
doxyreplace ALIASES '"id=\\par File-ID\\n"'
|
||||
doxyadd ALIASES '"copy=\\par Copyright by <a href="@AUTHOR_URL@" target="_blank">@AUTHOR_NAME@</a>\\n"'
|
||||
doxyadd ALIASES '"license=\\par License\\n"'
|
||||
doxyadd ALIASES '"classmutex=\\par Reentrant:\\nAccess is locked with class static mutex @c "'
|
||||
doxyadd ALIASES '"instancemutex=\\par Reentrant:\\nAccess is locked with per instance mutex @c "'
|
||||
doxyadd ALIASES '"mutex=\\par Reentrant:\\nAccess is locked with mutex @c "'
|
||||
doxyadd ALIASES '"api=\\xrefitem api \\"API Call\\" \\"\\""'
|
||||
doxyadd ALIASES '"description=@DESCRIPTION@"'
|
||||
doxyadd ALIASES '"readme=@README_HTML@"'
|
||||
doxyadd ALIASES '"author=<a href="@AUTHOR_URL@" target="_blank">@AUTHOR_NAME@</a>"'
|
||||
doxyreplace PLANTUML_JAR_PATH '"@top_srcdir@/doc/plantuml.jar"'
|
||||
doxyreplace ENABLE_PREPROCESSING YES
|
||||
doxyreplace MACRO_EXPANSION YES
|
||||
doxyadd PREDEFINED '"NAMESPACE=@PACKAGE_NAME@"'
|
||||
doxyreplace BUILTIN_STL_SUPPORT YES
|
||||
doxyreplace DISTRIBUTE_GROUP_DOC YES
|
||||
doxyreplace EXTRACT_ALL YES
|
||||
doxyreplace EXTRACT_PACKAGE YES
|
||||
doxyreplace EXTRACT_PRIVATE YES
|
||||
doxyreplace EXTRACT_STATIC YES
|
||||
doxyreplace EXTRACT_LOCAL_CLASSES YES
|
||||
doxyreplace EXTRACT_LOCAL_METHODS YES
|
||||
doxyreplace EXTRACT_ANON_NSPACES YES
|
||||
doxyreplace SHOW_GROUPED_MEMB_INC YES
|
||||
doxyreplace SORT_MEMBERS_CTORS_1ST YES
|
||||
doxyreplace WARN_IF_UNDOCUMENTED NO
|
||||
doxyreplace WARN_LOGFILE doxygen.errors
|
||||
doxyreplace INPUT "@top_srcdir@/src"
|
||||
doxyadd INPUT "@top_srcdir@/@README_FILE@"
|
||||
if testtag AX_USE_SCRIPTS; then
|
||||
doxyadd INPUT "@top_srcdir@/scripts"
|
||||
fi
|
||||
if testtag AX_BUILD_HTML; then
|
||||
doxyadd INPUT "@top_srcdir@/html"
|
||||
fi
|
||||
if testtag AX_BUILD_TEST AX_USE_CPPUNIT; then
|
||||
doxyadd INPUT "@top_srcdir@/test"
|
||||
fi
|
||||
if testtag AX_USE_NODEJS; then
|
||||
doxyadd INPUT "@top_srcdir@/nodejs"
|
||||
doxyadd EXCLUDE "@top_srcdir@/nodejs/node_modules"
|
||||
doxyadd EXCLUDE "@top_srcdir@/nodejs/public/javascripts/ext"
|
||||
fi
|
||||
doxyreplace USE_MDFILE_AS_MAINPAGE "@top_srcdir@/@README_FILE@"
|
||||
doxyreplace FILE_PATTERNS '*.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.idl *.ddl *.odl *.h *.hh *.hxx *.hpp *.h++ *.cs *.d *.php *.php4 *.php5 *.phtml *.inc *.m *.markdown *.md *.mm *.dox *.py *.f90 *.f *.for *.tcl *.vhd *.vhdl *.ucf *.qsf *.as *.js *.wt *.sql'
|
||||
doxyreplace RECURSIVE YES
|
||||
doxyreplace EXCLUDE_PATTERNS "moc_* uic_* qrc_* version.[ch]xx"
|
||||
doxyreplace HTML_HEADER header.html
|
||||
doxyreplace HTML_FOOTER footer.html
|
||||
doxyreplace HTML_EXTRA_STYLESHEET style.css
|
||||
doxyreplace HTML_DYNAMIC_SECTIONS YES
|
||||
doxyreplace DISABLE_INDEX NO
|
||||
doxyreplace GENERATE_TREEVIEW YES
|
||||
doxyreplace EXAMPLE_PATH @top_srcdir@
|
||||
doxyreplace EXAMPLE_RECURSIVE YES
|
||||
doxyreplace FILTER_PATTERNS '*.wt=doxygen-webtester.sed *.sql=@top_srcdir@/sql-to-dot.sed'
|
||||
doxyreplace SOURCE_BROWSER YES
|
||||
doxyreplace INLINE_SOURCES YES
|
||||
doxyreplace GENERATE_TESTLIST YES
|
||||
doxyreplace SEARCHENGINE NO
|
||||
doxyreplace GENERATE_HTML YES
|
||||
doxyreplace GENERATE_LATEX NO
|
||||
doxyreplace LATEX_BATCHMODE YES
|
||||
doxyreplace LATEX_HIDE_INDICES YES
|
||||
doxyreplace COMPACT_RTF YES
|
||||
doxyreplace RTF_HYPERLINKS YES
|
||||
doxyreplace GENERATE_TAGFILE "@PACKAGE_NAME@.doxytag"
|
||||
doxyreplace HIDE_UNDOC_RELATIONS NO
|
||||
doxyreplace HAVE_DOT YES
|
||||
doxyreplace CLASS_GRAPH YES
|
||||
doxyreplace TEMPLATE_RELATIONS YES
|
||||
doxyreplace DOT_IMAGE_FORMAT svg
|
||||
doxyreplace INTERACTIVE_SVG NO
|
||||
doxyreplace DOT_TRANSPARENT YES
|
||||
fi
|
||||
fi
|
||||
if testtag AX_USE_DEBIAN_PACKAGING; then
|
||||
checkdir debian
|
||||
to debian/changelog.in <<EOF
|
||||
@PACKAGE@ (@PACKAGE_VERSION@~@DISTRO@.@BUILD_NUMBER@) @DISTRO@; urgency=low
|
||||
|
||||
@DEB_CHANGELOG@
|
||||
|
||||
-- @PACKAGER@ @BUILD_DATE@
|
||||
EOF
|
||||
RUN_DEPENDS="$(if testtag AX_USE_NODEJS; then echo -n ", nodejs, npm,"; fi)"
|
||||
BUILD_DEPENDS="gnupg, debhelper, fakeroot, ${VCSDEPENDS_DEB} pkg-config, automake, libtool, libltdl-dev, autotools-dev, pandoc, lsb-release$(if testtag AX_USE_DOXYGEN; then echo -n ", doxygen, graphviz, mscgen, default-jre-headless|default-jre"; fi; if testtag AX_USE_PERLDOC; then echo -n ", libpod-tree-perl"; fi; if testtag AX_USE_CPPUNIT; then echo -n ", libcppunit-dev"; fi; if testtag AX_CXX_QT || testtag AX_CHECK_QT AX_REQUIRE_QT; then echo -n ", qt5-default | libqt4-core | libqtcore4, qt5-qmake | qt4-qmake, qtbase5-dev | libqt4-dev, qtbase5-dev-tools | qt4-dev-tools, qttools5-dev-tools | qt4-dev-tools, qttools5-dev | qt4-dev,"; fi)"
|
||||
to debian/control.in <<EOF
|
||||
Source: @PACKAGE_NAME@
|
||||
Priority: extra
|
||||
Maintainer: @PACKAGER@
|
||||
Build-Depends: ${BUILD_DEPENDS}${RUN_DEPENDS} @DEB_BUILD_DEPEND@ @DEB_DEPEND_IFEXISTS@
|
||||
|
||||
Package: @PACKAGE_NAME@
|
||||
Section: $(if testtag AX_USE_LIBTOOL; then echo "libs"; else echo "@DEB_SECTION@"; fi)
|
||||
Architecture: any
|
||||
Depends: \${shlibs:Depends}, \${misc:Depends}${RUN_DEPENDS} @DEB_DEPEND@
|
||||
Description: @DESCRIPTION@
|
||||
@README_DEB@
|
||||
$( if testtag AX_USE_LIBTOOL; then
|
||||
cat <<EOF2
|
||||
|
||||
Package: @PACKAGE_NAME@-dev
|
||||
Section: libdevel
|
||||
Architecture: any
|
||||
Depends: @PACKAGE_NAME@ (= \${binary:Version}), ${BUILD_DEPENDS}${RUN_DEPENDS} @DEB_BUILD_DEPEND@ @DEB_DEPEND_IFEXISTS@
|
||||
Description: @DESCRIPTION@ - Development Package
|
||||
@README_DEB@
|
||||
EOF2
|
||||
fi)
|
||||
EOF
|
||||
to debian/docs <<EOF
|
||||
NEWS
|
||||
$README
|
||||
EOF
|
||||
to --condition AX_USE_LIBTOOL debian/${PACKAGE_NAME}.install <<EOF
|
||||
usr/lib/lib*.so.*
|
||||
usr/share/${PACKAGE_NAME}
|
||||
EOF
|
||||
to --condition AX_USE_LIBTOOL debian/${PACKAGE_NAME}-dev.install <<EOF
|
||||
usr/include/*
|
||||
usr/lib/lib*.a
|
||||
usr/lib/lib*.so
|
||||
usr/lib/pkgconfig/*
|
||||
usr/lib/*.la
|
||||
usr/share/doc/${PACKAGE_NAME}/html
|
||||
$(if testtag AX_BUILD_EXAMPLES; then
|
||||
echo usr/share/doc/${PACKAGE_NAME}/examples
|
||||
fi)
|
||||
EOF
|
||||
to --mode "u=rwx,g=rwx,o=rx" debian/rules <<EOF
|
||||
${HEADER}%:
|
||||
dh \$@
|
||||
EOF
|
||||
echo 7 | to debian/compat
|
||||
fi
|
||||
to ${PACKAGE_NAME}.desktop.in <<EOF
|
||||
[Desktop Entry]
|
||||
Type=Application
|
||||
Name=${PACKAGE_NAME}
|
||||
GenericName=${PACKAGE_NAME}
|
||||
Comment=@DESCRIPTION@
|
||||
Icon=@prefix@/share/@PACKAGE_NAME@/@PACKAGE_ICON@
|
||||
Exec=${PACKAGE_NAME} %u
|
||||
Terminal=false
|
||||
Categories=Qt;Utility;
|
||||
EOF
|
||||
to --condition AX_USE_RPM_PACKAGING ${PACKAGE_NAME}.spec.in <<EOF
|
||||
Summary: @DESCRIPTION@
|
||||
Name: @PACKAGE_NAME@
|
||||
Version: @VERSION@
|
||||
Release: @BUILD_NUMBER@.@DISTRO@
|
||||
License: LGPL
|
||||
Group: $(if testtag AX_USE_LIBTOOL; then
|
||||
echo Development/Libraries/C++;
|
||||
else
|
||||
echo @RPM_GROUP@;
|
||||
fi)
|
||||
$(if testtag AX_RPM_DEPEND; then echo "Requires: @RPM_DEPEND@"; fi)
|
||||
Source0: %{name}-%{version}.tar.gz
|
||||
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root
|
||||
BuildRequires: which, pkgconfig, gnupg, expect, ${VCSDEPENDS_RPM}make, automake, autoconf, rpm-build$(
|
||||
if testtag AX_USE_CXX; then
|
||||
echo -n ", binutils-devel, gcc-c++"
|
||||
fi
|
||||
if testtag AX_USE_CPPUNIT; then
|
||||
echo -n ", cppunit-devel"
|
||||
fi
|
||||
if testtag AX_USE_DOXYGEN; then
|
||||
echo -n ", doxygen, graphviz, java-openjdk";
|
||||
fi
|
||||
if testtag AX_USE_PERLDOC; then
|
||||
echo -n ", libpod-tree-perl";
|
||||
fi
|
||||
) @RPM_BUILD_DEPEND@ @RPM_DEPEND_IFEXISTS@
|
||||
|
||||
#### os dependent definitions ####
|
||||
%if 0%{?suse_version} || 0%{?sles_version}
|
||||
BuildRequires: lsb-release$(
|
||||
if testtag AX_REQUIRE_QT || testtag AX_CHECK_QT; then
|
||||
echo -n ", libqt5-qtbase-devel, libqt5-qttools, libqt5-linguist-devel, libQt5WebKit5-devel libqt5-qtwebengine-devel libQt5WebKitWidgets-devel";
|
||||
fi)
|
||||
%else
|
||||
%if 0%{?mageia}
|
||||
BuildRequires: rpm-sign, lsb-release
|
||||
$(
|
||||
if testtag AX_REQUIRE_QT || testtag AX_CHECK_QT; then
|
||||
echo -n "BuildRequires: qtbase5-common-devel, qttools5, lib64qt5webkit-devel, lib64qt5webkitwidgets-devel";
|
||||
fi)
|
||||
%else
|
||||
$(
|
||||
if testtag AX_REQUIRE_QT || testtag AX_CHECK_QT; then
|
||||
echo -n "BuildRequires: qt5-qtbase-devel, qt5-qttools-devel, qt5-qtwebkit-devel";
|
||||
fi)
|
||||
BuildRequires: rpm-sign, redhat-lsb
|
||||
%global debug_package %{nil}
|
||||
%endif
|
||||
%endif
|
||||
$(
|
||||
if testtag AX_USE_DOXYGEN; then cat <<EOS
|
||||
%if ! 0%{?centos}
|
||||
BuildRequires: mscgen
|
||||
%if ! 0%{?mageia}
|
||||
BuildRequires: pandoc
|
||||
%endif
|
||||
%endif
|
||||
EOS
|
||||
fi
|
||||
if testtag AX_USE_LIBTOOL; then cat <<EOS
|
||||
%if 0%{?mageia}
|
||||
BuildRequires: libtool, libltdl-devel
|
||||
%else
|
||||
BuildRequires: libtool, libtool-ltdl-devel
|
||||
%endif
|
||||
EOS
|
||||
fi)
|
||||
|
||||
%description
|
||||
@README@
|
||||
$(if testtag AX_USE_LIBTOOL; then
|
||||
echo
|
||||
echo This package contains only the shared libraries required at runtime.
|
||||
fi)
|
||||
|
||||
$(if ! testtag 'AX_USE_LIBTOOL|AX_USE_CXX'; then
|
||||
echo '%global debug_package %{nil}'
|
||||
fi)
|
||||
%prep
|
||||
%setup -q
|
||||
./configure --prefix=/usr \\
|
||||
--sysconfdir=/etc \\
|
||||
--docdir=/usr/share/doc/packages/@PACKAGE_NAME@ \\
|
||||
--libdir=/usr/%_lib
|
||||
|
||||
%build
|
||||
make
|
||||
|
||||
%install
|
||||
DESTDIR=\$RPM_BUILD_ROOT make install
|
||||
|
||||
%clean
|
||||
rm -rf \$RPM_BUILD_ROOT
|
||||
|
||||
%files
|
||||
%defattr(-,root,root,-)
|
||||
$(if testtag AX_USE_LIBTOOL; then
|
||||
echo '/usr/%_lib/*.so.*'
|
||||
else
|
||||
echo '/usr/bin'
|
||||
echo '/usr/share/applications'
|
||||
fi)
|
||||
/usr/share/@PACKAGE_NAME@
|
||||
$(if testtag AX_USE_ETC; then
|
||||
|
||||
cat <<EOF2
|
||||
%config
|
||||
/etc
|
||||
|
||||
EOF2
|
||||
fi)
|
||||
%doc
|
||||
/usr/share/doc
|
||||
|
||||
$(if testtag AX_USE_LIBTOOL; then
|
||||
cat <<EOF2
|
||||
%package devel
|
||||
Summary: @DESCRIPTION@
|
||||
Group: Development/Libraries/C++
|
||||
Requires: @PACKAGE_NAME@ = @VERSION@ @RPM_BUILD_DEPEND@
|
||||
|
||||
%description devel
|
||||
@README@
|
||||
|
||||
This Package contains all files required for developement.
|
||||
|
||||
%files devel
|
||||
%defattr(-,root,root,-)
|
||||
/usr/%_lib/*.so
|
||||
/usr/%_lib/*.a
|
||||
/usr/%_lib/*.la
|
||||
/usr/%_lib/pkgconfig
|
||||
/usr/include/*
|
||||
%doc
|
||||
$(if testtag AX_USE_DOXYGEN AX_USE_PERLDOC; then
|
||||
echo '/usr/share/doc/packages/@PACKAGE_NAME@/html'
|
||||
fi)
|
||||
$(if testtag AX_BUILD_EXAMPLES; then
|
||||
echo '/usr/share/doc/packages/@PACKAGE_NAME@/examples'
|
||||
fi)
|
||||
EOF2
|
||||
fi)
|
||||
|
||||
%changelog
|
||||
|
||||
EOF
|
||||
SUBDIRS=""
|
||||
if testtag AX_USE_CXX; then
|
||||
SUBDIRS="${SUBDIRS} src"
|
||||
fi
|
||||
if testtag AX_USE_ETC; then
|
||||
SUBDIRS="${SUBDIRS} etc"
|
||||
fi
|
||||
if testtag AX_BUILD_TEST AX_USE_CPPUNIT; then
|
||||
SUBDIRS="${SUBDIRS} test"
|
||||
fi
|
||||
if testtag AX_USE_SCRIPTS; then
|
||||
SUBDIRS="${SUBDIRS} scripts"
|
||||
fi
|
||||
if testtag 'AX_USE_DOXYGEN|AX_USE_PERLDOC'; then
|
||||
SUBDIRS="${SUBDIRS} doc"
|
||||
fi
|
||||
if testtag AX_BUILD_EXAMPLES; then
|
||||
SUBDIRS="${SUBDIRS} examples"
|
||||
fi
|
||||
if testtag AX_BUILD_HTML; then
|
||||
SUBDIRS="${SUBDIRS} html"
|
||||
fi
|
||||
for d in src test scripts doc examples html; do
|
||||
if test -d "$d" -a "${SUBDIRS//$d/}" = "${SUBDIRS}"; then
|
||||
SUBDIRS="${SUBDIRS} $d"
|
||||
fi
|
||||
done
|
||||
to --mode "u=rwx,g=rwx,o=rx" autogen.sh <<EOF
|
||||
#!/bin/bash -e
|
||||
if test -n "$VCS" -a -d ".$VCS" -a -e -x "\$(which ${VCS}2cl)"; then
|
||||
$(case "$VCS" in
|
||||
(svn) echo " ${VCS}2cl";;
|
||||
(git) echo " ${VCS}2cl > ChangeLog";;
|
||||
esac)
|
||||
fi
|
||||
aclocal
|
||||
$(if testtag AX_USE_LIBTOOL; then
|
||||
cat <<EOF1
|
||||
if which libtoolize > /dev/null; then
|
||||
run libtoolize --force;
|
||||
elif which glibtoolize > /dev/null; then
|
||||
run glibtoolize --force;
|
||||
else
|
||||
echo "error: libtoolize not found" 1>&2
|
||||
exit 1
|
||||
fi
|
||||
EOF1
|
||||
fi)
|
||||
automake -a
|
||||
autoconf
|
||||
EOF
|
||||
to makefile.am<<EOF
|
||||
${HEADER}SUBDIRS =${SUBDIRS}
|
||||
|
||||
desktopdir = \${datadir}/applications
|
||||
desktop_DATA = @PACKAGE_DESKTOP@
|
||||
dist_pkgdata_DATA = @PACKAGE_ICON@
|
||||
dist_noinst_DATA = ax_check_qt.m4 bootstrap.sh \\
|
||||
resolve-rpmbuilddeps.sh autogen.sh \\
|
||||
ax_cxx_compile_stdcxx.m4 build-in-docker.sh \\
|
||||
build-resource-file.sh \\
|
||||
ax_init_standard_project.m4 \\
|
||||
mac-create-app-bundle.sh resolve-debbuilddeps.sh \\
|
||||
dependency-graph.sh template.sh \\
|
||||
sql-to-dot.sed
|
||||
dist_doc_DATA = AUTHORS NEWS $README COPYING INSTALL ChangeLog
|
||||
$(if test -e README.md -a ! -e README; then
|
||||
cat <<EOF2
|
||||
|
||||
README: README.md
|
||||
cp README.md README
|
||||
|
||||
CLEANFILES = README
|
||||
EOF2
|
||||
fi)
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
EOF
|
||||
to --condition AX_USE_LIBTOOL src/${PACKAGE_NAME}.pc.in <<EOF
|
||||
${HEADER}prefix=@prefix@
|
||||
exec_prefix=@exec_prefix@
|
||||
libdir=\${exec_prefix}/lib
|
||||
includedir=\${prefix}/include
|
||||
translationsdir=@datadir@/@PACKAGE_NAME@/translations
|
||||
|
||||
Name: @PACKAGE_NAME@
|
||||
Description: @DESCRIPTION@
|
||||
Version: @VERSION@
|
||||
Libs: -L\${libdir} -l${PACKAGE_NAME#lib} @LDFLAGS@
|
||||
Cflags: -I\${includedir} @CPPFLAGS@
|
||||
Requires: @PKG_REQUIREMENTS@
|
||||
EOF
|
||||
to build-in-docker.conf <<EOF
|
||||
${HEADER}# Use Ubuntu Universe Repository
|
||||
repos+=("ubuntu:::universe")
|
||||
|
||||
# Use Marc Wäckerlin's Repository, see https://repository.mrw.sh
|
||||
repos+=("debian|ubuntu:::https://repository.mrw.sh:::https://repository.mrw.sh/@DISTRIBUTOR@/marc-waeckerlin.repo")
|
||||
keys+=("https://repository.mrw.sh/PublicKey")
|
||||
|
||||
# centos requires epel-release for some packages, such as Qt WebKit
|
||||
packages+=("centos:::epel-release")
|
||||
EOF
|
||||
|
||||
#### Cleanup If Makefile Exists ####
|
||||
if test -f makefile; then
|
||||
run --no-check make maintainer-clean
|
||||
fi
|
||||
|
||||
#### Build In Docker If User Requires ####
|
||||
if test "$docker" -eq 1; then
|
||||
./build-in-docker.sh $buildtarget $* || exit 1
|
||||
else
|
||||
|
||||
#### Bootstrap Before Configure ####
|
||||
run --no-check vcs2cl
|
||||
run aclocal
|
||||
if testtag AX_USE_LIBTOOL; then
|
||||
if which libtoolize > /dev/null; then
|
||||
run libtoolize --force;
|
||||
elif which glibtoolize > /dev/null; then
|
||||
run glibtoolize --force;
|
||||
else
|
||||
error libtoolize not found
|
||||
fi
|
||||
fi
|
||||
run automake -a
|
||||
run autoconf
|
||||
|
||||
#### Run Configure If User Requires ####
|
||||
if test "$configure" -eq 1; then
|
||||
./configure $* || exit 1
|
||||
fi
|
||||
|
||||
#### Run Make If User Requires ####
|
||||
if test "$build" -eq 1; then
|
||||
make $buildtarget || exit 1
|
||||
fi
|
||||
|
||||
fi
|
||||
|
19
build-in-docker.conf
Normal file
19
build-in-docker.conf
Normal file
@@ -0,0 +1,19 @@
|
||||
## @id $Id$
|
||||
##
|
||||
## This file has been added:
|
||||
## - by bootstrap.sh
|
||||
## - on Thu, 19 July 2018 13:16:09 +0200
|
||||
## Feel free to change it or even remove and rebuild it, up to your needs
|
||||
##
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
# Use Ubuntu Universe Repository
|
||||
repos+=("ubuntu:::universe")
|
||||
|
||||
# Use Marc Wäckerlin's Repository, see https://repository.mrw.sh
|
||||
repos+=("debian|ubuntu:::https://repository.mrw.sh:::https://repository.mrw.sh/@DISTRIBUTOR@/@CODENAME@ marc-waeckerlin")
|
||||
keys+=("https://repository.mrw.sh/PublicKey")
|
||||
|
||||
# centos requires epel-release for some packages, such as Qt WebKit
|
||||
packages+=("centos:::epel-release")
|
373
build-in-docker.sh
Executable file
373
build-in-docker.sh
Executable file
@@ -0,0 +1,373 @@
|
||||
#! /bin/bash -e
|
||||
set -o errtrace
|
||||
|
||||
# build and test everything in a fresh docker installation
|
||||
myarch=$(dpkg --print-architecture)
|
||||
if test "${arch}" = "amd64"; then
|
||||
myarch="amd64|i386"
|
||||
fi
|
||||
mode=
|
||||
img=
|
||||
repos=()
|
||||
keys=()
|
||||
dns=()
|
||||
envs=("-e LANG=${LANG}" "-e HOME=${HOME}" "-e TERM=xterm" "-e DEBIAN_FRONTEND=noninteractive" "-e DEBCONF_NONINTERACTIVE_SEEN=true")
|
||||
dirs=("-v $(pwd):/workdir" "-v ${HOME}/.gnupg:${HOME}/.gnupg")
|
||||
packages=()
|
||||
targets="all check distcheck"
|
||||
commands=()
|
||||
arch=$((which dpkg > /dev/null 2> /dev/null && dpkg --print-architecture) || echo amd64)
|
||||
host=
|
||||
flags=()
|
||||
wait=0
|
||||
if test -e ./build-in-docker.conf; then
|
||||
# you can preconfigure the variables in file build-in-docker.conf
|
||||
# if you do so, add the file to EXTRA_DIST in makefile.am
|
||||
source ./build-in-docker.conf
|
||||
fi
|
||||
|
||||
while test $# -gt 0; do
|
||||
case "$1" in
|
||||
(-h|--help)
|
||||
echo "$0 [OPTIONS]"
|
||||
echo
|
||||
echo "OPTIONS:"
|
||||
echo
|
||||
echo " -h, --help show this help"
|
||||
echo " -m, --mode <type> mode: deb, rpm, win, default: ${mode}"
|
||||
echo " -i, --image <image> use given docker image instead of ${img}"
|
||||
echo " -a, --arch <arch> build for given hardware architecture"
|
||||
echo " -t, --targets targets specify build targets, default: ${targets}"
|
||||
echo " --host <target-arch> host for cross compiling, e.g. i686-w64-mingw32"
|
||||
echo " -f, --flag <flag> add flag to ./bootstrap.sh or ./configure"
|
||||
echo " -r, --repo <url> add given apt repository"
|
||||
echo " -k, --key <url> add public key from url"
|
||||
echo " -n, --dns <ip> add ip as dns server"
|
||||
echo " -e, --env <var>=<val> set environment variable in docker"
|
||||
echo " -d, --dir <dir> access given directory read only"
|
||||
echo " -p, --package <pkg> install extra debian packages"
|
||||
echo " -c, --cmd <command> execute commands as root in docker"
|
||||
echo " -w, --wait on error keep docker container and wait for enter"
|
||||
echo
|
||||
echo " The option -i must be after -m, because mode sets a new default image"
|
||||
echo " The option -m must be after -t, because mode may be auto detected from targets"
|
||||
echo " The option -m must be after -h, because mode may set a host"
|
||||
echo " If target is either deb or rpm, mode is set to the same value"
|
||||
echo " If target is win, host is set to i686-w64-mingw32"
|
||||
echo
|
||||
echo " The options -r -k -e -d -p -c can be repeated several times."
|
||||
echo
|
||||
echo " The options -r -p -c allow an if-then-else contruct"
|
||||
echo " depending on the operating system:"
|
||||
echo " <os>:::<A>:::<B>"
|
||||
echo " <os>:::<A>"
|
||||
echo " Read as: On linux type <os> use <A> else use <B>"
|
||||
echo " That means: If the distributer ID or codename in lsb_release"
|
||||
echo " matches regular expression <os>, then <A> is replaced, else <B> is replaced."
|
||||
echo " The three colons are for splitting <os> from <A> and <B> part."
|
||||
echo " E.g.: Install package curl on wheezy and npm on olter systems:"
|
||||
echo " $0 -p Debian|precise:::curl:::npm"
|
||||
echo
|
||||
echo "EXAMPLE:"
|
||||
echo
|
||||
echo "$0 -i mwaeckerlin/ubuntu:trusty-i386 \\"
|
||||
echo " -t deb \\"
|
||||
echo " -e ANDROID_HOME=/opt/local/android \\"
|
||||
echo " -d /opt/local/android \\"
|
||||
echo " -r universe \\"
|
||||
echo " -r https://repository.mrw.sh \\"
|
||||
echo " -k https://repository.mrw.sh/PublicKey \\"
|
||||
echo " -p mrw-c++"
|
||||
echo
|
||||
exit 0
|
||||
;;
|
||||
(-m|--mode)
|
||||
shift;
|
||||
mode="$1"
|
||||
if test -z "$img"; then
|
||||
case "$mode" in
|
||||
(deb|apt) img="mwaeckerlin/debbuildenv";;
|
||||
(rpm|zypper) img="opensuse:latest";;
|
||||
(yum) img="centos:latest";;
|
||||
(dnf) img="fedora:latest";;
|
||||
(win)
|
||||
img="mwaeckerlin/debbuildenv"; host="${host:---host=i686-w64-mingw32}"
|
||||
targets="all install"
|
||||
flags+=("--prefix=/workdir/usr")
|
||||
packages+=("mingw-w64")
|
||||
;;
|
||||
(*)
|
||||
echo "**** ERROR: unknown mode '$1', try --help" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
(-i|--image) shift;
|
||||
img="$1"
|
||||
;;
|
||||
(-a|--arch) shift;
|
||||
arch="$1"
|
||||
;;
|
||||
(-t|--targets) shift;
|
||||
targets="$1"
|
||||
if test "$1" = "deb" -o "$1" = "rpm"; then
|
||||
# set mode to same value
|
||||
set -- "-m" "$@"
|
||||
continue
|
||||
fi
|
||||
;;
|
||||
(--host) shift;
|
||||
host="--host=$1"
|
||||
;;
|
||||
(-f|--flag) shift;
|
||||
flags+=("$1")
|
||||
;;
|
||||
(-r|--repo) shift;
|
||||
echo "OPTION: $1"
|
||||
repos+=("$1")
|
||||
;;
|
||||
(-k|--key) shift;
|
||||
keys+=("$1")
|
||||
;;
|
||||
(-e|--env) shift;
|
||||
envs+=("-e $1")
|
||||
;;
|
||||
(-n|--dns) shift;
|
||||
dns+=("--dns $1")
|
||||
;;
|
||||
(-d|--dirs) shift;
|
||||
dirs+=("-v $1:$1:ro")
|
||||
;;
|
||||
(-p|--package) shift;
|
||||
packages+=("$1")
|
||||
;;
|
||||
(-c|--cmd) shift;
|
||||
commands+=("$1")
|
||||
;;
|
||||
(-w|--wait)
|
||||
wait=1
|
||||
;;
|
||||
(*)
|
||||
echo "**** ERROR: unknown option '$1', try --help" 1>&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
if test $# -eq 0; then
|
||||
echo "**** ERROR: missing value, try --help" 2>61
|
||||
exit 1
|
||||
fi
|
||||
shift
|
||||
done
|
||||
|
||||
function traperror() {
|
||||
set +x
|
||||
local DOCKER_ID="$1"
|
||||
local err=($2) # error status
|
||||
local line="$3" # LINENO
|
||||
local linecallfunc="$4"
|
||||
local command="$5"
|
||||
local funcstack="$6"
|
||||
for e in ${err[@]}; do
|
||||
if test -n "$e" -a "$e" != "0"; then
|
||||
echo "<---"
|
||||
echo "ERROR: line $line - command '$command' exited with status: $e (${err[@]})"
|
||||
if [ "${funcstack}" != "main" -o "$linecallfunc" != "0" ]; then
|
||||
echo -n " ... Error at ${funcstack} "
|
||||
if [ "$linecallfunc" != "" ]; then
|
||||
echo -n "called at line $linecallfunc"
|
||||
fi
|
||||
echo
|
||||
fi
|
||||
if [ "$wait" -eq 1 ]; then
|
||||
echo " ... now you can access the docker container as root or user:"
|
||||
echo " docker exec -it ${DOCKER_ID} bash"
|
||||
echo " docker exec -u $(id -u) -it ${DOCKER_ID} bash"
|
||||
echo -n " ... press enter to cleanup: "
|
||||
read
|
||||
fi
|
||||
echo -n " ... cleanup docker: "
|
||||
docker stop "${DOCKER_ID}" || true
|
||||
docker rm "${DOCKER_ID}"
|
||||
echo "returning status: $e"
|
||||
echo "--->"
|
||||
exit $e
|
||||
fi
|
||||
done
|
||||
echo -n "SUCCESS ... cleanup docker: "
|
||||
docker rm -f "${DOCKER_ID}"
|
||||
exit 0
|
||||
}
|
||||
|
||||
function ifthenelse() {
|
||||
arg="$1"
|
||||
shift
|
||||
cmd="$*"
|
||||
DISTRIBUTOR=$(docker exec ${DOCKER_ID} lsb_release -si | sed 's, .*,,;s,.*,\L&,g')
|
||||
CODENAME=$(docker exec ${DOCKER_ID} lsb_release -cs)
|
||||
ARCH=$((docker exec ${DOCKER_ID} which dpkg > /dev/null 2> /dev/null && docker exec ${DOCKER_ID} dpkg --print-architecture) || echo amd64)
|
||||
case "$DISTRIBUTOR" in
|
||||
(opensuse) # code name may be not available, then set leap or tumbleweed
|
||||
if test "$CODENAME" = "n/a"; then
|
||||
CODENAME=$(docker exec ${DOCKER_ID} lsb_release -ds | sed "s,\($(docker exec ${DOCKER_ID} lsb_release -si | sed 's, ,\\|,g')\) *,,"';s, .*,,g;s,",,g;s,.*,\L&,g')
|
||||
fi
|
||||
;;
|
||||
(fedora|mageia) # numeric code name
|
||||
CODENAME=$(docker exec ${DOCKER_ID} lsb_release -rs)
|
||||
;;
|
||||
(centos) # only look at major number in centos
|
||||
CODENAME=$(docker exec ${DOCKER_ID} lsb_release -rs | sed 's,\..*,,')
|
||||
;;
|
||||
esac
|
||||
if test "${arg/:::/}" = "${arg}"; then
|
||||
cmd_tmp="${cmd//ARG/${arg//@DISTRIBUTOR@/${DISTRIBUTOR}}}"
|
||||
docker exec ${DOCKER_ID} bash -c "${cmd_tmp//@CODENAME@/${CODENAME}}"
|
||||
else
|
||||
os="${arg%%:::*}"
|
||||
thenpart="${arg#*:::}"
|
||||
elsepart=
|
||||
if test "${thenpart/:::/}" != "${thenpart}"; then
|
||||
elsepart="${thenpart##*:::}"
|
||||
thenpart="${thenpart%%:::*}"
|
||||
fi
|
||||
if [[ "${DISTRIBUTOR}-${CODENAME}-${ARCH}" =~ ${os} ]]; then
|
||||
if test -n "${thenpart}"; then
|
||||
cmd_tmp="${cmd//ARG/${thenpart//@DISTRIBUTOR@/${DISTRIBUTOR}}}"
|
||||
docker exec ${DOCKER_ID} bash -c "${cmd_tmp//@CODENAME@/${CODENAME}}"
|
||||
fi
|
||||
else
|
||||
if test -n "${elsepart}"; then
|
||||
cmd_tmp="${cmd//ARG/${elsepart//@DISTRIBUTOR@/${DISTRIBUTOR}}}"
|
||||
docker exec ${DOCKER_ID} bash -c "${cmd_tmp//@CODENAME@/${CODENAME}}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
set -x
|
||||
|
||||
if test -z "$img"; then
|
||||
img="mwaeckerlin/debbuildenv"
|
||||
fi
|
||||
docker pull $img
|
||||
DOCKER_ID=$(docker create ${dns[@]} ${dirs[@]} ${envs[@]} -w /workdir $img sleep infinity)
|
||||
trap 'traperror '"${DOCKER_ID}"' "$? ${PIPESTATUS[@]}" $LINENO $BASH_LINENO "$BASH_COMMAND" "${FUNCNAME[@]}" "${FUNCTION}"' SIGINT INT TERM EXIT
|
||||
if ! [[ $arch =~ $myarch ]]; then
|
||||
docker cp "/usr/bin/qemu-${arch}-static" "${DOCKER_ID}:/usr/bin/qemu-${arch}-static"
|
||||
fi
|
||||
docker start "${DOCKER_ID}"
|
||||
if ! docker exec ${DOCKER_ID} getent group $(id -g) > /dev/null 2>&1; then
|
||||
docker exec ${DOCKER_ID} groupadd -g $(id -g) $(id -gn)
|
||||
fi
|
||||
if ! docker exec ${DOCKER_ID} getent passwd $(id -u) > /dev/null 2>&1; then
|
||||
docker exec ${DOCKER_ID} useradd -m -u $(id -u) -g $(id -g) -d"${HOME}" $(id -un)
|
||||
fi
|
||||
docker exec ${DOCKER_ID} chown $(id -u):$(id -g) "${HOME}"
|
||||
if test -z "$mode"; then
|
||||
case "$targets" in
|
||||
(*deb*) mode=deb;;
|
||||
(*rpm*) mode=rpm;;
|
||||
(*) case "$img" in
|
||||
(*deb*|*ubuntu*|*debian*|*mint*) mode=deb;;
|
||||
(*rpm*|*fedora*|*centos*|*mageia*) mode=rpm;;
|
||||
(*mingw*|*win*) mode=win;;
|
||||
(*) mode=deb;;
|
||||
esac;;
|
||||
esac
|
||||
fi
|
||||
case "$mode" in
|
||||
(deb|apt|win)
|
||||
OPTIONS='-o Dpkg::Options::=--force-confdef -o Dpkg::Options::=--force-confnew -y --force-yes --no-install-suggests --no-install-recommends'
|
||||
docker exec ${DOCKER_ID} apt-get update ${OPTIONS}
|
||||
#docker exec ${DOCKER_ID} apt-get upgrade ${OPTIONS}
|
||||
docker exec ${DOCKER_ID} apt-get install ${OPTIONS} python-software-properties software-properties-common apt-transport-https dpkg-dev lsb-release wget || \
|
||||
docker exec ${DOCKER_ID} apt-get install ${OPTIONS} software-properties-common apt-transport-https dpkg-dev lsb-release wget || \
|
||||
docker exec ${DOCKER_ID} apt-get install ${OPTIONS} python-software-properties apt-transport-https dpkg-dev lsb-release wget;
|
||||
if [[ "${img}" =~ "ubuntu" ]]; then
|
||||
docker exec ${DOCKER_ID} apt-get install ${OPTIONS} locales
|
||||
docker exec ${DOCKER_ID} locale-gen ${LANG}
|
||||
docker exec ${DOCKER_ID} update-locale LANG=${LANG}
|
||||
fi
|
||||
if test -n "${keys[*]}"; then # fix dependency bug in cosmic and stretch
|
||||
docker exec ${DOCKER_ID} apt-get install ${OPTIONS} gnupg
|
||||
for key in "${keys[@]}"; do
|
||||
wget -O- "$key" \
|
||||
| docker exec -i ${DOCKER_ID} apt-key add -
|
||||
done
|
||||
fi
|
||||
for repo in "${repos[@]}"; do
|
||||
ifthenelse "${repo}" "apt-add-repository 'ARG'"
|
||||
done
|
||||
|
||||
docker exec ${DOCKER_ID} apt-get update ${OPTIONS}
|
||||
for package in "${packages[@]}"; do
|
||||
ifthenelse "${package}" "apt-get install ${OPTIONS} ARG"
|
||||
done
|
||||
for command in "${commands[@]}"; do
|
||||
ifthenelse "${command}" "ARG"
|
||||
done
|
||||
docker exec ${DOCKER_ID} ./resolve-debbuilddeps.sh
|
||||
;;
|
||||
(rpm|yum|dnf|zypper|urpmi)
|
||||
if [[ "$img" =~ "centos" ]]; then
|
||||
docker exec ${DOCKER_ID} yum install -y redhat-lsb epel-release
|
||||
docker exec -i ${DOCKER_ID} bash -c 'cat > /etc/yum.repos.d/wandisco-svn.repo' <<EOF
|
||||
[WandiscoSVN]
|
||||
name=Wandisco SVN Repo
|
||||
EOF
|
||||
docker exec -i ${DOCKER_ID} bash -c 'echo "baseurl=http://opensource.wandisco.com/centos/$(lsb_release -sr | sed '"'"'s,[^0-9].*,,'"'"')/svn-'$(svn --version | head -1 | sed 's,[^0-9]*\([0-9]\+\.[0-9]\+\).*,\1,')'/RPMS/$(uname -i)/" >> /etc/yum.repos.d/wandisco-svn.repo'
|
||||
docker exec -i ${DOCKER_ID} bash -c 'cat >> /etc/yum.repos.d/wandisco-svn.repo' <<EOF
|
||||
enabled=1
|
||||
gpgcheck=0
|
||||
EOF
|
||||
fi
|
||||
INSTALL_TOOL=$((docker exec ${DOCKER_ID} test -x /usr/bin/zypper && echo zypper install -y) || (docker exec ${DOCKER_ID} test -x /usr/bin/dnf && echo dnf install -y) || (docker exec ${DOCKER_ID} test -x /usr/bin/yum && echo yum install -y) || (docker exec ${DOCKER_ID} test -x /usr/sbin/urpmi && echo urpmi --auto))
|
||||
if test "$INSTALL_TOOL" = "urpmi --auto" -o "$INSTALL_TOOL" = "zypper install -y"; then
|
||||
LSB_RELEASE=lsb-release
|
||||
else
|
||||
LSB_RELEASE=/usr/bin/lsb_release
|
||||
fi
|
||||
docker exec ${DOCKER_ID} ${INSTALL_TOOL} rpm-build automake libtool subversion gcc-c++ pkgconfig wget $LSB_RELEASE
|
||||
if docker exec ${DOCKER_ID} test -x /usr/bin/dnf; then
|
||||
docker exec ${DOCKER_ID} dnf install -y 'dnf-command(config-manager)'
|
||||
fi
|
||||
i=0
|
||||
for key in "${keys[@]}"; do
|
||||
docker exec -i ${DOCKER_ID} wget -Orpm-key "$key"
|
||||
docker exec -i ${DOCKER_ID} rpm --import rpm-key
|
||||
docker exec -i ${DOCKER_ID} rm rpm-key
|
||||
done
|
||||
for repo in "${repos[@]}"; do
|
||||
INSTALL_REPO=$((docker exec ${DOCKER_ID} test -x /usr/bin/zypper && echo zypper ar) || (docker exec ${DOCKER_ID} test -x /usr/bin/dnf && echo dnf config-manager --add-repo) || (docker exec ${DOCKER_ID} test -x /usr/bin/yum && echo yum-config-manager --add-repo) || (docker exec ${DOCKER_ID} test -x /usr/sbin/urpmi && echo false))
|
||||
ifthenelse "${repo}" "${INSTALL_REPO} ARG"
|
||||
((++i))
|
||||
done
|
||||
for package in "${packages[@]}"; do
|
||||
ifthenelse "${package}" "${INSTALL_TOOL} ARG"
|
||||
done
|
||||
for command in "${commands[@]}"; do
|
||||
ifthenelse "${command}" "ARG"
|
||||
done
|
||||
docker exec ${DOCKER_ID} ./resolve-rpmbuilddeps.sh
|
||||
;;
|
||||
esac
|
||||
FLAGS=()
|
||||
for f in "${flags[@]}"; do
|
||||
FLAGS+=($(ifthenelse "$f" "echo 'ARG'"))
|
||||
done
|
||||
|
||||
docker exec -u $(id -u):$(id -g) ${DOCKER_ID} ./bootstrap.sh -t "${targets}" ${host} "${FLAGS[@]}"
|
||||
|
||||
# last check: try to install built deb or rpm files (if not already cleaned up)
|
||||
# not supported in trusty and jessie
|
||||
if test "$mode" = deb -a "${img//trusty/}" = "${img}" -a "${img//jessie/}" = "${img}"; then
|
||||
if test "${targets//deb/}" != "${targets}" && ls *.deb > /dev/null 2> /dev/null; then
|
||||
docker exec ${DOCKER_ID} bash -c "apt-get install ${OPTIONS} /workdir/*.deb"
|
||||
fi
|
||||
fi
|
||||
if test "$mode" = rpm -a "${targets//rpm/}" != "${targets}"; then
|
||||
if ls *.rpm > /dev/null 2> /dev/null; then
|
||||
docker exec ${DOCKER_ID} bash -c "${INSTALL_TOOL} /workdir/*.rpm"
|
||||
fi
|
||||
fi
|
||||
echo "done."
|
31
build-resource-file.sh
Executable file
31
build-resource-file.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#! /bin/bash -ex
|
||||
|
||||
## @id $Id$
|
||||
|
||||
## build resource.qrc file from a resource directory
|
||||
##
|
||||
## Argument: $1: resource path (default: resources)
|
||||
## Result: file named <resource-path>.qrc (default: resources.qrc)
|
||||
##
|
||||
## Call:
|
||||
##
|
||||
## cd src
|
||||
## ../build-resource-file.sh
|
||||
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
RESOURCES=${1:-resources}
|
||||
TARGET=${RESOURCES}.qrc
|
||||
|
||||
test -d ${RESOURCES}
|
||||
|
||||
echo "<RCC>" > ${TARGET}
|
||||
for d in $(find resources -mindepth 1 -type d); do
|
||||
echo " <qresource prefix=\"${d#${RESOURCES}/}\">" >> ${TARGET}
|
||||
for f in $(find $d -mindepth 1 -maxdepth 1 -type f); do
|
||||
echo " <file alias=\"${f##*/}\">$f</file>" >> ${TARGET}
|
||||
done
|
||||
echo " </qresource>" >> ${TARGET}
|
||||
done
|
||||
echo "</RCC>" >> ${TARGET}
|
45
configure.ac
Normal file
45
configure.ac
Normal file
@@ -0,0 +1,45 @@
|
||||
## @id $Id$
|
||||
#
|
||||
# This file has been added by bootstrap.sh on Sun, 15 Mar 2015 09:14:42 +0100
|
||||
# Feel free to change it or even remove and rebuild it, up to your needs
|
||||
#
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
m4_define(x_package_name, proxyface) # project's name
|
||||
m4_define(x_major, 2) # project's major version
|
||||
m4_define(x_minor, 0) # project's minor version
|
||||
m4_define(x_least_diff, 223) # reset project's least version
|
||||
m4_include(ax_init_standard_project.m4)
|
||||
|
||||
AC_INIT(x_package_name, x_major.x_minor.x_least, x_bugreport, x_package_name)
|
||||
AM_INIT_AUTOMAKE([1.9 tar-pax])
|
||||
AX_INIT_STANDARD_PROJECT
|
||||
|
||||
# requirements, uncomment, what you need:
|
||||
AX_USE_CXX
|
||||
AX_USE_LIBTOOL
|
||||
AX_USE_DOXYGEN
|
||||
AX_USE_DEBIAN_PACKAGING
|
||||
AX_USE_RPM_PACKAGING
|
||||
#AX_USE_CPPUNIT
|
||||
AX_BUILD_EXAMPLES
|
||||
|
||||
AX_CXX_COMPILE_STDCXX(11, noext, mandatory)
|
||||
|
||||
AX_CHECK_QT([QT], [QtNetwork QtGui], [QtWidgets])
|
||||
AC_CONFIG_FILES([src/languages.qrc])
|
||||
if test -z "${MINGW}"; then
|
||||
AX_PKG_REQUIRE([PROXY], [libproxy-1.0], [], [libproxy libproxy1 lib64proxy], [], [DEV_DIST_PKG=libproxy])
|
||||
else
|
||||
LDFLAGS+=" -lwinhttp"
|
||||
HAVE_PROXY=0
|
||||
fi
|
||||
AM_CONDITIONAL(HAVE_PROXY, test ${HAVE_PROXY} -eq 1)
|
||||
|
||||
if test "$HAVE_QT" -ne 1 -a "$HAVE_PROXY" -ne 1; then
|
||||
AC_MSG_ERROR([Either Qt or Google Proxy is required.])
|
||||
fi
|
||||
|
||||
# create output
|
||||
AX_OUTPUT
|
217
configure.in
217
configure.in
@@ -1,217 +0,0 @@
|
||||
# $Id$
|
||||
AC_ALIAS([AC_DEFINE_DIR], [AX_DEFINE_DIR])
|
||||
AC_DEFUN([AX_DEFINE_DIR], [
|
||||
prefix_NONE=
|
||||
exec_prefix_NONE=
|
||||
test "x$prefix" = xNONE && prefix_NONE=yes && prefix=$ac_default_prefix
|
||||
test "x$exec_prefix" = xNONE && exec_prefix_NONE=yes && exec_prefix=$prefix
|
||||
dnl In Autoconf 2.60, ${datadir} refers to ${datarootdir}, which in turn
|
||||
dnl refers to ${prefix}. Thus we have to use `eval' twice.
|
||||
eval ax_define_dir="\"[$]$2\""
|
||||
eval ax_define_dir="\"$ax_define_dir\""
|
||||
AC_SUBST($1, "$ax_define_dir")
|
||||
AC_DEFINE_UNQUOTED($1, "$ax_define_dir", [$3])
|
||||
test "$prefix_NONE" && prefix=NONE
|
||||
test "$exec_prefix_NONE" && exec_prefix=NONE
|
||||
])
|
||||
|
||||
AC_INIT([README])
|
||||
SRC_DIR=src
|
||||
TST_DIR=
|
||||
DOC_DIR=doc
|
||||
|
||||
m4_define(x_packagename, proxyface)
|
||||
m4_define(x_major, 1)
|
||||
m4_define(x_minor, 0)
|
||||
PACKAGENAME=x_packagename
|
||||
MAJOR=x_major
|
||||
MINOR=x_minor
|
||||
BUILDDATE=$(date "+%d.%m.%Y/%H.%M")
|
||||
|
||||
if svn info . 2>&1 > /dev/null; then
|
||||
LEAST=$(LANG= svn info $path | sed -n 's/Revision: //p')
|
||||
break;
|
||||
else
|
||||
LEAST=[$(pwd | sed -n 's,^.*/'${PACKAGENAME}'-'${MAJOR}'\.'${MINOR}'\.\([0-9]*\).*$,\1,p')]
|
||||
if test -z "${LEAST}"; then
|
||||
LEAST="ERROR_CANNOT_DETERMINE_REVISION_NUMBER from $(pwd)"
|
||||
fi
|
||||
fi
|
||||
|
||||
AM_INIT_AUTOMAKE($PACKAGENAME, $MAJOR.$MINOR.$LEAST, [marc@waeckerlin.org])
|
||||
|
||||
# files to create
|
||||
AC_CONFIG_FILES([makefile ${PACKAGENAME}.spec src/version.cxx
|
||||
src/makefile examples/makefile
|
||||
doc/doxyfile doc/makefile
|
||||
src/${PACKAGENAME}.pc debian/changelog])
|
||||
|
||||
# copy M4 to shell
|
||||
AC_SUBST(MAJOR)
|
||||
AC_SUBST(MINOR)
|
||||
AC_SUBST(LEAST)
|
||||
AC_SUBST(BUILDDATE)
|
||||
|
||||
# libtool versioning
|
||||
LIB_MAJOR=m4_eval(x_major+x_minor)
|
||||
LIB_MINOR=${LEAST}
|
||||
LIB_LEAST=x_minor
|
||||
LIB_VERSION="${LIB_MAJOR}:${LIB_MINOR}:${LIB_LEAST}"
|
||||
AC_SUBST(LIB_VERSION)
|
||||
|
||||
# home
|
||||
AC_SUBST(HOME)
|
||||
|
||||
# datadir for languages
|
||||
AX_DEFINE_DIR([DATADIR], [datadir])
|
||||
#AC_SUBST(DATADIR)
|
||||
|
||||
# macros
|
||||
README=README
|
||||
AC_SUBST_FILE(README)
|
||||
CHANGE_LOG=ChangeLog
|
||||
AC_SUBST_FILE(CHANGE_LOG)
|
||||
|
||||
AM_CPPFLAGS="-DPACKAGEVERSION='\"${VERSION}\"' -DPACKAGENAME='\"${PACKAGENAME}\"'"
|
||||
|
||||
# Get rid of that stupid -O2 -g opions!
|
||||
CXXFLAGS="${CXXFLAGS:-}"
|
||||
|
||||
# languages
|
||||
AC_LANG(C++)
|
||||
|
||||
# programs
|
||||
AC_PROG_CXX
|
||||
AC_PROG_CPP
|
||||
AC_PROG_INSTALL
|
||||
AC_PROG_LN_S
|
||||
AC_PROG_MAKE_SET
|
||||
AC_PROG_LIBTOOL
|
||||
AC_CHECK_PROG(have_doxygen, doxygen, yes, no)
|
||||
AC_CHECK_PROG(have_dot, dot, yes, no)
|
||||
PKG_PROG_PKG_CONFIG
|
||||
|
||||
AC_ARG_ENABLE(pedantic,
|
||||
[AS_HELP_STRING([--enable-pedantic],
|
||||
[enable all warnings and checks, abort on warnings])],
|
||||
[have_pedantic="$enableval"; test "$enableval" = "yes" && \
|
||||
AM_CXXFLAGS="${AM_CXXFLAGS:-} -pedantic-errors -Wall -W -Wfloat-equal -Wundef -Wendif-labels -Wpointer-arith -Wcast-align -Wwrite-strings -Wconversion -Wsign-compare -Wmissing-format-attribute -Wno-multichar -Wpacked -Wredundant-decls -Werror -Wshadow -Wcast-qual -Wno-ctor-dtor-privacy"])
|
||||
dnl problem in libs: -Wshadow -Wcast-qual
|
||||
dnl auto.hpp: -Wno-ctor-dtor-privacy (removed)
|
||||
AM_CONDITIONAL(PEDANTIC, test "$enableval" = "yes")
|
||||
AC_ARG_ENABLE(dot,
|
||||
[AS_HELP_STRING([--disable-dot],
|
||||
[disable dot graphic tools for documentation])],
|
||||
[have_dot="$enableval"])
|
||||
test "$enableval" = "yes" && HAVE_DOT="YES" || HAVE_DOT="NO";
|
||||
AM_PATH_CPPUNIT([1.0.0], [have_cppunit="yes"], [have_cppunit="no"])
|
||||
|
||||
MINGW32=no
|
||||
MAC=no
|
||||
case $host_os in
|
||||
*mingw32*) MINGW32=yes;;
|
||||
*darwin* | *rhapsody* | *macosx*) MAC=yes;;
|
||||
esac
|
||||
AM_CONDITIONAL(MINGW32, test "$MINGW32" = "yes")
|
||||
AM_CONDITIONAL(MAC, test "$MAC" = "yes")
|
||||
|
||||
# export macros
|
||||
SRCDIR=${srcdir}
|
||||
AC_SUBST(SRCDIR)
|
||||
AC_SUBST(SRC_DIR)
|
||||
AC_SUBST(TST_DIR)
|
||||
AC_SUBST(DOC_DIR)
|
||||
AC_SUBST(HAVE_DOT)
|
||||
AC_SUBST(THREADS)
|
||||
AC_SUBST(PACKAGENAME)
|
||||
AC_SUBST(AM_CXXFLAGS)
|
||||
AC_SUBST(AM_CPPFLAGS)
|
||||
AC_SUBST(LIBS)
|
||||
# Qt Environment
|
||||
AC_MSG_CHECKING(QT4 directory)
|
||||
QTDIR="no"
|
||||
AC_ARG_WITH([qt-dir],
|
||||
AC_HELP_STRING([--with-qt-dir=/path/to/Qt4],
|
||||
[to specify the path to the Qt4 directory.]),
|
||||
[QTPATHS="$withval"],
|
||||
[QTPATHS="/usr/include/qt4 /usr/local/include/qt4 /opt/include/qt4 /opt/local/include/qt4 /opt/local/libexec/qt4-mac/include /usr/include /usr/local/include /opt/include /opt/local/include"])
|
||||
for x in $QTPATHS; do
|
||||
if test -d $x/QtCore ; then
|
||||
QTINCDIR=$x
|
||||
QTDIR=${x%/include*}
|
||||
if test -d $QTDIR/lib; then
|
||||
QTLIBDIR=$QTDIR/lib
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
AC_MSG_RESULT($QTDIR)
|
||||
AM_CONDITIONAL(USE_QT, test -n "$QTLIBDIR")
|
||||
if test -z "$QTLIBDIR"; then
|
||||
AC_MSG_ERROR(Could not locate QT 4)
|
||||
case $host in
|
||||
*darwin*)
|
||||
LDFLAGS+=" -lproxy"
|
||||
;;
|
||||
*mingw*|*win*)
|
||||
LDFLAGS+=" -lwinhttp"
|
||||
;;
|
||||
*)
|
||||
LDFLAGS+=" -lproxy"
|
||||
;;
|
||||
esac
|
||||
else
|
||||
# by now, Linux/Unix always uses libproxy
|
||||
case $host in
|
||||
*darwin*)
|
||||
LDFLAGS+=" -L$QTLIBDIR -lQtCore -lQtNetwork -lQtGui"
|
||||
;;
|
||||
*mingw*|*win*)
|
||||
LDFLAGS+=" -L$QTLIBDIR -lQtCore4 -lQtNetwork4 -lQtGui4"
|
||||
;;
|
||||
*)
|
||||
LDFLAGS+=" -L$QTLIBDIR -lQtCore -lQtNetwork -lQtGui"
|
||||
LDFLAGS+=" -lproxy"
|
||||
;;
|
||||
esac
|
||||
CPPFLAGS+=" -DUNICODE -DQT_NO_DEBUG"
|
||||
CPPFLAGS+=" -DQT_GUI_LIB -DQT_NETWORK_LIB -DQT_WEBKIT_LIB -DQT_CORE_LIB"
|
||||
CPPFLAGS+=" -I$QTINCDIR"
|
||||
AC_CHECK_PROGS([MOC], [moc-qt4 moc-mac moc])
|
||||
test -n "$MOC" || AC_MSG_ERROR([moc for Qt 4 not found!])
|
||||
AC_SUBST(MOC)
|
||||
AC_CHECK_PROGS([RCC], [rcc-qt4 rcc-mac rcc])
|
||||
test -n "$RCC" || AC_MSG_ERROR([rcc for Qt 4 not found!])
|
||||
AC_SUBST(RCC)
|
||||
AC_CHECK_PROGS([UIC], [uic-qt4 uic-mac uic])
|
||||
test -n "$UIC" || AC_MSG_ERROR([uic for Qt 4 not found!])
|
||||
AC_SUBST(UIC)
|
||||
AC_CHECK_PROGS([LRELEASE], [lrelease-qt4 lrelease-mac lrelease])
|
||||
test -n "$LRELEASE" || AC_MSG_ERROR([lrelease for Qt 4 not found!])
|
||||
AC_SUBST(LRELEASE)
|
||||
AC_CHECK_PROGS([LUPDATE], [lupdate-qt4 lupdate-mac lupdate])
|
||||
test -n "$LUPDATE" || AC_MSG_ERROR([lupdate for Qt 4 not found!])
|
||||
AC_SUBST(LUPDATE)
|
||||
AC_ARG_VAR(LUPDATE_ARGS, [arguments for qt lupdate command, e.g. -no-obsolete])
|
||||
fi
|
||||
|
||||
# create output
|
||||
AC_OUTPUT
|
||||
# infos and warnings
|
||||
if test "$have_doxygen" = "no"; then
|
||||
AC_MSG_WARN([Missing program doxygen!
|
||||
- you cannot rebuild the documentation with make doc
|
||||
- there are precompiled derived files in the distribution]); fi
|
||||
if test "$have_dot" = "no"; then
|
||||
AC_MSG_WARN([Missing program dot!
|
||||
- when you rebild documentation with make doc, there are no generated images
|
||||
- there are precompiled derived files in the distribution]); fi
|
||||
if test "$have_cppunit" = "no"; then
|
||||
AC_MSG_WARN([Missing cppunit development library!
|
||||
- you cannot check the library using "make check"
|
||||
- everything else works perfectly]); fi
|
||||
if test "$have_pedantic" == "yes"; then
|
||||
AC_MSG_NOTICE([Pedantic compile mode enabled!
|
||||
- all warnings for GNU g++ are enabled
|
||||
- all warnings result in an error
|
||||
- doxygen warnings are treated as error too]); fi
|
6
debian/changelog.in
vendored
6
debian/changelog.in
vendored
@@ -1,5 +1,5 @@
|
||||
@PACKAGE@ (@VERSION@-1) unstable; urgency=low
|
||||
@PACKAGE@ (@PACKAGE_VERSION@~@DISTRO@.@BUILD_NUMBER@) @DISTRO@; urgency=low
|
||||
|
||||
* see https://dev.swisssign.com/projects/proxyface for changes
|
||||
@DEB_CHANGELOG@
|
||||
|
||||
-- Marc Wäckerlin (SwissSign AG) <marc.waeckerlin@tech.swisssign.com> Wed, 07 Apr 2010 10:23:02 +0200
|
||||
-- @PACKAGER@ @BUILD_DATE@
|
||||
|
21
debian/control
vendored
21
debian/control
vendored
@@ -1,21 +0,0 @@
|
||||
Source: proxyface
|
||||
Priority: extra
|
||||
Maintainer: Marc Wäckerlin (SwissSign AG) <marc.waeckerlin@tech.swisssign.com>
|
||||
Build-Depends: debhelper (>= 7), autotools-dev
|
||||
Standards-Version: 3.8.1
|
||||
Section: libs
|
||||
Homepage: https://dev.swisssign.com/projects/proxyface
|
||||
|
||||
Package: proxyface-dev
|
||||
Section: libdevel
|
||||
Architecture: any
|
||||
Depends: proxyface (= ${binary:Version})
|
||||
Description: proxyface
|
||||
.
|
||||
|
||||
Package: proxyface
|
||||
Section: libs
|
||||
Architecture: any
|
||||
Depends: ${shlibs:Depends}, ${misc:Depends}
|
||||
Description: proxyface
|
||||
.
|
18
debian/control.in
vendored
Normal file
18
debian/control.in
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
Source: @PACKAGE_NAME@
|
||||
Priority: extra
|
||||
Maintainer: @PACKAGER@
|
||||
Build-Depends: gnupg, debhelper, fakeroot, svn2cl, git, pkg-config, automake, libtool, autotools-dev, pandoc, lsb-release, doxygen, graphviz, mscgen, default-jre-headless|default-jre, qt5-default | libqt4-core | libqtcore4, qt5-qmake | qt4-qmake, qtbase5-dev | libqt4-dev, qtbase5-dev-tools | qt4-dev-tools, qttools5-dev-tools | qt4-dev-tools, qttools5-dev-tools | qt4-dev-tools @DEB_BUILD_DEPEND@ @DEB_DEPEND_IFEXISTS@
|
||||
|
||||
Package: @PACKAGE_NAME@
|
||||
Section: libs
|
||||
Architecture: any
|
||||
Depends: ${shlibs:Depends}, ${misc:Depends} @DEB_DEPEND@
|
||||
Description: @DESCRIPTION@
|
||||
@README_DEB@
|
||||
|
||||
Package: @PACKAGE_NAME@-dev
|
||||
Section: libdevel
|
||||
Architecture: any
|
||||
Depends: @PACKAGE_NAME@ (= ${binary:Version}), debhelper, fakeroot, svn2cl, git, pkg-config, automake, libtool, autotools-dev, pandoc, lsb-release, doxygen, graphviz, mscgen, default-jre-headless|default-jre, qt5-default | libqt4-core | libqtcore4, qt5-qmake | qt4-qmake, qtbase5-dev | libqt4-dev, qtbase5-dev-tools | qt4-dev-tools, qttools5-dev-tools | qt4-dev-tools, qttools5-dev-tools | qt4-dev-tools @DEB_DEPEND@ @DEB_BUILD_DEPEND@ @DEB_DEPEND_IFEXISTS@
|
||||
Description: @DESCRIPTION@ - Development Package
|
||||
@README_DEB@
|
26
debian/copyright
vendored
26
debian/copyright
vendored
@@ -1,26 +0,0 @@
|
||||
This package was debianized by Marc Wäckerlin (SwissSign AG) <marc.waeckerlin@tech.swisssign.com> on
|
||||
Wed, 07 Apr 2010 10:23:02 +0200.
|
||||
|
||||
It was downloaded from https://dev.swisssign.com/projects/proxyface
|
||||
|
||||
Upstream Author(s):
|
||||
|
||||
Marc Wäckerlin (SwissSign AG) <marc.waeckerlin@tech.swisssign.com>
|
||||
|
||||
Copyright:
|
||||
|
||||
Marc Wäckerlin (SwissSign AG) <marc.waeckerlin@tech.swisssign.com>
|
||||
|
||||
License:
|
||||
|
||||
LGPL version 3
|
||||
|
||||
The Debian packaging is:
|
||||
|
||||
Copyright (C) 2010 Marc Wäckerlin (SwissSign AG) <marc.waeckerlin@tech.swisssign.com>
|
||||
|
||||
and is licensed under the GPL version 3,
|
||||
see `/usr/share/common-licenses/GPL-3'.
|
||||
|
||||
# Please also look if there are files or directories which have a
|
||||
# different copyright/license attached and list them here.
|
0
debian/dirs
vendored
0
debian/dirs
vendored
3
debian/proxyface-dev.dirs
vendored
3
debian/proxyface-dev.dirs
vendored
@@ -1,3 +0,0 @@
|
||||
usr/lib
|
||||
usr/share/proxyface
|
||||
usr/include
|
4
debian/proxyface-dev.install
vendored
4
debian/proxyface-dev.install
vendored
@@ -3,5 +3,5 @@ usr/lib/lib*.a
|
||||
usr/lib/lib*.so
|
||||
usr/lib/pkgconfig/*
|
||||
usr/lib/*.la
|
||||
usr/share/pkgconfig/*
|
||||
usr/share/proxyface/*
|
||||
usr/share/doc/proxyface/html
|
||||
usr/share/doc/proxyface/examples
|
||||
|
1
debian/proxyface.dirs
vendored
1
debian/proxyface.dirs
vendored
@@ -1 +0,0 @@
|
||||
usr/lib
|
20
debian/proxyface.doc-base.EX
vendored
20
debian/proxyface.doc-base.EX
vendored
@@ -1,20 +0,0 @@
|
||||
Document: proxyface
|
||||
Title: Debian proxyface Manual
|
||||
Author: <insert document author here>
|
||||
Abstract: This manual describes what proxyface is
|
||||
and how it can be used to
|
||||
manage online manuals on Debian systems.
|
||||
Section: unknown
|
||||
|
||||
Format: debiandoc-sgml
|
||||
Files: /usr/share/doc/proxyface/proxyface.sgml.gz
|
||||
|
||||
Format: postscript
|
||||
Files: /usr/share/doc/proxyface/proxyface.ps.gz
|
||||
|
||||
Format: text
|
||||
Files: /usr/share/doc/proxyface/proxyface.text.gz
|
||||
|
||||
Format: HTML
|
||||
Index: /usr/share/doc/proxyface/html/index.html
|
||||
Files: /usr/share/doc/proxyface/html/*.html
|
1
debian/proxyface.install
vendored
1
debian/proxyface.install
vendored
@@ -1 +1,2 @@
|
||||
usr/lib/lib*.so.*
|
||||
usr/share/proxyface
|
||||
|
105
debian/rules
vendored
105
debian/rules
vendored
@@ -1,95 +1,12 @@
|
||||
#!/usr/bin/make -f
|
||||
# -*- makefile -*-
|
||||
# Sample debian/rules that uses debhelper.
|
||||
# This file was originally written by Joey Hess and Craig Small.
|
||||
# As a special exception, when this file is copied by dh-make into a
|
||||
# dh-make output file, you may use that output file without restriction.
|
||||
# This special exception was added by Craig Small in version 0.37 of dh-make.
|
||||
## @id $Id$
|
||||
##
|
||||
## This file has been added:
|
||||
## - by bootstrap.sh
|
||||
## - on Fri, 17 March 2017 16:12:28 +0100
|
||||
## Feel free to change it or even remove and rebuild it, up to your needs
|
||||
##
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
# Uncomment this to turn on verbose mode.
|
||||
#export DH_VERBOSE=1
|
||||
|
||||
|
||||
# These are used for cross-compiling and for saving the configure script
|
||||
# from having to guess our platform (since we know it already)
|
||||
DEB_HOST_GNU_TYPE ?= $(shell dpkg-architecture -qDEB_HOST_GNU_TYPE)
|
||||
DEB_BUILD_GNU_TYPE ?= $(shell dpkg-architecture -qDEB_BUILD_GNU_TYPE)
|
||||
ifneq ($(DEB_HOST_GNU_TYPE),$(DEB_BUILD_GNU_TYPE))
|
||||
CROSS= --build $(DEB_BUILD_GNU_TYPE) --host $(DEB_HOST_GNU_TYPE)
|
||||
else
|
||||
CROSS= --build $(DEB_BUILD_GNU_TYPE)
|
||||
endif
|
||||
|
||||
|
||||
|
||||
config.status: configure
|
||||
dh_testdir
|
||||
ifneq "$(wildcard /usr/share/misc/config.sub)" ""
|
||||
cp -f /usr/share/misc/config.sub config.sub
|
||||
endif
|
||||
ifneq "$(wildcard /usr/share/misc/config.guess)" ""
|
||||
cp -f /usr/share/misc/config.guess config.guess
|
||||
endif
|
||||
./configure $(CROSS) --prefix=/usr --mandir=\$${prefix}/share/man --infodir=\$${prefix}/share/info CFLAGS="$(CFLAGS)"
|
||||
|
||||
|
||||
build: build-stamp
|
||||
|
||||
build-stamp: config.status
|
||||
dh_testdir
|
||||
$(MAKE)
|
||||
touch $@
|
||||
|
||||
clean:
|
||||
dh_testdir
|
||||
dh_testroot
|
||||
rm -f build-stamp
|
||||
[ ! -f [Mm]akefile ] || $(MAKE) distclean
|
||||
rm -f config.sub config.guess
|
||||
dh_clean
|
||||
|
||||
install: build
|
||||
dh_testdir
|
||||
dh_testroot
|
||||
dh_prep
|
||||
dh_installdirs
|
||||
$(MAKE) prefix=$(CURDIR)/debian/tmp/usr install
|
||||
|
||||
|
||||
# Build architecture-independent files here.
|
||||
binary-indep: install
|
||||
# We have nothing to do by default.
|
||||
|
||||
# Build architecture-dependent files here.
|
||||
binary-arch: install
|
||||
dh_testdir
|
||||
dh_testroot
|
||||
dh_installchangelogs ChangeLog
|
||||
dh_installdocs
|
||||
dh_installexamples
|
||||
dh_install
|
||||
# dh_installmenu
|
||||
# dh_installdebconf
|
||||
# dh_installlogrotate
|
||||
# dh_installemacsen
|
||||
# dh_installpam
|
||||
# dh_installmime
|
||||
# dh_python
|
||||
# dh_installinit
|
||||
# dh_installcron
|
||||
# dh_installinfo
|
||||
dh_installman
|
||||
dh_link
|
||||
dh_strip
|
||||
dh_compress
|
||||
dh_fixperms
|
||||
# dh_perl
|
||||
dh_makeshlibs
|
||||
dh_installdeb
|
||||
dh_shlibdeps
|
||||
dh_gencontrol
|
||||
dh_md5sums
|
||||
dh_builddeb
|
||||
|
||||
binary: binary-indep binary-arch
|
||||
.PHONY: build clean binary-indep binary-arch binary install
|
||||
%:
|
||||
dh $@
|
||||
|
1
debian/shlibs.local.ex
vendored
1
debian/shlibs.local.ex
vendored
@@ -1 +0,0 @@
|
||||
libproxyface 1.0.34 proxyface (>> 1.0.34-0), proxyface (<< 1.0.34-99)
|
23
debian/watch.ex
vendored
23
debian/watch.ex
vendored
@@ -1,23 +0,0 @@
|
||||
# Example watch control file for uscan
|
||||
# Rename this file to "watch" and then you can run the "uscan" command
|
||||
# to check for upstream updates and more.
|
||||
# See uscan(1) for format
|
||||
|
||||
# Compulsory line, this is a version 3 file
|
||||
version=3
|
||||
|
||||
# Uncomment to examine a Webpage
|
||||
# <Webpage URL> <string match>
|
||||
#http://www.example.com/downloads.php proxyface-(.*)\.tar\.gz
|
||||
|
||||
# Uncomment to examine a Webserver directory
|
||||
#http://www.example.com/pub/proxyface-(.*)\.tar\.gz
|
||||
|
||||
# Uncommment to examine a FTP server
|
||||
#ftp://ftp.example.com/pub/proxyface-(.*)\.tar\.gz debian uupdate
|
||||
|
||||
# Uncomment to find new files on sourceforge, for devscripts >= 2.9
|
||||
# http://sf.net/proxyface/proxyface-(.*)\.tar\.gz
|
||||
|
||||
# Uncomment to find new files on GooglePages
|
||||
# http://example.googlepages.com/foo.html proxyface-(.*)\.tar\.gz
|
171
dependency-graph.sh
Executable file
171
dependency-graph.sh
Executable file
@@ -0,0 +1,171 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
# template for bash scripts
|
||||
|
||||
# internal use only
|
||||
append_msg() {
|
||||
if test $# -ne 0; then
|
||||
echo -en ":\e[0m \e[1m$*"
|
||||
fi
|
||||
echo -e "\e[0m"
|
||||
}
|
||||
|
||||
# write a notice
|
||||
notice() {
|
||||
if test $# -eq 0; then
|
||||
return
|
||||
fi
|
||||
echo -e "\e[1m$*\e[0m" 1>&3
|
||||
}
|
||||
|
||||
# write error message
|
||||
error() {
|
||||
echo -en "\e[1;31merror" 1>&2
|
||||
append_msg $* 1>&2
|
||||
}
|
||||
|
||||
# write a warning message
|
||||
warning() {
|
||||
echo -en "\e[1;33mwarning" 1>&2
|
||||
append_msg $* 1>&2
|
||||
}
|
||||
|
||||
# write a success message
|
||||
success() {
|
||||
echo -en "\e[1;32msuccess" 1>&2
|
||||
append_msg $* 1>&2
|
||||
}
|
||||
|
||||
# commandline parameter evaluation
|
||||
files=${0%/*}/configure.ac
|
||||
short=0
|
||||
while test $# -gt 0; do
|
||||
case "$1" in
|
||||
(--short|-s) short=1;;
|
||||
(--help|-h) less <<EOF
|
||||
SYNOPSIS
|
||||
|
||||
$0 [OPTIONS] <files>
|
||||
|
||||
OPTIONS
|
||||
|
||||
--help, -h show this help
|
||||
--short, -s short graph with no external dependencies
|
||||
|
||||
<files> list of zero or more configure.ac files
|
||||
(default: ${files})
|
||||
|
||||
DESCRIPTION
|
||||
|
||||
Evaluates dependencies of all the given configure.ac file. By
|
||||
default takes the local configure.ac. Outputs a graphwiz dot file
|
||||
with the dependencies. Solid lines are required dependencies, dotted
|
||||
lines are optional dependencies.
|
||||
|
||||
EXAMPLE
|
||||
|
||||
Evaluate all dependencies between all local subversion and git
|
||||
projects, if they are in the path ~/svn and ~/git:
|
||||
|
||||
$0 ~/svn/*/configure.ac ~/git/*/configure.ac
|
||||
|
||||
EOF
|
||||
exit;;
|
||||
(*) files=$*; break;;
|
||||
esac
|
||||
if test $# -eq 0; then
|
||||
error "missing parameter, try $0 --help"; exit 1
|
||||
fi
|
||||
shift;
|
||||
done
|
||||
|
||||
# run a command, print the result and abort in case of error
|
||||
# option: --no-check: ignore the result, continue in case of error
|
||||
run() {
|
||||
check=1
|
||||
while test $# -gt 0; do
|
||||
case "$1" in
|
||||
(--no-check) check=0;;
|
||||
(*) break;;
|
||||
esac
|
||||
shift;
|
||||
done
|
||||
echo -en "\e[1m-> running:\e[0m $* ..."
|
||||
result=$($* 2>&1)
|
||||
res=$?
|
||||
if test $res -ne 0; then
|
||||
if test $check -eq 1; then
|
||||
error "failed with return code: $res"
|
||||
if test -n "$result"; then
|
||||
echo "$result"
|
||||
fi
|
||||
exit 1
|
||||
else
|
||||
warning "ignored return code: $res"
|
||||
fi
|
||||
else
|
||||
success
|
||||
fi
|
||||
}
|
||||
|
||||
# error handler
|
||||
function traperror() {
|
||||
set +x
|
||||
local err=($1) # error status
|
||||
local line="$2" # LINENO
|
||||
local linecallfunc="$3"
|
||||
local command="$4"
|
||||
local funcstack="$5"
|
||||
for e in ${err[@]}; do
|
||||
if test -n "$e" -a "$e" != "0"; then
|
||||
error "line $line - command '$command' exited with status: $e (${err[@]})"
|
||||
if [ "${funcstack}" != "main" -o "$linecallfunc" != "0" ]; then
|
||||
echo -n " ... error at ${funcstack} "
|
||||
if [ "$linecallfunc" != "" ]; then
|
||||
echo -n "called at line $linecallfunc"
|
||||
fi
|
||||
echo
|
||||
fi
|
||||
exit $e
|
||||
fi
|
||||
done
|
||||
success
|
||||
exit 0
|
||||
}
|
||||
|
||||
# catch errors
|
||||
trap 'traperror "$? ${PIPESTATUS[@]}" $LINENO $BASH_LINENO "$BASH_COMMAND" "${FUNCNAME[@]}" "${FUNCTION}"' ERR SIGINT INT TERM EXIT
|
||||
|
||||
##########################################################################################
|
||||
|
||||
filter() {
|
||||
if test $short -eq 1; then
|
||||
all=$(cat)
|
||||
allowed=$(sed -n '/"\(.*\)" \[style=solid\];/{s//\1/;H};${x;s/\n//;s/\n/\\|/gp}' <<<"${all}")
|
||||
sed -n '/"\('"${allowed}"'\)" -> "\('"${allowed}"'\)"/p' <<<"${all}"
|
||||
else
|
||||
cat
|
||||
fi
|
||||
}
|
||||
|
||||
echo "digraph G {"
|
||||
if test $short -eq 0; then
|
||||
echo "node [style=dashed];"
|
||||
fi
|
||||
(
|
||||
for file in $files; do
|
||||
if ! test -e $file; then
|
||||
error "file $file not found"; exit 1
|
||||
fi
|
||||
sed -n '
|
||||
/^ *m4_define(x_package_name, */ {s//"/;s/ *).*/"/;h;s/.*/& [style=solid];/p}
|
||||
/^ *AX_REQUIRE_QT/ {s/.*/"qt" -> /;G;s/\n//;s/.*/&;/p}
|
||||
/^ *AX_PKG_REQUIRE(\[\?\([^],)]\+\)\]\?, \[\?\([^],)]\+\)\]\?.*/ {s//"\2" -> /;G;s/\n//;s/.*/&;/p}
|
||||
/^ *AX_PKG_REQUIRE(\[\?\([^],)]\+\)\]\?.*/ {s//"\1" -> /;G;s/\n//;s/.*/&;/p}
|
||||
/^ *AX_CHECK_QT/ {s/.*/"qt" -> /;G;s/\n//;s/.*/& [style=dashed];/p}
|
||||
/^ *AX_PKG_CHECK(\[\?\([^],)]\+\)\]\?, \[\?\([^],)]\+\)\]\?.*/ {s//"\2" -> /;G;s/\n//;s/.*/& [style=dotted];/p}
|
||||
/^ *AX_PKG_CHECK(\[\?\([^],)]\+\)\]\?.*/ {s//"\1" -> /;G;s/\n//;s/.*/& [style=dotted];/p}
|
||||
' $file
|
||||
done
|
||||
) | filter
|
||||
echo "}"
|
2611
doc/doxyfile.in
2611
doc/doxyfile.in
@@ -1,107 +1,129 @@
|
||||
# Doxyfile 1.7.3
|
||||
# Doxyfile 1.8.13
|
||||
|
||||
# This file describes the settings to be used by the documentation system
|
||||
# doxygen (www.doxygen.org) for a project.
|
||||
#
|
||||
# All text after a hash (#) is considered a comment and will be ignored.
|
||||
# All text after a double hash (##) is considered a comment and is placed in
|
||||
# front of the TAG it is preceding.
|
||||
#
|
||||
# All text after a single hash (#) is considered a comment and will be ignored.
|
||||
# The format is:
|
||||
# TAG = value [value, ...]
|
||||
# For lists items can also be appended using:
|
||||
# TAG += value [value, ...]
|
||||
# Values that contain spaces should be placed between quotes (" ").
|
||||
# TAG = value [value, ...]
|
||||
# For lists, items can also be appended using:
|
||||
# TAG += value [value, ...]
|
||||
# Values that contain spaces should be placed between quotes (\" \").
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Project related configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# This tag specifies the encoding used for all characters in the config file
|
||||
# that follow. The default is UTF-8 which is also the encoding used for all
|
||||
# text before the first occurrence of this tag. Doxygen uses libiconv (or the
|
||||
# iconv built into libc) for the transcoding. See
|
||||
# http://www.gnu.org/software/libiconv for the list of possible encodings.
|
||||
# that follow. The default is UTF-8 which is also the encoding used for all text
|
||||
# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
|
||||
# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
|
||||
# for the list of possible encodings.
|
||||
# The default value is: UTF-8.
|
||||
|
||||
DOXYFILE_ENCODING = UTF-8
|
||||
|
||||
# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
|
||||
# by quotes) that should identify the project.
|
||||
# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
|
||||
# double-quotes, unless you are using Doxywizard) that should identify the
|
||||
# project for which the documentation is generated. This name is used in the
|
||||
# title of most generated pages and in a few other places.
|
||||
# The default value is: My Project.
|
||||
|
||||
PROJECT_NAME = "Projektdokumentation @PACKAGENAME@"
|
||||
PROJECT_NAME = @PACKAGE_NAME@
|
||||
|
||||
# The PROJECT_NUMBER tag can be used to enter a project or revision number.
|
||||
# This could be handy for archiving the generated documentation or
|
||||
# if some version control system is used.
|
||||
# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = "Version @MAJOR@.@MINOR@.@LEAST@"
|
||||
PROJECT_NUMBER = @PACKAGE_VERSION@
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description for a project that appears at the top of each page and should give viewer a quick idea about the purpose of the project. Keep the description short.
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
# quick idea about the purpose of the project. Keep the description short.
|
||||
|
||||
PROJECT_BRIEF =
|
||||
PROJECT_BRIEF = @DESCRIPTION@
|
||||
|
||||
# With the PROJECT_LOGO tag one can specify an logo or icon that is
|
||||
# included in the documentation. The maximum height of the logo should not
|
||||
# exceed 55 pixels and the maximum width should not exceed 200 pixels.
|
||||
# Doxygen will copy the logo to the output directory.
|
||||
# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
|
||||
# in the documentation. The maximum height of the logo should not exceed 55
|
||||
# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
|
||||
# the logo to the output directory.
|
||||
|
||||
PROJECT_LOGO =
|
||||
PROJECT_LOGO = @top_srcdir@/@PACKAGE_LOGO@
|
||||
|
||||
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
|
||||
# base path where the generated documentation will be put.
|
||||
# If a relative path is entered, it will be relative to the location
|
||||
# where doxygen was started. If left blank the current directory will be used.
|
||||
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
|
||||
# into which the generated documentation will be written. If a relative path is
|
||||
# entered, it will be relative to the location where doxygen was started. If
|
||||
# left blank the current directory will be used.
|
||||
|
||||
OUTPUT_DIRECTORY =
|
||||
|
||||
# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create
|
||||
# 4096 sub-directories (in 2 levels) under the output directory of each output
|
||||
# format and will distribute the generated files over these directories.
|
||||
# Enabling this option can be useful when feeding doxygen a huge amount of
|
||||
# source files, where putting all generated files in the same directory would
|
||||
# otherwise cause performance problems for the file system.
|
||||
# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
|
||||
# directories (in 2 levels) under the output directory of each output format and
|
||||
# will distribute the generated files over these directories. Enabling this
|
||||
# option can be useful when feeding doxygen a huge amount of source files, where
|
||||
# putting all generated files in the same directory would otherwise causes
|
||||
# performance problems for the file system.
|
||||
# The default value is: NO.
|
||||
|
||||
CREATE_SUBDIRS = NO
|
||||
|
||||
# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
|
||||
# characters to appear in the names of generated files. If set to NO, non-ASCII
|
||||
# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
|
||||
# U+3044.
|
||||
# The default value is: NO.
|
||||
|
||||
ALLOW_UNICODE_NAMES = NO
|
||||
|
||||
# The OUTPUT_LANGUAGE tag is used to specify the language in which all
|
||||
# documentation generated by doxygen is written. Doxygen will use this
|
||||
# information to generate all constant output in the proper language.
|
||||
# The default language is English, other supported languages are:
|
||||
# Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional,
|
||||
# Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German,
|
||||
# Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English
|
||||
# messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian,
|
||||
# Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrillic, Slovak,
|
||||
# Slovene, Spanish, Swedish, Ukrainian, and Vietnamese.
|
||||
# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
|
||||
# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
|
||||
# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
|
||||
# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
|
||||
# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
|
||||
# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
|
||||
# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
|
||||
# Ukrainian and Vietnamese.
|
||||
# The default value is: English.
|
||||
|
||||
OUTPUT_LANGUAGE = German
|
||||
OUTPUT_LANGUAGE = English
|
||||
|
||||
# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
|
||||
# include brief member descriptions after the members that are listed in
|
||||
# the file and class documentation (similar to JavaDoc).
|
||||
# Set to NO to disable this.
|
||||
# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
|
||||
# descriptions after the members that are listed in the file and class
|
||||
# documentation (similar to Javadoc). Set to NO to disable this.
|
||||
# The default value is: YES.
|
||||
|
||||
BRIEF_MEMBER_DESC = YES
|
||||
|
||||
# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
|
||||
# the brief description of a member or function before the detailed description.
|
||||
# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
|
||||
# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
|
||||
# description of a member or function before the detailed description
|
||||
#
|
||||
# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
|
||||
# brief descriptions will be completely suppressed.
|
||||
# The default value is: YES.
|
||||
|
||||
REPEAT_BRIEF = YES
|
||||
|
||||
# This tag implements a quasi-intelligent brief description abbreviator
|
||||
# that is used to form the text in various listings. Each string
|
||||
# in this list, if found as the leading text of the brief description, will be
|
||||
# stripped from the text and the result after processing the whole list, is
|
||||
# used as the annotated text. Otherwise, the brief description is used as-is.
|
||||
# If left blank, the following values are used ("$name" is automatically
|
||||
# replaced with the name of the entity): "The $name class" "The $name widget"
|
||||
# "The $name file" "is" "provides" "specifies" "contains"
|
||||
# "represents" "a" "an" "the"
|
||||
# This tag implements a quasi-intelligent brief description abbreviator that is
|
||||
# used to form the text in various listings. Each string in this list, if found
|
||||
# as the leading text of the brief description, will be stripped from the text
|
||||
# and the result, after processing the whole list, is used as the annotated
|
||||
# text. Otherwise, the brief description is used as-is. If left blank, the
|
||||
# following values are used ($name is automatically replaced with the name of
|
||||
# the entity):The $name class, The $name widget, The $name file, is, provides,
|
||||
# specifies, contains, represents, a, an and the.
|
||||
|
||||
ABBREVIATE_BRIEF =
|
||||
ABBREVIATE_BRIEF = "The $name class" "The $name widget" "The $name file" is provides specifies contains represents a an the
|
||||
|
||||
# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
|
||||
# Doxygen will generate a detailed section even if there is only a brief
|
||||
# doxygen will generate a detailed section even if there is only a brief
|
||||
# description.
|
||||
# The default value is: NO.
|
||||
|
||||
ALWAYS_DETAILED_SEC = NO
|
||||
|
||||
@@ -109,1264 +131,1871 @@ ALWAYS_DETAILED_SEC = NO
|
||||
# inherited members of a class in the documentation of that class as if those
|
||||
# members were ordinary class members. Constructors, destructors and assignment
|
||||
# operators of the base classes will not be shown.
|
||||
# The default value is: NO.
|
||||
|
||||
INLINE_INHERITED_MEMB = NO
|
||||
INLINE_INHERITED_MEMB = YES
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
|
||||
# path before files name in the file list and in the header files. If set
|
||||
# to NO the shortest path that makes the file name unique will be used.
|
||||
# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
|
||||
# before files name in the file list and in the header files. If set to NO the
|
||||
# shortest path that makes the file name unique will be used
|
||||
# The default value is: YES.
|
||||
|
||||
FULL_PATH_NAMES = YES
|
||||
|
||||
# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
|
||||
# can be used to strip a user-defined part of the path. Stripping is
|
||||
# only done if one of the specified strings matches the left-hand part of
|
||||
# the path. The tag can be used to show relative paths in the file list.
|
||||
# If left blank the directory from which doxygen is run is used as the
|
||||
# path to strip.
|
||||
# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
|
||||
# Stripping is only done if one of the specified strings matches the left-hand
|
||||
# part of the path. The tag can be used to show relative paths in the file list.
|
||||
# If left blank the directory from which doxygen is run is used as the path to
|
||||
# strip.
|
||||
#
|
||||
# Note that you can specify absolute paths here, but also relative paths, which
|
||||
# will be relative from the directory where doxygen is started.
|
||||
# This tag requires that the tag FULL_PATH_NAMES is set to YES.
|
||||
|
||||
STRIP_FROM_PATH =
|
||||
|
||||
# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of
|
||||
# the path mentioned in the documentation of a class, which tells
|
||||
# the reader which header file to include in order to use a class.
|
||||
# If left blank only the name of the header file containing the class
|
||||
# definition is used. Otherwise one should specify the include paths that
|
||||
# are normally passed to the compiler using the -I flag.
|
||||
# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
|
||||
# path mentioned in the documentation of a class, which tells the reader which
|
||||
# header file to include in order to use a class. If left blank only the name of
|
||||
# the header file containing the class definition is used. Otherwise one should
|
||||
# specify the list of include paths that are normally passed to the compiler
|
||||
# using the -I flag.
|
||||
|
||||
STRIP_FROM_INC_PATH =
|
||||
|
||||
# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
|
||||
# (but less readable) file names. This can be useful if your file system
|
||||
# doesn't support long names like on DOS, Mac, or CD-ROM.
|
||||
# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
|
||||
# less readable) file names. This can be useful is your file systems doesn't
|
||||
# support long names like on DOS, Mac, or CD-ROM.
|
||||
# The default value is: NO.
|
||||
|
||||
SHORT_NAMES = NO
|
||||
|
||||
# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
|
||||
# will interpret the first line (until the first dot) of a JavaDoc-style
|
||||
# comment as the brief description. If set to NO, the JavaDoc
|
||||
# comments will behave just like regular Qt-style comments
|
||||
# (thus requiring an explicit @brief command for a brief description.)
|
||||
# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
|
||||
# first line (until the first dot) of a Javadoc-style comment as the brief
|
||||
# description. If set to NO, the Javadoc-style will behave just like regular Qt-
|
||||
# style comments (thus requiring an explicit @brief command for a brief
|
||||
# description.)
|
||||
# The default value is: NO.
|
||||
|
||||
JAVADOC_AUTOBRIEF = NO
|
||||
|
||||
# If the QT_AUTOBRIEF tag is set to YES then Doxygen will
|
||||
# interpret the first line (until the first dot) of a Qt-style
|
||||
# comment as the brief description. If set to NO, the comments
|
||||
# will behave just like regular Qt-style comments (thus requiring
|
||||
# an explicit \brief command for a brief description.)
|
||||
# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
|
||||
# line (until the first dot) of a Qt-style comment as the brief description. If
|
||||
# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
|
||||
# requiring an explicit \brief command for a brief description.)
|
||||
# The default value is: NO.
|
||||
|
||||
QT_AUTOBRIEF = NO
|
||||
|
||||
# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen
|
||||
# treat a multi-line C++ special comment block (i.e. a block of //! or ///
|
||||
# comments) as a brief description. This used to be the default behaviour.
|
||||
# The new default is to treat a multi-line C++ comment block as a detailed
|
||||
# description. Set this tag to YES if you prefer the old behaviour instead.
|
||||
# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
|
||||
# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
|
||||
# a brief description. This used to be the default behavior. The new default is
|
||||
# to treat a multi-line C++ comment block as a detailed description. Set this
|
||||
# tag to YES if you prefer the old behavior instead.
|
||||
#
|
||||
# Note that setting this tag to YES also means that rational rose comments are
|
||||
# not recognized any more.
|
||||
# The default value is: NO.
|
||||
|
||||
MULTILINE_CPP_IS_BRIEF = YES
|
||||
|
||||
# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
|
||||
# member inherits the documentation from any documented member that it
|
||||
# re-implements.
|
||||
# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
|
||||
# documentation from any documented member that it re-implements.
|
||||
# The default value is: YES.
|
||||
|
||||
INHERIT_DOCS = YES
|
||||
|
||||
# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce
|
||||
# a new page for each member. If set to NO, the documentation of a member will
|
||||
# be part of the file/class/namespace that contains it.
|
||||
# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
|
||||
# page for each member. If set to NO, the documentation of a member will be part
|
||||
# of the file/class/namespace that contains it.
|
||||
# The default value is: NO.
|
||||
|
||||
SEPARATE_MEMBER_PAGES = NO
|
||||
|
||||
# The TAB_SIZE tag can be used to set the number of spaces in a tab.
|
||||
# Doxygen uses this value to replace tabs by spaces in code fragments.
|
||||
# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
|
||||
# uses this value to replace tabs by spaces in code fragments.
|
||||
# Minimum value: 1, maximum value: 16, default value: 4.
|
||||
|
||||
TAB_SIZE = 2
|
||||
|
||||
# This tag can be used to specify a number of aliases that acts
|
||||
# as commands in the documentation. An alias has the form "name=value".
|
||||
# For example adding "sideeffect=\par Side Effects:\n" will allow you to
|
||||
# put the command \sideeffect (or @sideeffect) in the documentation, which
|
||||
# will result in a user-defined paragraph with heading "Side Effects:".
|
||||
# You can put \n's in the value part of an alias to insert newlines.
|
||||
# This tag can be used to specify a number of aliases that act as commands in
|
||||
# the documentation. An alias has the form:
|
||||
# name=value
|
||||
# For example adding
|
||||
# "sideeffect=@par Side Effects:\n"
|
||||
# will allow you to put the command \sideeffect (or @sideeffect) in the
|
||||
# documentation, which will result in a user-defined paragraph with heading
|
||||
# "Side Effects:". You can put \n's in the value part of an alias to insert
|
||||
# newlines.
|
||||
|
||||
ALIASES = "id=\par File-ID\n" \
|
||||
"copy=\par Copyright\n" \
|
||||
"license=\par License\n" \
|
||||
"classmutex=\par Reentrant:\nAccess is locked with class static mutex @c " \
|
||||
"instancemutex=\par Reentrant:\nAccess is locked with per instance mutex @c " \
|
||||
"mutex=\par Reentrant:\nAccess is locked with mutex @c "
|
||||
ALIASES = "id=\par File-ID\n"
|
||||
ALIASES += "author=<a href="@AUTHOR_URL@" target="_blank">@AUTHOR_NAME@</a>"
|
||||
ALIASES += "readme=@README_HTML@"
|
||||
ALIASES += "description=@DESCRIPTION@"
|
||||
ALIASES += "api=\xrefitem api \"API Call\" \"\""
|
||||
ALIASES += "mutex=\par Reentrant:\nAccess is locked with mutex @c "
|
||||
ALIASES += "instancemutex=\par Reentrant:\nAccess is locked with per instance mutex @c "
|
||||
ALIASES += "classmutex=\par Reentrant:\nAccess is locked with class static mutex @c "
|
||||
ALIASES += "license=\par License\n"
|
||||
ALIASES += "copy=\par Copyright by <a href="@AUTHOR_URL@" target="_blank">@AUTHOR_NAME@</a>\n"
|
||||
|
||||
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C
|
||||
# sources only. Doxygen will then generate output that is more tailored for C.
|
||||
# For instance, some of the names that are used will be different. The list
|
||||
# of all members will be omitted, etc.
|
||||
# This tag can be used to specify a number of word-keyword mappings (TCL only).
|
||||
# A mapping has the form "name=value". For example adding "class=itcl::class"
|
||||
# will allow you to use the command class in the itcl::class meaning.
|
||||
|
||||
TCL_SUBST =
|
||||
|
||||
# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
|
||||
# only. Doxygen will then generate output that is more tailored for C. For
|
||||
# instance, some of the names that are used will be different. The list of all
|
||||
# members will be omitted, etc.
|
||||
# The default value is: NO.
|
||||
|
||||
OPTIMIZE_OUTPUT_FOR_C = NO
|
||||
|
||||
# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java
|
||||
# sources only. Doxygen will then generate output that is more tailored for
|
||||
# Java. For instance, namespaces will be presented as packages, qualified
|
||||
# scopes will look different, etc.
|
||||
# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
|
||||
# Python sources only. Doxygen will then generate output that is more tailored
|
||||
# for that language. For instance, namespaces will be presented as packages,
|
||||
# qualified scopes will look different, etc.
|
||||
# The default value is: NO.
|
||||
|
||||
OPTIMIZE_OUTPUT_JAVA = NO
|
||||
|
||||
# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
|
||||
# sources only. Doxygen will then generate output that is more tailored for
|
||||
# Fortran.
|
||||
# sources. Doxygen will then generate output that is tailored for Fortran.
|
||||
# The default value is: NO.
|
||||
|
||||
OPTIMIZE_FOR_FORTRAN = NO
|
||||
|
||||
# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
|
||||
# sources. Doxygen will then generate output that is tailored for
|
||||
# VHDL.
|
||||
# sources. Doxygen will then generate output that is tailored for VHDL.
|
||||
# The default value is: NO.
|
||||
|
||||
OPTIMIZE_OUTPUT_VHDL = NO
|
||||
|
||||
# Doxygen selects the parser to use depending on the extension of the files it
|
||||
# parses. With this tag you can assign which parser to use for a given extension.
|
||||
# Doxygen has a built-in mapping, but you can override or extend it using this
|
||||
# tag. The format is ext=language, where ext is a file extension, and language
|
||||
# is one of the parsers supported by doxygen: IDL, Java, Javascript, CSharp, C,
|
||||
# C++, D, PHP, Objective-C, Python, Fortran, VHDL, C, C++. For instance to make
|
||||
# doxygen treat .inc files as Fortran files (default is PHP), and .f files as C
|
||||
# (default is Fortran), use: inc=Fortran f=C. Note that for custom extensions
|
||||
# you also need to set FILE_PATTERNS otherwise the files are not read by doxygen.
|
||||
# parses. With this tag you can assign which parser to use for a given
|
||||
# extension. Doxygen has a built-in mapping, but you can override or extend it
|
||||
# using this tag. The format is ext=language, where ext is a file extension, and
|
||||
# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
|
||||
# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran:
|
||||
# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran:
|
||||
# Fortran. In the later case the parser tries to guess whether the code is fixed
|
||||
# or free formatted code, this is the default for Fortran type files), VHDL. For
|
||||
# instance to make doxygen treat .inc files as Fortran files (default is PHP),
|
||||
# and .f files as C (default is Fortran), use: inc=Fortran f=C.
|
||||
#
|
||||
# Note: For files without extension you can use no_extension as a placeholder.
|
||||
#
|
||||
# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
|
||||
# the files are not read by doxygen.
|
||||
|
||||
EXTENSION_MAPPING =
|
||||
|
||||
# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
|
||||
# to include (a tag file for) the STL sources as input, then you should
|
||||
# set this tag to YES in order to let doxygen match functions declarations and
|
||||
# definitions whose arguments contain STL classes (e.g. func(std::string); v.s.
|
||||
# func(std::string) {}). This also makes the inheritance and collaboration
|
||||
# diagrams that involve STL classes more complete and accurate.
|
||||
# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
|
||||
# according to the Markdown format, which allows for more readable
|
||||
# documentation. See http://daringfireball.net/projects/markdown/ for details.
|
||||
# The output of markdown processing is further processed by doxygen, so you can
|
||||
# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
|
||||
# case of backward compatibilities issues.
|
||||
# The default value is: YES.
|
||||
|
||||
BUILTIN_STL_SUPPORT = NO
|
||||
MARKDOWN_SUPPORT = YES
|
||||
|
||||
# When the TOC_INCLUDE_HEADINGS tag is set to a non-zero value, all headings up
|
||||
# to that level are automatically included in the table of contents, even if
|
||||
# they do not have an id attribute.
|
||||
# Note: This feature currently applies only to Markdown headings.
|
||||
# Minimum value: 0, maximum value: 99, default value: 0.
|
||||
# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
|
||||
|
||||
TOC_INCLUDE_HEADINGS = 0
|
||||
|
||||
# When enabled doxygen tries to link words that correspond to documented
|
||||
# classes, or namespaces to their corresponding documentation. Such a link can
|
||||
# be prevented in individual cases by putting a % sign in front of the word or
|
||||
# globally by setting AUTOLINK_SUPPORT to NO.
|
||||
# The default value is: YES.
|
||||
|
||||
AUTOLINK_SUPPORT = YES
|
||||
|
||||
# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
|
||||
# to include (a tag file for) the STL sources as input, then you should set this
|
||||
# tag to YES in order to let doxygen match functions declarations and
|
||||
# definitions whose arguments contain STL classes (e.g. func(std::string);
|
||||
# versus func(std::string) {}). This also make the inheritance and collaboration
|
||||
# diagrams that involve STL classes more complete and accurate.
|
||||
# The default value is: NO.
|
||||
|
||||
BUILTIN_STL_SUPPORT = YES
|
||||
|
||||
# If you use Microsoft's C++/CLI language, you should set this option to YES to
|
||||
# enable parsing support.
|
||||
# The default value is: NO.
|
||||
|
||||
CPP_CLI_SUPPORT = NO
|
||||
|
||||
# Set the SIP_SUPPORT tag to YES if your project consists of sip sources only.
|
||||
# Doxygen will parse them like normal C++ but will assume all classes use public
|
||||
# instead of private inheritance when no explicit protection keyword is present.
|
||||
# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
|
||||
# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
|
||||
# will parse them like normal C++ but will assume all classes use public instead
|
||||
# of private inheritance when no explicit protection keyword is present.
|
||||
# The default value is: NO.
|
||||
|
||||
SIP_SUPPORT = NO
|
||||
|
||||
# For Microsoft's IDL there are propget and propput attributes to indicate getter
|
||||
# and setter methods for a property. Setting this option to YES (the default)
|
||||
# will make doxygen replace the get and set methods by a property in the
|
||||
# documentation. This will only work if the methods are indeed getting or
|
||||
# setting a simple type. If this is not the case, or you want to show the
|
||||
# methods anyway, you should set this option to NO.
|
||||
# For Microsoft's IDL there are propget and propput attributes to indicate
|
||||
# getter and setter methods for a property. Setting this option to YES will make
|
||||
# doxygen to replace the get and set methods by a property in the documentation.
|
||||
# This will only work if the methods are indeed getting or setting a simple
|
||||
# type. If this is not the case, or you want to show the methods anyway, you
|
||||
# should set this option to NO.
|
||||
# The default value is: YES.
|
||||
|
||||
IDL_PROPERTY_SUPPORT = YES
|
||||
|
||||
# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
|
||||
# tag is set to YES, then doxygen will reuse the documentation of the first
|
||||
# tag is set to YES then doxygen will reuse the documentation of the first
|
||||
# member in the group (if any) for the other members of the group. By default
|
||||
# all members of a group must be documented explicitly.
|
||||
# The default value is: NO.
|
||||
|
||||
DISTRIBUTE_GROUP_DOC = YES
|
||||
|
||||
# Set the SUBGROUPING tag to YES (the default) to allow class member groups of
|
||||
# the same type (for instance a group of public functions) to be put as a
|
||||
# subgroup of that type (e.g. under the Public Functions section). Set it to
|
||||
# NO to prevent subgrouping. Alternatively, this can be done per class using
|
||||
# the \nosubgrouping command.
|
||||
# If one adds a struct or class to a group and this option is enabled, then also
|
||||
# any nested class or struct is added to the same group. By default this option
|
||||
# is disabled and one has to add nested compounds explicitly via \ingroup.
|
||||
# The default value is: NO.
|
||||
|
||||
GROUP_NESTED_COMPOUNDS = NO
|
||||
|
||||
# Set the SUBGROUPING tag to YES to allow class member groups of the same type
|
||||
# (for instance a group of public functions) to be put as a subgroup of that
|
||||
# type (e.g. under the Public Functions section). Set it to NO to prevent
|
||||
# subgrouping. Alternatively, this can be done per class using the
|
||||
# \nosubgrouping command.
|
||||
# The default value is: YES.
|
||||
|
||||
SUBGROUPING = YES
|
||||
|
||||
# When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum
|
||||
# is documented as struct, union, or enum with the name of the typedef. So
|
||||
# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
|
||||
# are shown inside the group in which they are included (e.g. using \ingroup)
|
||||
# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
|
||||
# and RTF).
|
||||
#
|
||||
# Note that this feature does not work in combination with
|
||||
# SEPARATE_MEMBER_PAGES.
|
||||
# The default value is: NO.
|
||||
|
||||
INLINE_GROUPED_CLASSES = NO
|
||||
|
||||
# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
|
||||
# with only public data fields or simple typedef fields will be shown inline in
|
||||
# the documentation of the scope in which they are defined (i.e. file,
|
||||
# namespace, or group documentation), provided this scope is documented. If set
|
||||
# to NO, structs, classes, and unions are shown on a separate page (for HTML and
|
||||
# Man pages) or section (for LaTeX and RTF).
|
||||
# The default value is: NO.
|
||||
|
||||
INLINE_SIMPLE_STRUCTS = NO
|
||||
|
||||
# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
|
||||
# enum is documented as struct, union, or enum with the name of the typedef. So
|
||||
# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
|
||||
# with name TypeT. When disabled the typedef will appear as a member of a file,
|
||||
# namespace, or class. And the struct will be named TypeS. This can typically
|
||||
# be useful for C code in case the coding convention dictates that all compound
|
||||
# namespace, or class. And the struct will be named TypeS. This can typically be
|
||||
# useful for C code in case the coding convention dictates that all compound
|
||||
# types are typedef'ed and only the typedef is referenced, never the tag name.
|
||||
# The default value is: NO.
|
||||
|
||||
TYPEDEF_HIDES_STRUCT = NO
|
||||
|
||||
# The SYMBOL_CACHE_SIZE determines the size of the internal cache use to
|
||||
# determine which symbols to keep in memory and which to flush to disk.
|
||||
# When the cache is full, less often used symbols will be written to disk.
|
||||
# For small to medium size projects (<1000 input files) the default value is
|
||||
# probably good enough. For larger projects a too small cache size can cause
|
||||
# doxygen to be busy swapping symbols to and from disk most of the time
|
||||
# causing a significant performance penalty.
|
||||
# If the system has enough physical memory increasing the cache will improve the
|
||||
# performance by keeping more symbols in memory. Note that the value works on
|
||||
# a logarithmic scale so increasing the size by one will roughly double the
|
||||
# memory usage. The cache size is given by this formula:
|
||||
# 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0,
|
||||
# corresponding to a cache size of 2^16 = 65536 symbols
|
||||
# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
|
||||
# cache is used to resolve symbols given their name and scope. Since this can be
|
||||
# an expensive process and often the same symbol appears multiple times in the
|
||||
# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
|
||||
# doxygen will become slower. If the cache is too large, memory is wasted. The
|
||||
# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
|
||||
# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
|
||||
# symbols. At the end of a run doxygen will report the cache usage and suggest
|
||||
# the optimal cache size from a speed point of view.
|
||||
# Minimum value: 0, maximum value: 9, default value: 0.
|
||||
|
||||
SYMBOL_CACHE_SIZE = 0
|
||||
LOOKUP_CACHE_SIZE = 0
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Build related configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
|
||||
# documentation are documented, even if no documentation was available.
|
||||
# Private class members and static file members will be hidden unless
|
||||
# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
|
||||
# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
|
||||
# documentation are documented, even if no documentation was available. Private
|
||||
# class members and static file members will be hidden unless the
|
||||
# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
|
||||
# Note: This will also disable the warnings about undocumented members that are
|
||||
# normally produced when WARNINGS is set to YES.
|
||||
# The default value is: NO.
|
||||
|
||||
EXTRACT_ALL = YES
|
||||
|
||||
# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
|
||||
# will be included in the documentation.
|
||||
# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
|
||||
# be included in the documentation.
|
||||
# The default value is: NO.
|
||||
|
||||
EXTRACT_PRIVATE = YES
|
||||
|
||||
# If the EXTRACT_STATIC tag is set to YES all static members of a file
|
||||
# will be included in the documentation.
|
||||
# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
|
||||
# scope will be included in the documentation.
|
||||
# The default value is: NO.
|
||||
|
||||
EXTRACT_PACKAGE = YES
|
||||
|
||||
# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
|
||||
# included in the documentation.
|
||||
# The default value is: NO.
|
||||
|
||||
EXTRACT_STATIC = YES
|
||||
|
||||
# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs)
|
||||
# defined locally in source files will be included in the documentation.
|
||||
# If set to NO only classes defined in header files are included.
|
||||
# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
|
||||
# locally in source files will be included in the documentation. If set to NO,
|
||||
# only classes defined in header files are included. Does not have any effect
|
||||
# for Java sources.
|
||||
# The default value is: YES.
|
||||
|
||||
EXTRACT_LOCAL_CLASSES = YES
|
||||
|
||||
# This flag is only useful for Objective-C code. When set to YES local
|
||||
# methods, which are defined in the implementation section but not in
|
||||
# the interface are included in the documentation.
|
||||
# If set to NO (the default) only methods in the interface are included.
|
||||
# This flag is only useful for Objective-C code. If set to YES, local methods,
|
||||
# which are defined in the implementation section but not in the interface are
|
||||
# included in the documentation. If set to NO, only methods in the interface are
|
||||
# included.
|
||||
# The default value is: NO.
|
||||
|
||||
EXTRACT_LOCAL_METHODS = NO
|
||||
EXTRACT_LOCAL_METHODS = YES
|
||||
|
||||
# If this flag is set to YES, the members of anonymous namespaces will be
|
||||
# extracted and appear in the documentation as a namespace called
|
||||
# 'anonymous_namespace{file}', where file will be replaced with the base
|
||||
# name of the file that contains the anonymous namespace. By default
|
||||
# anonymous namespaces are hidden.
|
||||
# 'anonymous_namespace{file}', where file will be replaced with the base name of
|
||||
# the file that contains the anonymous namespace. By default anonymous namespace
|
||||
# are hidden.
|
||||
# The default value is: NO.
|
||||
|
||||
EXTRACT_ANON_NSPACES = NO
|
||||
EXTRACT_ANON_NSPACES = YES
|
||||
|
||||
# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
|
||||
# undocumented members of documented classes, files or namespaces.
|
||||
# If set to NO (the default) these members will be included in the
|
||||
# various overviews, but no documentation section is generated.
|
||||
# This option has no effect if EXTRACT_ALL is enabled.
|
||||
# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
|
||||
# undocumented members inside documented classes or files. If set to NO these
|
||||
# members will be included in the various overviews, but no documentation
|
||||
# section is generated. This option has no effect if EXTRACT_ALL is enabled.
|
||||
# The default value is: NO.
|
||||
|
||||
HIDE_UNDOC_MEMBERS = NO
|
||||
|
||||
# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
|
||||
# undocumented classes that are normally visible in the class hierarchy.
|
||||
# If set to NO (the default) these classes will be included in the various
|
||||
# overviews. This option has no effect if EXTRACT_ALL is enabled.
|
||||
# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
|
||||
# undocumented classes that are normally visible in the class hierarchy. If set
|
||||
# to NO, these classes will be included in the various overviews. This option
|
||||
# has no effect if EXTRACT_ALL is enabled.
|
||||
# The default value is: NO.
|
||||
|
||||
HIDE_UNDOC_CLASSES = NO
|
||||
|
||||
# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all
|
||||
# friend (class|struct|union) declarations.
|
||||
# If set to NO (the default) these declarations will be included in the
|
||||
# documentation.
|
||||
# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
|
||||
# (class|struct|union) declarations. If set to NO, these declarations will be
|
||||
# included in the documentation.
|
||||
# The default value is: NO.
|
||||
|
||||
HIDE_FRIEND_COMPOUNDS = YES
|
||||
HIDE_FRIEND_COMPOUNDS = NO
|
||||
|
||||
# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any
|
||||
# documentation blocks found inside the body of a function.
|
||||
# If set to NO (the default) these blocks will be appended to the
|
||||
# function's detailed documentation block.
|
||||
# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
|
||||
# documentation blocks found inside the body of a function. If set to NO, these
|
||||
# blocks will be appended to the function's detailed documentation block.
|
||||
# The default value is: NO.
|
||||
|
||||
HIDE_IN_BODY_DOCS = NO
|
||||
|
||||
# The INTERNAL_DOCS tag determines if documentation
|
||||
# that is typed after a \internal command is included. If the tag is set
|
||||
# to NO (the default) then the documentation will be excluded.
|
||||
# Set it to YES to include the internal documentation.
|
||||
# The INTERNAL_DOCS tag determines if documentation that is typed after a
|
||||
# \internal command is included. If the tag is set to NO then the documentation
|
||||
# will be excluded. Set it to YES to include the internal documentation.
|
||||
# The default value is: NO.
|
||||
|
||||
INTERNAL_DOCS = NO
|
||||
|
||||
# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
|
||||
# file names in lower-case letters. If set to YES upper-case letters are also
|
||||
# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
|
||||
# names in lower-case letters. If set to YES, upper-case letters are also
|
||||
# allowed. This is useful if you have classes or files whose names only differ
|
||||
# in case and if your file system supports case sensitive file names. Windows
|
||||
# and Mac users are advised to set this option to NO.
|
||||
# The default value is: system dependent.
|
||||
|
||||
CASE_SENSE_NAMES = YES
|
||||
|
||||
# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
|
||||
# will show members with their full class and namespace scopes in the
|
||||
# documentation. If set to YES the scope will be hidden.
|
||||
# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
|
||||
# their full class and namespace scopes in the documentation. If set to YES, the
|
||||
# scope will be hidden.
|
||||
# The default value is: NO.
|
||||
|
||||
HIDE_SCOPE_NAMES = NO
|
||||
|
||||
# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
|
||||
# will put a list of the files that are included by a file in the documentation
|
||||
# of that file.
|
||||
# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
|
||||
# append additional text to a page's title, such as Class Reference. If set to
|
||||
# YES the compound reference will be hidden.
|
||||
# The default value is: NO.
|
||||
|
||||
SHOW_INCLUDE_FILES = NO
|
||||
HIDE_COMPOUND_REFERENCE= NO
|
||||
|
||||
# If the FORCE_LOCAL_INCLUDES tag is set to YES then Doxygen
|
||||
# will list include files with double quotes in the documentation
|
||||
# rather than with sharp brackets.
|
||||
# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
|
||||
# the files that are included by a file in the documentation of that file.
|
||||
# The default value is: YES.
|
||||
|
||||
SHOW_INCLUDE_FILES = YES
|
||||
|
||||
# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
|
||||
# grouped member an include statement to the documentation, telling the reader
|
||||
# which file to include in order to use the member.
|
||||
# The default value is: NO.
|
||||
|
||||
SHOW_GROUPED_MEMB_INC = YES
|
||||
|
||||
# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
|
||||
# files with double quotes in the documentation rather than with sharp brackets.
|
||||
# The default value is: NO.
|
||||
|
||||
FORCE_LOCAL_INCLUDES = NO
|
||||
|
||||
# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
|
||||
# is inserted in the documentation for inline members.
|
||||
# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
|
||||
# documentation for inline members.
|
||||
# The default value is: YES.
|
||||
|
||||
INLINE_INFO = YES
|
||||
|
||||
# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
|
||||
# will sort the (detailed) documentation of file and class members
|
||||
# alphabetically by member name. If set to NO the members will appear in
|
||||
# declaration order.
|
||||
# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
|
||||
# (detailed) documentation of file and class members alphabetically by member
|
||||
# name. If set to NO, the members will appear in declaration order.
|
||||
# The default value is: YES.
|
||||
|
||||
SORT_MEMBER_DOCS = YES
|
||||
|
||||
# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the
|
||||
# brief documentation of file, namespace and class members alphabetically
|
||||
# by member name. If set to NO (the default) the members will appear in
|
||||
# declaration order.
|
||||
# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
|
||||
# descriptions of file, namespace and class members alphabetically by member
|
||||
# name. If set to NO, the members will appear in declaration order. Note that
|
||||
# this will also influence the order of the classes in the class list.
|
||||
# The default value is: NO.
|
||||
|
||||
SORT_BRIEF_DOCS = YES
|
||||
SORT_BRIEF_DOCS = NO
|
||||
|
||||
# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen
|
||||
# will sort the (brief and detailed) documentation of class members so that
|
||||
# constructors and destructors are listed first. If set to NO (the default)
|
||||
# the constructors will appear in the respective orders defined by
|
||||
# SORT_MEMBER_DOCS and SORT_BRIEF_DOCS.
|
||||
# This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO
|
||||
# and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO.
|
||||
# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
|
||||
# (brief and detailed) documentation of class members so that constructors and
|
||||
# destructors are listed first. If set to NO the constructors will appear in the
|
||||
# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
|
||||
# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
|
||||
# member documentation.
|
||||
# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
|
||||
# detailed member documentation.
|
||||
# The default value is: NO.
|
||||
|
||||
SORT_MEMBERS_CTORS_1ST = YES
|
||||
|
||||
# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the
|
||||
# hierarchy of group names into alphabetical order. If set to NO (the default)
|
||||
# the group names will appear in their defined order.
|
||||
# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
|
||||
# of group names into alphabetical order. If set to NO the group names will
|
||||
# appear in their defined order.
|
||||
# The default value is: NO.
|
||||
|
||||
SORT_GROUP_NAMES = YES
|
||||
SORT_GROUP_NAMES = NO
|
||||
|
||||
# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be
|
||||
# sorted by fully-qualified names, including namespaces. If set to
|
||||
# NO (the default), the class list will be sorted only by class name,
|
||||
# not including the namespace part.
|
||||
# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
|
||||
# fully-qualified names, including namespaces. If set to NO, the class list will
|
||||
# be sorted only by class name, not including the namespace part.
|
||||
# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
|
||||
# Note: This option applies only to the class list, not to the
|
||||
# alphabetical list.
|
||||
# Note: This option applies only to the class list, not to the alphabetical
|
||||
# list.
|
||||
# The default value is: NO.
|
||||
|
||||
SORT_BY_SCOPE_NAME = NO
|
||||
|
||||
# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper type resolution of all parameters of a function it will reject a
|
||||
# match between the prototype and the implementation of a member function even if there is only one candidate or it is obvious which candidate to choose by doing a simple string match. By disabling STRICT_PROTO_MATCHING doxygen
|
||||
# will still accept a match between prototype and implementation in such cases.
|
||||
# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
|
||||
# type resolution of all parameters of a function it will reject a match between
|
||||
# the prototype and the implementation of a member function even if there is
|
||||
# only one candidate or it is obvious which candidate to choose by doing a
|
||||
# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
|
||||
# accept a match between prototype and implementation in such cases.
|
||||
# The default value is: NO.
|
||||
|
||||
STRICT_PROTO_MATCHING = NO
|
||||
|
||||
# The GENERATE_TODOLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the todo list. This list is created by putting \todo
|
||||
# commands in the documentation.
|
||||
# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
|
||||
# list. This list is created by putting \todo commands in the documentation.
|
||||
# The default value is: YES.
|
||||
|
||||
GENERATE_TODOLIST = YES
|
||||
|
||||
# The GENERATE_TESTLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the test list. This list is created by putting \test
|
||||
# commands in the documentation.
|
||||
# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
|
||||
# list. This list is created by putting \test commands in the documentation.
|
||||
# The default value is: YES.
|
||||
|
||||
GENERATE_TESTLIST = YES
|
||||
|
||||
# The GENERATE_BUGLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the bug list. This list is created by putting \bug
|
||||
# commands in the documentation.
|
||||
# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
|
||||
# list. This list is created by putting \bug commands in the documentation.
|
||||
# The default value is: YES.
|
||||
|
||||
GENERATE_BUGLIST = YES
|
||||
|
||||
# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or
|
||||
# disable (NO) the deprecated list. This list is created by putting
|
||||
# \deprecated commands in the documentation.
|
||||
# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
|
||||
# the deprecated list. This list is created by putting \deprecated commands in
|
||||
# the documentation.
|
||||
# The default value is: YES.
|
||||
|
||||
GENERATE_DEPRECATEDLIST= YES
|
||||
|
||||
# The ENABLED_SECTIONS tag can be used to enable conditional
|
||||
# documentation sections, marked by \if sectionname ... \endif.
|
||||
# The ENABLED_SECTIONS tag can be used to enable conditional documentation
|
||||
# sections, marked by \if <section_label> ... \endif and \cond <section_label>
|
||||
# ... \endcond blocks.
|
||||
|
||||
ENABLED_SECTIONS =
|
||||
|
||||
# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
|
||||
# the initial value of a variable or macro consists of for it to appear in
|
||||
# the documentation. If the initializer consists of more lines than specified
|
||||
# here it will be hidden. Use a value of 0 to hide initializers completely.
|
||||
# The appearance of the initializer of individual variables and macros in the
|
||||
# documentation can be controlled using \showinitializer or \hideinitializer
|
||||
# command in the documentation regardless of this setting.
|
||||
# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
|
||||
# initial value of a variable or macro / define can have for it to appear in the
|
||||
# documentation. If the initializer consists of more lines than specified here
|
||||
# it will be hidden. Use a value of 0 to hide initializers completely. The
|
||||
# appearance of the value of individual variables and macros / defines can be
|
||||
# controlled using \showinitializer or \hideinitializer command in the
|
||||
# documentation regardless of this setting.
|
||||
# Minimum value: 0, maximum value: 10000, default value: 30.
|
||||
|
||||
MAX_INITIALIZER_LINES = 30
|
||||
|
||||
# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
|
||||
# at the bottom of the documentation of classes and structs. If set to YES the
|
||||
# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
|
||||
# the bottom of the documentation of classes and structs. If set to YES, the
|
||||
# list will mention the files that were used to generate the documentation.
|
||||
# The default value is: YES.
|
||||
|
||||
SHOW_USED_FILES = YES
|
||||
|
||||
# If the sources in your project are distributed over multiple directories
|
||||
# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy
|
||||
# in the documentation. The default is NO.
|
||||
|
||||
SHOW_DIRECTORIES = NO
|
||||
|
||||
# Set the SHOW_FILES tag to NO to disable the generation of the Files page.
|
||||
# This will remove the Files entry from the Quick Index and from the
|
||||
# Folder Tree View (if specified). The default is YES.
|
||||
# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
|
||||
# will remove the Files entry from the Quick Index and from the Folder Tree View
|
||||
# (if specified).
|
||||
# The default value is: YES.
|
||||
|
||||
SHOW_FILES = YES
|
||||
|
||||
# Set the SHOW_NAMESPACES tag to NO to disable the generation of the
|
||||
# Namespaces page.
|
||||
# This will remove the Namespaces entry from the Quick Index
|
||||
# and from the Folder Tree View (if specified). The default is YES.
|
||||
# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
|
||||
# page. This will remove the Namespaces entry from the Quick Index and from the
|
||||
# Folder Tree View (if specified).
|
||||
# The default value is: YES.
|
||||
|
||||
SHOW_NAMESPACES = YES
|
||||
|
||||
# The FILE_VERSION_FILTER tag can be used to specify a program or script that
|
||||
# doxygen should invoke to get the current version for each file (typically from
|
||||
# the version control system). Doxygen will invoke the program by executing (via
|
||||
# popen()) the command <command> <input-file>, where <command> is the value of
|
||||
# the FILE_VERSION_FILTER tag, and <input-file> is the name of an input file
|
||||
# provided by doxygen. Whatever the program writes to standard output
|
||||
# is used as the file version. See the manual for examples.
|
||||
# popen()) the command command input-file, where command is the value of the
|
||||
# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
|
||||
# by doxygen. Whatever the program writes to standard output is used as the file
|
||||
# version. For an example see the documentation.
|
||||
|
||||
FILE_VERSION_FILTER =
|
||||
|
||||
# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
|
||||
# by doxygen. The layout file controls the global structure of the generated
|
||||
# output files in an output format independent way. The create the layout file
|
||||
# that represents doxygen's defaults, run doxygen with the -l option.
|
||||
# You can optionally specify a file name after the option, if omitted
|
||||
# DoxygenLayout.xml will be used as the name of the layout file.
|
||||
# output files in an output format independent way. To create the layout file
|
||||
# that represents doxygen's defaults, run doxygen with the -l option. You can
|
||||
# optionally specify a file name after the option, if omitted DoxygenLayout.xml
|
||||
# will be used as the name of the layout file.
|
||||
#
|
||||
# Note that if you run doxygen from a directory containing a file called
|
||||
# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
|
||||
# tag is left empty.
|
||||
|
||||
LAYOUT_FILE = doxygenlayout.xml
|
||||
LAYOUT_FILE =
|
||||
|
||||
# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
|
||||
# the reference definitions. This must be a list of .bib files. The .bib
|
||||
# extension is automatically appended if omitted. This requires the bibtex tool
|
||||
# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
|
||||
# For LaTeX the style of the bibliography can be controlled using
|
||||
# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
|
||||
# search path. See also \cite for info how to create references.
|
||||
|
||||
CITE_BIB_FILES =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to warning and progress messages
|
||||
# Configuration options related to warning and progress messages
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The QUIET tag can be used to turn on/off the messages that are generated
|
||||
# by doxygen. Possible values are YES and NO. If left blank NO is used.
|
||||
# The QUIET tag can be used to turn on/off the messages that are generated to
|
||||
# standard output by doxygen. If QUIET is set to YES this implies that the
|
||||
# messages are off.
|
||||
# The default value is: NO.
|
||||
|
||||
QUIET = NO
|
||||
|
||||
# The WARNINGS tag can be used to turn on/off the warning messages that are
|
||||
# generated by doxygen. Possible values are YES and NO. If left blank
|
||||
# NO is used.
|
||||
# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
|
||||
# this implies that the warnings are on.
|
||||
#
|
||||
# Tip: Turn warnings on while writing the documentation.
|
||||
# The default value is: YES.
|
||||
|
||||
WARNINGS = YES
|
||||
|
||||
# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
|
||||
# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
|
||||
# automatically be disabled.
|
||||
# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
|
||||
# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
|
||||
# will automatically be disabled.
|
||||
# The default value is: YES.
|
||||
|
||||
WARN_IF_UNDOCUMENTED = NO
|
||||
|
||||
# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for
|
||||
# potential errors in the documentation, such as not documenting some
|
||||
# parameters in a documented function, or documenting parameters that
|
||||
# don't exist or using markup commands wrongly.
|
||||
# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
|
||||
# potential errors in the documentation, such as not documenting some parameters
|
||||
# in a documented function, or documenting parameters that don't exist or using
|
||||
# markup commands wrongly.
|
||||
# The default value is: YES.
|
||||
|
||||
WARN_IF_DOC_ERROR = YES
|
||||
|
||||
# The WARN_NO_PARAMDOC option can be enabled to get warnings for
|
||||
# functions that are documented, but have no documentation for their parameters
|
||||
# or return value. If set to NO (the default) doxygen will only warn about
|
||||
# wrong or incomplete parameter documentation, but not about the absence of
|
||||
# documentation.
|
||||
# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
|
||||
# are documented, but have no documentation for their parameters or return
|
||||
# value. If set to NO, doxygen will only warn about wrong or incomplete
|
||||
# parameter documentation, but not about the absence of documentation.
|
||||
# The default value is: NO.
|
||||
|
||||
WARN_NO_PARAMDOC = NO
|
||||
|
||||
# The WARN_FORMAT tag determines the format of the warning messages that
|
||||
# doxygen can produce. The string should contain the $file, $line, and $text
|
||||
# tags, which will be replaced by the file and line number from which the
|
||||
# warning originated and the warning text. Optionally the format may contain
|
||||
# $version, which will be replaced by the version of the file (if it could
|
||||
# be obtained via FILE_VERSION_FILTER)
|
||||
# If the WARN_AS_ERROR tag is set to YES then doxygen will immediately stop when
|
||||
# a warning is encountered.
|
||||
# The default value is: NO.
|
||||
|
||||
WARN_AS_ERROR = NO
|
||||
|
||||
# The WARN_FORMAT tag determines the format of the warning messages that doxygen
|
||||
# can produce. The string should contain the $file, $line, and $text tags, which
|
||||
# will be replaced by the file and line number from which the warning originated
|
||||
# and the warning text. Optionally the format may contain $version, which will
|
||||
# be replaced by the version of the file (if it could be obtained via
|
||||
# FILE_VERSION_FILTER)
|
||||
# The default value is: $file:$line: $text.
|
||||
|
||||
WARN_FORMAT = "$file:$line: $text"
|
||||
|
||||
# The WARN_LOGFILE tag can be used to specify a file to which warning
|
||||
# and error messages should be written. If left blank the output is written
|
||||
# to stderr.
|
||||
# The WARN_LOGFILE tag can be used to specify a file to which warning and error
|
||||
# messages should be written. If left blank the output is written to standard
|
||||
# error (stderr).
|
||||
|
||||
WARN_LOGFILE = doxygen.errors
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the input files
|
||||
# Configuration options related to the input files
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The INPUT tag can be used to specify the files and/or directories that contain
|
||||
# documented source files. You may enter file names like "myfile.cpp" or
|
||||
# directories like "/usr/src/myproject". Separate the files or directories
|
||||
# with spaces.
|
||||
# The INPUT tag is used to specify the files and/or directories that contain
|
||||
# documented source files. You may enter file names like myfile.cpp or
|
||||
# directories like /usr/src/myproject. Separate the files or directories with
|
||||
# spaces. See also FILE_PATTERNS and EXTENSION_MAPPING
|
||||
# Note: If this tag is empty the current directory is searched.
|
||||
|
||||
INPUT = @SRCDIR@/../src
|
||||
INPUT = @top_srcdir@/src
|
||||
INPUT += @top_srcdir@/@README_FILE@
|
||||
|
||||
# This tag can be used to specify the character encoding of the source files
|
||||
# that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is
|
||||
# also the default input encoding. Doxygen uses libiconv (or the iconv built
|
||||
# into libc) for the transcoding. See http://www.gnu.org/software/libiconv for
|
||||
# the list of possible encodings.
|
||||
# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
|
||||
# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
|
||||
# documentation (see: http://www.gnu.org/software/libiconv) for the list of
|
||||
# possible encodings.
|
||||
# The default value is: UTF-8.
|
||||
|
||||
INPUT_ENCODING = UTF-8
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank the following patterns are tested:
|
||||
# *.c *.cc *.cxx *.cpp *.c++ *.d *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh
|
||||
# *.hxx *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.dox *.py
|
||||
# *.f90 *.f *.for *.vhd *.vhdl
|
||||
# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
|
||||
# *.h) to filter out the source-files in the directories.
|
||||
#
|
||||
# Note that for custom extensions or not directly supported extensions you also
|
||||
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
|
||||
# read by doxygen.
|
||||
#
|
||||
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
|
||||
# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
|
||||
# *.hh, *.hxx, *.hpp, *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc,
|
||||
# *.m, *.markdown, *.md, *.mm, *.dox, *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
|
||||
# *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf and *.qsf.
|
||||
|
||||
FILE_PATTERNS = *.[ch]xx \
|
||||
*.doc
|
||||
FILE_PATTERNS = *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.idl *.ddl *.odl *.h *.hh *.hxx *.hpp *.h++ *.cs *.d *.php *.php4 *.php5 *.phtml *.inc *.m *.markdown *.md *.mm *.dox *.py *.f90 *.f *.for *.tcl *.vhd *.vhdl *.ucf *.qsf *.as *.js *.wt *.sql
|
||||
|
||||
# The RECURSIVE tag can be used to turn specify whether or not subdirectories
|
||||
# should be searched for input files as well. Possible values are YES and NO.
|
||||
# If left blank NO is used.
|
||||
# The RECURSIVE tag can be used to specify whether or not subdirectories should
|
||||
# be searched for input files as well.
|
||||
# The default value is: NO.
|
||||
|
||||
RECURSIVE = NO
|
||||
RECURSIVE = YES
|
||||
|
||||
# The EXCLUDE tag can be used to specify files and/or directories that should
|
||||
# The EXCLUDE tag can be used to specify files and/or directories that should be
|
||||
# excluded from the INPUT source files. This way you can easily exclude a
|
||||
# subdirectory from a directory tree whose root is specified with the INPUT tag.
|
||||
#
|
||||
# Note that relative paths are relative to the directory from which doxygen is
|
||||
# run.
|
||||
|
||||
EXCLUDE =
|
||||
|
||||
# The EXCLUDE_SYMLINKS tag can be used select whether or not files or
|
||||
# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
|
||||
# directories that are symbolic links (a Unix file system feature) are excluded
|
||||
# from the input.
|
||||
# The default value is: NO.
|
||||
|
||||
EXCLUDE_SYMLINKS = NO
|
||||
|
||||
# If the value of the INPUT tag contains directories, you can use the
|
||||
# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
|
||||
# certain files from those directories. Note that the wildcards are matched
|
||||
# against the file with absolute path, so to exclude all test directories
|
||||
# for example use the pattern */test/*
|
||||
# certain files from those directories.
|
||||
#
|
||||
# Note that the wildcards are matched against the file with absolute path, so to
|
||||
# exclude all test directories for example use the pattern */test/*
|
||||
|
||||
EXCLUDE_PATTERNS = moc_* \
|
||||
uic_* \
|
||||
qrc_*
|
||||
EXCLUDE_PATTERNS = moc_* uic_* qrc_* version.[ch]xx
|
||||
|
||||
# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
|
||||
# (namespaces, classes, functions, etc.) that should be excluded from the
|
||||
# output. The symbol name can be a fully qualified name, a word, or if the
|
||||
# wildcard * is used, a substring. Examples: ANamespace, AClass,
|
||||
# AClass::ANamespace, ANamespace::*Test
|
||||
#
|
||||
# Note that the wildcards are matched against the file with absolute path, so to
|
||||
# exclude all test directories use the pattern */test/*
|
||||
|
||||
EXCLUDE_SYMBOLS =
|
||||
|
||||
# The EXAMPLE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain example code fragments that are included (see
|
||||
# the \include command).
|
||||
# The EXAMPLE_PATH tag can be used to specify one or more files or directories
|
||||
# that contain example code fragments that are included (see the \include
|
||||
# command).
|
||||
|
||||
EXAMPLE_PATH = .
|
||||
EXAMPLE_PATH = @top_srcdir@
|
||||
|
||||
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
|
||||
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
|
||||
# and *.h) to filter out the source-files in the directories. If left
|
||||
# blank all files are included.
|
||||
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
|
||||
# *.h) to filter out the source-files in the directories. If left blank all
|
||||
# files are included.
|
||||
|
||||
EXAMPLE_PATTERNS =
|
||||
EXAMPLE_PATTERNS = *
|
||||
|
||||
# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
|
||||
# searched for input files to be used with the \include or \dontinclude
|
||||
# commands irrespective of the value of the RECURSIVE tag.
|
||||
# Possible values are YES and NO. If left blank NO is used.
|
||||
# searched for input files to be used with the \include or \dontinclude commands
|
||||
# irrespective of the value of the RECURSIVE tag.
|
||||
# The default value is: NO.
|
||||
|
||||
EXAMPLE_RECURSIVE = NO
|
||||
EXAMPLE_RECURSIVE = YES
|
||||
|
||||
# The IMAGE_PATH tag can be used to specify one or more files or
|
||||
# directories that contain image that are included in the documentation (see
|
||||
# the \image command).
|
||||
# The IMAGE_PATH tag can be used to specify one or more files or directories
|
||||
# that contain images that are to be included in the documentation (see the
|
||||
# \image command).
|
||||
|
||||
IMAGE_PATH =
|
||||
|
||||
# The INPUT_FILTER tag can be used to specify a program that doxygen should
|
||||
# invoke to filter for each input file. Doxygen will invoke the filter program
|
||||
# by executing (via popen()) the command <filter> <input-file>, where <filter>
|
||||
# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
|
||||
# input file. Doxygen will then use the output that the filter program writes
|
||||
# to standard output.
|
||||
# If FILTER_PATTERNS is specified, this tag will be
|
||||
# ignored.
|
||||
# by executing (via popen()) the command:
|
||||
#
|
||||
# <filter> <input-file>
|
||||
#
|
||||
# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
|
||||
# name of an input file. Doxygen will then use the output that the filter
|
||||
# program writes to standard output. If FILTER_PATTERNS is specified, this tag
|
||||
# will be ignored.
|
||||
#
|
||||
# Note that the filter must not add or remove lines; it is applied before the
|
||||
# code is scanned, but not when the output code is generated. If lines are added
|
||||
# or removed, the anchors will not be placed correctly.
|
||||
#
|
||||
# Note that for custom extensions or not directly supported extensions you also
|
||||
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
|
||||
# properly processed by doxygen.
|
||||
|
||||
INPUT_FILTER =
|
||||
|
||||
# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
|
||||
# basis.
|
||||
# Doxygen will compare the file name with each pattern and apply the
|
||||
# filter if there is a match.
|
||||
# The filters are a list of the form:
|
||||
# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further
|
||||
# info on how filters are used. If FILTER_PATTERNS is empty or if
|
||||
# non of the patterns match the file name, INPUT_FILTER is applied.
|
||||
# basis. Doxygen will compare the file name with each pattern and apply the
|
||||
# filter if there is a match. The filters are a list of the form: pattern=filter
|
||||
# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
|
||||
# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
|
||||
# patterns match the file name, INPUT_FILTER is applied.
|
||||
#
|
||||
# Note that for custom extensions or not directly supported extensions you also
|
||||
# need to set EXTENSION_MAPPING for the extension otherwise the files are not
|
||||
# properly processed by doxygen.
|
||||
|
||||
FILTER_PATTERNS =
|
||||
FILTER_PATTERNS = *.wt=doxygen-webtester.sed *.sql=@top_srcdir@/sql-to-dot.sed
|
||||
|
||||
# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
|
||||
# INPUT_FILTER) will be used to filter the input files when producing source
|
||||
# files to browse (i.e. when SOURCE_BROWSER is set to YES).
|
||||
# INPUT_FILTER) will also be used to filter the input files that are used for
|
||||
# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
|
||||
# The default value is: NO.
|
||||
|
||||
FILTER_SOURCE_FILES = NO
|
||||
|
||||
# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
|
||||
# pattern. A pattern will override the setting for FILTER_PATTERN (if any)
|
||||
# and it is also possible to disable source filtering for a specific pattern
|
||||
# using *.ext= (so without naming a filter). This option only has effect when
|
||||
# FILTER_SOURCE_FILES is enabled.
|
||||
# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
|
||||
# it is also possible to disable source filtering for a specific pattern using
|
||||
# *.ext= (so without naming a filter).
|
||||
# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
|
||||
|
||||
FILTER_SOURCE_PATTERNS =
|
||||
|
||||
# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
|
||||
# is part of the input, its contents will be placed on the main page
|
||||
# (index.html). This can be useful if you have a project on for instance GitHub
|
||||
# and want to reuse the introduction page also for the doxygen output.
|
||||
|
||||
USE_MDFILE_AS_MAINPAGE = @top_srcdir@/@README_FILE@
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to source browsing
|
||||
# Configuration options related to source browsing
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the SOURCE_BROWSER tag is set to YES then a list of source files will
|
||||
# be generated. Documented entities will be cross-referenced with these sources.
|
||||
# Note: To get rid of all source code in the generated output, make sure also
|
||||
# VERBATIM_HEADERS is set to NO.
|
||||
# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
|
||||
# generated. Documented entities will be cross-referenced with these sources.
|
||||
#
|
||||
# Note: To get rid of all source code in the generated output, make sure that
|
||||
# also VERBATIM_HEADERS is set to NO.
|
||||
# The default value is: NO.
|
||||
|
||||
SOURCE_BROWSER = YES
|
||||
|
||||
# Setting the INLINE_SOURCES tag to YES will include the body
|
||||
# of functions and classes directly in the documentation.
|
||||
# Setting the INLINE_SOURCES tag to YES will include the body of functions,
|
||||
# classes and enums directly into the documentation.
|
||||
# The default value is: NO.
|
||||
|
||||
INLINE_SOURCES = YES
|
||||
|
||||
# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
|
||||
# doxygen to hide any special comment blocks from generated source code
|
||||
# fragments. Normal C and C++ comments will always remain visible.
|
||||
# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
|
||||
# special comment blocks from generated source code fragments. Normal C, C++ and
|
||||
# Fortran comments will always remain visible.
|
||||
# The default value is: YES.
|
||||
|
||||
STRIP_CODE_COMMENTS = YES
|
||||
|
||||
# If the REFERENCED_BY_RELATION tag is set to YES
|
||||
# then for each documented function all documented
|
||||
# functions referencing it will be listed.
|
||||
# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
|
||||
# function all documented functions referencing it will be listed.
|
||||
# The default value is: NO.
|
||||
|
||||
REFERENCED_BY_RELATION = YES
|
||||
REFERENCED_BY_RELATION = NO
|
||||
|
||||
# If the REFERENCES_RELATION tag is set to YES
|
||||
# then for each documented function all documented entities
|
||||
# called/used by that function will be listed.
|
||||
# If the REFERENCES_RELATION tag is set to YES then for each documented function
|
||||
# all documented entities called/used by that function will be listed.
|
||||
# The default value is: NO.
|
||||
|
||||
REFERENCES_RELATION = YES
|
||||
REFERENCES_RELATION = NO
|
||||
|
||||
# If the REFERENCES_LINK_SOURCE tag is set to YES (the default)
|
||||
# and SOURCE_BROWSER tag is set to YES, then the hyperlinks from
|
||||
# functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will
|
||||
# link to the source code.
|
||||
# Otherwise they will link to the documentation.
|
||||
# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
|
||||
# to YES then the hyperlinks from functions in REFERENCES_RELATION and
|
||||
# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
|
||||
# link to the documentation.
|
||||
# The default value is: YES.
|
||||
|
||||
REFERENCES_LINK_SOURCE = YES
|
||||
|
||||
# If the USE_HTAGS tag is set to YES then the references to source code
|
||||
# will point to the HTML generated by the htags(1) tool instead of doxygen
|
||||
# built-in source browser. The htags tool is part of GNU's global source
|
||||
# tagging system (see http://www.gnu.org/software/global/global.html). You
|
||||
# will need version 4.8.6 or higher.
|
||||
# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
|
||||
# source code will show a tooltip with additional information such as prototype,
|
||||
# brief description and links to the definition and documentation. Since this
|
||||
# will make the HTML file larger and loading of large files a bit slower, you
|
||||
# can opt to disable this feature.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag SOURCE_BROWSER is set to YES.
|
||||
|
||||
SOURCE_TOOLTIPS = YES
|
||||
|
||||
# If the USE_HTAGS tag is set to YES then the references to source code will
|
||||
# point to the HTML generated by the htags(1) tool instead of doxygen built-in
|
||||
# source browser. The htags tool is part of GNU's global source tagging system
|
||||
# (see http://www.gnu.org/software/global/global.html). You will need version
|
||||
# 4.8.6 or higher.
|
||||
#
|
||||
# To use it do the following:
|
||||
# - Install the latest version of global
|
||||
# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
|
||||
# - Make sure the INPUT points to the root of the source tree
|
||||
# - Run doxygen as normal
|
||||
#
|
||||
# Doxygen will invoke htags (and that will in turn invoke gtags), so these
|
||||
# tools must be available from the command line (i.e. in the search path).
|
||||
#
|
||||
# The result: instead of the source browser generated by doxygen, the links to
|
||||
# source code will now point to the output of htags.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag SOURCE_BROWSER is set to YES.
|
||||
|
||||
USE_HTAGS = NO
|
||||
|
||||
# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
|
||||
# will generate a verbatim copy of the header file for each class for
|
||||
# which an include is specified. Set to NO to disable this.
|
||||
# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
|
||||
# verbatim copy of the header file for each class for which an include is
|
||||
# specified. Set to NO to disable this.
|
||||
# See also: Section \class.
|
||||
# The default value is: YES.
|
||||
|
||||
VERBATIM_HEADERS = YES
|
||||
|
||||
# If the CLANG_ASSISTED_PARSING tag is set to YES then doxygen will use the
|
||||
# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the
|
||||
# cost of reduced performance. This can be particularly helpful with template
|
||||
# rich C++ code for which doxygen's built-in parser lacks the necessary type
|
||||
# information.
|
||||
# Note: The availability of this option depends on whether or not doxygen was
|
||||
# generated with the -Duse-libclang=ON option for CMake.
|
||||
# The default value is: NO.
|
||||
|
||||
CLANG_ASSISTED_PARSING = NO
|
||||
|
||||
# If clang assisted parsing is enabled you can provide the compiler with command
|
||||
# line options that you would normally use when invoking the compiler. Note that
|
||||
# the include paths will already be set by doxygen for the files and directories
|
||||
# specified with INPUT and INCLUDE_PATH.
|
||||
# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES.
|
||||
|
||||
CLANG_OPTIONS =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the alphabetical class index
|
||||
# Configuration options related to the alphabetical class index
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
|
||||
# of all compounds will be generated. Enable this if the project
|
||||
# contains a lot of classes, structs, unions or interfaces.
|
||||
# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
|
||||
# compounds will be generated. Enable this if the project contains a lot of
|
||||
# classes, structs, unions or interfaces.
|
||||
# The default value is: YES.
|
||||
|
||||
ALPHABETICAL_INDEX = YES
|
||||
|
||||
# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
|
||||
# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
|
||||
# in which this list will be split (can be a number in the range [1..20])
|
||||
# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
|
||||
# which the alphabetical index list will be split.
|
||||
# Minimum value: 1, maximum value: 20, default value: 5.
|
||||
# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
|
||||
|
||||
COLS_IN_ALPHA_INDEX = 5
|
||||
|
||||
# In case all classes in a project start with a common prefix, all
|
||||
# classes will be put under the same header in the alphabetical index.
|
||||
# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
|
||||
# should be ignored while generating the index headers.
|
||||
# In case all classes in a project start with a common prefix, all classes will
|
||||
# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
|
||||
# can be used to specify a prefix (or a list of prefixes) that should be ignored
|
||||
# while generating the index headers.
|
||||
# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
|
||||
|
||||
IGNORE_PREFIX =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the HTML output
|
||||
# Configuration options related to the HTML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
|
||||
# generate HTML output.
|
||||
# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
|
||||
# The default value is: YES.
|
||||
|
||||
GENERATE_HTML = YES
|
||||
|
||||
# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `html' will be used as the default path.
|
||||
# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
|
||||
# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
|
||||
# it.
|
||||
# The default directory is: html.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_OUTPUT = html
|
||||
|
||||
# The HTML_FILE_EXTENSION tag can be used to specify the file extension for
|
||||
# each generated HTML page (for example: .htm,.php,.asp). If it is left blank
|
||||
# doxygen will generate files with .html extension.
|
||||
# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
|
||||
# generated HTML page (for example: .htm, .php, .asp).
|
||||
# The default value is: .html.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_FILE_EXTENSION = .html
|
||||
|
||||
# The HTML_HEADER tag can be used to specify a personal HTML header for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
|
||||
# each generated HTML page. If the tag is left blank doxygen will generate a
|
||||
# standard header.
|
||||
#
|
||||
# To get valid HTML the header file that includes any scripts and style sheets
|
||||
# that doxygen needs, which is dependent on the configuration options used (e.g.
|
||||
# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
|
||||
# default header using
|
||||
# doxygen -w html new_header.html new_footer.html new_stylesheet.css
|
||||
# YourConfigFile
|
||||
# and then modify the file new_header.html. See also section "Doxygen usage"
|
||||
# for information on how to generate the default header that doxygen normally
|
||||
# uses.
|
||||
# Note: The header is subject to change so you typically have to regenerate the
|
||||
# default header when upgrading to a newer version of doxygen. For a description
|
||||
# of the possible markers and block names see the documentation.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_HEADER =
|
||||
HTML_HEADER = header.html
|
||||
|
||||
# The HTML_FOOTER tag can be used to specify a personal HTML footer for
|
||||
# each generated HTML page. If it is left blank doxygen will generate a
|
||||
# standard footer.
|
||||
# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
|
||||
# generated HTML page. If the tag is left blank doxygen will generate a standard
|
||||
# footer. See HTML_HEADER for more information on how to generate a default
|
||||
# footer and what special commands can be used inside the footer. See also
|
||||
# section "Doxygen usage" for information on how to generate the default footer
|
||||
# that doxygen normally uses.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_FOOTER =
|
||||
HTML_FOOTER = footer.html
|
||||
|
||||
# The HTML_STYLESHEET tag can be used to specify a user-defined cascading
|
||||
# style sheet that is used by each HTML page. It can be used to
|
||||
# fine-tune the look of the HTML output. If the tag is left blank doxygen
|
||||
# will generate a default style sheet. Note that doxygen will try to copy
|
||||
# the style sheet file to the HTML output directory, so don't put your own
|
||||
# stylesheet in the HTML output directory as well, or it will be erased!
|
||||
# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
|
||||
# sheet that is used by each HTML page. It can be used to fine-tune the look of
|
||||
# the HTML output. If left blank doxygen will generate a default style sheet.
|
||||
# See also section "Doxygen usage" for information on how to generate the style
|
||||
# sheet that doxygen normally uses.
|
||||
# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
|
||||
# it is more robust and this tag (HTML_STYLESHEET) will in the future become
|
||||
# obsolete.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_STYLESHEET =
|
||||
|
||||
# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output.
|
||||
# Doxygen will adjust the colors in the stylesheet and background images
|
||||
# according to this color. Hue is specified as an angle on a colorwheel,
|
||||
# see http://en.wikipedia.org/wiki/Hue for more information.
|
||||
# For instance the value 0 represents red, 60 is yellow, 120 is green,
|
||||
# 180 is cyan, 240 is blue, 300 purple, and 360 is red again.
|
||||
# The allowed range is 0 to 359.
|
||||
# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
|
||||
# cascading style sheets that are included after the standard style sheets
|
||||
# created by doxygen. Using this option one can overrule certain style aspects.
|
||||
# This is preferred over using HTML_STYLESHEET since it does not replace the
|
||||
# standard style sheet and is therefore more robust against future updates.
|
||||
# Doxygen will copy the style sheet files to the output directory.
|
||||
# Note: The order of the extra style sheet files is of importance (e.g. the last
|
||||
# style sheet in the list overrules the setting of the previous ones in the
|
||||
# list). For an example see the documentation.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_EXTRA_STYLESHEET = style.css
|
||||
|
||||
# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
|
||||
# other source files which should be copied to the HTML output directory. Note
|
||||
# that these files will be copied to the base HTML output directory. Use the
|
||||
# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
|
||||
# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
|
||||
# files will be copied as-is; there are no commands or markers available.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_EXTRA_FILES =
|
||||
|
||||
# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
|
||||
# will adjust the colors in the style sheet and background images according to
|
||||
# this color. Hue is specified as an angle on a colorwheel, see
|
||||
# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
|
||||
# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
|
||||
# purple, and 360 is red again.
|
||||
# Minimum value: 0, maximum value: 359, default value: 220.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_COLORSTYLE_HUE = 220
|
||||
|
||||
# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of
|
||||
# the colors in the HTML output. For a value of 0 the output will use
|
||||
# grayscales only. A value of 255 will produce the most vivid colors.
|
||||
# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
|
||||
# in the HTML output. For a value of 0 the output will use grayscales only. A
|
||||
# value of 255 will produce the most vivid colors.
|
||||
# Minimum value: 0, maximum value: 255, default value: 100.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_COLORSTYLE_SAT = 100
|
||||
|
||||
# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to
|
||||
# the luminance component of the colors in the HTML output. Values below
|
||||
# 100 gradually make the output lighter, whereas values above 100 make
|
||||
# the output darker. The value divided by 100 is the actual gamma applied,
|
||||
# so 80 represents a gamma of 0.8, The value 220 represents a gamma of 2.2,
|
||||
# and 100 does not change the gamma.
|
||||
# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
|
||||
# luminance component of the colors in the HTML output. Values below 100
|
||||
# gradually make the output lighter, whereas values above 100 make the output
|
||||
# darker. The value divided by 100 is the actual gamma applied, so 80 represents
|
||||
# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
|
||||
# change the gamma.
|
||||
# Minimum value: 40, maximum value: 240, default value: 80.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_COLORSTYLE_GAMMA = 80
|
||||
|
||||
# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
|
||||
# page will contain the date and time when the page was generated. Setting
|
||||
# this to NO can help when comparing the output of multiple runs.
|
||||
# page will contain the date and time when the page was generated. Setting this
|
||||
# to YES can help to show when doxygen was last run and thus if the
|
||||
# documentation is up to date.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_TIMESTAMP = YES
|
||||
|
||||
# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes,
|
||||
# files or namespaces will be aligned in HTML using tables. If set to
|
||||
# NO a bullet list will be used.
|
||||
|
||||
HTML_ALIGN_MEMBERS = YES
|
||||
HTML_TIMESTAMP = NO
|
||||
|
||||
# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
|
||||
# documentation will contain sections that can be hidden and shown after the
|
||||
# page has loaded. For this to work a browser that supports
|
||||
# JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox
|
||||
# Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari).
|
||||
# page has loaded.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_DYNAMIC_SECTIONS = YES
|
||||
|
||||
# If the GENERATE_DOCSET tag is set to YES, additional index files
|
||||
# will be generated that can be used as input for Apple's Xcode 3
|
||||
# integrated development environment, introduced with OSX 10.5 (Leopard).
|
||||
# To create a documentation set, doxygen will generate a Makefile in the
|
||||
# HTML output directory. Running make will produce the docset in that
|
||||
# directory and running "make install" will install the docset in
|
||||
# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find
|
||||
# it at startup.
|
||||
# See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
|
||||
# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
|
||||
# shown in the various tree structured indices initially; the user can expand
|
||||
# and collapse entries dynamically later on. Doxygen will expand the tree to
|
||||
# such a level that at most the specified number of entries are visible (unless
|
||||
# a fully collapsed tree already exceeds this amount). So setting the number of
|
||||
# entries 1 will produce a full collapsed tree by default. 0 is a special value
|
||||
# representing an infinite number of entries and will result in a full expanded
|
||||
# tree by default.
|
||||
# Minimum value: 0, maximum value: 9999, default value: 100.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_INDEX_NUM_ENTRIES = 100
|
||||
|
||||
# If the GENERATE_DOCSET tag is set to YES, additional index files will be
|
||||
# generated that can be used as input for Apple's Xcode 3 integrated development
|
||||
# environment (see: http://developer.apple.com/tools/xcode/), introduced with
|
||||
# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
|
||||
# Makefile in the HTML output directory. Running make will produce the docset in
|
||||
# that directory and running make install will install the docset in
|
||||
# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
|
||||
# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
|
||||
# for more information.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
GENERATE_DOCSET = NO
|
||||
|
||||
# When GENERATE_DOCSET tag is set to YES, this tag determines the name of the
|
||||
# feed. A documentation feed provides an umbrella under which multiple
|
||||
# documentation sets from a single provider (such as a company or product suite)
|
||||
# can be grouped.
|
||||
# This tag determines the name of the docset feed. A documentation feed provides
|
||||
# an umbrella under which multiple documentation sets from a single provider
|
||||
# (such as a company or product suite) can be grouped.
|
||||
# The default value is: Doxygen generated docs.
|
||||
# This tag requires that the tag GENERATE_DOCSET is set to YES.
|
||||
|
||||
DOCSET_FEEDNAME = "Doxygen generated docs"
|
||||
|
||||
# When GENERATE_DOCSET tag is set to YES, this tag specifies a string that
|
||||
# should uniquely identify the documentation set bundle. This should be a
|
||||
# reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen
|
||||
# will append .docset to the name.
|
||||
# This tag specifies a string that should uniquely identify the documentation
|
||||
# set bundle. This should be a reverse domain-name style string, e.g.
|
||||
# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
|
||||
# The default value is: org.doxygen.Project.
|
||||
# This tag requires that the tag GENERATE_DOCSET is set to YES.
|
||||
|
||||
DOCSET_BUNDLE_ID = com.swisssign.dev
|
||||
DOCSET_BUNDLE_ID = org.doxygen.Project
|
||||
|
||||
# When GENERATE_PUBLISHER_ID tag specifies a string that should uniquely identify
|
||||
# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
|
||||
# the documentation publisher. This should be a reverse domain-name style
|
||||
# string, e.g. com.mycompany.MyDocSet.documentation.
|
||||
# The default value is: org.doxygen.Publisher.
|
||||
# This tag requires that the tag GENERATE_DOCSET is set to YES.
|
||||
|
||||
DOCSET_PUBLISHER_ID = com.swisssign.dev
|
||||
DOCSET_PUBLISHER_ID = org.doxygen.Publisher
|
||||
|
||||
# The GENERATE_PUBLISHER_NAME tag identifies the documentation publisher.
|
||||
# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
|
||||
# The default value is: Publisher.
|
||||
# This tag requires that the tag GENERATE_DOCSET is set to YES.
|
||||
|
||||
DOCSET_PUBLISHER_NAME = "Marc Wäckerlin (SwissSign AG)"
|
||||
DOCSET_PUBLISHER_NAME = Publisher
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, additional index files
|
||||
# will be generated that can be used as input for tools like the
|
||||
# Microsoft HTML help workshop to generate a compiled HTML help file (.chm)
|
||||
# of the generated HTML documentation.
|
||||
# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
|
||||
# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
|
||||
# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
|
||||
# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
|
||||
# Windows.
|
||||
#
|
||||
# The HTML Help Workshop contains a compiler that can convert all HTML output
|
||||
# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
|
||||
# files are now used as the Windows 98 help format, and will replace the old
|
||||
# Windows help format (.hlp) on all Windows platforms in the future. Compressed
|
||||
# HTML files also contain an index, a table of contents, and you can search for
|
||||
# words in the documentation. The HTML workshop also contains a viewer for
|
||||
# compressed HTML files.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
GENERATE_HTMLHELP = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can
|
||||
# be used to specify the file name of the resulting .chm file. You
|
||||
# can add a path in front of the file if the result should not be
|
||||
# The CHM_FILE tag can be used to specify the file name of the resulting .chm
|
||||
# file. You can add a path in front of the file if the result should not be
|
||||
# written to the html output directory.
|
||||
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
|
||||
|
||||
CHM_FILE =
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can
|
||||
# be used to specify the location (absolute path including file name) of
|
||||
# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run
|
||||
# the HTML help compiler on the generated index.hhp.
|
||||
# The HHC_LOCATION tag can be used to specify the location (absolute path
|
||||
# including file name) of the HTML help compiler (hhc.exe). If non-empty,
|
||||
# doxygen will try to run the HTML help compiler on the generated index.hhp.
|
||||
# The file has to be specified with full path.
|
||||
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
|
||||
|
||||
HHC_LOCATION =
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
|
||||
# controls if a separate .chi index file is generated (YES) or that
|
||||
# it should be included in the master .chm file (NO).
|
||||
# The GENERATE_CHI flag controls if a separate .chi index file is generated
|
||||
# (YES) or that it should be included in the master .chm file (NO).
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
|
||||
|
||||
GENERATE_CHI = NO
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING
|
||||
# is used to encode HtmlHelp index (hhk), content (hhc) and project file
|
||||
# content.
|
||||
# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
|
||||
# and project file content.
|
||||
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
|
||||
|
||||
CHM_INDEX_ENCODING =
|
||||
|
||||
# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
|
||||
# controls whether a binary table of contents is generated (YES) or a
|
||||
# normal table of contents (NO) in the .chm file.
|
||||
# The BINARY_TOC flag controls whether a binary table of contents is generated
|
||||
# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
|
||||
# enables the Previous and Next buttons.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
|
||||
|
||||
BINARY_TOC = NO
|
||||
|
||||
# The TOC_EXPAND flag can be set to YES to add extra items for group members
|
||||
# to the contents of the HTML help documentation and to the tree view.
|
||||
# The TOC_EXPAND flag can be set to YES to add extra items for group members to
|
||||
# the table of contents of the HTML help documentation and to the tree view.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
|
||||
|
||||
TOC_EXPAND = NO
|
||||
|
||||
# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
|
||||
# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated
|
||||
# that can be used as input for Qt's qhelpgenerator to generate a
|
||||
# Qt Compressed Help (.qch) of the generated HTML documentation.
|
||||
# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
|
||||
# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
|
||||
# (.qch) of the generated HTML documentation.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
GENERATE_QHP = YES
|
||||
GENERATE_QHP = NO
|
||||
|
||||
# If the QHG_LOCATION tag is specified, the QCH_FILE tag can
|
||||
# be used to specify the file name of the resulting .qch file.
|
||||
# The path specified is relative to the HTML output folder.
|
||||
# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
|
||||
# the file name of the resulting .qch file. The path specified is relative to
|
||||
# the HTML output folder.
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
QCH_FILE = @PACKAGENAME@.qch
|
||||
QCH_FILE =
|
||||
|
||||
# The QHP_NAMESPACE tag specifies the namespace to use when generating
|
||||
# Qt Help Project output. For more information please see
|
||||
# http://doc.trolltech.com/qthelpproject.html#namespace
|
||||
# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
|
||||
# Project output. For more information please see Qt Help Project / Namespace
|
||||
# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
|
||||
# The default value is: org.doxygen.Project.
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
QHP_NAMESPACE = org.waweckerlin.marc.dev
|
||||
QHP_NAMESPACE = org.doxygen.Project
|
||||
|
||||
# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating
|
||||
# Qt Help Project output. For more information please see
|
||||
# http://doc.trolltech.com/qthelpproject.html#virtual-folders
|
||||
# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
|
||||
# Help Project output. For more information please see Qt Help Project / Virtual
|
||||
# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
|
||||
# folders).
|
||||
# The default value is: doc.
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
QHP_VIRTUAL_FOLDER = doc
|
||||
|
||||
# If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to
|
||||
# add. For more information please see
|
||||
# http://doc.trolltech.com/qthelpproject.html#custom-filters
|
||||
# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
|
||||
# filter to add. For more information please see Qt Help Project / Custom
|
||||
# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
|
||||
# filters).
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
QHP_CUST_FILTER_NAME =
|
||||
|
||||
# The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the
|
||||
# custom filter to add. For more information please see
|
||||
# <a href="http://doc.trolltech.com/qthelpproject.html#custom-filters">
|
||||
# Qt Help Project / Custom Filters</a>.
|
||||
# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
|
||||
# custom filter to add. For more information please see Qt Help Project / Custom
|
||||
# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
|
||||
# filters).
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
QHP_CUST_FILTER_ATTRS =
|
||||
|
||||
# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
|
||||
# project's
|
||||
# filter section matches.
|
||||
# <a href="http://doc.trolltech.com/qthelpproject.html#filter-attributes">
|
||||
# Qt Help Project / Filter Attributes</a>.
|
||||
# project's filter section matches. Qt Help Project / Filter Attributes (see:
|
||||
# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
QHP_SECT_FILTER_ATTRS =
|
||||
|
||||
# If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can
|
||||
# be used to specify the location of Qt's qhelpgenerator.
|
||||
# If non-empty doxygen will try to run qhelpgenerator on the generated
|
||||
# .qhp file.
|
||||
# The QHG_LOCATION tag can be used to specify the location of Qt's
|
||||
# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
|
||||
# generated .qhp file.
|
||||
# This tag requires that the tag GENERATE_QHP is set to YES.
|
||||
|
||||
QHG_LOCATION =
|
||||
|
||||
# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files
|
||||
# will be generated, which together with the HTML files, form an Eclipse help
|
||||
# plugin. To install this plugin and make it available under the help contents
|
||||
# menu in Eclipse, the contents of the directory containing the HTML and XML
|
||||
# files needs to be copied into the plugins directory of eclipse. The name of
|
||||
# the directory within the plugins directory should be the same as
|
||||
# the ECLIPSE_DOC_ID value. After copying Eclipse needs to be restarted before
|
||||
# the help appears.
|
||||
# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
|
||||
# generated, together with the HTML files, they form an Eclipse help plugin. To
|
||||
# install this plugin and make it available under the help contents menu in
|
||||
# Eclipse, the contents of the directory containing the HTML and XML files needs
|
||||
# to be copied into the plugins directory of eclipse. The name of the directory
|
||||
# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
|
||||
# After copying Eclipse needs to be restarted before the help appears.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
GENERATE_ECLIPSEHELP = NO
|
||||
|
||||
# A unique identifier for the eclipse help plugin. When installing the plugin
|
||||
# the directory name containing the HTML and XML files should also have
|
||||
# this name.
|
||||
# A unique identifier for the Eclipse help plugin. When installing the plugin
|
||||
# the directory name containing the HTML and XML files should also have this
|
||||
# name. Each documentation set should have its own identifier.
|
||||
# The default value is: org.doxygen.Project.
|
||||
# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
|
||||
|
||||
ECLIPSE_DOC_ID = com.swisssign.dev
|
||||
ECLIPSE_DOC_ID = org.doxygen.Project
|
||||
|
||||
# The DISABLE_INDEX tag can be used to turn on/off the condensed index at
|
||||
# top of each HTML page. The value NO (the default) enables the index and
|
||||
# the value YES disables it.
|
||||
# If you want full control over the layout of the generated HTML pages it might
|
||||
# be necessary to disable the index and replace it with your own. The
|
||||
# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
|
||||
# of each HTML page. A value of NO enables the index and the value YES disables
|
||||
# it. Since the tabs in the index contain the same information as the navigation
|
||||
# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
DISABLE_INDEX = NO
|
||||
|
||||
# This tag can be used to set the number of enum values (range [0,1..20])
|
||||
# that doxygen will group on one line in the generated HTML documentation.
|
||||
# Note that a value of 0 will completely suppress the enum values from appearing in the overview section.
|
||||
|
||||
ENUM_VALUES_PER_LINE = 4
|
||||
|
||||
# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
|
||||
# structure should be generated to display hierarchical information.
|
||||
# If the tag value is set to YES, a side panel will be generated
|
||||
# containing a tree-like index structure (just like the one that
|
||||
# is generated for HTML Help). For this to work a browser that supports
|
||||
# JavaScript, DHTML, CSS and frames is required (i.e. any modern browser).
|
||||
# Windows users are probably better off using the HTML help feature.
|
||||
# structure should be generated to display hierarchical information. If the tag
|
||||
# value is set to YES, a side panel will be generated containing a tree-like
|
||||
# index structure (just like the one that is generated for HTML Help). For this
|
||||
# to work a browser that supports JavaScript, DHTML, CSS and frames is required
|
||||
# (i.e. any modern browser). Windows users are probably better off using the
|
||||
# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
|
||||
# further fine-tune the look of the index. As an example, the default style
|
||||
# sheet generated by doxygen has an example that shows how to put an image at
|
||||
# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
|
||||
# the same information as the tab index, you could consider setting
|
||||
# DISABLE_INDEX to YES when enabling this option.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
GENERATE_TREEVIEW = YES
|
||||
|
||||
# By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories,
|
||||
# and Class Hierarchy pages using a tree view instead of an ordered list.
|
||||
# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
|
||||
# doxygen will group on one line in the generated HTML documentation.
|
||||
#
|
||||
# Note that a value of 0 will completely suppress the enum values from appearing
|
||||
# in the overview section.
|
||||
# Minimum value: 0, maximum value: 20, default value: 4.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
USE_INLINE_TREES = YES
|
||||
ENUM_VALUES_PER_LINE = 4
|
||||
|
||||
# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
|
||||
# used to set the initial width (in pixels) of the frame in which the tree
|
||||
# is shown.
|
||||
# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
|
||||
# to set the initial width (in pixels) of the frame in which the tree is shown.
|
||||
# Minimum value: 0, maximum value: 1500, default value: 250.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
TREEVIEW_WIDTH = 250
|
||||
|
||||
# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open
|
||||
# links to external symbols imported via tag files in a separate window.
|
||||
# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
|
||||
# external symbols imported via tag files in a separate window.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
EXT_LINKS_IN_WINDOW = NO
|
||||
|
||||
# Use this tag to change the font size of Latex formulas included
|
||||
# as images in the HTML documentation. The default is 10. Note that
|
||||
# when you change the font size after a successful doxygen run you need
|
||||
# to manually remove any form_*.png images from the HTML output directory
|
||||
# to force them to be regenerated.
|
||||
# Use this tag to change the font size of LaTeX formulas included as images in
|
||||
# the HTML documentation. When you change the font size after a successful
|
||||
# doxygen run you need to manually remove any form_*.png images from the HTML
|
||||
# output directory to force them to be regenerated.
|
||||
# Minimum value: 8, maximum value: 50, default value: 10.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
FORMULA_FONTSIZE = 10
|
||||
|
||||
# Use the FORMULA_TRANPARENT tag to determine whether or not the images
|
||||
# generated for formulas are transparent PNGs. Transparent PNGs are
|
||||
# not supported properly for IE 6.0, but are supported on all modern browsers.
|
||||
# Note that when changing this option you need to delete any form_*.png files
|
||||
# in the HTML output before the changes have effect.
|
||||
# generated for formulas are transparent PNGs. Transparent PNGs are not
|
||||
# supported properly for IE 6.0, but are supported on all modern browsers.
|
||||
#
|
||||
# Note that when changing this option you need to delete any form_*.png files in
|
||||
# the HTML output directory before the changes have effect.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
FORMULA_TRANSPARENT = YES
|
||||
|
||||
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax
|
||||
# (see http://www.mathjax.org) which uses client side Javascript for the
|
||||
# rendering instead of using prerendered bitmaps. Use this if you do not
|
||||
# have LaTeX installed or if you want to formulas look prettier in the HTML
|
||||
# output. When enabled you also need to install MathJax separately and
|
||||
# configure the path to it using the MATHJAX_RELPATH option.
|
||||
# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
|
||||
# http://www.mathjax.org) which uses client side Javascript for the rendering
|
||||
# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
|
||||
# installed or if you want to formulas look prettier in the HTML output. When
|
||||
# enabled you may also need to install MathJax separately and configure the path
|
||||
# to it using the MATHJAX_RELPATH option.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
USE_MATHJAX = NO
|
||||
|
||||
# When MathJax is enabled you need to specify the location relative to the
|
||||
# HTML output directory using the MATHJAX_RELPATH option. The destination
|
||||
# directory should contain the MathJax.js script. For instance, if the mathjax
|
||||
# directory is located at the same level as the HTML output directory, then
|
||||
# MATHJAX_RELPATH should be ../mathjax. The default value points to the mathjax.org site, so you can quickly see the result without installing
|
||||
# MathJax, but it is strongly recommended to install a local copy of MathJax
|
||||
# before deployment.
|
||||
# When MathJax is enabled you can set the default output format to be used for
|
||||
# the MathJax output. See the MathJax site (see:
|
||||
# http://docs.mathjax.org/en/latest/output.html) for more details.
|
||||
# Possible values are: HTML-CSS (which is slower, but has the best
|
||||
# compatibility), NativeMML (i.e. MathML) and SVG.
|
||||
# The default value is: HTML-CSS.
|
||||
# This tag requires that the tag USE_MATHJAX is set to YES.
|
||||
|
||||
MATHJAX_RELPATH = http://www.mathjax.org/mathjax
|
||||
MATHJAX_FORMAT = HTML-CSS
|
||||
|
||||
# When the SEARCHENGINE tag is enabled doxygen will generate a search box
|
||||
# for the HTML output. The underlying search engine uses javascript
|
||||
# and DHTML and should work on any modern browser. Note that when using
|
||||
# HTML help (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets
|
||||
# (GENERATE_DOCSET) there is already a search function so this one should
|
||||
# typically be disabled. For large projects the javascript based search engine
|
||||
# can be slow, then enabling SERVER_BASED_SEARCH may provide a better solution.
|
||||
# When MathJax is enabled you need to specify the location relative to the HTML
|
||||
# output directory using the MATHJAX_RELPATH option. The destination directory
|
||||
# should contain the MathJax.js script. For instance, if the mathjax directory
|
||||
# is located at the same level as the HTML output directory, then
|
||||
# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
|
||||
# Content Delivery Network so you can quickly see the result without installing
|
||||
# MathJax. However, it is strongly recommended to install a local copy of
|
||||
# MathJax from http://www.mathjax.org before deployment.
|
||||
# The default value is: http://cdn.mathjax.org/mathjax/latest.
|
||||
# This tag requires that the tag USE_MATHJAX is set to YES.
|
||||
|
||||
MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
|
||||
|
||||
# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
|
||||
# extension names that should be enabled during MathJax rendering. For example
|
||||
# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
|
||||
# This tag requires that the tag USE_MATHJAX is set to YES.
|
||||
|
||||
MATHJAX_EXTENSIONS =
|
||||
|
||||
# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
|
||||
# of code that will be used on startup of the MathJax code. See the MathJax site
|
||||
# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
|
||||
# example see the documentation.
|
||||
# This tag requires that the tag USE_MATHJAX is set to YES.
|
||||
|
||||
MATHJAX_CODEFILE =
|
||||
|
||||
# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
|
||||
# the HTML output. The underlying search engine uses javascript and DHTML and
|
||||
# should work on any modern browser. Note that when using HTML help
|
||||
# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
|
||||
# there is already a search function so this one should typically be disabled.
|
||||
# For large projects the javascript based search engine can be slow, then
|
||||
# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
|
||||
# search using the keyboard; to jump to the search box use <access key> + S
|
||||
# (what the <access key> is depends on the OS and browser, but it is typically
|
||||
# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
|
||||
# key> to jump into the search results window, the results can be navigated
|
||||
# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
|
||||
# the search. The filter options can be selected when the cursor is inside the
|
||||
# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
|
||||
# to select a filter and <Enter> or <escape> to activate or cancel the filter
|
||||
# option.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
SEARCHENGINE = NO
|
||||
|
||||
# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
|
||||
# implemented using a PHP enabled web server instead of at the web client
|
||||
# using Javascript. Doxygen will generate the search PHP script and index
|
||||
# file to put on the web server. The advantage of the server
|
||||
# based approach is that it scales better to large projects and allows
|
||||
# full text search. The disadvantages are that it is more difficult to setup
|
||||
# and does not have live searching capabilities.
|
||||
# implemented using a web server instead of a web client using Javascript. There
|
||||
# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
|
||||
# setting. When disabled, doxygen will generate a PHP script for searching and
|
||||
# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
|
||||
# and searching needs to be provided by external tools. See the section
|
||||
# "External Indexing and Searching" for details.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag SEARCHENGINE is set to YES.
|
||||
|
||||
SERVER_BASED_SEARCH = NO
|
||||
|
||||
# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
|
||||
# script for searching. Instead the search results are written to an XML file
|
||||
# which needs to be processed by an external indexer. Doxygen will invoke an
|
||||
# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
|
||||
# search results.
|
||||
#
|
||||
# Doxygen ships with an example indexer (doxyindexer) and search engine
|
||||
# (doxysearch.cgi) which are based on the open source search engine library
|
||||
# Xapian (see: http://xapian.org/).
|
||||
#
|
||||
# See the section "External Indexing and Searching" for details.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag SEARCHENGINE is set to YES.
|
||||
|
||||
EXTERNAL_SEARCH = NO
|
||||
|
||||
# The SEARCHENGINE_URL should point to a search engine hosted by a web server
|
||||
# which will return the search results when EXTERNAL_SEARCH is enabled.
|
||||
#
|
||||
# Doxygen ships with an example indexer (doxyindexer) and search engine
|
||||
# (doxysearch.cgi) which are based on the open source search engine library
|
||||
# Xapian (see: http://xapian.org/). See the section "External Indexing and
|
||||
# Searching" for details.
|
||||
# This tag requires that the tag SEARCHENGINE is set to YES.
|
||||
|
||||
SEARCHENGINE_URL =
|
||||
|
||||
# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
|
||||
# search data is written to a file for indexing by an external tool. With the
|
||||
# SEARCHDATA_FILE tag the name of this file can be specified.
|
||||
# The default file is: searchdata.xml.
|
||||
# This tag requires that the tag SEARCHENGINE is set to YES.
|
||||
|
||||
SEARCHDATA_FILE = searchdata.xml
|
||||
|
||||
# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
|
||||
# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
|
||||
# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
|
||||
# projects and redirect the results back to the right project.
|
||||
# This tag requires that the tag SEARCHENGINE is set to YES.
|
||||
|
||||
EXTERNAL_SEARCH_ID =
|
||||
|
||||
# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
|
||||
# projects other than the one defined by this configuration file, but that are
|
||||
# all added to the same external search index. Each project needs to have a
|
||||
# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
|
||||
# to a relative location where the documentation can be found. The format is:
|
||||
# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
|
||||
# This tag requires that the tag SEARCHENGINE is set to YES.
|
||||
|
||||
EXTRA_SEARCH_MAPPINGS =
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the LaTeX output
|
||||
# Configuration options related to the LaTeX output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
|
||||
# generate Latex output.
|
||||
# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.
|
||||
# The default value is: YES.
|
||||
|
||||
GENERATE_LATEX = YES
|
||||
GENERATE_LATEX = NO
|
||||
|
||||
# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `latex' will be used as the default path.
|
||||
# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
|
||||
# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
|
||||
# it.
|
||||
# The default directory is: latex.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_OUTPUT = latex
|
||||
|
||||
# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
|
||||
# invoked. If left blank `latex' will be used as the default command name.
|
||||
# Note that when enabling USE_PDFLATEX this option is only used for
|
||||
# generating bitmaps for formulas in the HTML output, but not in the
|
||||
# Makefile that is written to the output directory.
|
||||
# invoked.
|
||||
#
|
||||
# Note that when enabling USE_PDFLATEX this option is only used for generating
|
||||
# bitmaps for formulas in the HTML output, but not in the Makefile that is
|
||||
# written to the output directory.
|
||||
# The default file is: latex.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_CMD_NAME = latex
|
||||
|
||||
# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to
|
||||
# generate index for LaTeX. If left blank `makeindex' will be used as the
|
||||
# default command name.
|
||||
# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
|
||||
# index for LaTeX.
|
||||
# The default file is: makeindex.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
MAKEINDEX_CMD_NAME = makeindex
|
||||
|
||||
# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
|
||||
# LaTeX documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX
|
||||
# documents. This may be useful for small projects and may help to save some
|
||||
# trees in general.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
COMPACT_LATEX = YES
|
||||
COMPACT_LATEX = NO
|
||||
|
||||
# The PAPER_TYPE tag can be used to set the paper type that is used
|
||||
# by the printer. Possible values are: a4, letter, legal and
|
||||
# executive. If left blank a4wide will be used.
|
||||
# The PAPER_TYPE tag can be used to set the paper type that is used by the
|
||||
# printer.
|
||||
# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
|
||||
# 14 inches) and executive (7.25 x 10.5 inches).
|
||||
# The default value is: a4.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
PAPER_TYPE = a4
|
||||
|
||||
# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
|
||||
# packages that should be included in the LaTeX output.
|
||||
# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
|
||||
# that should be included in the LaTeX output. The package can be specified just
|
||||
# by its name or with the correct syntax as to be used with the LaTeX
|
||||
# \usepackage command. To get the times font for instance you can specify :
|
||||
# EXTRA_PACKAGES=times or EXTRA_PACKAGES={times}
|
||||
# To use the option intlimits with the amsmath package you can specify:
|
||||
# EXTRA_PACKAGES=[intlimits]{amsmath}
|
||||
# If left blank no extra packages will be included.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
EXTRA_PACKAGES =
|
||||
|
||||
# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
|
||||
# the generated latex document. The header should contain everything until
|
||||
# the first chapter. If it is left blank doxygen will generate a
|
||||
# standard header. Notice: only use this tag if you know what you are doing!
|
||||
# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
|
||||
# generated LaTeX document. The header should contain everything until the first
|
||||
# chapter. If it is left blank doxygen will generate a standard header. See
|
||||
# section "Doxygen usage" for information on how to let doxygen write the
|
||||
# default header to a separate file.
|
||||
#
|
||||
# Note: Only use a user-defined header if you know what you are doing! The
|
||||
# following commands have a special meaning inside the header: $title,
|
||||
# $datetime, $date, $doxygenversion, $projectname, $projectnumber,
|
||||
# $projectbrief, $projectlogo. Doxygen will replace $title with the empty
|
||||
# string, for the replacement values of the other commands the user is referred
|
||||
# to HTML_HEADER.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_HEADER =
|
||||
|
||||
# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
|
||||
# is prepared for conversion to pdf (using ps2pdf). The pdf file will
|
||||
# contain links (just like the HTML output) instead of page references
|
||||
# This makes the output suitable for online browsing using a pdf viewer.
|
||||
# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
|
||||
# generated LaTeX document. The footer should contain everything after the last
|
||||
# chapter. If it is left blank doxygen will generate a standard footer. See
|
||||
# LATEX_HEADER for more information on how to generate a default footer and what
|
||||
# special commands can be used inside the footer.
|
||||
#
|
||||
# Note: Only use a user-defined footer if you know what you are doing!
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_FOOTER =
|
||||
|
||||
# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined
|
||||
# LaTeX style sheets that are included after the standard style sheets created
|
||||
# by doxygen. Using this option one can overrule certain style aspects. Doxygen
|
||||
# will copy the style sheet files to the output directory.
|
||||
# Note: The order of the extra style sheet files is of importance (e.g. the last
|
||||
# style sheet in the list overrules the setting of the previous ones in the
|
||||
# list).
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_EXTRA_STYLESHEET =
|
||||
|
||||
# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
|
||||
# other source files which should be copied to the LATEX_OUTPUT output
|
||||
# directory. Note that the files will be copied as-is; there are no commands or
|
||||
# markers available.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_EXTRA_FILES =
|
||||
|
||||
# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
|
||||
# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
|
||||
# contain links (just like the HTML output) instead of page references. This
|
||||
# makes the output suitable for online browsing using a PDF viewer.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
PDF_HYPERLINKS = YES
|
||||
|
||||
# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
|
||||
# plain latex in the generated Makefile. Set this option to YES to get a
|
||||
# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
|
||||
# the PDF file directly from the LaTeX files. Set this option to YES, to get a
|
||||
# higher quality PDF documentation.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
USE_PDFLATEX = YES
|
||||
|
||||
# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
|
||||
# command to the generated LaTeX files. This will instruct LaTeX to keep
|
||||
# running if errors occur, instead of asking the user for help.
|
||||
# This option is also used when generating formulas in HTML.
|
||||
# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
|
||||
# command to the generated LaTeX files. This will instruct LaTeX to keep running
|
||||
# if errors occur, instead of asking the user for help. This option is also used
|
||||
# when generating formulas in HTML.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_BATCHMODE = YES
|
||||
|
||||
# If LATEX_HIDE_INDICES is set to YES then doxygen will not
|
||||
# include the index chapters (such as File Index, Compound Index, etc.)
|
||||
# in the output.
|
||||
# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
|
||||
# index chapters (such as File Index, Compound Index, etc.) in the output.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_HIDE_INDICES = YES
|
||||
|
||||
# If LATEX_SOURCE_CODE is set to YES then doxygen will include
|
||||
# source code with syntax highlighting in the LaTeX output.
|
||||
# Note that which sources are shown also depends on other settings
|
||||
# such as SOURCE_BROWSER.
|
||||
# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
|
||||
# code with syntax highlighting in the LaTeX output.
|
||||
#
|
||||
# Note that which sources are shown also depends on other settings such as
|
||||
# SOURCE_BROWSER.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_SOURCE_CODE = NO
|
||||
|
||||
# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
|
||||
# bibliography, e.g. plainnat, or ieeetr. See
|
||||
# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
|
||||
# The default value is: plain.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_BIB_STYLE = plain
|
||||
|
||||
# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated
|
||||
# page will contain the date and time when the page was generated. Setting this
|
||||
# to NO can help when comparing the output of multiple runs.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_TIMESTAMP = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the RTF output
|
||||
# Configuration options related to the RTF output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
|
||||
# The RTF output is optimized for Word 97 and may not look very pretty with
|
||||
# other RTF readers or editors.
|
||||
# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The
|
||||
# RTF output is optimized for Word 97 and may not look too pretty with other RTF
|
||||
# readers/editors.
|
||||
# The default value is: NO.
|
||||
|
||||
GENERATE_RTF = NO
|
||||
|
||||
# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `rtf' will be used as the default path.
|
||||
# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
|
||||
# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
|
||||
# it.
|
||||
# The default directory is: rtf.
|
||||
# This tag requires that the tag GENERATE_RTF is set to YES.
|
||||
|
||||
RTF_OUTPUT = rtf
|
||||
|
||||
# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
|
||||
# RTF documents. This may be useful for small projects and may help to
|
||||
# save some trees in general.
|
||||
# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF
|
||||
# documents. This may be useful for small projects and may help to save some
|
||||
# trees in general.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_RTF is set to YES.
|
||||
|
||||
COMPACT_RTF = YES
|
||||
|
||||
# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
|
||||
# will contain hyperlink fields. The RTF file will
|
||||
# contain links (just like the HTML output) instead of page references.
|
||||
# This makes the output suitable for online browsing using WORD or other
|
||||
# programs which support those fields.
|
||||
# Note: wordpad (write) and others do not support links.
|
||||
# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
|
||||
# contain hyperlink fields. The RTF file will contain links (just like the HTML
|
||||
# output) instead of page references. This makes the output suitable for online
|
||||
# browsing using Word or some other Word compatible readers that support those
|
||||
# fields.
|
||||
#
|
||||
# Note: WordPad (write) and others do not support links.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_RTF is set to YES.
|
||||
|
||||
RTF_HYPERLINKS = YES
|
||||
|
||||
# Load stylesheet definitions from file. Syntax is similar to doxygen's
|
||||
# config file, i.e. a series of assignments. You only have to provide
|
||||
# replacements, missing definitions are set to their default value.
|
||||
# Load stylesheet definitions from file. Syntax is similar to doxygen's config
|
||||
# file, i.e. a series of assignments. You only have to provide replacements,
|
||||
# missing definitions are set to their default value.
|
||||
#
|
||||
# See also section "Doxygen usage" for information on how to generate the
|
||||
# default style sheet that doxygen normally uses.
|
||||
# This tag requires that the tag GENERATE_RTF is set to YES.
|
||||
|
||||
RTF_STYLESHEET_FILE =
|
||||
|
||||
# Set optional variables used in the generation of an rtf document.
|
||||
# Syntax is similar to doxygen's config file.
|
||||
# Set optional variables used in the generation of an RTF document. Syntax is
|
||||
# similar to doxygen's config file. A template extensions file can be generated
|
||||
# using doxygen -e rtf extensionFile.
|
||||
# This tag requires that the tag GENERATE_RTF is set to YES.
|
||||
|
||||
RTF_EXTENSIONS_FILE =
|
||||
|
||||
# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code
|
||||
# with syntax highlighting in the RTF output.
|
||||
#
|
||||
# Note that which sources are shown also depends on other settings such as
|
||||
# SOURCE_BROWSER.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_RTF is set to YES.
|
||||
|
||||
RTF_SOURCE_CODE = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the man page output
|
||||
# Configuration options related to the man page output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
|
||||
# generate man pages
|
||||
# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for
|
||||
# classes and files.
|
||||
# The default value is: NO.
|
||||
|
||||
GENERATE_MAN = NO
|
||||
|
||||
# The MAN_OUTPUT tag is used to specify where the man pages will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `man' will be used as the default path.
|
||||
# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
|
||||
# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
|
||||
# it. A directory man3 will be created inside the directory specified by
|
||||
# MAN_OUTPUT.
|
||||
# The default directory is: man.
|
||||
# This tag requires that the tag GENERATE_MAN is set to YES.
|
||||
|
||||
MAN_OUTPUT = man
|
||||
|
||||
# The MAN_EXTENSION tag determines the extension that is added to
|
||||
# the generated man pages (default is the subroutine's section .3)
|
||||
# The MAN_EXTENSION tag determines the extension that is added to the generated
|
||||
# man pages. In case the manual section does not start with a number, the number
|
||||
# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
|
||||
# optional.
|
||||
# The default value is: .3.
|
||||
# This tag requires that the tag GENERATE_MAN is set to YES.
|
||||
|
||||
MAN_EXTENSION = .3
|
||||
|
||||
# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
|
||||
# then it will generate one additional man file for each entity
|
||||
# documented in the real man page(s). These additional files
|
||||
# only source the real man page, but without them the man command
|
||||
# would be unable to find the correct page. The default is NO.
|
||||
# The MAN_SUBDIR tag determines the name of the directory created within
|
||||
# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by
|
||||
# MAN_EXTENSION with the initial . removed.
|
||||
# This tag requires that the tag GENERATE_MAN is set to YES.
|
||||
|
||||
MAN_LINKS = YES
|
||||
MAN_SUBDIR =
|
||||
|
||||
# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
|
||||
# will generate one additional man file for each entity documented in the real
|
||||
# man page(s). These additional files only source the real man page, but without
|
||||
# them the man command would be unable to find the correct page.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_MAN is set to YES.
|
||||
|
||||
MAN_LINKS = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the XML output
|
||||
# Configuration options related to the XML output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_XML tag is set to YES Doxygen will
|
||||
# generate an XML file that captures the structure of
|
||||
# the code including all documentation.
|
||||
# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that
|
||||
# captures the structure of the code including all documentation.
|
||||
# The default value is: NO.
|
||||
|
||||
GENERATE_XML = NO
|
||||
|
||||
# The XML_OUTPUT tag is used to specify where the XML pages will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be
|
||||
# put in front of it. If left blank `xml' will be used as the default path.
|
||||
# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
|
||||
# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
|
||||
# it.
|
||||
# The default directory is: xml.
|
||||
# This tag requires that the tag GENERATE_XML is set to YES.
|
||||
|
||||
XML_OUTPUT = xml
|
||||
|
||||
# The XML_SCHEMA tag can be used to specify an XML schema,
|
||||
# which can be used by a validating XML parser to check the
|
||||
# syntax of the XML files.
|
||||
|
||||
XML_SCHEMA =
|
||||
|
||||
# The XML_DTD tag can be used to specify an XML DTD,
|
||||
# which can be used by a validating XML parser to check the
|
||||
# syntax of the XML files.
|
||||
|
||||
XML_DTD =
|
||||
|
||||
# If the XML_PROGRAMLISTING tag is set to YES Doxygen will
|
||||
# dump the program listings (including syntax highlighting
|
||||
# and cross-referencing information) to the XML output. Note that
|
||||
# enabling this will significantly increase the size of the XML output.
|
||||
# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program
|
||||
# listings (including syntax highlighting and cross-referencing information) to
|
||||
# the XML output. Note that enabling this will significantly increase the size
|
||||
# of the XML output.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag GENERATE_XML is set to YES.
|
||||
|
||||
XML_PROGRAMLISTING = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options for the AutoGen Definitions output
|
||||
# Configuration options related to the DOCBOOK output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will
|
||||
# generate an AutoGen Definitions (see autogen.sf.net) file
|
||||
# that captures the structure of the code including all
|
||||
# documentation. Note that this feature is still experimental
|
||||
# and incomplete at the moment.
|
||||
# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files
|
||||
# that can be used to generate PDF.
|
||||
# The default value is: NO.
|
||||
|
||||
GENERATE_DOCBOOK = NO
|
||||
|
||||
# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
|
||||
# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
|
||||
# front of it.
|
||||
# The default directory is: docbook.
|
||||
# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
|
||||
|
||||
DOCBOOK_OUTPUT = docbook
|
||||
|
||||
# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the
|
||||
# program listings (including syntax highlighting and cross-referencing
|
||||
# information) to the DOCBOOK output. Note that enabling this will significantly
|
||||
# increase the size of the DOCBOOK output.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
|
||||
|
||||
DOCBOOK_PROGRAMLISTING = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options for the AutoGen Definitions output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
|
||||
# AutoGen Definitions (see http://autogen.sf.net) file that captures the
|
||||
# structure of the code including all documentation. Note that this feature is
|
||||
# still experimental and incomplete at the moment.
|
||||
# The default value is: NO.
|
||||
|
||||
GENERATE_AUTOGEN_DEF = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# configuration options related to the Perl module output
|
||||
# Configuration options related to the Perl module output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_PERLMOD tag is set to YES Doxygen will
|
||||
# generate a Perl module file that captures the structure of
|
||||
# the code including all documentation. Note that this
|
||||
# feature is still experimental and incomplete at the
|
||||
# moment.
|
||||
# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module
|
||||
# file that captures the structure of the code including all documentation.
|
||||
#
|
||||
# Note that this feature is still experimental and incomplete at the moment.
|
||||
# The default value is: NO.
|
||||
|
||||
GENERATE_PERLMOD = NO
|
||||
|
||||
# If the PERLMOD_LATEX tag is set to YES Doxygen will generate
|
||||
# the necessary Makefile rules, Perl scripts and LaTeX code to be able
|
||||
# to generate PDF and DVI output from the Perl module output.
|
||||
# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary
|
||||
# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
|
||||
# output from the Perl module output.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_PERLMOD is set to YES.
|
||||
|
||||
PERLMOD_LATEX = NO
|
||||
|
||||
# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be
|
||||
# nicely formatted so it can be parsed by a human reader.
|
||||
# This is useful
|
||||
# if you want to understand what is going on.
|
||||
# On the other hand, if this
|
||||
# tag is set to NO the size of the Perl module output will be much smaller
|
||||
# and Perl will parse it just the same.
|
||||
# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely
|
||||
# formatted so it can be parsed by a human reader. This is useful if you want to
|
||||
# understand what is going on. On the other hand, if this tag is set to NO, the
|
||||
# size of the Perl module output will be much smaller and Perl will parse it
|
||||
# just the same.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag GENERATE_PERLMOD is set to YES.
|
||||
|
||||
PERLMOD_PRETTY = YES
|
||||
|
||||
# The names of the make variables in the generated doxyrules.make file
|
||||
# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX.
|
||||
# This is useful so different doxyrules.make files included by the same
|
||||
# Makefile don't overwrite each other's variables.
|
||||
# The names of the make variables in the generated doxyrules.make file are
|
||||
# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
|
||||
# so different doxyrules.make files included by the same Makefile don't
|
||||
# overwrite each other's variables.
|
||||
# This tag requires that the tag GENERATE_PERLMOD is set to YES.
|
||||
|
||||
PERLMOD_MAKEVAR_PREFIX =
|
||||
|
||||
@@ -1374,109 +2003,130 @@ PERLMOD_MAKEVAR_PREFIX =
|
||||
# Configuration options related to the preprocessor
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
|
||||
# evaluate all C-preprocessor directives found in the sources and include
|
||||
# files.
|
||||
# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all
|
||||
# C-preprocessor directives found in the sources and include files.
|
||||
# The default value is: YES.
|
||||
|
||||
ENABLE_PREPROCESSING = YES
|
||||
|
||||
# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
|
||||
# names in the source code. If set to NO (the default) only conditional
|
||||
# compilation will be performed. Macro expansion can be done in a controlled
|
||||
# way by setting EXPAND_ONLY_PREDEF to YES.
|
||||
# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
|
||||
# in the source code. If set to NO, only conditional compilation will be
|
||||
# performed. Macro expansion can be done in a controlled way by setting
|
||||
# EXPAND_ONLY_PREDEF to YES.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||
|
||||
MACRO_EXPANSION = NO
|
||||
MACRO_EXPANSION = YES
|
||||
|
||||
# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
|
||||
# then the macro expansion is limited to the macros specified with the
|
||||
# PREDEFINED and EXPAND_AS_DEFINED tags.
|
||||
# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
|
||||
# the macro expansion is limited to the macros specified with the PREDEFINED and
|
||||
# EXPAND_AS_DEFINED tags.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||
|
||||
EXPAND_ONLY_PREDEF = NO
|
||||
|
||||
# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
|
||||
# in the INCLUDE_PATH (see below) will be search if a #include is found.
|
||||
# If the SEARCH_INCLUDES tag is set to YES, the include files in the
|
||||
# INCLUDE_PATH will be searched if a #include is found.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||
|
||||
SEARCH_INCLUDES = YES
|
||||
|
||||
# The INCLUDE_PATH tag can be used to specify one or more directories that
|
||||
# contain include files that are not input files but should be processed by
|
||||
# the preprocessor.
|
||||
# contain include files that are not input files but should be processed by the
|
||||
# preprocessor.
|
||||
# This tag requires that the tag SEARCH_INCLUDES is set to YES.
|
||||
|
||||
INCLUDE_PATH =
|
||||
|
||||
# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
|
||||
# patterns (like *.h and *.hpp) to filter out the header-files in the
|
||||
# directories. If left blank, the patterns specified with FILE_PATTERNS will
|
||||
# be used.
|
||||
# directories. If left blank, the patterns specified with FILE_PATTERNS will be
|
||||
# used.
|
||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||
|
||||
INCLUDE_FILE_PATTERNS =
|
||||
|
||||
# The PREDEFINED tag can be used to specify one or more macro names that
|
||||
# are defined before the preprocessor is started (similar to the -D option of
|
||||
# gcc). The argument of the tag is a list of macros of the form: name
|
||||
# or name=definition (no spaces). If the definition and the = are
|
||||
# omitted =1 is assumed. To prevent a macro definition from being
|
||||
# undefined via #undef or recursively expanded use the := operator
|
||||
# instead of the = operator.
|
||||
# The PREDEFINED tag can be used to specify one or more macro names that are
|
||||
# defined before the preprocessor is started (similar to the -D option of e.g.
|
||||
# gcc). The argument of the tag is a list of macros of the form: name or
|
||||
# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
|
||||
# is assumed. To prevent a macro definition from being undefined via #undef or
|
||||
# recursively expanded use the := operator instead of the = operator.
|
||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||
|
||||
PREDEFINED = HAVE_STACKTRACE
|
||||
PREDEFINED =
|
||||
PREDEFINED += "NAMESPACE=@PACKAGE_NAME@"
|
||||
|
||||
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then
|
||||
# this tag can be used to specify a list of macro names that should be expanded.
|
||||
# The macro definition that is found in the sources will be used.
|
||||
# Use the PREDEFINED tag if you want to use a different macro definition that overrules the definition found in the source code.
|
||||
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
||||
# tag can be used to specify a list of macro names that should be expanded. The
|
||||
# macro definition that is found in the sources will be used. Use the PREDEFINED
|
||||
# tag if you want to use a different macro definition that overrules the
|
||||
# definition found in the source code.
|
||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||
|
||||
EXPAND_AS_DEFINED =
|
||||
|
||||
# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
|
||||
# doxygen's preprocessor will remove all references to function-like macros
|
||||
# that are alone on a line, have an all uppercase name, and do not end with a
|
||||
# semicolon, because these will confuse the parser if not removed.
|
||||
# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
|
||||
# remove all references to function-like macros that are alone on a line, have
|
||||
# an all uppercase name, and do not end with a semicolon. Such function macros
|
||||
# are typically used for boiler-plate code, and will confuse the parser if not
|
||||
# removed.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
|
||||
|
||||
SKIP_FUNCTION_MACROS = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration::additions related to external references
|
||||
# Configuration options related to external references
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# The TAGFILES option can be used to specify one or more tagfiles.
|
||||
# Optionally an initial location of the external documentation
|
||||
# can be added for each tagfile. The format of a tag file without
|
||||
# this location is as follows:
|
||||
#
|
||||
# The TAGFILES tag can be used to specify one or more tag files. For each tag
|
||||
# file the location of the external documentation should be added. The format of
|
||||
# a tag file without this location is as follows:
|
||||
# TAGFILES = file1 file2 ...
|
||||
# Adding location for the tag files is done as follows:
|
||||
#
|
||||
# TAGFILES = file1=loc1 "file2 = loc2" ...
|
||||
# where "loc1" and "loc2" can be relative or absolute paths or
|
||||
# URLs. If a location is present for each tag, the installdox tool
|
||||
# does not have to be run to correct the links.
|
||||
# Note that each tag file must have a unique name
|
||||
# (where the name does NOT include the path)
|
||||
# If a tag file is not located in the directory in which doxygen
|
||||
# is run, you must also specify the path to the tagfile here.
|
||||
# where loc1 and loc2 can be relative or absolute paths or URLs. See the
|
||||
# section "Linking to external documentation" for more information about the use
|
||||
# of tag files.
|
||||
# Note: Each tag file must have a unique name (where the name does NOT include
|
||||
# the path). If a tag file is not located in the directory in which doxygen is
|
||||
# run, you must also specify the path to the tagfile here.
|
||||
|
||||
TAGFILES =
|
||||
|
||||
# When a file name is specified after GENERATE_TAGFILE, doxygen will create
|
||||
# a tag file that is based on the input files it reads.
|
||||
# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
|
||||
# tag file that is based on the input files it reads. See section "Linking to
|
||||
# external documentation" for more information about the usage of tag files.
|
||||
|
||||
GENERATE_TAGFILE = @PACKAGENAME@.doxytag
|
||||
GENERATE_TAGFILE = @PACKAGE_NAME@.doxytag
|
||||
|
||||
# If the ALLEXTERNALS tag is set to YES all external classes will be listed
|
||||
# in the class index. If set to NO only the inherited external classes
|
||||
# will be listed.
|
||||
# If the ALLEXTERNALS tag is set to YES, all external class will be listed in
|
||||
# the class index. If set to NO, only the inherited external classes will be
|
||||
# listed.
|
||||
# The default value is: NO.
|
||||
|
||||
ALLEXTERNALS = NO
|
||||
|
||||
# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed
|
||||
# in the modules index. If set to NO, only the current project's groups will
|
||||
# be listed.
|
||||
# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed
|
||||
# in the modules index. If set to NO, only the current project's groups will be
|
||||
# listed.
|
||||
# The default value is: YES.
|
||||
|
||||
EXTERNAL_GROUPS = YES
|
||||
|
||||
# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in
|
||||
# the related pages index. If set to NO, only the current project's pages will
|
||||
# be listed.
|
||||
# The default value is: YES.
|
||||
|
||||
EXTERNAL_PAGES = YES
|
||||
|
||||
# The PERL_PATH should be the absolute path and name of the perl script
|
||||
# interpreter (i.e. the result of `which perl').
|
||||
# interpreter (i.e. the result of 'which perl').
|
||||
# The default file (with absolute path) is: /usr/bin/perl.
|
||||
|
||||
PERL_PATH = /usr/bin/perl
|
||||
|
||||
@@ -1484,204 +2134,319 @@ PERL_PATH = /usr/bin/perl
|
||||
# Configuration options related to the dot tool
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
|
||||
# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base
|
||||
# or super classes. Setting the tag to NO turns the diagrams off. Note that
|
||||
# this option also works with HAVE_DOT disabled, but it is recommended to
|
||||
# install and use dot, since it yields more powerful graphs.
|
||||
# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram
|
||||
# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
|
||||
# NO turns the diagrams off. Note that this option also works with HAVE_DOT
|
||||
# disabled, but it is recommended to install and use dot, since it yields more
|
||||
# powerful graphs.
|
||||
# The default value is: YES.
|
||||
|
||||
CLASS_DIAGRAMS = YES
|
||||
|
||||
# You can define message sequence charts within doxygen comments using the \msc
|
||||
# command. Doxygen will then run the mscgen tool (see
|
||||
# http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the
|
||||
# command. Doxygen will then run the mscgen tool (see:
|
||||
# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
|
||||
# documentation. The MSCGEN_PATH tag allows you to specify the directory where
|
||||
# the mscgen tool resides. If left empty the tool is assumed to be found in the
|
||||
# default search path.
|
||||
|
||||
MSCGEN_PATH =
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will hide
|
||||
# inheritance and usage relations if the target is undocumented
|
||||
# or is not a class.
|
||||
# You can include diagrams made with dia in doxygen documentation. Doxygen will
|
||||
# then run dia to produce the diagram and insert it in the documentation. The
|
||||
# DIA_PATH tag allows you to specify the directory where the dia binary resides.
|
||||
# If left empty dia is assumed to be found in the default search path.
|
||||
|
||||
DIA_PATH =
|
||||
|
||||
# If set to YES the inheritance and collaboration graphs will hide inheritance
|
||||
# and usage relations if the target is undocumented or is not a class.
|
||||
# The default value is: YES.
|
||||
|
||||
HIDE_UNDOC_RELATIONS = NO
|
||||
|
||||
# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
|
||||
# available from the path. This tool is part of Graphviz, a graph visualization
|
||||
# toolkit from AT&T and Lucent Bell Labs. The other options in this section
|
||||
# have no effect if this option is set to NO (the default)
|
||||
# available from the path. This tool is part of Graphviz (see:
|
||||
# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
|
||||
# Bell Labs. The other options in this section have no effect if this option is
|
||||
# set to NO
|
||||
# The default value is: YES.
|
||||
|
||||
HAVE_DOT = @HAVE_DOT@
|
||||
HAVE_DOT = YES
|
||||
|
||||
# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is
|
||||
# allowed to run in parallel. When set to 0 (the default) doxygen will
|
||||
# base this on the number of processors available in the system. You can set it
|
||||
# explicitly to a value larger than 0 to get control over the balance
|
||||
# between CPU load and processing speed.
|
||||
# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
|
||||
# to run in parallel. When set to 0 doxygen will base this on the number of
|
||||
# processors available in the system. You can set it explicitly to a value
|
||||
# larger than 0 to get control over the balance between CPU load and processing
|
||||
# speed.
|
||||
# Minimum value: 0, maximum value: 32, default value: 0.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_NUM_THREADS = 0
|
||||
|
||||
# By default doxygen will write a font called Helvetica to the output
|
||||
# directory and reference it in all dot files that doxygen generates.
|
||||
# When you want a differently looking font you can specify the font name
|
||||
# using DOT_FONTNAME. You need to make sure dot is able to find the font,
|
||||
# which can be done by putting it in a standard location or by setting the
|
||||
# DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory
|
||||
# containing the font.
|
||||
# When you want a differently looking font in the dot files that doxygen
|
||||
# generates you can specify the font name using DOT_FONTNAME. You need to make
|
||||
# sure dot is able to find the font, which can be done by putting it in a
|
||||
# standard location or by setting the DOTFONTPATH environment variable or by
|
||||
# setting DOT_FONTPATH to the directory containing the font.
|
||||
# The default value is: Helvetica.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_FONTNAME = Helvetica
|
||||
|
||||
# The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs.
|
||||
# The default size is 10pt.
|
||||
# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
|
||||
# dot graphs.
|
||||
# Minimum value: 4, maximum value: 24, default value: 10.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_FONTSIZE = 10
|
||||
|
||||
# By default doxygen will tell dot to use the output directory to look for the
|
||||
# FreeSans.ttf font (which doxygen will put there itself). If you specify a
|
||||
# different font using DOT_FONTNAME you can set the path where dot
|
||||
# can find it using this tag.
|
||||
# By default doxygen will tell dot to use the default font as specified with
|
||||
# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
|
||||
# the path where dot can find it using this tag.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_FONTPATH =
|
||||
|
||||
# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect inheritance relations. Setting this tag to YES will force the
|
||||
# the CLASS_DIAGRAMS tag to NO.
|
||||
# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
|
||||
# each documented class showing the direct and indirect inheritance relations.
|
||||
# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
CLASS_GRAPH = YES
|
||||
|
||||
# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for each documented class showing the direct and
|
||||
# indirect implementation dependencies (inheritance, containment, and
|
||||
# class references variables) of the class with other documented classes.
|
||||
# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
|
||||
# graph for each documented class showing the direct and indirect implementation
|
||||
# dependencies (inheritance, containment, and class references variables) of the
|
||||
# class with other documented classes.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
COLLABORATION_GRAPH = NO
|
||||
COLLABORATION_GRAPH = YES
|
||||
|
||||
# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graph for groups, showing the direct groups dependencies
|
||||
# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
|
||||
# groups, showing the direct groups dependencies.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
GROUP_GRAPHS = YES
|
||||
|
||||
# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
|
||||
# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and
|
||||
# collaboration diagrams in a style similar to the OMG's Unified Modeling
|
||||
# Language.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
UML_LOOK = NO
|
||||
|
||||
# If set to YES, the inheritance and collaboration graphs will show the
|
||||
# relations between templates and their instances.
|
||||
# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
|
||||
# class node. If there are many fields or methods and many nodes the graph may
|
||||
# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
|
||||
# number of items for each type to make the size more manageable. Set this to 0
|
||||
# for no limit. Note that the threshold may be exceeded by 50% before the limit
|
||||
# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
|
||||
# but if the number exceeds 15, the total amount of fields shown is limited to
|
||||
# 10.
|
||||
# Minimum value: 0, maximum value: 100, default value: 10.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
UML_LIMIT_NUM_FIELDS = 10
|
||||
|
||||
# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
|
||||
# collaboration graphs will show the relations between templates and their
|
||||
# instances.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
TEMPLATE_RELATIONS = YES
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
|
||||
# tags are set to YES then doxygen will generate a graph for each documented
|
||||
# file showing the direct and indirect include dependencies of the file with
|
||||
# other documented files.
|
||||
# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
|
||||
# YES then doxygen will generate a graph for each documented file showing the
|
||||
# direct and indirect include dependencies of the file with other documented
|
||||
# files.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
INCLUDE_GRAPH = NO
|
||||
INCLUDE_GRAPH = YES
|
||||
|
||||
# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
|
||||
# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
|
||||
# documented header file showing the documented files that directly or
|
||||
# indirectly include this file.
|
||||
# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
|
||||
# set to YES then doxygen will generate a graph for each documented file showing
|
||||
# the direct and indirect include dependencies of the file with other documented
|
||||
# files.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
INCLUDED_BY_GRAPH = NO
|
||||
INCLUDED_BY_GRAPH = YES
|
||||
|
||||
# If the CALL_GRAPH and HAVE_DOT options are set to YES then
|
||||
# doxygen will generate a call dependency graph for every global function
|
||||
# or class method. Note that enabling this option will significantly increase
|
||||
# the time of a run. So in most cases it will be better to enable call graphs
|
||||
# for selected functions only using the \callgraph command.
|
||||
# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
|
||||
# dependency graph for every global function or class method.
|
||||
#
|
||||
# Note that enabling this option will significantly increase the time of a run.
|
||||
# So in most cases it will be better to enable call graphs for selected
|
||||
# functions only using the \callgraph command. Disabling a call graph can be
|
||||
# accomplished by means of the command \hidecallgraph.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
CALL_GRAPH = NO
|
||||
|
||||
# If the CALLER_GRAPH and HAVE_DOT tags are set to YES then
|
||||
# doxygen will generate a caller dependency graph for every global function
|
||||
# or class method. Note that enabling this option will significantly increase
|
||||
# the time of a run. So in most cases it will be better to enable caller
|
||||
# graphs for selected functions only using the \callergraph command.
|
||||
# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
|
||||
# dependency graph for every global function or class method.
|
||||
#
|
||||
# Note that enabling this option will significantly increase the time of a run.
|
||||
# So in most cases it will be better to enable caller graphs for selected
|
||||
# functions only using the \callergraph command. Disabling a caller graph can be
|
||||
# accomplished by means of the command \hidecallergraph.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
CALLER_GRAPH = NO
|
||||
|
||||
# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
|
||||
# will generate a graphical hierarchy of all classes instead of a textual one.
|
||||
# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
|
||||
# hierarchy of all classes instead of a textual one.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
GRAPHICAL_HIERARCHY = YES
|
||||
|
||||
# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES
|
||||
# then doxygen will show the dependencies a directory has on other directories
|
||||
# in a graphical way. The dependency relations are determined by the #include
|
||||
# relations between the files in the directories.
|
||||
# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
|
||||
# dependencies a directory has on other directories in a graphical way. The
|
||||
# dependency relations are determined by the #include relations between the
|
||||
# files in the directories.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DIRECTORY_GRAPH = YES
|
||||
|
||||
# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
|
||||
# generated by dot. Possible values are png, svg, gif or svg.
|
||||
# If left blank png will be used.
|
||||
# generated by dot. For an explanation of the image formats see the section
|
||||
# output formats in the documentation of the dot tool (Graphviz (see:
|
||||
# http://www.graphviz.org/)).
|
||||
# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
|
||||
# to make the SVG files visible in IE 9+ (other browsers do not have this
|
||||
# requirement).
|
||||
# Possible values are: png, png:cairo, png:cairo:cairo, png:cairo:gd, png:gd,
|
||||
# png:gd:gd, jpg, jpg:cairo, jpg:cairo:gd, jpg:gd, jpg:gd:gd, gif, gif:cairo,
|
||||
# gif:cairo:gd, gif:gd, gif:gd:gd, svg, png:gd, png:gd:gd, png:cairo,
|
||||
# png:cairo:gd, png:cairo:cairo, png:cairo:gdiplus, png:gdiplus and
|
||||
# png:gdiplus:gdiplus.
|
||||
# The default value is: png.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_IMAGE_FORMAT = png
|
||||
DOT_IMAGE_FORMAT = svg
|
||||
|
||||
# The tag DOT_PATH can be used to specify the path where the dot tool can be
|
||||
# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
|
||||
# enable generation of interactive SVG images that allow zooming and panning.
|
||||
#
|
||||
# Note that this requires a modern browser other than Internet Explorer. Tested
|
||||
# and working are Firefox, Chrome, Safari, and Opera.
|
||||
# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
|
||||
# the SVG files visible. Older versions of IE do not have SVG support.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
INTERACTIVE_SVG = NO
|
||||
|
||||
# The DOT_PATH tag can be used to specify the path where the dot tool can be
|
||||
# found. If left blank, it is assumed the dot tool can be found in the path.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_PATH =
|
||||
|
||||
# The DOTFILE_DIRS tag can be used to specify one or more directories that
|
||||
# contain dot files that are included in the documentation (see the
|
||||
# \dotfile command).
|
||||
# contain dot files that are included in the documentation (see the \dotfile
|
||||
# command).
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOTFILE_DIRS =
|
||||
|
||||
# The MSCFILE_DIRS tag can be used to specify one or more directories that
|
||||
# contain msc files that are included in the documentation (see the
|
||||
# \mscfile command).
|
||||
# contain msc files that are included in the documentation (see the \mscfile
|
||||
# command).
|
||||
|
||||
MSCFILE_DIRS =
|
||||
|
||||
# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of
|
||||
# nodes that will be shown in the graph. If the number of nodes in a graph
|
||||
# becomes larger than this value, doxygen will truncate the graph, which is
|
||||
# visualized by representing a node as a red box. Note that doxygen if the
|
||||
# number of direct children of the root node in a graph is already larger than
|
||||
# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note
|
||||
# that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
|
||||
# The DIAFILE_DIRS tag can be used to specify one or more directories that
|
||||
# contain dia files that are included in the documentation (see the \diafile
|
||||
# command).
|
||||
|
||||
DIAFILE_DIRS =
|
||||
|
||||
# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the
|
||||
# path where java can find the plantuml.jar file. If left blank, it is assumed
|
||||
# PlantUML is not used or called during a preprocessing step. Doxygen will
|
||||
# generate a warning when it encounters a \startuml command in this case and
|
||||
# will not generate output for the diagram.
|
||||
|
||||
PLANTUML_JAR_PATH = "@top_srcdir@/doc/plantuml.jar"
|
||||
|
||||
# When using plantuml, the PLANTUML_CFG_FILE tag can be used to specify a
|
||||
# configuration file for plantuml.
|
||||
|
||||
PLANTUML_CFG_FILE =
|
||||
|
||||
# When using plantuml, the specified paths are searched for files specified by
|
||||
# the !include statement in a plantuml block.
|
||||
|
||||
PLANTUML_INCLUDE_PATH =
|
||||
|
||||
# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
|
||||
# that will be shown in the graph. If the number of nodes in a graph becomes
|
||||
# larger than this value, doxygen will truncate the graph, which is visualized
|
||||
# by representing a node as a red box. Note that doxygen if the number of direct
|
||||
# children of the root node in a graph is already larger than
|
||||
# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
|
||||
# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
|
||||
# Minimum value: 0, maximum value: 10000, default value: 50.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_GRAPH_MAX_NODES = 50
|
||||
|
||||
# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the
|
||||
# graphs generated by dot. A depth value of 3 means that only nodes reachable
|
||||
# from the root by following a path via at most 3 edges will be shown. Nodes
|
||||
# that lay further from the root node will be omitted. Note that setting this
|
||||
# option to 1 or 2 may greatly reduce the computation time needed for large
|
||||
# code bases. Also note that the size of a graph can be further restricted by
|
||||
# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
|
||||
# generated by dot. A depth value of 3 means that only nodes reachable from the
|
||||
# root by following a path via at most 3 edges will be shown. Nodes that lay
|
||||
# further from the root node will be omitted. Note that setting this option to 1
|
||||
# or 2 may greatly reduce the computation time needed for large code bases. Also
|
||||
# note that the size of a graph can be further restricted by
|
||||
# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
|
||||
# Minimum value: 0, maximum value: 1000, default value: 0.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
MAX_DOT_GRAPH_DEPTH = 0
|
||||
|
||||
# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
|
||||
# background. This is disabled by default, because dot on Windows does not
|
||||
# seem to support this out of the box. Warning: Depending on the platform used,
|
||||
# enabling this option may lead to badly anti-aliased labels on the edges of
|
||||
# a graph (i.e. they become hard to read).
|
||||
# background. This is disabled by default, because dot on Windows does not seem
|
||||
# to support this out of the box.
|
||||
#
|
||||
# Warning: Depending on the platform used, enabling this option may lead to
|
||||
# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
|
||||
# read).
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_TRANSPARENT = YES
|
||||
|
||||
# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
|
||||
# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output
|
||||
# files in one run (i.e. multiple -o and -T options on the command line). This
|
||||
# makes dot run faster, but since only newer versions of dot (>1.8.10)
|
||||
# support this, this feature is disabled by default.
|
||||
# makes dot run faster, but since only newer versions of dot (>1.8.10) support
|
||||
# this, this feature is disabled by default.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_MULTI_TARGETS = NO
|
||||
|
||||
# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
|
||||
# generate a legend page explaining the meaning of the various boxes and
|
||||
# arrows in the dot generated graphs.
|
||||
# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
|
||||
# explaining the meaning of the various boxes and arrows in the dot generated
|
||||
# graphs.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
GENERATE_LEGEND = YES
|
||||
|
||||
# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
|
||||
# remove the intermediate dot files that are used to generate
|
||||
# the various graphs.
|
||||
# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
|
||||
# files that are used to generate the various graphs.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
DOT_CLEANUP = YES
|
||||
|
@@ -1,184 +0,0 @@
|
||||
<doxygenlayout version="1.0">
|
||||
<!-- Navigation index tabs for HTML output -->
|
||||
<navindex>
|
||||
<tab type="mainpage" visible="yes" title=""/>
|
||||
<tab type="pages" visible="yes" title="" intro=""/>
|
||||
<tab type="modules" visible="yes" title="" intro=""/>
|
||||
<tab type="namespaces" visible="yes" title="">
|
||||
<tab type="namespaces" visible="yes" title="" intro=""/>
|
||||
<tab type="namespacemembers" visible="yes" title="" intro=""/>
|
||||
</tab>
|
||||
<tab type="classes" visible="yes" title="">
|
||||
<tab type="classes" visible="yes" title="" intro=""/>
|
||||
<tab type="classindex" visible="$ALPHABETICAL_INDEX" title=""/>
|
||||
<tab type="hierarchy" visible="yes" title="" intro=""/>
|
||||
<tab type="classmembers" visible="yes" title="" intro=""/>
|
||||
</tab>
|
||||
<tab type="files" visible="yes" title="">
|
||||
<tab type="files" visible="yes" title="" intro=""/>
|
||||
<tab type="globals" visible="yes" title="" intro=""/>
|
||||
</tab>
|
||||
<tab type="dirs" visible="yes" title="" intro=""/>
|
||||
<tab type="examples" visible="yes" title="" intro=""/>
|
||||
</navindex>
|
||||
|
||||
<!-- Layout definition for a class page -->
|
||||
<class>
|
||||
<briefdescription visible="yes"/>
|
||||
<includes visible="$SHOW_INCLUDE_FILES"/>
|
||||
<inheritancegraph visible="$CLASS_GRAPH"/>
|
||||
<collaborationgraph visible="$COLLABORATION_GRAPH"/>
|
||||
<allmemberslink visible="yes"/>
|
||||
<memberdecl>
|
||||
<nestedclasses visible="yes" title=""/>
|
||||
<publictypes title=""/>
|
||||
<publicslots title=""/>
|
||||
<signals title=""/>
|
||||
<publicmethods title=""/>
|
||||
<publicstaticmethods title=""/>
|
||||
<publicattributes title=""/>
|
||||
<publicstaticattributes title=""/>
|
||||
<protectedtypes title=""/>
|
||||
<protectedslots title=""/>
|
||||
<protectedmethods title=""/>
|
||||
<protectedstaticmethods title=""/>
|
||||
<protectedattributes title=""/>
|
||||
<protectedstaticattributes title=""/>
|
||||
<packagetypes title=""/>
|
||||
<packagemethods title=""/>
|
||||
<packagestaticmethods title=""/>
|
||||
<packageattributes title=""/>
|
||||
<packagestaticattributes title=""/>
|
||||
<properties title=""/>
|
||||
<events title=""/>
|
||||
<privatetypes title=""/>
|
||||
<privateslots title=""/>
|
||||
<privatemethods title=""/>
|
||||
<privatestaticmethods title=""/>
|
||||
<privateattributes title=""/>
|
||||
<privatestaticattributes title=""/>
|
||||
<friends title=""/>
|
||||
<related title="" subtitle=""/>
|
||||
<membergroups visible="yes"/>
|
||||
</memberdecl>
|
||||
<detaileddescription title=""/>
|
||||
<memberdef>
|
||||
<typedefs title=""/>
|
||||
<enums title=""/>
|
||||
<constructors title=""/>
|
||||
<functions title=""/>
|
||||
<related title=""/>
|
||||
<variables title=""/>
|
||||
<properties title=""/>
|
||||
<events title=""/>
|
||||
</memberdef>
|
||||
<usedfiles visible="$SHOW_USED_FILES"/>
|
||||
<authorsection visible="yes"/>
|
||||
</class>
|
||||
|
||||
<!-- Layout definition for a namespace page -->
|
||||
<namespace>
|
||||
<briefdescription visible="yes"/>
|
||||
<memberdecl>
|
||||
<nestednamespaces visible="yes" title=""/>
|
||||
<classes visible="yes" title=""/>
|
||||
<typedefs title=""/>
|
||||
<enums title=""/>
|
||||
<functions title=""/>
|
||||
<variables title=""/>
|
||||
<membergroups visible="yes"/>
|
||||
</memberdecl>
|
||||
<detaileddescription title=""/>
|
||||
<memberdef>
|
||||
<typedefs title=""/>
|
||||
<enums title=""/>
|
||||
<functions title=""/>
|
||||
<variables title=""/>
|
||||
</memberdef>
|
||||
<authorsection visible="yes"/>
|
||||
</namespace>
|
||||
|
||||
<!-- Layout definition for a file page -->
|
||||
<file>
|
||||
<briefdescription visible="yes"/>
|
||||
<includes visible="$SHOW_INCLUDE_FILES"/>
|
||||
<includegraph visible="$INCLUDE_GRAPH"/>
|
||||
<includedbygraph visible="$INCLUDED_BY_GRAPH"/>
|
||||
<sourcelink visible="yes"/>
|
||||
<memberdecl>
|
||||
<classes visible="yes" title=""/>
|
||||
<namespaces visible="yes" title=""/>
|
||||
<defines title=""/>
|
||||
<typedefs title=""/>
|
||||
<enums title=""/>
|
||||
<functions title=""/>
|
||||
<variables title=""/>
|
||||
<membergroups visible="yes"/>
|
||||
</memberdecl>
|
||||
<detaileddescription title=""/>
|
||||
<memberdef>
|
||||
<defines title=""/>
|
||||
<typedefs title=""/>
|
||||
<enums title=""/>
|
||||
<functions title=""/>
|
||||
<variables title=""/>
|
||||
</memberdef>
|
||||
<authorsection/>
|
||||
</file>
|
||||
|
||||
<!-- Layout definition for a group page -->
|
||||
<group>
|
||||
<briefdescription visible="yes"/>
|
||||
<groupgraph visible="$GROUP_GRAPHS"/>
|
||||
<memberdecl>
|
||||
<classes visible="yes" title=""/>
|
||||
<namespaces visible="yes" title=""/>
|
||||
<dirs visible="yes" title=""/>
|
||||
<nestedgroups visible="yes" title=""/>
|
||||
<files visible="yes" title=""/>
|
||||
<defines title=""/>
|
||||
<typedefs title=""/>
|
||||
<enums title=""/>
|
||||
<enumvalues title=""/>
|
||||
<functions title=""/>
|
||||
<variables title=""/>
|
||||
<signals title=""/>
|
||||
<publicslots title=""/>
|
||||
<protectedslots title=""/>
|
||||
<privateslots title=""/>
|
||||
<events title=""/>
|
||||
<properties title=""/>
|
||||
<friends title=""/>
|
||||
<membergroups visible="yes"/>
|
||||
</memberdecl>
|
||||
<detaileddescription title=""/>
|
||||
<memberdef>
|
||||
<pagedocs/>
|
||||
<defines title=""/>
|
||||
<typedefs title=""/>
|
||||
<enums title=""/>
|
||||
<enumvalues title=""/>
|
||||
<functions title=""/>
|
||||
<variables title=""/>
|
||||
<signals title=""/>
|
||||
<publicslots title=""/>
|
||||
<protectedslots title=""/>
|
||||
<privateslots title=""/>
|
||||
<events title=""/>
|
||||
<properties title=""/>
|
||||
<friends title=""/>
|
||||
</memberdef>
|
||||
<authorsection visible="yes"/>
|
||||
</group>
|
||||
|
||||
<!-- Layout definition for a directory page -->
|
||||
<directory>
|
||||
<briefdescription visible="yes"/>
|
||||
<directorygraph visible="yes"/>
|
||||
<memberdecl>
|
||||
<dirs visible="yes"/>
|
||||
<files visible="yes"/>
|
||||
</memberdecl>
|
||||
<detaileddescription title=""/>
|
||||
</directory>
|
||||
</doxygenlayout>
|
8
doc/footer.html.in
Normal file
8
doc/footer.html.in
Normal file
@@ -0,0 +1,8 @@
|
||||
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
|
||||
<ul>
|
||||
$navpath
|
||||
<li class="footer"><a href="@AUTHOR_URL@" target="_blank">@AUTHOR_NAME@</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
33
doc/header.html.in
Normal file
33
doc/header.html.in
Normal file
@@ -0,0 +1,33 @@
|
||||
<!-- HTML header for doxygen 1.8.6-->
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
|
||||
<meta name="generator" content="Doxygen $doxygenversion"/>
|
||||
<!--BEGIN PROJECT_NAME--><title>$projectname: $title</title><!--END PROJECT_NAME-->
|
||||
<!--BEGIN !PROJECT_NAME--><title>$title</title><!--END !PROJECT_NAME-->
|
||||
<link href="$relpath^tabs.css" rel="stylesheet" type="text/css"/>
|
||||
<script type="text/javascript" src="$relpath^jquery.js"></script>
|
||||
<script type="text/javascript" src="$relpath^dynsections.js"></script>
|
||||
$treeview
|
||||
$search
|
||||
$mathjax
|
||||
<link href="$relpath^$stylesheet" rel="stylesheet" type="text/css" />
|
||||
$extrastylesheet
|
||||
</head>
|
||||
<body>
|
||||
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
|
||||
|
||||
<div id="titlearea">
|
||||
<div id="projecthead">
|
||||
<div id="projectlogo"><img alt="" src="$relpath^$projectlogo"/></div>
|
||||
<div id="projectname">$projectname</span> <span id="projectnumber">$projectnumber</div>
|
||||
<div id="projectbrief">$projectbrief</div>
|
||||
</div>
|
||||
<nav>
|
||||
<a href="@PROJECT_URL@" target="_blank">Project Management</a>
|
||||
<a href="@SOURCE_DOWNLOAD@" target="_blank">Download</a>
|
||||
<div>$searchbox</div>
|
||||
</nav>
|
||||
</div>
|
@@ -1,47 +1,11 @@
|
||||
## @file
|
||||
## @id $Id$
|
||||
##
|
||||
## $Id: makefile.am 10 2009-06-17 12:15:51Z marwae $
|
||||
## This file has been added:
|
||||
## - by bootstrap.sh
|
||||
## - on Fri, 23 November 2018 15:32:44 +0100
|
||||
## Feel free to change it or even remove and rebuild it, up to your needs
|
||||
##
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
ALL_SRC = ${top_srcdir}/src/*.[ch]xx
|
||||
# ${top_srcdir}/src/*.doc
|
||||
|
||||
DIRS = html
|
||||
#latex
|
||||
|
||||
all: ${DIRS}
|
||||
|
||||
.PHONY: doc clean-local distclean-local dist-hool install-data-hook \
|
||||
uninstall-hook
|
||||
|
||||
deps = ${top_srcdir}/COPYING ${top_srcdir}/README \
|
||||
${top_srcdir}/INSTALL ${top_srcdir}/NEWS ${top_srcdir}/ChangeLog
|
||||
|
||||
html: ${ALL_SRC} doxyfile ${deps}
|
||||
doxygen doxyfile
|
||||
if PEDANTIC
|
||||
test \! -s doxygen.errors
|
||||
endif
|
||||
# cd latex && make
|
||||
# mv latex/refman.pdf @PACKAGENAME@-@MAJOR@.@MINOR@.@LEAST@.pdf
|
||||
|
||||
CLEANFILES = doxygen.errors @PACKAGENAME@-@MAJOR@.@MINOR@.@LEAST@.pdf
|
||||
DISTCLEANFILES = @PACKAGENAME@.doxytag
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
|
||||
distclean-local:
|
||||
- rm -r html latex
|
||||
|
||||
dist-hook: html
|
||||
# cp -r html latex ${distdir}/
|
||||
|
||||
install-data-hook:
|
||||
test -d $(DESTDIR)${docdir} || mkdir -p $(DESTDIR)${docdir}
|
||||
chmod -R u+w $(DESTDIR)${docdir}
|
||||
cp -r html $(DESTDIR)${docdir}/
|
||||
|
||||
uninstall-hook:
|
||||
-chmod -R u+w $(DESTDIR)${docdir}
|
||||
-rm -rf $(DESTDIR)${docdir}/*
|
||||
|
BIN
doc/plantuml.jar
Normal file
BIN
doc/plantuml.jar
Normal file
Binary file not shown.
38
doc/style.css
Normal file
38
doc/style.css
Normal file
@@ -0,0 +1,38 @@
|
||||
#titlearea {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: flex-begin;
|
||||
}
|
||||
#titlearea nav {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
#titlearea nav a {
|
||||
background-color: lightgray;
|
||||
border: 1px solid gray;
|
||||
color: black;
|
||||
padding: 1ex;
|
||||
margin: 0;
|
||||
}
|
||||
img, object {
|
||||
max-width: 100% !important;
|
||||
}
|
||||
@media (max-width: 50em) {
|
||||
#navrow1, #navrow2 {
|
||||
display: block
|
||||
}
|
||||
#side-nav, #splitbar, .ui-resizable-handle ui-resizable-e, .ui-resizable-handle ui-resizable-s {
|
||||
display: none;
|
||||
}
|
||||
#doc-content {
|
||||
margin-left: 0 !important;
|
||||
}
|
||||
}
|
||||
@media (min-width: 50em) {
|
||||
#navrow1, #navrow2 {
|
||||
display: none;
|
||||
}
|
||||
#side-nav, #splitbar, .ui-resizable-handle ui-resizable-e, .ui-resizable-handle ui-resizable-s {
|
||||
display: block
|
||||
}
|
||||
}
|
@@ -1,18 +1,22 @@
|
||||
## @id $Id$
|
||||
|
||||
#
|
||||
# This file has been added by bootstrap.sh on Sun, 15 Mar 2015 09:18:56 +0100
|
||||
# Feel free to change it or even remove and rebuild it, up to your needs
|
||||
#
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
AM_CPPFLAGS = -I${top_srcdir}/src
|
||||
AM_LDFLAGS = -L${top_builddir}/src/.libs
|
||||
LDADD = -lproxyface
|
||||
LDADD = -l${PACKAGE_NAME}
|
||||
|
||||
noinst_PROGRAMS = getproxylist
|
||||
if USE_QT
|
||||
noinst_PROGRAMS += simplegui
|
||||
exampledir = ${docdir}/examples
|
||||
example_PROGRAMS = getproxylist
|
||||
getproxylist_SOURCES = getproxylist.cxx
|
||||
|
||||
if HAVE_QT
|
||||
example_PROGRAMS += simplegui
|
||||
simplegui_SOURCES = simplegui.cxx
|
||||
endif
|
||||
|
||||
getproxylist_SOURCES = getproxylist.cxx
|
||||
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
example_DATA = ${example_PROGRAMS:*=*.cxx}
|
||||
|
@@ -5,7 +5,12 @@
|
||||
// 1 2 3 4 5 6 7 8
|
||||
// 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
#include <QtCore/QtGlobal>
|
||||
#if QT_VERSION >= 0x050000
|
||||
#include <QtWidgets/QApplication>
|
||||
#else
|
||||
#include <QtGui/QApplication>
|
||||
#endif
|
||||
#include <proxyface/proxy.hxx>
|
||||
|
||||
int main(int argc, char** argv) {
|
||||
|
@@ -1,13 +0,0 @@
|
||||
./bootstrap.sh && \
|
||||
LDFLAGS="-L/opt/local/i586-mingw32msvc/lib" \
|
||||
CPPFLAGS="-I/opt/local/i586-mingw32msvc/include -DQ_OS_WIN32" \
|
||||
./configure \
|
||||
--prefix=/opt/local/i586-mingw32msvc \
|
||||
--build=x86_64 \
|
||||
--host=i586-mingw32msvc && \
|
||||
make && \
|
||||
sudo make install && \
|
||||
make clean && \
|
||||
./configure && \
|
||||
make check && \
|
||||
sudo make install
|
@@ -1,20 +0,0 @@
|
||||
./bootstrap.sh && \
|
||||
LDFLAGS="-L/usr/lib32 -m32" CXXFLAGS="-m32" ./configure \
|
||||
--libdir=/usr/local/lib32 \
|
||||
--build=x86_64 \
|
||||
--host=i386 && \
|
||||
make && \
|
||||
sudo make install && \
|
||||
make clean && \
|
||||
LDFLAGS="-L/opt/local/i586-mingw32msvc/lib" \
|
||||
CPPFLAGS="-I/opt/local/i586-mingw32msvc/include -DQ_OS_WIN32" \
|
||||
./configure \
|
||||
--prefix=/opt/local/i586-mingw32msvc \
|
||||
--build=x86_64 \
|
||||
--host=i586-mingw32msvc && \
|
||||
make && \
|
||||
sudo make install && \
|
||||
make clean && \
|
||||
./configure && \
|
||||
make check && \
|
||||
sudo make install
|
105
mac-create-app-bundle.sh
Executable file
105
mac-create-app-bundle.sh
Executable file
@@ -0,0 +1,105 @@
|
||||
#!/bin/bash -ex
|
||||
|
||||
## @id $Id$
|
||||
##
|
||||
## Create Mac OS-X App Bundle from built file
|
||||
##
|
||||
## Parameters:
|
||||
## $1: name of the app-target
|
||||
## $2: name of the project
|
||||
## $3: package installation target
|
||||
##
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
if test "$(uname -s)" != "Darwin"; then
|
||||
echo "**** ERROR: run on Mac OS-X: $0"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
project=${2:-$(sed -n 's/ *m4_define *( *x_package_name, *\(.*\) *).*/\1/p' $(pwd)/configure.ac)}
|
||||
apptarget=${1:-${project}.app}
|
||||
sources=${3:-$(pwd)/tmp}
|
||||
! test -e "$apptarget" || rm -rf "$apptarget"
|
||||
test -n "$project"
|
||||
test -d "$sources"
|
||||
target="$(pwd)/${apptarget}/Contents/MacOS"
|
||||
|
||||
echo "Creating $apptarget for $project from $sources"
|
||||
|
||||
# Step 1: create and fill app directory structure
|
||||
mkdir -p ${apptarget}/Contents/{Resources,MacOS}
|
||||
! test -d ${sources}/bin || \
|
||||
find ${sources}/bin -mindepth 1 -maxdepth 1 -exec cp -a {} ${apptarget}/Contents/MacOS/ \;
|
||||
! test -d ${sources}/scripts || \
|
||||
find ${sources}/scripts -mindepth 1 -maxdepth 1 -exec cp -a {} ${apptarget}/Contents/MacOS/ \;
|
||||
executablefile=${apptarget}/Contents/MacOS/${project}
|
||||
test -x $executablefile || executablefile=$(ls -1 ${apptarget}/Contents/MacOS/ | head -1)
|
||||
! test -d ${sources}/lib || \
|
||||
find ${sources}/lib -mindepth 1 -maxdepth 1 -exec cp -a {} ${apptarget}/Contents/MacOS/ \;
|
||||
! test -d ${sources}/share/${project} || \
|
||||
find ${sources}/share/${project} -mindepth 1 -maxdepth 1 -exec cp -a {} ${apptarget}/Contents/Resources/ \;
|
||||
! test -d ${sources}/share || \
|
||||
find ${sources}/share -mindepth 1 -maxdepth 1 -name ${project} -prune -o -exec cp -a {} ${apptarget}/Contents/Resources/ \;
|
||||
! test -d ${sources} || \
|
||||
find ${sources} -mindepth 1 -maxdepth 1 -name share -o -name bin -o -name lib -o -name scripts -prune -o -exec cp -a {} ${apptarget}/Contents/Resources/ \;
|
||||
|
||||
# Step 2: copy qt plugins, if necessary
|
||||
for f in ${QT_PLUGINS}; do
|
||||
test -d ${target}/${f} \
|
||||
|| cp -r ${QT_PLUGIN_PATH}/${f} ${target}/${f} \
|
||||
|| exit 1
|
||||
done
|
||||
|
||||
# Step 3: resolve all library dependencies
|
||||
found=1
|
||||
oldpath="$(pwd)"
|
||||
while [ $found -ne 0 ]; do
|
||||
found=0
|
||||
cd "${target}"
|
||||
for file in $(find . -type f); do
|
||||
for lib in $(otool -L ${file} | tail -n +2 \
|
||||
| egrep '/usr/local/|/opt/local/|/opt/X11/|'"${HOME}" \
|
||||
| grep -v $file | awk '{print $1}'); do
|
||||
found=1
|
||||
test -f ${lib##*/} \
|
||||
|| ( \
|
||||
cp ${lib} . \
|
||||
&& chmod u+w ${lib##*/} \
|
||||
) \
|
||||
|| exit 1
|
||||
install_name_tool -change ${lib} \
|
||||
@executable_path/${lib##*/} ${file} \
|
||||
|| exit 1
|
||||
done
|
||||
done
|
||||
done
|
||||
cd ${oldpath}
|
||||
|
||||
# Step 4: if necessary, install qt_menu.nib
|
||||
if test -n "${QTDIR}"; then
|
||||
MENU_NIB=$(find ${QTDIR} -name .svn -o -name .git -prune -o -name qt_menu.nib -print \
|
||||
| head -1)
|
||||
if test -e "${MENU_NIB}"; then
|
||||
rsync -r "${MENU_NIB}" ${apptarget}/Contents/Resources/
|
||||
test -d ${apptarget}/Contents/Resources/qt_menu.nib
|
||||
fi
|
||||
fi
|
||||
|
||||
# Step 5: copy local or create new info.plist
|
||||
if test -f Info.plist; then
|
||||
cp -a Info.plist ${apptarget}/Contents/Info.plist
|
||||
else
|
||||
cat > ${apptarget}/Contents/Info.plist <<EOF
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<qdict>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>${project}</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>${executablefile##*/}</string>
|
||||
</dict>
|
||||
</plist>
|
||||
EOF
|
||||
fi
|
78
makefile.am
78
makefile.am
@@ -1,66 +1,28 @@
|
||||
## @file
|
||||
## @id $Id$
|
||||
##
|
||||
## $Id$
|
||||
##
|
||||
## $Date: 2008-08-26 13:09:21 $
|
||||
## $Author: marc $
|
||||
##
|
||||
## @copy © Marc Wäckerlin
|
||||
## @license LGPL, see file <a href="license.html">COPYING</a>
|
||||
## This file has been added:
|
||||
## - by bootstrap.sh
|
||||
## - on Fri, 23 November 2018 15:32:44 +0100
|
||||
## Feel free to change it or even remove and rebuild it, up to your needs
|
||||
##
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
SUBDIRS = @SRC_DIR@ @TST_DIR@ @DOC_DIR@ examples
|
||||
SUBDIRS = src doc examples
|
||||
|
||||
nobase_include_HEADERS =
|
||||
desktopdir = ${datadir}/applications
|
||||
dist_noinst_DATA = ax_check_qt.m4 bootstrap.sh \
|
||||
resolve-rpmbuilddeps.sh autogen.sh \
|
||||
ax_cxx_compile_stdcxx.m4 build-in-docker.sh \
|
||||
build-resource-file.sh \
|
||||
ax_init_standard_project.m4 \
|
||||
mac-create-app-bundle.sh resolve-debbuilddeps.sh \
|
||||
dependency-graph.sh template.sh \
|
||||
sql-to-dot.sed
|
||||
dist_doc_DATA = AUTHORS NEWS README.md COPYING INSTALL ChangeLog
|
||||
|
||||
EXTRA_DIST = bootstrap.sh debian
|
||||
README: README.md
|
||||
cp README.md README
|
||||
|
||||
DISTCLEANFILES = debian/changelog
|
||||
|
||||
deb: dist
|
||||
tar xzvf @PACKAGE@-@VERSION@.tar.gz
|
||||
cd @PACKAGE@-@VERSION@ && dpkg-buildpackage
|
||||
rm -rf @PACKAGE@-@VERSION@
|
||||
|
||||
doc_DATA = AUTHORS NEWS README COPYING INSTALL ChangeLog @PACKAGENAME@.spec
|
||||
|
||||
RPMS = /usr/src/packages/RPMS/i586/@PACKAGENAME@-@MAJOR@.@MINOR@.@LEAST@-1.i586.rpm \
|
||||
/usr/src/packages/RPMS/i586/@PACKAGENAME@-devel-@MAJOR@.@MINOR@.@LEAST@-1.i586.rpm \
|
||||
/usr/src/packages/SRPMS/@PACKAGENAME@-@MAJOR@.@MINOR@.@LEAST@-1.src.rpm
|
||||
|
||||
.PHONY: release tag rpm webserver \
|
||||
doc clean-local distclean-local dist-hool install-data-hook \
|
||||
uninstall-hook
|
||||
|
||||
release: tag webserver
|
||||
|
||||
tag: distcheck
|
||||
cvs ci -R .
|
||||
cvs tag -FR REL_@PACKAGENAME@-@MAJOR@-@MINOR@-@LEAST@ .
|
||||
|
||||
rpm: dist
|
||||
cp @PACKAGENAME@-@MAJOR@.@MINOR@.@LEAST@.tar.gz \
|
||||
/usr/src/packages/SOURCES/
|
||||
rpmbuild -ba --clean @PACKAGENAME@.spec
|
||||
|
||||
deps = ${top_srcdir}/COPYING ${top_srcdir}/README ${top_srcdir}/INSTALL ${top_srcdir}/NEWS ${top_srcdir}/ChangeLog
|
||||
|
||||
clean-local:
|
||||
- rm doxygen.err lib@PACKAGENAME@.doxytag
|
||||
- rm @PACKAGENAME@-dev_@MAJOR@.@MINOR@.@LEAST@-*.deb \
|
||||
@PACKAGENAME@_@MAJOR@.@MINOR@.@LEAST@-*.changes \
|
||||
@PACKAGENAME@_@MAJOR@.@MINOR@.@LEAST@-1.tar.gz \
|
||||
@PACKAGENAME@_@MAJOR@.@MINOR@.@LEAST@-1.dsc \
|
||||
@PACKAGENAME@-@MAJOR@.@MINOR@.@LEAST@.tar.gz \
|
||||
@PACKAGENAME@_@MAJOR@.@MINOR@.@LEAST@-*.deb
|
||||
|
||||
distclean-local:
|
||||
- rm -r ${top_builddir}/@DOC_DIR@/html/* ${top_builddir}/@DOC_DIR@/latex/*
|
||||
- rm makefile makefile.in doxygen.err libmrw.doxytag
|
||||
- find . -name '*~' | xargs rm
|
||||
- rm -r autom4te.cache
|
||||
- rm aclocal.m4 config.guess config.sub configure \
|
||||
depcomp install-sh ltmain.sh makefile makefile.in \
|
||||
missing mkinstalldirs
|
||||
CLEANFILES = README
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
|
9
makefile_test.inc.am
Normal file
9
makefile_test.inc.am
Normal file
@@ -0,0 +1,9 @@
|
||||
## @id $Id$
|
||||
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
%.gcda: %
|
||||
gcov $<
|
||||
|
||||
CLEANFILES += ${CLEANFILES} ${TEST:%=%.gcno} ${TEST:%=%.gcda} *.gcov
|
9
proxyface.desktop.in
Normal file
9
proxyface.desktop.in
Normal file
@@ -0,0 +1,9 @@
|
||||
[Desktop Entry]
|
||||
Type=Application
|
||||
Name=proxyface
|
||||
GenericName=proxyface
|
||||
Comment=@DESCRIPTION@
|
||||
Icon=@prefix@/share/@PACKAGE_NAME@/@PACKAGE_ICON@
|
||||
Exec=proxyface %u
|
||||
Terminal=false
|
||||
Categories=Qt;Utility;
|
@@ -1,24 +1,50 @@
|
||||
Summary: @PACKAGENAME@ Proxy interface for Unix/Mac/Windows
|
||||
Name: @PACKAGENAME@
|
||||
Summary: @DESCRIPTION@
|
||||
Name: @PACKAGE_NAME@
|
||||
Version: @VERSION@
|
||||
Release: 1
|
||||
Release: @BUILD_NUMBER@.@DISTRO@
|
||||
License: LGPL
|
||||
Group: Development/Libraries/C++
|
||||
URL: https://dev.swisssign.com/projects/@PACKAGENAME@
|
||||
Source0: %{name}-%{version}.tar.gz
|
||||
BuildRequires: subversion libcppunit-devel gcc-c++ doxygen graphviz texlive automake autoconf libtool make libqt4-devel libproxy-devel
|
||||
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root
|
||||
|
||||
Source0: %{name}-%{version}.tar.gz
|
||||
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root
|
||||
BuildRequires: which, pkgconfig, gnupg, expect, git, make, automake, autoconf, rpm-build, binutils-devel, gcc-c++, doxygen, graphviz, java-openjdk @RPM_BUILD_DEPEND@ @RPM_DEPEND_IFEXISTS@
|
||||
|
||||
#### os dependent definitions ####
|
||||
%if 0%{?suse_version} || 0%{?sles_version}
|
||||
BuildRequires: lsb-release, libqt5-qtbase-devel, libqt5-qttools, libqt5-linguist-devel, libQt5WebKit5-devel libqt5-qtwebengine-devel libQt5WebKitWidgets-devel
|
||||
%else
|
||||
%if 0%{?mageia}
|
||||
BuildRequires: rpm-sign, lsb-release
|
||||
BuildRequires: qtbase5-common-devel, qttools5, lib64qt5webkit-devel, lib64qt5webkitwidgets-devel
|
||||
%else
|
||||
BuildRequires: qt5-qtbase-devel, qt5-qttools-devel, qt5-qtwebkit-devel
|
||||
BuildRequires: rpm-sign, redhat-lsb
|
||||
%global debug_package %{nil}
|
||||
%endif
|
||||
%endif
|
||||
%if ! 0%{?centos}
|
||||
BuildRequires: mscgen
|
||||
%if ! 0%{?mageia}
|
||||
BuildRequires: pandoc
|
||||
%endif
|
||||
%endif
|
||||
%if 0%{?mageia}
|
||||
BuildRequires: libtool, libltdl-devel
|
||||
%else
|
||||
BuildRequires: libtool, libtool-ltdl-devel
|
||||
%endif
|
||||
|
||||
%description
|
||||
@README@
|
||||
|
||||
This package contains only the shared libraries required at runtime.
|
||||
|
||||
|
||||
%prep
|
||||
%setup -q
|
||||
./configure --prefix=/usr \
|
||||
--docdir=/usr/share/doc/packages/@PACKAGENAME@ \
|
||||
--sysconfdir=/etc \
|
||||
--docdir=/usr/share/doc/packages/@PACKAGE_NAME@ \
|
||||
--libdir=/usr/%_lib
|
||||
|
||||
%build
|
||||
@@ -32,20 +58,16 @@ rm -rf $RPM_BUILD_ROOT
|
||||
|
||||
%files
|
||||
%defattr(-,root,root,-)
|
||||
/usr/%_lib/lib@PACKAGENAME@.so.*
|
||||
/usr/share/@PACKAGENAME@
|
||||
/usr/%_lib/*.so.*
|
||||
/usr/share/@PACKAGE_NAME@
|
||||
|
||||
%doc
|
||||
/usr/share/doc/packages/@PACKAGENAME@/AUTHORS
|
||||
/usr/share/doc/packages/@PACKAGENAME@/COPYING
|
||||
/usr/share/doc/packages/@PACKAGENAME@/ChangeLog
|
||||
/usr/share/doc/packages/@PACKAGENAME@/INSTALL
|
||||
/usr/share/doc/packages/@PACKAGENAME@/NEWS
|
||||
/usr/share/doc/packages/@PACKAGENAME@/README
|
||||
/usr/share/doc
|
||||
|
||||
%package devel
|
||||
Summary: @PACKAGENAME@ Proxy interface for Unix/Mac/Windows (development files)
|
||||
Summary: @DESCRIPTION@
|
||||
Group: Development/Libraries/C++
|
||||
Requires: @PACKAGENAME@ = @VERSION@
|
||||
Requires: @PACKAGE_NAME@ = @VERSION@ @RPM_BUILD_DEPEND@
|
||||
|
||||
%description devel
|
||||
@README@
|
||||
@@ -54,14 +76,14 @@ This Package contains all files required for developement.
|
||||
|
||||
%files devel
|
||||
%defattr(-,root,root,-)
|
||||
/usr/%_lib/lib@PACKAGENAME@.so
|
||||
/usr/%_lib/lib@PACKAGENAME@.a
|
||||
/usr/%_lib/*.so
|
||||
/usr/%_lib/*.a
|
||||
/usr/%_lib/*.la
|
||||
/usr/%_lib/pkgconfig
|
||||
/usr/%_lib/lib@PACKAGENAME@.la
|
||||
/usr/include/
|
||||
/usr/share/pkgconfig
|
||||
/usr/include/*
|
||||
%doc
|
||||
/usr/share/doc/packages/@PACKAGENAME@/html
|
||||
/usr/share/doc/packages/@PACKAGENAME@/@PACKAGENAME@.spec
|
||||
/usr/share/doc/packages/@PACKAGE_NAME@/html
|
||||
/usr/share/doc/packages/@PACKAGE_NAME@/examples
|
||||
|
||||
%changelog
|
||||
|
||||
|
133
resolve-debbuilddeps.sh
Executable file
133
resolve-debbuilddeps.sh
Executable file
@@ -0,0 +1,133 @@
|
||||
#! /bin/bash -ex
|
||||
|
||||
## @id $Id$
|
||||
|
||||
## Resolve Debian Build Dependencies
|
||||
## Installs all the required packages
|
||||
## Call: ./resolve-debbuilddeps 'name of build schroot'
|
||||
## e.g. call: ./resolve-debbuilddeps trusty_amd64
|
||||
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
SCHROOTNAME="$1"
|
||||
if test -n "${SCHROOTNAME}"; then
|
||||
DO="schroot -c "${SCHROOTNAME}" --"
|
||||
SUDO="schroot -c "${SCHROOTNAME}" -u root -d / --"
|
||||
else
|
||||
DO=""
|
||||
if grep -q '/docker' /proc/1/cgroup; then
|
||||
SUDO=""
|
||||
else
|
||||
SUDO="sudo"
|
||||
fi
|
||||
fi
|
||||
|
||||
function install() {
|
||||
if ${SUDO} apt-get install -y --force-yes --no-install-suggests --no-install-recommends $*; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
TO_INSTALL=
|
||||
DEPS=
|
||||
|
||||
if test -e debian/control.in -a ! -e debian/control; then
|
||||
function pkg_exists() {
|
||||
test -n "$(${DO} apt-cache policy -q ${1})"
|
||||
}
|
||||
function AX_PKG_CHECK() {
|
||||
local DEV_DEB_DIST_PKG=
|
||||
local DEV_DIST_PKG=
|
||||
local pkg=
|
||||
eval $4
|
||||
if test -z "$2"; then
|
||||
pkg=$1
|
||||
else
|
||||
pkg=$2
|
||||
fi
|
||||
pkg=${DEV_DEB_DIST_PKG:-${DEV_DIST_PKG:-${pkg}}-dev}
|
||||
if pkg_exists "${pkg}"; then
|
||||
echo $pkg
|
||||
fi
|
||||
}
|
||||
function AX_PKG_REQUIRE() {
|
||||
local DEV_DEB_DIST_PKG=
|
||||
local DEV_DIST_PKG=
|
||||
local pkg=
|
||||
eval $6
|
||||
if test -z "$2"; then
|
||||
pkg=$1
|
||||
else
|
||||
pkg=$2
|
||||
fi
|
||||
if test -n "$4"; then
|
||||
for f in $pkg $4; do
|
||||
if pkg_exists "${f}-dev"; then
|
||||
pkg=$f
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
echo ${DEV_DEB_DIST_PKG:-${DEV_DIST_PKG:-${pkg}}-dev}
|
||||
}
|
||||
DEPS+=" $(eval $(sed -n '/^ *AX_PKG_REQUIRE/{s,^ *\(AX_PKG_REQUIRE\) *(\(.*\)).*,\1 \2,;s.\[\([^]]*\)\],\?."\1".g;s,$,;,g;p}' configure.ac))"
|
||||
DEPS+=" $(eval $(sed -n '/^ *AX_PKG_CHECK/{s,^ *\(AX_PKG_CHECK\) *(\(.*\)).*,\1 \2,;s.\[\([^]]*\)\],\?."\1".g;s,$,;,g;p}' configure.ac))"
|
||||
for f in $(sed -n 's, *AX_\(DEB\|ALL\)_DEPEND_IFEXISTS(\([^)]*\)).*,\2,p' configure.ac); do
|
||||
if pkg_exists "${f}"; then
|
||||
DEPS+=" ${f}"
|
||||
fi
|
||||
done
|
||||
for f in $(sed -n 's, *AX_\(DEB\|ALL\)_DEPEND_IFEXISTS_DEV(\([^)]*\)).*,\2,p' configure.ac); do
|
||||
if pkg_exists "${f}-dev"; then
|
||||
DEPS+=" ${f}-dev"
|
||||
fi
|
||||
done
|
||||
for f in $(sed -n 's, *AX_\(DEB\|ALL\)\(_BUILD\)\?_DEPEND(\([^)]*\)).*,\3,p' configure.ac); do
|
||||
DEPS+=" ${f}"
|
||||
done
|
||||
for f in $(sed -n 's, *AX_\(DEB\|ALL\)\(_BUILD\)\?_DEPEND_DEV(\([^)]*\)).*,\3,p' configure.ac); do
|
||||
DEPS+=" ${f}-dev"
|
||||
done
|
||||
trap "rm debian/control" INT TERM EXIT
|
||||
sed 's,@\(\(ALL\|DEB\)_DEPEND_IFEXISTS\|\(ALL\|DEB\)_BUILD_DEPEND\|\(ALL\|DEB\)_DEPEND\)@,,g' debian/control.in | \
|
||||
sed 's,@[^@]*@, dummytext,g' > debian/control
|
||||
fi
|
||||
|
||||
install dpkg-dev
|
||||
|
||||
DEPS+=" $(LANG= ${DO} dpkg-checkbuilddeps 2>&1 | sed -n '/Unmet build dependencies/ { s,.*Unmet build dependencies: ,,g; s, ([^)]*),,g; s, *| *,|,g; p}')"
|
||||
|
||||
for pa in ${DEPS}; do
|
||||
if test "${pa//|/}" = "${pa}"; then
|
||||
TO_INSTALL+=" ${pa}"
|
||||
continue;
|
||||
fi
|
||||
success=0
|
||||
for p in ${pa//|/ }; do
|
||||
if install ${TO_INSTALL} ${p}; then
|
||||
TO_INSTALL+=" ${p}"
|
||||
success=1
|
||||
break
|
||||
fi
|
||||
done
|
||||
if test ${success} -eq 0; then
|
||||
echo "**** Error: Installation Failed: ${pa}"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
if test -n "${TO_INSTALL}" && ! install ${TO_INSTALL}; then
|
||||
echo "**** Error: Installation Failed: ${TO_INSTALL}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
FILES="$(LANG= ${DO} dpkg-checkbuilddeps 2>&1 | sed -n '/Unmet build dependencies/ { s,.*Unmet build dependencies: ,,g; s, ([^)]*),,g; s, *| *,|,g; p}')"
|
||||
if test -n "${FILES}"; then
|
||||
echo "**** ERROR: Cannot install: " $FILES
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "**** Success: All Dependencies Resolved"
|
120
resolve-rpmbuilddeps.sh
Executable file
120
resolve-rpmbuilddeps.sh
Executable file
@@ -0,0 +1,120 @@
|
||||
#! /bin/bash -ex
|
||||
|
||||
## @id $Id$
|
||||
|
||||
## Resolve RPM Build Dependencies
|
||||
## Installs all the required packages
|
||||
## Call: ./resolve-rpmbuilddeps 'name of build schroot'
|
||||
## e.g. call: ./resolve-rpmbuilddeps opensuse-13.2_x86_64
|
||||
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
INSTALL_TOOL=${INSTALL_TOOL:-$((test -x /usr/bin/zypper && echo zypper install -y) || (test -x /usr/bin/dnf && echo dnf install -y) || (test -x /usr/bin/yum && echo yum install -y) || (test -x /usr/sbin/urpmi && echo urpmi --auto))}
|
||||
SCHROOTNAME="$1"
|
||||
PACKAGE_NAME=$(sed -n 's/^ *m4_define(x_package_name, \(.*\)).*/\1/p' configure.ac)
|
||||
|
||||
TRAP_CMD="sleep ${SLEEP:-0};"
|
||||
DEPS=
|
||||
for f in BUILD BUILDROOT RPMS SPECS SRPMS; do
|
||||
if ! test -d $f; then
|
||||
TRAP_CMD+="rm -rf $f;"
|
||||
mkdir $f
|
||||
fi
|
||||
done
|
||||
if test -e ${PACKAGE_NAME}.spec.in -a ! -e ${PACKAGE_NAME}.spec; then
|
||||
function pkg_exists() {
|
||||
(test -x /usr/bin/zypper && zypper search -x "$1" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/bin/dnf && dnf list -q "$1" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/bin/yum && yum list -q "$1" 1>&2 > /dev/null) || \
|
||||
(test -x /usr/sbin/urpmq && urpmq "$1" 1>&2 > /dev/null)
|
||||
}
|
||||
function AX_PKG_CHECK() {
|
||||
local DEV_RPM_DIST_PKG=
|
||||
local DEV_DIST_PKG=
|
||||
local pkg=
|
||||
eval $4
|
||||
if test -z "$2"; then
|
||||
pkg=$1
|
||||
else
|
||||
pkg=$2
|
||||
fi
|
||||
pkg=${DEV_RPM_DIST_PKG:-${DEV_DIST_PKG:-${pkg}}-devel}
|
||||
if pkg_exists "${pkg}"; then
|
||||
echo ${pkg}
|
||||
fi
|
||||
}
|
||||
function AX_PKG_REQUIRE() {
|
||||
local DEV_RPM_DIST_PKG=
|
||||
local DEV_DIST_PKG=
|
||||
local pkg=
|
||||
eval $6
|
||||
if test -z "$2"; then
|
||||
pkg=$1
|
||||
else
|
||||
pkg=$2
|
||||
fi
|
||||
if test -n "$4"; then
|
||||
for f in $pkg $4; do
|
||||
if pkg_exists "${f}-devel"; then
|
||||
pkg=$f
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
echo ${DEV_RPM_DIST_PKG:-${DEV_DIST_PKG:-${pkg}}-devel}
|
||||
}
|
||||
DEPS+=" $(eval $(sed -n '/^ *AX_PKG_REQUIRE/{s,^ *\(AX_PKG_REQUIRE\) *(\(.*\)).*,\1 \2,;s.\[\([^]]*\)\],\?."\1".g;s,$,;,g;p}' configure.ac))"
|
||||
DEPS+=" $(eval $(sed -n '/^ *AX_PKG_CHECK/{s,^ *\(AX_PKG_CHECK\) *(\(.*\)).*,\1 \2,;s.\[\([^]]*\)\],\?."\1".g;s,$,;,g;p}' configure.ac))"
|
||||
for f in $(sed -n 's, *AX_\(RPM\|ALL\)_DEPEND_IFEXISTS(\([^)]*\)).*,\2,p' configure.ac); do
|
||||
if pkg_exists "${f}"; then
|
||||
DEPS+=" ${f}"
|
||||
fi
|
||||
done
|
||||
for f in $(sed -n 's, *AX_\(RPM\|ALL\)_DEPEND_IFEXISTS_DEV(\([^)]*\)).*,\2,p' configure.ac); do
|
||||
if pkg_exists "${f}-devel"; then
|
||||
DEPS+=" ${f}-devel"
|
||||
fi
|
||||
done
|
||||
for f in $(sed -n 's, *AX_\(RPM\|ALL\)\(_BUILD\)\?_DEPEND(\([^)]*\)).*,\3,p' configure.ac); do
|
||||
DEPS+=" ${f}"
|
||||
done
|
||||
for f in $(sed -n 's, *AX_\(RPM\|ALL\)\(_BUILD\)\?_DEPEND_DEV(\([^)]*\)).*,\3,p' configure.ac); do
|
||||
DEPS+=" ${f}-devel"
|
||||
done
|
||||
TRAP_CMD+="rm ${PACKAGE_NAME}.spec;"
|
||||
trap "${TRAP_CMD}" INT TERM EXIT
|
||||
sed 's,@\(\(ALL\|RPM\)_DEPEND_IFEXISTS\|\(ALL\|RPM\)_BUILD_DEPEND\|\(ALL\|RPM\)_DEPEND\)@,,g' ${PACKAGE_NAME}.spec.in | \
|
||||
sed 's,@[^@]*@,dummytext,g' > ${PACKAGE_NAME}.spec
|
||||
fi
|
||||
|
||||
TGZFILE=$(sed -n '/^Name: */{s///;h};/^Version: */{s///;H;x;s/\n/-/;s/$/.tar.gz/;p}' ${PACKAGE_NAME}.spec)
|
||||
if ! test -e $TGZFILE; then
|
||||
TRAP_CMD+="rm ${TGZFILE};"
|
||||
trap "${TRAP_CMD}" INT TERM EXIT
|
||||
touch $TGZFILE
|
||||
fi
|
||||
|
||||
if test -n "${SCHROOTNAME}"; then
|
||||
FILES=$(LANG= schroot -c ${SCHROOTNAME} -- rpmbuild -bb --clean --nobuild --define "_topdir ." --define "_sourcedir ." ${PACKAGE_NAME}.spec 2>&1 | sed -n 's, is needed by.*,,p')
|
||||
if test -n "${FILES// /}${DEPS// /}"; then
|
||||
schroot -c ${SCHROOTNAME} -u root -- ${INSTALL_TOOL} ${FILES} ${DEPS}
|
||||
fi
|
||||
else
|
||||
FILES=$(LANG= rpmbuild -bb --clean --nobuild --define "_topdir ." --define "_sourcedir ." ${PACKAGE_NAME}.spec 2>&1 | sed -n 's, is needed by.*,,p')
|
||||
if test -n "${FILES// /}${DEPS// /}"; then
|
||||
${INSTALL_TOOL} ${FILES} ${DEPS}
|
||||
fi
|
||||
fi
|
||||
|
||||
if test -n "${SCHROOTNAME}"; then
|
||||
FILES=$(LANG= schroot -c ${SCHROOTNAME} -- rpmbuild -bb --clean --nobuild --define "_topdir ." --define "_sourcedir ." ${PACKAGE_NAME}.spec 2>&1 | sed -n 's, is needed by.*,,p')
|
||||
else
|
||||
FILES=$(LANG= rpmbuild -bb --clean --nobuild --define "_topdir ." --define "_sourcedir ." ${PACKAGE_NAME}.spec 2>&1 | sed -n 's, is needed by.*,,p')
|
||||
fi
|
||||
if test -n "${FILES// /}"; then
|
||||
echo "**** ERROR: Cannot install: " $FILES
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "**** Success: All Dependencies Resolved"
|
14
rpmsign.exp
Executable file
14
rpmsign.exp
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/expect -f
|
||||
|
||||
set key [lindex $argv 0]
|
||||
set password [lindex $argv 1]
|
||||
set files [lrange $argv 2 end]
|
||||
|
||||
### rpm-sign.exp -- Sign RPMs by sending the passphrase.
|
||||
spawn rpmsign --define "_gpg_name $key" --addsign {*}$files
|
||||
expect {
|
||||
"Enter pass phrase: " {
|
||||
send -- "$password\r"
|
||||
exp_continue
|
||||
} eof
|
||||
}
|
BIN
screenshot.png
Normal file
BIN
screenshot.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 29 KiB |
87
sql-to-dot.sed
Executable file
87
sql-to-dot.sed
Executable file
@@ -0,0 +1,87 @@
|
||||
#! /bin/sed -nf
|
||||
1i\
|
||||
/** @page database Database Schema\
|
||||
\
|
||||
@dot\
|
||||
digraph schema {
|
||||
|
||||
# get everithing on one single line
|
||||
H;$!d;$x
|
||||
|
||||
# remove all single-line comment lines
|
||||
s/\n--[^\n]*//g
|
||||
|
||||
# encode html entities
|
||||
s/&/\&##SEMICOLON##/g
|
||||
s/</\<##SEMICOLON##/g
|
||||
s/>/\>##SEMICOLON##/g
|
||||
|
||||
# reduce spaces
|
||||
s,\t\| \+, ,g
|
||||
|
||||
# remove multiline comments
|
||||
:f;s,\(.*\)/\*.*\*/[ \n]*;*,\1,g;tf
|
||||
|
||||
# remove empty lines
|
||||
s,\n\+,\n,g
|
||||
|
||||
# remove unknown commands
|
||||
s,\(;\|\n\) *\(INSERT\|DELIMITER\|USE\|DROP\|CREATE[ \n]\+DATABASE\)[ \n]\+[^;]*;\+,,ig
|
||||
|
||||
# convert special characters within quotes
|
||||
:a;s/^\(\([^"]*"[^",]*"\)*[^"]*"[^"]*\),\([^"]*".*\)/\1\##COMMA##\3/g;ta
|
||||
:c;s/^\(\([^']*'[^',]*'\)*[^']*'[^']*\),\([^']*'.*\)/\1\##COMMA##\3/g;tc
|
||||
|
||||
# backup everything to the buffer
|
||||
# then analyze only on one create table
|
||||
:i
|
||||
h
|
||||
s,.*\(create[ \n]\+table[^;]*;\).*,\1,ig
|
||||
|
||||
# start html table node
|
||||
s|CREATE[ \n]\+TABLE[ \n]\+\(IF[ \n]\+NOT[ \n]\+EXISTS[ \n]\+\)\?`\?\(\w\+\)`\?| \2\n [shape=none, margin=0, label=<\n <table bgcolor="#dddddd">\n <tr><td bgcolor="#ddddff" colspan="4"><b>\2</b></td></tr>|ig
|
||||
|
||||
# remove key definitions
|
||||
s/[),][\n ]*\(\(UNIQUE\|PRIMARY\)[ \n]\+\)\?KEY[ \n]\+[^(]*([^)]*)//gi
|
||||
|
||||
# move foreign keys as relation to the end
|
||||
:b;s/\(\w\+\)\([^;]*\)FOREIGN[\n ]\+KEY[ \n]*([ \n]*`\?\([a-z]\+\)`\?[ \n]*)[ \n]*REFERENCES[ \n]*`\?\([a-z]\+\)`\?[ \n]*([ \n]*`\?\([a-z]\+\)`\?[ \n]*)[ \n]*\([^,)]*\)\([,)].*\)/\1\2\7\n \1:\3 -> \4:\5 [label="\6"]##SEMICOLON##/ig;tb
|
||||
|
||||
# create table rows
|
||||
s|[(,][ \n]*`\?\(\w\+\)`\?[ \n]\+\(\w\+\(([^)]\+)\)\?\)[ \n]*\([^,)]*\)|\n <tr><td align="left" port="\1"><b>\1</b></td><td align="left">\2</td><td align="left">\4</td><td></td></tr>|g
|
||||
# extract comment
|
||||
s|\(<td\( *\w*="\w*"\)* *>[^<]*\)COMMENT[ \n]\+["']\([^"']*\)["']\([^<]*</td>\)<td></td>|\1\4<td align="left">\3</td>|g
|
||||
|
||||
# add line breaks for long lines
|
||||
s|\(<td[^>]*>[^<]\{30,40\}\)[ \n]\+\([^<]\{20,\}</td>\)|\1<br/>\2|g
|
||||
#:d;s|\(<br/>[^<]\{30,40\}\)[ \n]\+\([^<]\{20,\}</td>\)|\1<br/>\2|g;td
|
||||
|
||||
# add table comment below
|
||||
:k;tk
|
||||
s|[ \n]*)[^)]*COMMENT[ \n]*=[ \n]*["']\?\([^"']*\)["']\?[^;]*|\n <tr><td bgcolor="#ddddff" colspan="4">\1</td></tr>|ig;th
|
||||
s|)[^);]*;|\n;|ig
|
||||
:h
|
||||
|
||||
# cleanup comment below, add line breaksfor long lines
|
||||
s|\(<td[^>]*>[^<]\{60,80\}\)[ \n]\+\([^<]\{30,\}</td>\)|\1<br/>\2|g
|
||||
#:e;s|\(<br/>[^<]\{60,80\}\)[ \n]\+\([^<]\{30,\}</td>\)|\1<br/>\2|g;te
|
||||
|
||||
# close table
|
||||
s|;|\n </table>\n >];|ig
|
||||
|
||||
# convert ##COMMA## to ,
|
||||
s|##COMMA##|,|g
|
||||
# convert ##SEMICOLON## to ;
|
||||
s,##SEMICOLON##,;,g
|
||||
|
||||
# print one table
|
||||
p
|
||||
# get buffer back and remove the table that has just been analyzed
|
||||
x
|
||||
s,\(.*\)create[ \n]\+table[^;]*;\(.*\),\1\2,ig
|
||||
ti
|
||||
|
||||
$a\
|
||||
}\
|
||||
@enddot\
|
||||
*/
|
9
src/languages.qrc.in
Normal file
9
src/languages.qrc.in
Normal file
@@ -0,0 +1,9 @@
|
||||
<!DOCTYPE RCC>
|
||||
<RCC version="1.0">
|
||||
<qresource prefix="/language">
|
||||
<file alias="proxy_de.qm">@top_srcdir@/src/proxy_de.qm</file>
|
||||
<file alias="proxy_fr.qm">@top_srcdir@/src/proxy_fr.qm</file>
|
||||
<file alias="proxy_it.qm">@top_srcdir@/src/proxy_it.qm</file>
|
||||
<file alias="proxy_en.qm">@top_srcdir@/src/proxy_en.qm</file>
|
||||
</qresource>
|
||||
</RCC>
|
@@ -1,57 +1,42 @@
|
||||
## @file
|
||||
##
|
||||
## $Id$
|
||||
##
|
||||
## @id $Id$
|
||||
#
|
||||
# This file has been added by bootstrap.sh on Sun, 15 Mar 2015 09:18:56 +0100
|
||||
# Feel free to change it or even remove and rebuild it, up to your needs
|
||||
#
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
if USE_QT
|
||||
UISOURCES = proxyface/proxy.ui proxyface/proxyauth.ui
|
||||
RESOURCES = resources.cxx
|
||||
MOCHEADER = proxyface/proxy.hxx proxyface/autoproxy.hxx proxyface/proxyauth.hxx
|
||||
LANGUAGES = proxy_de.qm proxy_en.qm proxy_fr.qm proxy_it.qm
|
||||
LANG_TS = proxy_de.ts proxy_en.ts proxy_fr.ts proxy_it.ts
|
||||
UIHEADER = proxyface/ui_proxy.hxx proxyface/ui_proxyauth.hxx
|
||||
MOCSOURCES = proxyface/moc_proxy.cxx proxyface/moc_autoproxy.cxx proxyface/moc_proxyauth.cxx
|
||||
endif
|
||||
|
||||
pkgconfigdir = $(libdir)/pkgconfig
|
||||
pkgconfig_DATA = @PACKAGENAME@.pc
|
||||
pkgconfig2dir = $(datarootdir)/pkgconfig
|
||||
pkgconfig2_DATA = $(pkgconfig_DATA)
|
||||
|
||||
pkgdata_DATA = ${LANGUAGES}
|
||||
|
||||
EXTRA_DIST = ${UISOURCES} $(pkgconfig_DATA).in version.cxx.in
|
||||
|
||||
BUILT_SOURCES = ${UIHEADER} ${MOCSOURCES} ${LANGUAGES} ${RESOURCES} version.cxx
|
||||
lib_LTLIBRARIES = libproxyface.la
|
||||
LANGUAGE_FILE_BASE = proxy
|
||||
translationsdir = ${pkgdatadir}/translations
|
||||
translations_DATA = ${LANGUAGE_FILE_BASE}_en.qm \
|
||||
${LANGUAGE_FILE_BASE}_de.qm \
|
||||
${LANGUAGE_FILE_BASE}_fr.qm \
|
||||
${LANGUAGE_FILE_BASE}_it.qm
|
||||
|
||||
proxyfacedir = ${includedir}/proxyface
|
||||
proxyface_HEADERS = proxyface/unix.hxx proxyface/windoze.hxx proxyface/qtproxy.hxx \
|
||||
${MOCHEADER} ${UIHEADER}
|
||||
proxyface_HEADERS = proxyface/autoproxy.hxx proxyface/proxy.hxx \
|
||||
proxyface/unix.hxx proxyface/proxyauth.hxx \
|
||||
proxyface/qtproxy.hxx proxyface/windoze.hxx \
|
||||
${libproxyface_la_UIFILES}
|
||||
|
||||
lib_LTLIBRARIES = libproxyface.la
|
||||
if HAVE_QT
|
||||
libproxyface_la_UIFILES = proxyface/ui_proxy.hxx \
|
||||
proxyface/ui_proxyauth.hxx
|
||||
libproxyface_la_MOCFILES = proxyface/moc_proxy.cxx \
|
||||
proxyface/moc_autoproxy.cxx \
|
||||
proxyface/moc_proxyauth.cxx
|
||||
libproxyface_la_RCCFILES = proxyface/qrc_resources.cxx
|
||||
BUILT_SOURCES = ${libproxyface_la_UIFILES} \
|
||||
${libproxyface_la_MOCFILES} \
|
||||
${libproxyface_la_RCCFILES} ${translations_DATA}
|
||||
EXTRA_DIST_TR = ${libproxyface_la_MOCFILES:moc_%.cxx=%.hxx} \
|
||||
${libproxyface_la_UIFILES:ui_%.hxx=%.ui}
|
||||
EXTRA_DIST = ${EXTRA_DIST_TR} languages.qrc.in \
|
||||
${translations_DATA:%.qm=%.ts}
|
||||
|
||||
libproxyface_la_SOURCES = ${MOCSOURCES} ${LANGUAGES} ${RESOURCES} version.cxx
|
||||
libproxyface_la_LDFLAGS = -version-info ${LIB_VERSION}
|
||||
endif
|
||||
libproxyface_la_SOURCES = version.cxx ${libproxyface_la_RCCFILES} \
|
||||
${libproxyface_la_MOCFILES}
|
||||
|
||||
AM_CPPFLAGS = -I${top_srcdir} -I${top_builddir}
|
||||
|
||||
%.cxx: %.qrc
|
||||
${RCC} -o $@ $<
|
||||
|
||||
moc_%.cxx: %.hxx
|
||||
${MOC} -DMOC ${CPPFLAGS} ${AM_CPPFLAGS} -o $@ $<
|
||||
|
||||
ui_%.hxx: %.ui
|
||||
${UIC} -o $@ $<
|
||||
|
||||
%.ts: ${UISOURCES} ${MOCHEADER} ${proxyface_HEADERS}
|
||||
${LUPDATE} ${LUPDATE_ARGS} $^ -ts $@
|
||||
|
||||
%.qm: %.ts
|
||||
${LRELEASE} $< -qm $@
|
||||
|
||||
CLEANFILES = ${UIHEADER} ${MOCSOURCES}
|
||||
DISTCLEANFILES = $(pkgconfig_DATA)
|
||||
MAINTAINERCLEANFILES = makefile.in
|
||||
DISTCLEANFILES = ${translations_DATA}
|
@@ -1,11 +1,20 @@
|
||||
prefix=@PACKAGENAME@
|
||||
exec_prefix=${prefix}
|
||||
## @id $Id$
|
||||
#
|
||||
# This file has been added by bootstrap.sh on Sat, 09 May 2015 10:46:31 +0200
|
||||
# Feel free to change it or even remove and rebuild it, up to your needs
|
||||
#
|
||||
## 1 2 3 4 5 6 7 8
|
||||
## 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
prefix=@prefix@
|
||||
exec_prefix=@exec_prefix@
|
||||
libdir=${exec_prefix}/lib
|
||||
includedir=${prefix}/include
|
||||
translationsdir=@datadir@/@PACKAGE_NAME@/translations
|
||||
|
||||
Name: @PACKAGENAME@
|
||||
Description: C++/Qt/libproxy/WinHTTP GUI/WPAD Proxy Interface
|
||||
Name: @PACKAGE_NAME@
|
||||
Description: @DESCRIPTION@
|
||||
Version: @VERSION@
|
||||
Libs: -L${libdir} -lproxyface
|
||||
Libs.private:
|
||||
Cflags: -I${includedir}
|
||||
Libs: -L${libdir} -l@PACKAGE_NAME@ @LDFLAGS@
|
||||
Cflags: -I${includedir} @CPPFLAGS@
|
||||
Requires: @PKG_REQUIREMENTS@
|
||||
|
@@ -8,7 +8,9 @@
|
||||
#ifndef PROXYFACE_HXX
|
||||
#define PROXYFACE_HXX
|
||||
|
||||
#ifdef QT_NETWORK_LIB
|
||||
#if HAVE_QT == 1
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wconversion"
|
||||
#include <QtNetwork/QNetworkProxy>
|
||||
#include <QtNetwork/QNetworkAccessManager>
|
||||
#include <QtNetwork/QNetworkReply>
|
||||
@@ -17,9 +19,8 @@
|
||||
#include <QtNetwork/QHostInfo>
|
||||
#include <QtCore/QTimer>
|
||||
#include <QtCore/QDebug>
|
||||
#ifndef Q_OS_WIN32
|
||||
#include <QtCore/QThread>
|
||||
#endif
|
||||
#pragma GCC diagnostic pop
|
||||
#include <map>
|
||||
#ifndef PROXYFACE_LOG
|
||||
#define PROXYFACE_LOG qDebug()<<__PRETTY_FUNCTION__
|
||||
@@ -192,15 +193,11 @@ namespace proxy {
|
||||
|
||||
@example getproxylist.cxx */
|
||||
class Interface
|
||||
#ifdef QT_NETWORK_LIB
|
||||
#ifdef Q_OS_WIN32
|
||||
: public QObject
|
||||
#else
|
||||
#if HAVE_QT == 1
|
||||
: public QThread
|
||||
#endif
|
||||
#endif
|
||||
{
|
||||
#ifdef QT_NETWORK_LIB
|
||||
#if HAVE_QT == 1
|
||||
Q_OBJECT
|
||||
#endif
|
||||
|
||||
@@ -208,11 +205,11 @@ namespace proxy {
|
||||
|
||||
//! Keep your instance as long as possible, because of caching.
|
||||
Interface()
|
||||
#ifdef QT_NETWORK_LIB
|
||||
#if HAVE_QT == 1
|
||||
: _timeout1Paused(false), _timeout2Paused(false)
|
||||
#endif
|
||||
{
|
||||
#ifdef QT_NETWORK_LIB
|
||||
#if HAVE_QT == 1
|
||||
PROXYFACE_LOG;
|
||||
if (!connect(&_timeout1, SIGNAL(timeout()), SLOT(timeout())))
|
||||
qFatal("connect failed");
|
||||
@@ -229,7 +226,7 @@ namespace proxy {
|
||||
|
||||
//! Get list of proxies for a given URL.
|
||||
virtual List proxies(const std::string& url) = 0;
|
||||
#ifdef QT_NETWORK_LIB
|
||||
#if HAVE_QT == 1
|
||||
//! Reset, stop all outstanding checks
|
||||
void reset() {
|
||||
_timeout1.stop();
|
||||
@@ -313,7 +310,7 @@ namespace proxy {
|
||||
if (getenv("HTTP_PROXY")) {
|
||||
QUrl proxy(QString(getenv("HTTP_PROXY")));
|
||||
QNetworkProxy envProxy(QNetworkProxy::HttpProxy,
|
||||
proxy.host(), proxy.port(),
|
||||
proxy.host(), (quint16)proxy.port(),
|
||||
proxy.userName(), proxy.password());
|
||||
setupProxyCheck(envProxy, url);
|
||||
}
|
||||
@@ -523,7 +520,7 @@ namespace proxy {
|
||||
:(it->type==HTTP?QNetworkProxy::HttpProxy
|
||||
:(it->type==SOCKS?QNetworkProxy::Socks5Proxy
|
||||
:QNetworkProxy::NoProxy))),
|
||||
QString::fromStdString(it->host), it->port);
|
||||
QString::fromStdString(it->host), (quint16)it->port);
|
||||
setupProxyCheck(prxy, _url);
|
||||
}
|
||||
QNetworkProxy directProxy(QNetworkProxy::NoProxy);
|
||||
@@ -561,7 +558,7 @@ namespace proxy {
|
||||
}
|
||||
|
||||
# ifdef WIN32
|
||||
# ifdef QT_NETWORK_LIB
|
||||
# ifdef QT_NETWORK_LIB___HAS_A_BUG /// @bug Crashes on Windows Qt 5.2.1
|
||||
// use Qt if available
|
||||
# include <proxyface/qtproxy.hxx>
|
||||
namespace proxy {
|
||||
@@ -575,7 +572,7 @@ namespace proxy {
|
||||
}
|
||||
# endif
|
||||
# else
|
||||
# if defined(QT_NETWORK_LIB) && defined(Q_OS_MAC)
|
||||
# if HAVE_QT == 1
|
||||
// use Qt if available (not yet linux)
|
||||
# include <proxyface/qtproxy.hxx>
|
||||
namespace proxy {
|
||||
|
Before Width: | Height: | Size: 3.1 KiB After Width: | Height: | Size: 3.1 KiB |
@@ -9,18 +9,27 @@
|
||||
#define PROXY_HXX
|
||||
|
||||
#include <proxyface/autoproxy.hxx>
|
||||
#include <proxyface/ui_proxy.hxx>
|
||||
#include <proxyface/proxyauth.hxx>
|
||||
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wconversion"
|
||||
#include <proxyface/ui_proxy.hxx>
|
||||
#include <QtGui/QMovie>
|
||||
#if QT_VERSION >= 0x050000
|
||||
#include <QtWidgets/QDialog>
|
||||
#include <QtWidgets/QPushButton>
|
||||
#else
|
||||
#include <QtGui/QDialog>
|
||||
#include <QtGui/QPushButton>
|
||||
#endif
|
||||
#include <QtNetwork/QNetworkProxy>
|
||||
#include <QtNetwork/QAuthenticator>
|
||||
#include <QtNetwork/QNetworkReply>
|
||||
#include <QtCore/QUrl>
|
||||
#include <QtCore/QSettings>
|
||||
|
||||
#include <QtCore/QDebug>
|
||||
#pragma GCC diagnostic pop
|
||||
|
||||
#include <cassert>
|
||||
|
||||
namespace gui {
|
||||
@@ -86,6 +95,11 @@ namespace gui {
|
||||
acceptValues();
|
||||
}
|
||||
|
||||
void proxy(const QString& url,
|
||||
int timeout1=5000, int timeout2=30000) {
|
||||
_auto.proxy(url.toStdString(), timeout1, timeout2);
|
||||
}
|
||||
|
||||
void ping() {
|
||||
_auto.ping(_testUrl->currentText());
|
||||
}
|
||||
@@ -136,7 +150,7 @@ namespace gui {
|
||||
|
||||
public Q_SLOTS:
|
||||
|
||||
void proxyAuthenticationRequired(const QNetworkProxy& p,
|
||||
void proxyAuthenticationRequired(const QNetworkProxy&,
|
||||
QAuthenticator* auth) {
|
||||
qDebug()<<"proxyAuthenticationRequired";
|
||||
_proxyAuth._realm->setText(auth->realm());
|
||||
@@ -149,7 +163,7 @@ namespace gui {
|
||||
_auto.restart();
|
||||
}
|
||||
|
||||
void authenticationRequired(QNetworkReply*, QAuthenticator* auth) {
|
||||
void authenticationRequired(QNetworkReply*, QAuthenticator*) {
|
||||
}
|
||||
|
||||
void proxyFoundSlot(const QUrl&, const QNetworkProxy& p) {
|
||||
@@ -188,13 +202,13 @@ namespace gui {
|
||||
case 1: {
|
||||
QNetworkProxy::setApplicationProxy
|
||||
(QNetworkProxy
|
||||
(QNetworkProxy::HttpProxy, _url->text(), _port->value()));
|
||||
(QNetworkProxy::HttpProxy, _url->text(), (quint16)_port->value()));
|
||||
_auto.ping(_testUrl->currentText());
|
||||
} break;
|
||||
case 2: {
|
||||
QNetworkProxy::setApplicationProxy
|
||||
(QNetworkProxy
|
||||
(QNetworkProxy::Socks5Proxy, _url->text(), _port->value(),
|
||||
(QNetworkProxy::Socks5Proxy, _url->text(), (quint16)_port->value(),
|
||||
"marwae", "marwae"));
|
||||
_auto.ping(_testUrl->currentText());
|
||||
} break;
|
||||
|
@@ -115,7 +115,7 @@
|
||||
<item row="2" column="1">
|
||||
<widget class="QSpinBox" name="_port">
|
||||
<property name="maximum">
|
||||
<number>10000</number>
|
||||
<number>65535</number>
|
||||
</property>
|
||||
<property name="value">
|
||||
<number>80</number>
|
||||
|
@@ -8,8 +8,15 @@
|
||||
#ifndef __PROXYAUTH_HXX__
|
||||
#define __PROXYAUTH_HXX__
|
||||
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wconversion"
|
||||
#include <proxyface/ui_proxyauth.hxx>
|
||||
#if QT_VERSION >= 0x050000
|
||||
#include <QtWidgets/QDialog>
|
||||
#else
|
||||
#include <QtGui/QDialog>
|
||||
#endif
|
||||
#pragma GCC diagnostic pop
|
||||
|
||||
class ProxyAuth: public QDialog, public Ui::ProxyAuth {
|
||||
Q_OBJECT
|
||||
|
@@ -28,13 +28,17 @@ namespace proxy {
|
||||
virtual List proxies(const std::string& url) {
|
||||
|
||||
qDebug()<<"************ QTPROXY ********************";
|
||||
qDebug()<<"************ QTPROXY ********************";
|
||||
|
||||
List res;
|
||||
|
||||
qDebug()<<"Query System Proxies for "<<QString::fromStdString(url)
|
||||
<<" ...";
|
||||
qDebug()<<" ... 1. setup query";
|
||||
QNetworkProxyQuery proxyQuery(QUrl(QString::fromStdString(url)));
|
||||
qDebug()<<" ... 2. query factory";
|
||||
QList<QNetworkProxy> proxies
|
||||
(QNetworkProxyFactory::systemProxyForQuery
|
||||
(QUrl(QString::fromStdString(url))));
|
||||
(QNetworkProxyFactory::systemProxyForQuery(proxyQuery));
|
||||
qDebug()<<"Found "<<proxies.size()<<" System Proxies.";
|
||||
|
||||
for (QList<QNetworkProxy>::const_iterator proxy(proxies.begin());
|
||||
proxy!=proxies.end(); ++proxy) {
|
||||
|
@@ -8,6 +8,7 @@
|
||||
#ifndef PROXY_LINUX
|
||||
#define PROXY_LINUX
|
||||
|
||||
#ifdef HAVE_PROXY
|
||||
extern "C" {
|
||||
#include <proxy.h>
|
||||
}
|
||||
@@ -60,3 +61,4 @@ namespace proxy {
|
||||
}
|
||||
|
||||
#endif
|
||||
#endif
|
||||
|
@@ -8,6 +8,9 @@
|
||||
#ifndef PROXY_WINDOZE_HXX
|
||||
#define PROXY_WINDOZE_HXX
|
||||
|
||||
#ifdef DATADIR
|
||||
#undef DATADIR
|
||||
#endif
|
||||
#include <windows.h>
|
||||
#include <winhttp.h>
|
||||
|
||||
|
@@ -10,8 +10,8 @@
|
||||
|
||||
namespace proxy {
|
||||
std::string version() {
|
||||
return "@PACKAGENAME@-@VERSION@";
|
||||
return PACKAGE_NAME "-" PACKAGE_VERSION;
|
||||
}
|
||||
const std::string WHAT("#(@) @PACKAGENAME@-@VERSION@");
|
||||
const std::string IDENT("$Id: @PACKAGENAME@-@VERSION@ $");
|
||||
const std::string WHAT("#(@) " PACKAGE_NAME "-" PACKAGE_VERSION);
|
||||
const std::string IDENT("$Id: " PACKAGE_NAME "-" PACKAGE_VERSION " $");
|
||||
}
|
33
src/version.hxx
Normal file
33
src/version.hxx
Normal file
@@ -0,0 +1,33 @@
|
||||
/*! @file
|
||||
|
||||
@id $Id$
|
||||
*/
|
||||
// 1 2 3 4 5 6 7 8
|
||||
// 45678901234567890123456789012345678901234567890123456789012345678901234567890
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace NAMESPACE {
|
||||
/// get package string which consists of package name and package version
|
||||
std::string package_string();
|
||||
/// get package name
|
||||
std::string package_name();
|
||||
/// get package version
|
||||
std::string version();
|
||||
/// get code build date
|
||||
std::string build_date();
|
||||
/// get author, i.e. copyright holder
|
||||
std::string author();
|
||||
/// get short package description (1st line of README)
|
||||
std::string description();
|
||||
/// get long package description (starting at 3rd line in README)
|
||||
std::string readme();
|
||||
/// get package logo file name
|
||||
std::string logo();
|
||||
/// get package icon file name
|
||||
std::string icon();
|
||||
/// used for <code>what filename</code>
|
||||
extern const std::string WHAT;
|
||||
/// used for <code>ident filename</code>
|
||||
extern const std::string IDENT;
|
||||
}
|
161
template.sh
Executable file
161
template.sh
Executable file
@@ -0,0 +1,161 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
# documentation: run with option --help
|
||||
|
||||
##########################################################################################
|
||||
#### template for bash scripts #### START BELOW ##########################################
|
||||
##########################################################################################
|
||||
|
||||
############################################################################ begin logging
|
||||
# check if stdout is a terminal...
|
||||
if test -t 1; then
|
||||
|
||||
# see if it supports colors...
|
||||
ncolors=$(tput colors)
|
||||
|
||||
if test -n "$ncolors" && test $ncolors -ge 8; then
|
||||
bold="$(tput bold)"
|
||||
underline="$(tput smul)"
|
||||
standout="$(tput smso)"
|
||||
normal="$(tput sgr0)"
|
||||
black="$(tput setaf 0)"
|
||||
red="$(tput setaf 1)"
|
||||
green="$(tput setaf 2)"
|
||||
yellow="$(tput setaf 3)"
|
||||
blue="$(tput setaf 4)"
|
||||
magenta="$(tput setaf 5)"
|
||||
cyan="$(tput setaf 6)"
|
||||
white="$(tput setaf 7)"
|
||||
fi
|
||||
fi
|
||||
|
||||
append_msg() {
|
||||
if test $# -ne 0; then
|
||||
echo -n ": ${bold}$*"
|
||||
fi
|
||||
echo "${normal}"
|
||||
}
|
||||
|
||||
# write a message
|
||||
message() {
|
||||
if test $# -eq 0; then
|
||||
return
|
||||
fi
|
||||
echo "${bold}${white}$*${normal}" 1>&2
|
||||
}
|
||||
|
||||
# write a success message
|
||||
success() {
|
||||
echo -n "${bold}${green}success" 1>&2
|
||||
append_msg "$*" 1>&2
|
||||
}
|
||||
|
||||
# write a notice
|
||||
notice() {
|
||||
echo -n "${bold}${yellow}notice" 1>&2
|
||||
append_msg "$*" 1>&2
|
||||
}
|
||||
|
||||
# write a warning message
|
||||
warning() {
|
||||
echo -en "${bold}${red}warning" 1>&2
|
||||
append_msg "$*" 1>&2
|
||||
}
|
||||
|
||||
# write error message
|
||||
error() {
|
||||
echo -en "${bold}${red}error" 1>&2
|
||||
append_msg "$*" 1>&2
|
||||
}
|
||||
|
||||
# run a command, print the result and abort in case of error
|
||||
# option: --ignore: ignore the result, continue in case of error
|
||||
run() {
|
||||
ignore=1
|
||||
while test $# -gt 0; do
|
||||
case "$1" in
|
||||
(--ignore) ignore=0;;
|
||||
(*) break;;
|
||||
esac
|
||||
shift;
|
||||
done
|
||||
echo -n "${bold}${yellow}running:${white} $*${normal} … "
|
||||
set +e
|
||||
result=$($* 2>&1)
|
||||
res=$?
|
||||
set -e
|
||||
if test $res -ne 0; then
|
||||
if test $ignore -eq 1; then
|
||||
error "failed with return code: $res"
|
||||
if test -n "$result"; then
|
||||
echo "$result"
|
||||
fi
|
||||
exit 1
|
||||
else
|
||||
warning "ignored return code: $res"
|
||||
return 0
|
||||
fi
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
############################################################################ error handler
|
||||
function traperror() {
|
||||
set +x
|
||||
local err=($1) # error status
|
||||
local line="$2" # LINENO
|
||||
local linecallfunc="$3"
|
||||
local command="$4"
|
||||
local funcstack="$5"
|
||||
IFS=" "
|
||||
for e in ${err[@]}; do
|
||||
if test -n "$e" -a "$e" != "0"; then
|
||||
error "line $line - command '$command' exited with status: $e (${err[@]})"
|
||||
if [ "${funcstack}" != "main" -o "$linecallfunc" != "0" ]; then
|
||||
echo -n " ... error at ${funcstack} " 1>&2
|
||||
if [ "$linecallfunc" != "" ]; then
|
||||
echo -n "called at line $linecallfunc" 1>&2
|
||||
fi
|
||||
echo
|
||||
fi
|
||||
exit $e
|
||||
fi
|
||||
done
|
||||
exit 0
|
||||
}
|
||||
|
||||
# catch errors
|
||||
trap 'traperror "$? ${PIPESTATUS[@]}" $LINENO $BASH_LINENO "$BASH_COMMAND" "${FUNCNAME[@]}" "${FUNCTION}"' ERR SIGINT INT TERM EXIT
|
||||
|
||||
|
||||
|
||||
##########################################################################################
|
||||
#### START HERE ##########################################################################
|
||||
##########################################################################################
|
||||
|
||||
######################################################### commandline parameter evaluation
|
||||
while test $# -gt 0; do
|
||||
case "$1" in
|
||||
(--help|-h) cat <<EOF
|
||||
SYNOPSIS
|
||||
|
||||
$0 [OPTIONS]
|
||||
|
||||
OPTIONS
|
||||
|
||||
--help, -h show this help
|
||||
|
||||
DESCRIPTION
|
||||
|
||||
EOF
|
||||
exit;;
|
||||
(*) error "unknow option $1, try $0 --help"; exit 1;;
|
||||
esac
|
||||
if test $# -eq 0; then
|
||||
error "missing parameter, try $0 --help"; exit 1
|
||||
fi
|
||||
shift;
|
||||
done
|
||||
|
||||
##################################################################################### Main
|
@@ -1,18 +0,0 @@
|
||||
./bootstrap.sh && \
|
||||
LDFLAGS="-L/usr/lib32 -m32" CXXFLAGS="-m32" ./configure \
|
||||
--libdir=/usr/local/lib32 \
|
||||
--build=x86_64 \
|
||||
--host=i386 && \
|
||||
sudo make uninstall && \
|
||||
make clean && \
|
||||
LDFLAGS="-L/opt/local/i586-mingw32msvc/lib" \
|
||||
CPPFLAGS="-I/opt/local/i586-mingw32msvc/include" \
|
||||
./configure \
|
||||
--prefix=/opt/local/i586-mingw32msvc \
|
||||
--build=x86_64 \
|
||||
--host=i586-mingw32msvc && \
|
||||
sudo make uninstall && \
|
||||
make clean && \
|
||||
./configure && \
|
||||
sudo make uninstall
|
||||
|
Reference in New Issue
Block a user