2011-09-09 11:06:14 +02:00
|
|
|
From: Jeff Mahoney <jeffm@suse.com>
|
|
|
|
Subject: apparmor: Use autoconf
|
2011-01-17 17:43:05 +01:00
|
|
|
|
2011-09-09 11:06:14 +02:00
|
|
|
The apparmor build system is currently a mismash of hand-coded makefiles
|
|
|
|
that don't do anything particularly original. This patch unifies the
|
|
|
|
build system to use a single configure script.
|
|
|
|
|
|
|
|
- Pulls in the relevant m4 macro files
|
|
|
|
- Adds relevant "needed" autoconf files to package root directory
|
|
|
|
- Removes the old autoconf files from subdirectories
|
|
|
|
- Converts hand-written makefiles to Makefile.am
|
|
|
|
- Adds missing includes/defines as needed
|
|
|
|
|
|
|
|
- I generally don't touch the files that are already autogenerated. It's
|
|
|
|
expected that autoreconf will be run before the next tarball is
|
|
|
|
produced. There's no sense in maintaining the autogenerated ones in
|
|
|
|
the patch.
|
|
|
|
|
|
|
|
Signed-off-by: Jeff Mahoney <jeffm@suse.com>
|
2011-03-25 09:04:51 +01:00
|
|
|
---
|
2011-09-09 11:06:14 +02:00
|
|
|
|
2011-01-25 13:16:44 +01:00
|
|
|
AUTHORS | 1
|
|
|
|
ChangeLog | 1
|
2011-09-09 11:06:14 +02:00
|
|
|
INSTALL | 365 +
|
2011-07-05 13:45:31 +02:00
|
|
|
Makefile.am | 2
|
|
|
|
NEWS | 1
|
|
|
|
changehat/Makefile.am | 1
|
|
|
|
changehat/mod_apparmor/Makefile.am | 23
|
|
|
|
changehat/mod_apparmor/apache2-mod_apparmor.spec.in | 215
|
|
|
|
changehat/pam_apparmor/COPYING | 39
|
|
|
|
changehat/pam_apparmor/Makefile.am | 9
|
|
|
|
changehat/pam_apparmor/pam_apparmor.changes | 49
|
|
|
|
changehat/pam_apparmor/pam_apparmor.spec.in | 83
|
|
|
|
changehat/tomcat_apparmor/Makefile.am | 1
|
|
|
|
changehat/tomcat_apparmor/tomcat_5_0/Makefile.am | 2
|
|
|
|
changehat/tomcat_apparmor/tomcat_5_5/Makefile.am | 13
|
|
|
|
changehat/tomcat_apparmor/tomcat_5_5/build.xml | 11
|
|
|
|
changehat/tomcat_apparmor/tomcat_5_5/src/Makefile.am | 1
|
|
|
|
changehat/tomcat_apparmor/tomcat_5_5/src/jni_src/Makefile.am | 17
|
|
|
|
configure.in | 203
|
|
|
|
deprecated/Makefile.am | 2
|
|
|
|
deprecated/management/Makefile.am | 1
|
|
|
|
deprecated/management/apparmor-dbus/Makefile.am | 2
|
|
|
|
deprecated/management/apparmor-dbus/src/Makefile.am | 3
|
|
|
|
deprecated/management/profile-editor/Makefile.am | 2
|
|
|
|
deprecated/management/profile-editor/src/Makefile.am | 6
|
|
|
|
deprecated/management/profile-editor/src/wxStyledTextCtrl/Makefile.am | 4
|
|
|
|
libraries/Makefile.am | 1
|
|
|
|
libraries/libapparmor/AUTHORS | 2
|
|
|
|
libraries/libapparmor/ChangeLog | 1
|
|
|
|
libraries/libapparmor/INSTALL | 236
|
|
|
|
libraries/libapparmor/NEWS | 1
|
|
|
|
libraries/libapparmor/README | 1
|
|
|
|
libraries/libapparmor/autogen.sh | 42
|
|
|
|
libraries/libapparmor/compile | 143
|
2011-09-09 11:06:14 +02:00
|
|
|
libraries/libapparmor/config.guess | 1502 ----
|
|
|
|
libraries/libapparmor/config.sub | 1714 -----
|
2011-01-17 17:43:05 +01:00
|
|
|
libraries/libapparmor/doc/Makefile.am | 14
|
2011-09-09 11:06:14 +02:00
|
|
|
libraries/libapparmor/install-sh | 520 -
|
2011-01-17 17:43:05 +01:00
|
|
|
libraries/libapparmor/libapparmor1.spec | 178
|
|
|
|
libraries/libapparmor/m4/ac_pod2man.m4 | 16
|
|
|
|
libraries/libapparmor/m4/ac_python_devel.m4 | 193
|
|
|
|
libraries/libapparmor/src/Makefile.am | 4
|
|
|
|
libraries/libapparmor/swig/perl/Makefile.PL.in | 17
|
|
|
|
libraries/libapparmor/swig/perl/Makefile.am | 37
|
|
|
|
libraries/libapparmor/swig/python/Makefile.am | 2
|
|
|
|
libraries/libapparmor/swig/ruby/Makefile.am | 29
|
|
|
|
libraries/libapparmor/swig/ruby/extconf.rb | 37
|
|
|
|
libraries/libapparmor/testsuite/Makefile.am | 2
|
|
|
|
m4/ac_pod2man.m4 | 16
|
|
|
|
m4/ac_python_devel.m4 | 209
|
|
|
|
m4/am_path_apxs.m4 | 12
|
|
|
|
m4/am_path_perl.m4 | 25
|
|
|
|
m4/am_path_ruby.m4 | 115
|
|
|
|
m4/wxwidgets.m4 | 37
|
|
|
|
parser/Makefile.am | 81
|
|
|
|
parser/libapparmor_re/Makefile.am | 4
|
2011-09-09 11:06:14 +02:00
|
|
|
parser/libapparmor_re/regexp.y | 3082 ----------
|
|
|
|
parser/libapparmor_re/regexp.yy | 3082 ++++++++++
|
2011-01-17 17:43:05 +01:00
|
|
|
parser/parser_alias.c | 1
|
|
|
|
parser/parser_main.c | 3
|
|
|
|
parser/parser_policy.c | 1
|
|
|
|
parser/parser_regex.c | 2
|
|
|
|
parser/parser_symtab.c | 1
|
|
|
|
parser/po/Makefile | 8
|
|
|
|
po/Makefile.am | 2
|
2011-09-09 11:06:14 +02:00
|
|
|
profiles/Makefile | 2
|
2011-01-17 17:43:05 +01:00
|
|
|
tests/Makefile.am | 1
|
|
|
|
tests/regression/Makefile.am | 1
|
|
|
|
tests/regression/subdomain/Makefile.am | 109
|
2011-03-25 09:04:51 +01:00
|
|
|
utils/Immunix/Makefile.am | 3
|
2011-01-17 17:43:05 +01:00
|
|
|
utils/Makefile.PL | 15
|
2011-03-25 09:04:51 +01:00
|
|
|
utils/Makefile.am | 36
|
2011-01-17 17:43:05 +01:00
|
|
|
utils/po/Makefile | 8
|
2011-09-09 11:06:14 +02:00
|
|
|
73 files changed, 4463 insertions(+), 8142 deletions(-)
|
|
|
|
|
2011-01-17 17:43:05 +01:00
|
|
|
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/AUTHORS
|
|
|
|
@@ -0,0 +1 @@
|
|
|
|
+
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/ChangeLog
|
|
|
|
@@ -0,0 +1 @@
|
|
|
|
+
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/INSTALL
|
|
|
|
@@ -0,0 +1,365 @@
|
|
|
|
+Installation Instructions
|
|
|
|
+*************************
|
|
|
|
+
|
|
|
|
+Copyright (C) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005,
|
|
|
|
+2006, 2007, 2008, 2009 Free Software Foundation, Inc.
|
|
|
|
+
|
|
|
|
+ Copying and distribution of this file, with or without modification,
|
|
|
|
+are permitted in any medium without royalty provided the copyright
|
|
|
|
+notice and this notice are preserved. This file is offered as-is,
|
|
|
|
+without warranty of any kind.
|
|
|
|
+
|
|
|
|
+Basic Installation
|
|
|
|
+==================
|
|
|
|
+
|
|
|
|
+ Briefly, the shell commands `./configure; make; make install' should
|
|
|
|
+configure, build, and install this package. The following
|
|
|
|
+more-detailed instructions are generic; see the `README' file for
|
|
|
|
+instructions specific to this package. Some packages provide this
|
|
|
|
+`INSTALL' file but do not implement all of the features documented
|
|
|
|
+below. The lack of an optional feature in a given package is not
|
|
|
|
+necessarily a bug. More recommendations for GNU packages can be found
|
|
|
|
+in *note Makefile Conventions: (standards)Makefile Conventions.
|
|
|
|
+
|
|
|
|
+ The `configure' shell script attempts to guess correct values for
|
|
|
|
+various system-dependent variables used during compilation. It uses
|
|
|
|
+those values to create a `Makefile' in each directory of the package.
|
|
|
|
+It may also create one or more `.h' files containing system-dependent
|
|
|
|
+definitions. Finally, it creates a shell script `config.status' that
|
|
|
|
+you can run in the future to recreate the current configuration, and a
|
|
|
|
+file `config.log' containing compiler output (useful mainly for
|
|
|
|
+debugging `configure').
|
|
|
|
+
|
|
|
|
+ It can also use an optional file (typically called `config.cache'
|
|
|
|
+and enabled with `--cache-file=config.cache' or simply `-C') that saves
|
|
|
|
+the results of its tests to speed up reconfiguring. Caching is
|
|
|
|
+disabled by default to prevent problems with accidental use of stale
|
|
|
|
+cache files.
|
|
|
|
+
|
|
|
|
+ If you need to do unusual things to compile the package, please try
|
|
|
|
+to figure out how `configure' could check whether to do them, and mail
|
|
|
|
+diffs or instructions to the address given in the `README' so they can
|
|
|
|
+be considered for the next release. If you are using the cache, and at
|
|
|
|
+some point `config.cache' contains results you don't want to keep, you
|
|
|
|
+may remove or edit it.
|
|
|
|
+
|
|
|
|
+ The file `configure.ac' (or `configure.in') is used to create
|
|
|
|
+`configure' by a program called `autoconf'. You need `configure.ac' if
|
|
|
|
+you want to change it or regenerate `configure' using a newer version
|
|
|
|
+of `autoconf'.
|
|
|
|
+
|
|
|
|
+ The simplest way to compile this package is:
|
|
|
|
+
|
|
|
|
+ 1. `cd' to the directory containing the package's source code and type
|
|
|
|
+ `./configure' to configure the package for your system.
|
|
|
|
+
|
|
|
|
+ Running `configure' might take a while. While running, it prints
|
|
|
|
+ some messages telling which features it is checking for.
|
|
|
|
+
|
|
|
|
+ 2. Type `make' to compile the package.
|
|
|
|
+
|
|
|
|
+ 3. Optionally, type `make check' to run any self-tests that come with
|
|
|
|
+ the package, generally using the just-built uninstalled binaries.
|
|
|
|
+
|
|
|
|
+ 4. Type `make install' to install the programs and any data files and
|
|
|
|
+ documentation. When installing into a prefix owned by root, it is
|
|
|
|
+ recommended that the package be configured and built as a regular
|
|
|
|
+ user, and only the `make install' phase executed with root
|
|
|
|
+ privileges.
|
|
|
|
+
|
|
|
|
+ 5. Optionally, type `make installcheck' to repeat any self-tests, but
|
|
|
|
+ this time using the binaries in their final installed location.
|
|
|
|
+ This target does not install anything. Running this target as a
|
|
|
|
+ regular user, particularly if the prior `make install' required
|
|
|
|
+ root privileges, verifies that the installation completed
|
|
|
|
+ correctly.
|
|
|
|
+
|
|
|
|
+ 6. You can remove the program binaries and object files from the
|
|
|
|
+ source code directory by typing `make clean'. To also remove the
|
|
|
|
+ files that `configure' created (so you can compile the package for
|
|
|
|
+ a different kind of computer), type `make distclean'. There is
|
|
|
|
+ also a `make maintainer-clean' target, but that is intended mainly
|
|
|
|
+ for the package's developers. If you use it, you may have to get
|
|
|
|
+ all sorts of other programs in order to regenerate files that came
|
|
|
|
+ with the distribution.
|
|
|
|
+
|
|
|
|
+ 7. Often, you can also type `make uninstall' to remove the installed
|
|
|
|
+ files again. In practice, not all packages have tested that
|
|
|
|
+ uninstallation works correctly, even though it is required by the
|
|
|
|
+ GNU Coding Standards.
|
|
|
|
+
|
|
|
|
+ 8. Some packages, particularly those that use Automake, provide `make
|
|
|
|
+ distcheck', which can by used by developers to test that all other
|
|
|
|
+ targets like `make install' and `make uninstall' work correctly.
|
|
|
|
+ This target is generally not run by end users.
|
|
|
|
+
|
|
|
|
+Compilers and Options
|
|
|
|
+=====================
|
|
|
|
+
|
|
|
|
+ Some systems require unusual options for compilation or linking that
|
|
|
|
+the `configure' script does not know about. Run `./configure --help'
|
|
|
|
+for details on some of the pertinent environment variables.
|
|
|
|
+
|
|
|
|
+ You can give `configure' initial values for configuration parameters
|
|
|
|
+by setting variables in the command line or in the environment. Here
|
|
|
|
+is an example:
|
|
|
|
+
|
|
|
|
+ ./configure CC=c99 CFLAGS=-g LIBS=-lposix
|
|
|
|
+
|
|
|
|
+ *Note Defining Variables::, for more details.
|
|
|
|
+
|
|
|
|
+Compiling For Multiple Architectures
|
|
|
|
+====================================
|
|
|
|
+
|
|
|
|
+ You can compile the package for more than one kind of computer at the
|
|
|
|
+same time, by placing the object files for each architecture in their
|
|
|
|
+own directory. To do this, you can use GNU `make'. `cd' to the
|
|
|
|
+directory where you want the object files and executables to go and run
|
|
|
|
+the `configure' script. `configure' automatically checks for the
|
|
|
|
+source code in the directory that `configure' is in and in `..'. This
|
|
|
|
+is known as a "VPATH" build.
|
|
|
|
+
|
|
|
|
+ With a non-GNU `make', it is safer to compile the package for one
|
|
|
|
+architecture at a time in the source code directory. After you have
|
|
|
|
+installed the package for one architecture, use `make distclean' before
|
|
|
|
+reconfiguring for another architecture.
|
|
|
|
+
|
|
|
|
+ On MacOS X 10.5 and later systems, you can create libraries and
|
|
|
|
+executables that work on multiple system types--known as "fat" or
|
|
|
|
+"universal" binaries--by specifying multiple `-arch' options to the
|
|
|
|
+compiler but only a single `-arch' option to the preprocessor. Like
|
|
|
|
+this:
|
|
|
|
+
|
|
|
|
+ ./configure CC="gcc -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
|
|
|
|
+ CXX="g++ -arch i386 -arch x86_64 -arch ppc -arch ppc64" \
|
|
|
|
+ CPP="gcc -E" CXXCPP="g++ -E"
|
|
|
|
+
|
|
|
|
+ This is not guaranteed to produce working output in all cases, you
|
|
|
|
+may have to build one architecture at a time and combine the results
|
|
|
|
+using the `lipo' tool if you have problems.
|
|
|
|
+
|
|
|
|
+Installation Names
|
|
|
|
+==================
|
|
|
|
+
|
|
|
|
+ By default, `make install' installs the package's commands under
|
|
|
|
+`/usr/local/bin', include files under `/usr/local/include', etc. You
|
|
|
|
+can specify an installation prefix other than `/usr/local' by giving
|
|
|
|
+`configure' the option `--prefix=PREFIX', where PREFIX must be an
|
|
|
|
+absolute file name.
|
|
|
|
+
|
|
|
|
+ You can specify separate installation prefixes for
|
|
|
|
+architecture-specific files and architecture-independent files. If you
|
|
|
|
+pass the option `--exec-prefix=PREFIX' to `configure', the package uses
|
|
|
|
+PREFIX as the prefix for installing programs and libraries.
|
|
|
|
+Documentation and other data files still use the regular prefix.
|
|
|
|
+
|
|
|
|
+ In addition, if you use an unusual directory layout you can give
|
|
|
|
+options like `--bindir=DIR' to specify different values for particular
|
|
|
|
+kinds of files. Run `configure --help' for a list of the directories
|
|
|
|
+you can set and what kinds of files go in them. In general, the
|
|
|
|
+default for these options is expressed in terms of `${prefix}', so that
|
|
|
|
+specifying just `--prefix' will affect all of the other directory
|
|
|
|
+specifications that were not explicitly provided.
|
|
|
|
+
|
|
|
|
+ The most portable way to affect installation locations is to pass the
|
|
|
|
+correct locations to `configure'; however, many packages provide one or
|
|
|
|
+both of the following shortcuts of passing variable assignments to the
|
|
|
|
+`make install' command line to change installation locations without
|
|
|
|
+having to reconfigure or recompile.
|
|
|
|
+
|
|
|
|
+ The first method involves providing an override variable for each
|
|
|
|
+affected directory. For example, `make install
|
|
|
|
+prefix=/alternate/directory' will choose an alternate location for all
|
|
|
|
+directory configuration variables that were expressed in terms of
|
|
|
|
+`${prefix}'. Any directories that were specified during `configure',
|
|
|
|
+but not in terms of `${prefix}', must each be overridden at install
|
|
|
|
+time for the entire installation to be relocated. The approach of
|
|
|
|
+makefile variable overrides for each directory variable is required by
|
|
|
|
+the GNU Coding Standards, and ideally causes no recompilation.
|
|
|
|
+However, some platforms have known limitations with the semantics of
|
|
|
|
+shared libraries that end up requiring recompilation when using this
|
|
|
|
+method, particularly noticeable in packages that use GNU Libtool.
|
|
|
|
+
|
|
|
|
+ The second method involves providing the `DESTDIR' variable. For
|
|
|
|
+example, `make install DESTDIR=/alternate/directory' will prepend
|
|
|
|
+`/alternate/directory' before all installation names. The approach of
|
|
|
|
+`DESTDIR' overrides is not required by the GNU Coding Standards, and
|
|
|
|
+does not work on platforms that have drive letters. On the other hand,
|
|
|
|
+it does better at avoiding recompilation issues, and works well even
|
|
|
|
+when some directory options were not specified in terms of `${prefix}'
|
|
|
|
+at `configure' time.
|
|
|
|
+
|
|
|
|
+Optional Features
|
|
|
|
+=================
|
|
|
|
+
|
|
|
|
+ If the package supports it, you can cause programs to be installed
|
|
|
|
+with an extra prefix or suffix on their names by giving `configure' the
|
|
|
|
+option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'.
|
|
|
|
+
|
|
|
|
+ Some packages pay attention to `--enable-FEATURE' options to
|
|
|
|
+`configure', where FEATURE indicates an optional part of the package.
|
|
|
|
+They may also pay attention to `--with-PACKAGE' options, where PACKAGE
|
|
|
|
+is something like `gnu-as' or `x' (for the X Window System). The
|
|
|
|
+`README' should mention any `--enable-' and `--with-' options that the
|
|
|
|
+package recognizes.
|
|
|
|
+
|
|
|
|
+ For packages that use the X Window System, `configure' can usually
|
|
|
|
+find the X include and library files automatically, but if it doesn't,
|
|
|
|
+you can use the `configure' options `--x-includes=DIR' and
|
|
|
|
+`--x-libraries=DIR' to specify their locations.
|
|
|
|
+
|
|
|
|
+ Some packages offer the ability to configure how verbose the
|
|
|
|
+execution of `make' will be. For these packages, running `./configure
|
|
|
|
+--enable-silent-rules' sets the default to minimal output, which can be
|
|
|
|
+overridden with `make V=1'; while running `./configure
|
|
|
|
+--disable-silent-rules' sets the default to verbose, which can be
|
|
|
|
+overridden with `make V=0'.
|
|
|
|
+
|
|
|
|
+Particular systems
|
|
|
|
+==================
|
|
|
|
+
|
|
|
|
+ On HP-UX, the default C compiler is not ANSI C compatible. If GNU
|
|
|
|
+CC is not installed, it is recommended to use the following options in
|
|
|
|
+order to use an ANSI C compiler:
|
|
|
|
+
|
|
|
|
+ ./configure CC="cc -Ae -D_XOPEN_SOURCE=500"
|
|
|
|
+
|
|
|
|
+and if that doesn't work, install pre-built binaries of GCC for HP-UX.
|
|
|
|
+
|
|
|
|
+ On OSF/1 a.k.a. Tru64, some versions of the default C compiler cannot
|
|
|
|
+parse its `<wchar.h>' header file. The option `-nodtk' can be used as
|
|
|
|
+a workaround. If GNU CC is not installed, it is therefore recommended
|
|
|
|
+to try
|
|
|
|
+
|
|
|
|
+ ./configure CC="cc"
|
|
|
|
+
|
|
|
|
+and if that doesn't work, try
|
|
|
|
+
|
|
|
|
+ ./configure CC="cc -nodtk"
|
|
|
|
+
|
|
|
|
+ On Solaris, don't put `/usr/ucb' early in your `PATH'. This
|
|
|
|
+directory contains several dysfunctional programs; working variants of
|
|
|
|
+these programs are available in `/usr/bin'. So, if you need `/usr/ucb'
|
|
|
|
+in your `PATH', put it _after_ `/usr/bin'.
|
|
|
|
+
|
|
|
|
+ On Haiku, software installed for all users goes in `/boot/common',
|
|
|
|
+not `/usr/local'. It is recommended to use the following options:
|
|
|
|
+
|
|
|
|
+ ./configure --prefix=/boot/common
|
|
|
|
+
|
|
|
|
+Specifying the System Type
|
|
|
|
+==========================
|
|
|
|
+
|
|
|
|
+ There may be some features `configure' cannot figure out
|
|
|
|
+automatically, but needs to determine by the type of machine the package
|
|
|
|
+will run on. Usually, assuming the package is built to be run on the
|
|
|
|
+_same_ architectures, `configure' can figure that out, but if it prints
|
|
|
|
+a message saying it cannot guess the machine type, give it the
|
|
|
|
+`--build=TYPE' option. TYPE can either be a short name for the system
|
|
|
|
+type, such as `sun4', or a canonical name which has the form:
|
|
|
|
+
|
|
|
|
+ CPU-COMPANY-SYSTEM
|
|
|
|
+
|
|
|
|
+where SYSTEM can have one of these forms:
|
|
|
|
+
|
|
|
|
+ OS
|
|
|
|
+ KERNEL-OS
|
|
|
|
+
|
|
|
|
+ See the file `config.sub' for the possible values of each field. If
|
|
|
|
+`config.sub' isn't included in this package, then this package doesn't
|
|
|
|
+need to know the machine type.
|
|
|
|
+
|
|
|
|
+ If you are _building_ compiler tools for cross-compiling, you should
|
|
|
|
+use the option `--target=TYPE' to select the type of system they will
|
|
|
|
+produce code for.
|
|
|
|
+
|
|
|
|
+ If you want to _use_ a cross compiler, that generates code for a
|
|
|
|
+platform different from the build platform, you should specify the
|
|
|
|
+"host" platform (i.e., that on which the generated programs will
|
|
|
|
+eventually be run) with `--host=TYPE'.
|
|
|
|
+
|
|
|
|
+Sharing Defaults
|
|
|
|
+================
|
|
|
|
+
|
|
|
|
+ If you want to set default values for `configure' scripts to share,
|
|
|
|
+you can create a site shell script called `config.site' that gives
|
|
|
|
+default values for variables like `CC', `cache_file', and `prefix'.
|
|
|
|
+`configure' looks for `PREFIX/share/config.site' if it exists, then
|
|
|
|
+`PREFIX/etc/config.site' if it exists. Or, you can set the
|
|
|
|
+`CONFIG_SITE' environment variable to the location of the site script.
|
|
|
|
+A warning: not all `configure' scripts look for a site script.
|
|
|
|
+
|
|
|
|
+Defining Variables
|
|
|
|
+==================
|
|
|
|
+
|
|
|
|
+ Variables not defined in a site shell script can be set in the
|
|
|
|
+environment passed to `configure'. However, some packages may run
|
|
|
|
+configure again during the build, and the customized values of these
|
|
|
|
+variables may be lost. In order to avoid this problem, you should set
|
|
|
|
+them in the `configure' command line, using `VAR=value'. For example:
|
|
|
|
+
|
|
|
|
+ ./configure CC=/usr/local2/bin/gcc
|
|
|
|
+
|
|
|
|
+causes the specified `gcc' to be used as the C compiler (unless it is
|
|
|
|
+overridden in the site shell script).
|
|
|
|
+
|
|
|
|
+Unfortunately, this technique does not work for `CONFIG_SHELL' due to
|
|
|
|
+an Autoconf bug. Until the bug is fixed you can use this workaround:
|
|
|
|
+
|
|
|
|
+ CONFIG_SHELL=/bin/bash /bin/bash ./configure CONFIG_SHELL=/bin/bash
|
|
|
|
+
|
|
|
|
+`configure' Invocation
|
|
|
|
+======================
|
|
|
|
+
|
|
|
|
+ `configure' recognizes the following options to control how it
|
|
|
|
+operates.
|
|
|
|
+
|
|
|
|
+`--help'
|
|
|
|
+`-h'
|
|
|
|
+ Print a summary of all of the options to `configure', and exit.
|
|
|
|
+
|
|
|
|
+`--help=short'
|
|
|
|
+`--help=recursive'
|
|
|
|
+ Print a summary of the options unique to this package's
|
|
|
|
+ `configure', and exit. The `short' variant lists options used
|
|
|
|
+ only in the top level, while the `recursive' variant lists options
|
|
|
|
+ also present in any nested packages.
|
|
|
|
+
|
|
|
|
+`--version'
|
|
|
|
+`-V'
|
|
|
|
+ Print the version of Autoconf used to generate the `configure'
|
|
|
|
+ script, and exit.
|
|
|
|
+
|
|
|
|
+`--cache-file=FILE'
|
|
|
|
+ Enable the cache: use and save the results of the tests in FILE,
|
|
|
|
+ traditionally `config.cache'. FILE defaults to `/dev/null' to
|
|
|
|
+ disable caching.
|
|
|
|
+
|
|
|
|
+`--config-cache'
|
|
|
|
+`-C'
|
|
|
|
+ Alias for `--cache-file=config.cache'.
|
|
|
|
+
|
|
|
|
+`--quiet'
|
|
|
|
+`--silent'
|
|
|
|
+`-q'
|
|
|
|
+ Do not print messages saying which checks are being made. To
|
|
|
|
+ suppress all normal output, redirect it to `/dev/null' (any error
|
|
|
|
+ messages will still be shown).
|
|
|
|
+
|
|
|
|
+`--srcdir=DIR'
|
|
|
|
+ Look for the package's source code in directory DIR. Usually
|
|
|
|
+ `configure' can determine that directory automatically.
|
|
|
|
+
|
|
|
|
+`--prefix=DIR'
|
|
|
|
+ Use DIR as the installation prefix. *note Installation Names::
|
|
|
|
+ for more details, including other options available for fine-tuning
|
|
|
|
+ the installation locations.
|
|
|
|
+
|
|
|
|
+`--no-create'
|
|
|
|
+`-n'
|
|
|
|
+ Run the configure checks, but stop before creating any output
|
|
|
|
+ files.
|
|
|
|
+
|
|
|
|
+`configure' also accepts some other, not widely useful, options. Run
|
|
|
|
+`configure --help' for more details.
|
|
|
|
+
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/Makefile.am
|
|
|
|
@@ -0,0 +1,2 @@
|
|
|
|
+ACLOCAL_AMFLAGS = -Im4
|
|
|
|
+SUBDIRS = libraries parser changehat deprecated profiles tests utils po
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/NEWS
|
|
|
|
@@ -0,0 +1 @@
|
|
|
|
+
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/changehat/Makefile.am
|
|
|
|
@@ -0,0 +1 @@
|
|
|
|
+SUBDIRS = mod_apparmor pam_apparmor tomcat_apparmor
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/changehat/mod_apparmor/Makefile.am
|
2011-07-05 13:45:31 +02:00
|
|
|
@@ -0,0 +1,23 @@
|
2011-01-17 17:43:05 +01:00
|
|
|
+if HAVE_APACHE
|
|
|
|
+INCLUDES = "-I../../libraries/libapparmor/src"
|
2011-07-05 13:45:31 +02:00
|
|
|
+LIBAPPARMOR="../../libraries/libapparmor/src/libapparmor.la"
|
|
|
|
+
|
|
|
|
+all-local: module
|
|
|
|
+
|
|
|
|
+module: mod_apparmor.c
|
|
|
|
+ if test "$(srcdir)" != "."; then $(CP) $(srcdir)/mod_apparmor.c . ; fi
|
|
|
|
+ $(APXS) -c $(INCLUDES) $(LIBAPPARMOR) $<
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-07-05 13:45:31 +02:00
|
|
|
+install-exec-local: module
|
|
|
|
+ $(MKDIR_P) $(DESTDIR)$(apache_moduledir)
|
|
|
|
+ $(APXS) -S LIBEXECDIR=$(DESTDIR)$(apache_moduledir) -i mod_apparmor.la
|
|
|
|
+
|
|
|
|
+man_MANS = mod_apparmor.8
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+PODARGS = --center=AppArmor --release=NOVELL/SUSE
|
|
|
|
+
|
|
|
|
+pod2man = pod2man $(PODARGS) --section $(subst .,,$(suffix $<)) $< > $@
|
|
|
|
+
|
|
|
|
+.pod.8:
|
|
|
|
+ $(pod2man)
|
|
|
|
+endif
|
|
|
|
--- a/changehat/mod_apparmor/apache2-mod_apparmor.spec.in
|
|
|
|
+++ /dev/null
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -1,215 +0,0 @@
|
2011-01-17 17:43:05 +01:00
|
|
|
-# ----------------------------------------------------------------------
|
|
|
|
-# Copyright (c) 2004, 2005 NOVELL (All rights reserved)
|
|
|
|
-#
|
|
|
|
-# This program is free software; you can redistribute it and/or
|
|
|
|
-# modify it under the terms of version 2 of the GNU General Public
|
|
|
|
-# License published by the Free Software Foundation.
|
|
|
|
-#
|
|
|
|
-# This program is distributed in the hope that it will be useful,
|
|
|
|
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
-# GNU General Public License for more details.
|
|
|
|
-#
|
|
|
|
-# You should have received a copy of the GNU General Public License
|
|
|
|
-# along with this program; if not, contact Novell, Inc.
|
|
|
|
-# ----------------------------------------------------------------------
|
|
|
|
-# norootforbuild
|
|
|
|
-
|
|
|
|
-# Check first to see if distro is already defined.
|
|
|
|
-# I hate rpm macros
|
|
|
|
-%if ! %{?distro:1}0
|
|
|
|
-%if %{?suse_version:1}0
|
|
|
|
- %define distro suse
|
|
|
|
-%endif
|
|
|
|
-%if %{?fedora_version:1}0
|
|
|
|
- %define distro redhat
|
|
|
|
-%endif
|
|
|
|
-%endif
|
|
|
|
-%if ! %{?distro:1}0
|
|
|
|
- %define distro suse
|
|
|
|
-%endif
|
|
|
|
-
|
|
|
|
-# this is required to be underscore
|
|
|
|
-%define module_name mod_apparmor
|
|
|
|
-
|
|
|
|
-Summary: AppArmor module for apache2.
|
|
|
|
-Name: apache2-mod_apparmor
|
|
|
|
-Version: @@immunix_version@@
|
|
|
|
-Release: @@repo_version@@
|
|
|
|
-Group: Applications/System
|
|
|
|
-Source0: %{name}-%{version}-@@repo_version@@.tar.gz
|
|
|
|
-License: LGPL
|
|
|
|
-BuildRoot: %{?_tmppath:}%{!?_tmppath:/var/tmp}/%{name}-%{version}-build
|
|
|
|
-Url: http://forge.novell.com/modules/xfmod/project/?apparmor
|
|
|
|
-Obsoletes: mod_change_hat mod-change-hat mod-apparmor apache2-mod-apparmor
|
|
|
|
-Provides: mod_change_hat mod-change-hat mod-apparmor apache2-mod-apparmor
|
|
|
|
-
|
|
|
|
-%if %{distro} == "suse"
|
|
|
|
-%if 0%{?suse_version} < 1010
|
|
|
|
-BuildRequires: libimmunix
|
|
|
|
-%else
|
|
|
|
-%if 0%{?suse_version} < 1030
|
|
|
|
-BuildRequires: libapparmor
|
|
|
|
-%else
|
|
|
|
-BuildRequires: libapparmor-devel
|
|
|
|
-%endif
|
|
|
|
-%endif
|
|
|
|
-%else
|
|
|
|
-BuildRequires: libapparmor-devel
|
|
|
|
-%endif
|
|
|
|
-
|
|
|
|
-%if %{distro} == "suse"
|
|
|
|
-%define apxs /usr/sbin/apxs2
|
|
|
|
-%define apache_mmn %(MMN=$(%{apxs} -q LIBEXECDIR)_MMN; test -x $MMN && $MMN)
|
|
|
|
-Prereq: apache2-prefork
|
|
|
|
-Prereq: apparmor-parser
|
|
|
|
-BuildRequires: apache2-devel
|
|
|
|
-Requires: apache2 %{apache_mmn}
|
|
|
|
-%else
|
|
|
|
-%if %{distro} == "redhat" || %{distro} == "rhel4"
|
|
|
|
-%define apxs /usr/sbin/apxs
|
|
|
|
-Prereq: httpd
|
|
|
|
-BuildRequires: httpd-devel
|
|
|
|
-%endif
|
|
|
|
-%endif
|
|
|
|
-%define module_path %(%{apxs} -q LIBEXECDIR)
|
|
|
|
-%define apache_sysconfdir %(%{apxs} -q SYSCONFDIR)
|
|
|
|
-
|
|
|
|
-%description
|
|
|
|
-apache2-mod_apparmor adds support to apache2 to provide AppArmor confinement
|
|
|
|
-to individual cgi scripts handled by apache modules like mod_php and
|
|
|
|
-mod_perl.
|
|
|
|
-This package is part of a suite of tools that used to be named SubDomain.
|
|
|
|
-
|
|
|
|
-%prep
|
|
|
|
-
|
|
|
|
-%setup -q
|
|
|
|
-
|
|
|
|
-%build
|
|
|
|
-make APXS=%{apxs}
|
|
|
|
-
|
|
|
|
-%install
|
|
|
|
-make install DESTDIR=${RPM_BUILD_ROOT} DISTRO=%{distro} MANDIR=%{_mandir}
|
|
|
|
-
|
|
|
|
-%if %{distro} == "suse"
|
|
|
|
- mkdir -p ${RPM_BUILD_ROOT}%{_libdir}/apache2-prefork/
|
|
|
|
- ln -s %{module_path}/%{module_name}.so ${RPM_BUILD_ROOT}%{_libdir}/apache2-prefork/%{module_name}.so
|
|
|
|
-%else
|
|
|
|
- %if %{distro} == "redhat" || %{distro} == "rhel4"
|
|
|
|
- mkdir -p ${RPM_BUILD_ROOT}/%{apache_sysconfdir}.d/
|
|
|
|
- install -m 644 %{module_name}.conf ${RPM_BUILD_ROOT}/%{apache_sysconfdir}.d/
|
|
|
|
- %endif
|
|
|
|
-%endif
|
|
|
|
-
|
|
|
|
-%clean
|
|
|
|
-[ "$RPM_BUILD_ROOT" != "/" ] && rm -rf $RPM_BUILD_ROOT
|
|
|
|
-
|
|
|
|
-%files
|
|
|
|
-%defattr(-,root,root)
|
|
|
|
-%{module_path}
|
|
|
|
-%if %{distro} == "suse"
|
|
|
|
- %{_libdir}/apache2-prefork/%{module_name}.so
|
|
|
|
-%else
|
|
|
|
- %if %{distro} == "redhat" || %{distro} == "rhel4"
|
|
|
|
- %{apache_sysconfdir}.d/%{module_name}.conf
|
|
|
|
- %endif
|
|
|
|
-%endif
|
|
|
|
-%doc COPYING.LGPL
|
|
|
|
-%{_mandir}/man*/*
|
|
|
|
-%doc *.[0-9].html
|
|
|
|
-%doc common/apparmor.css
|
|
|
|
-
|
|
|
|
-%post
|
|
|
|
-%if %{distro} == "suse"
|
|
|
|
- /usr/sbin/a2enmod apparmor
|
|
|
|
-%endif
|
|
|
|
-
|
|
|
|
-%preun
|
|
|
|
-%if %{distro} == "suse"
|
|
|
|
- if [ $1 = 0 ] ; then
|
|
|
|
- /usr/sbin/a2dismod apparmor
|
|
|
|
- fi
|
|
|
|
-%endif
|
|
|
|
-
|
|
|
|
-%triggerpostun -- mod_change_hat mod-change-hat
|
|
|
|
-%if %{distro} == "suse"
|
|
|
|
- /usr/sbin/a2enmod apparmor
|
|
|
|
-%endif
|
|
|
|
-
|
|
|
|
-%changelog
|
|
|
|
-* Sun Jul 29 2007 - sbeattie@suse.de
|
|
|
|
-- Convert builddep on libapparmor to libapparmor-devel
|
|
|
|
-* Tue Apr 3 2007 - sbeattie@suse.de
|
|
|
|
-- Add mod_apparmor manpage to package
|
|
|
|
-* Wed Sep 06 2006 - poeml@suse.de
|
|
|
|
-- rename to apache2-mod_apparmor
|
|
|
|
-- use a2enmod instead of frob_sysconfig
|
|
|
|
-- remove SuSEconfig calls
|
|
|
|
-* Fri May 26 2006 - schwab@suse.de
|
|
|
|
-- Don't strip binaries.
|
|
|
|
-* Wed Apr 12 2006 - Steve Beattie <sbeattie@suse.de>
|
|
|
|
-- Move to novell forge svn repo; fix build issue with new layout
|
|
|
|
-* Thu Mar 30 2006 - Seth Arnold <seth.arnold@suse.de> 2.0-7.2
|
|
|
|
-- Relicense to LGPL
|
|
|
|
-* Mon Jan 30 2006 - Steve Beattie <sbeattie@suse.de> 2.0-7.1
|
|
|
|
-- Renamed apache config options:
|
|
|
|
- ImmhatName -> AAHatName
|
|
|
|
- ImmDefaultHatName -> AADefaultHatName
|
|
|
|
-* Mon Jan 30 2006 - poeml@suse.de
|
|
|
|
-- removed libapr-util1-devel from BuildRequires (apache2-devel does
|
|
|
|
- require it)
|
|
|
|
-* Fri Jan 27 2006 Steve Beattie <sbeattie@suse.de> 2.0-6.1
|
|
|
|
-- No more neededforbuild in STABLE
|
|
|
|
-* Wed Jan 25 2006 Steve Beattie <sbeattie@suse.de> 2.0-6
|
|
|
|
-- Fix linking against libapparmor.so
|
|
|
|
-* Sun Jan 8 2006 Steve Beattie <sbeattie@suse.de> 2.0-5
|
|
|
|
-- More SUSE autobuild fixups.
|
|
|
|
-* Wed Jan 4 2006 Steve Beattie <sbeattie@suse.de> 2.0-4
|
|
|
|
-- Fixup SUSE autobuild require on apache-devel-packages
|
|
|
|
-- Add svn revision to the source tarball
|
|
|
|
-* Sun Dec 18 2005 Steve Beattie <sbeattie@novell.com> 2.0-3
|
|
|
|
-- Include symlink in %{_libdir}/apache2-prefork/
|
|
|
|
-* Thu Dec 8 2005 Steve Beattie <sbeattie@novell.com> 2.0-2
|
|
|
|
-- Rename to apache2-mod-apparmor for consistency w/SUSE packages
|
|
|
|
-- Rename module to mod_apparmor.so
|
|
|
|
-* Wed Dec 7 2005 Steve Beattie <sbeattie@novell.com> 2.0-1
|
|
|
|
-- Reset version for inclusion in SUSE autobuild
|
|
|
|
-* Mon Dec 5 2005 Steve Beattie <sbeattie@novell.com> 1.99-9
|
|
|
|
-- Rename package to mod-apparmor
|
|
|
|
-* Wed Nov 30 2005 Steve Beattie <sbeattie@novell.com> 1.99-8
|
|
|
|
-- Minor packaging cleanups
|
|
|
|
-* Wed Nov 30 2005 Steve Beattie <sbeattie@novell.com> 1.99-7_imnx
|
|
|
|
-- Convert license to GPL
|
|
|
|
-* Thu Jun 23 2005 Steve Beattie <sbeattie@novell.com> 1.99-6_imnx
|
|
|
|
-- Add trigger for mod_change_hat => mod-change-hat upgrades
|
|
|
|
-- Don't run SuSEconfig on SuSE 9.3 or newer
|
|
|
|
-* Mon May 23 2005 Steve Beattie <sbeattie@novell.com> 1.99-5_imnx
|
|
|
|
-- Fix package uninstall on RHEL4.
|
|
|
|
-* Fri Mar 11 2005 Steve Beattie <steve@immunix.com> 1.99-4_imnx
|
|
|
|
-- Rename to be consistent with other packages
|
|
|
|
-* Fri Feb 18 2005 Steve Beattie <steve@immunix.com> 1.99-3_imnx
|
|
|
|
-- Cleanup some non-64bit clean code, sigh.
|
|
|
|
-- Fix install locations on 64-bit platform.
|
|
|
|
-* Fri Feb 4 2005 Seth Arnold <sarnold@immunix.coM> 1.99-1_imnx
|
|
|
|
-- Reversion to 1.99
|
|
|
|
-* Fri Nov 12 2004 Steve Beattie <steve@immunix.com> 1.2-2_imnx
|
|
|
|
-- Add configuration file for redhat build
|
|
|
|
-* Tue Oct 12 2004 Steve Beattie <steve@immunix.com> 1.2-1_imnx
|
|
|
|
-- Bump version after shass-1.1 branched off
|
|
|
|
-* Mon Sep 20 2004 Dominic Reynolds <dominic@immunix.com> 1.0-7_imnx_(redhat|suse)
|
|
|
|
-- Modified to build separate versions for suse/redhat (EL3).
|
|
|
|
-- Note:RH version does not currently setup the module configuraiton
|
|
|
|
-- in apache.
|
|
|
|
-* Tue Aug 31 2004 Steve Beattie <steve@immunix.com> 1.0-6_imnx
|
|
|
|
-- Got location and per server config directives working somewhat
|
|
|
|
- correctly :-)
|
|
|
|
-- copyright fixups.
|
|
|
|
-* Fri Aug 20 2004 Steve Beattie <steve@immunix.com> 1.0-5_imnx
|
|
|
|
-- added support for <Location> hatname </Location>
|
|
|
|
-* Wed Jul 21 2004 Steve Beattie <steve@immunix.com> 1.0-4_imnx
|
|
|
|
-- reduced loglevel of some debug messages
|
|
|
|
-- add change_hat to list of apache modules
|
|
|
|
-* Tue Jul 20 2004 Steve Beattie <steve@immunix.com> 1.0-2_imnx
|
|
|
|
-- got module actually working, at least in simple cases.
|
|
|
|
-* Thu Jul 15 2004 Steve Beattie <steve@immunix.com> 1.0-1_imnx
|
|
|
|
-- Initial package creation.
|
|
|
|
--- a/changehat/pam_apparmor/COPYING
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,39 +0,0 @@
|
|
|
|
-The pam_apparmor package is licensed under the same license as Linux-PAM
|
|
|
|
-<http://www.kernel.org/pub/linux/libs/pam/>, quoted below:
|
|
|
|
-
|
|
|
|
--------------------------------------------------------------------------
|
|
|
|
-Redistribution and use in source and binary forms of Linux-PAM, with
|
|
|
|
-or without modification, are permitted provided that the following
|
|
|
|
-conditions are met:
|
|
|
|
-
|
|
|
|
-1. Redistributions of source code must retain any existing copyright
|
|
|
|
- notice, and this entire permission notice in its entirety,
|
|
|
|
- including the disclaimer of warranties.
|
|
|
|
-
|
|
|
|
-2. Redistributions in binary form must reproduce all prior and current
|
|
|
|
- copyright notices, this list of conditions, and the following
|
|
|
|
- disclaimer in the documentation and/or other materials provided
|
|
|
|
- with the distribution.
|
|
|
|
-
|
|
|
|
-3. The name of any author may not be used to endorse or promote
|
|
|
|
- products derived from this software without their specific prior
|
|
|
|
- written permission.
|
|
|
|
-
|
|
|
|
-ALTERNATIVELY, this product may be distributed under the terms of the
|
|
|
|
-GNU General Public License, in which case the provisions of the GNU
|
|
|
|
-GPL are required INSTEAD OF the above restrictions. (This clause is
|
|
|
|
-necessary due to a potential conflict between the GNU GPL and the
|
|
|
|
-restrictions contained in a BSD-style copyright.)
|
|
|
|
-
|
|
|
|
-THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
|
|
|
|
-WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
|
|
|
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
|
|
|
|
-IN NO EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
|
|
-INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
|
|
|
-BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
|
|
|
-OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
|
|
|
-ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
|
|
-TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
|
|
|
-USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
|
|
|
-DAMAGE.
|
|
|
|
--------------------------------------------------------------------------
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/changehat/pam_apparmor/Makefile.am
|
|
|
|
@@ -0,0 +1,9 @@
|
|
|
|
+if HAVE_PAM
|
|
|
|
+securitydir = $(libdir)/security
|
|
|
|
+security_LTLIBRARIES = pam_apparmor.la
|
|
|
|
+pam_apparmor_la_SOURCES = pam_apparmor.c get_options.c
|
|
|
|
+pam_apparmor_la_LDFLAGS = -module -avoid-version
|
2011-07-05 13:45:31 +02:00
|
|
|
+pam_apparmor_la_LIBADD = ../../libraries/libapparmor/src/libapparmor.la -lpam
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+INCLUDES = "-I../../libraries/libapparmor/src"
|
|
|
|
+endif
|
|
|
|
--- a/changehat/pam_apparmor/pam_apparmor.changes
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,49 +0,0 @@
|
|
|
|
--------------------------------------------------------------------
|
|
|
|
-Mon Jul 30 08:16:39 CEST 2007 - sbeattie@suse.de
|
|
|
|
-
|
|
|
|
-- Convert libapparmor builddep to libapparmor-devel
|
|
|
|
-
|
|
|
|
--------------------------------------------------------------------
|
|
|
|
-Tue Mar 13 10:27:34 PDT 2007 - jmichael@suse.de
|
|
|
|
-
|
|
|
|
-- Use pam_modutil_* wrapper functions when possible
|
|
|
|
-
|
|
|
|
--------------------------------------------------------------------
|
|
|
|
-Tue Oct 31 12:00:00 UTC 2006 - jmichael@suse.de
|
|
|
|
-
|
|
|
|
-- Add debug option
|
|
|
|
-
|
|
|
|
--------------------------------------------------------------------
|
|
|
|
-Tue Oct 31 12:00:00 UTC 2006 - sbeattie@suse.de
|
|
|
|
-
|
|
|
|
-- Add configuration options to order attempted hat changes
|
|
|
|
-
|
|
|
|
--------------------------------------------------------------------
|
|
|
|
-Wed Oct 25 12:00:00 UTC 2006 - sbeattie@suse.de
|
|
|
|
-
|
|
|
|
-- remove auto-editing of pam's common-session
|
|
|
|
-- honor RPM's CFLAGS when building
|
|
|
|
-- add license (same as Linux PAM package).
|
|
|
|
-
|
|
|
|
--------------------------------------------------------------------
|
|
|
|
-Thu Sep 14 12:00:00 UTC 2006 - jmichael@suse.de
|
|
|
|
-
|
|
|
|
-- header comment was incorrect
|
|
|
|
-- use pam_get_user() instead of pam_get_item()
|
|
|
|
-- fix read from urandom if 0
|
|
|
|
-
|
|
|
|
--------------------------------------------------------------------
|
|
|
|
-Fri Jan 13 12:00:00 UTC 2006 - sbeattie@suse.de
|
|
|
|
-
|
|
|
|
-- Add svn repo number to tarball
|
|
|
|
-
|
|
|
|
--------------------------------------------------------------------
|
|
|
|
-Fri Jan 13 12:00:00 UTC 2006 - jmichael@suse.de
|
|
|
|
-
|
|
|
|
-- Make magic tokens harder to guess by pulling them from /dev/urandom
|
|
|
|
-
|
|
|
|
--------------------------------------------------------------------
|
|
|
|
-Wed Dec 21 10:31:40 PST 2005 - jmichael@suse.de
|
|
|
|
-
|
|
|
|
-- initial
|
|
|
|
-
|
|
|
|
--- a/changehat/pam_apparmor/pam_apparmor.spec.in
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,83 +0,0 @@
|
|
|
|
-#
|
|
|
|
-# spec file for package pam_apparmor (Version 2)
|
|
|
|
-#
|
|
|
|
-# Copyright (c) 2005 SUSE LINUX Products GmbH, Nuernberg, Germany.
|
|
|
|
-# This file and all modifications and additions to the pristine
|
|
|
|
-# package are under the same license as the package itself.
|
|
|
|
-#
|
|
|
|
-# Please submit bugfixes or comments via http://www.suse.de/feedback/
|
|
|
|
-#
|
|
|
|
-
|
|
|
|
-# norootforbuild
|
|
|
|
-
|
|
|
|
-Name: pam_apparmor
|
|
|
|
-License: GPL
|
|
|
|
-Group: Productivity/Security
|
|
|
|
-Autoreqprov: on
|
|
|
|
-Version: @@immunix_version@@
|
|
|
|
-Release: @@repo_version@@
|
|
|
|
-Summary: Pam module to add AppArmor change_hat functionality
|
|
|
|
-URL: http://forge.novell.com/modules/xfmod/project/?apparmor
|
|
|
|
-Source: pam_apparmor-%{version}-@@repo_version@@.tar.gz
|
|
|
|
-BuildRoot: %{_tmppath}/%{name}-%{version}-build
|
|
|
|
-BuildRequires: pam-devel
|
|
|
|
-Requires: pam
|
|
|
|
-Prereq: pam
|
|
|
|
-
|
|
|
|
-%if %{?suse_version:1}0
|
|
|
|
-%if 0%{?suse_version} < 1030
|
|
|
|
-BuildRequires: libapparmor
|
|
|
|
-%else
|
|
|
|
-BuildRequires: libapparmor-devel
|
|
|
|
-%endif
|
|
|
|
-%else
|
|
|
|
-BuildRequires: libapparmor-devel
|
|
|
|
-%endif
|
|
|
|
-
|
|
|
|
-%description
|
|
|
|
-The pam_apparmor module provides the means for any pam applications that
|
|
|
|
-call pam_open_session() to automatically perform an AppArmor change_hat
|
|
|
|
-operation in order to switch to a user-specific security policy.
|
|
|
|
-
|
|
|
|
-
|
|
|
|
-Authors:
|
|
|
|
---------
|
|
|
|
- Jesse Michael jmichael@suse.de
|
|
|
|
-
|
|
|
|
-%prep
|
|
|
|
-%setup -q
|
|
|
|
-
|
|
|
|
-%build
|
|
|
|
-make CFLAGS="${RPM_OPT_FLAGS}"
|
|
|
|
-
|
|
|
|
-%install
|
|
|
|
-[ "$RPM_BUILD_ROOT" != "/" ] && rm -rf $RPM_BUILD_ROOT
|
|
|
|
-make install DESTDIR=${RPM_BUILD_ROOT} SECDIR=${RPM_BUILD_ROOT}/%{_lib}/security
|
|
|
|
-
|
|
|
|
-%clean
|
|
|
|
-[ "${RPM_BUILD_ROOT}" != "/" ] && rm -rf ${RPM_BUILD_ROOT}
|
|
|
|
-
|
|
|
|
-%files
|
|
|
|
-%defattr(444,root,root,755)
|
|
|
|
-%doc README COPYING
|
|
|
|
-%attr(555,root,root) /%{_lib}/security/pam_apparmor.so
|
|
|
|
-
|
|
|
|
-%changelog -n pam_apparmor
|
|
|
|
-* Tue Oct 31 2006 Jesse Michael <jmichael@suse.de>
|
|
|
|
-- Add debug option
|
|
|
|
-* Tue Oct 31 2006 Steve Beattie <sbeattie@suse.de>
|
|
|
|
-- Add configuration options to order attempted hat changes
|
|
|
|
-* Wed Oct 25 2006 Steve Beattie <sbeattie@suse.de>
|
|
|
|
-- remove auto-editing of pam's common-session
|
|
|
|
-- honor RPM's CFLAGS when building
|
|
|
|
-- add license (same as Linux PAM package).
|
|
|
|
-* Thu Sep 14 2006 Jesse Michael <jmichael@suse.de>
|
|
|
|
-- header comment was incorrect
|
|
|
|
-- use pam_get_user() instead of pam_get_item()
|
|
|
|
-- fix read from urandom if 0
|
|
|
|
-* Fri Jan 13 2006 Steve Beattie <sbeattie@suse.de>
|
|
|
|
-- Add svn repo number to tarball
|
|
|
|
-* Fri Jan 13 2006 Jesse Michael <jmichael@suse.de>
|
|
|
|
-- Make magic tokens harder to guess by pulling them from /dev/urandom
|
|
|
|
-* Wed Dec 21 2005 - jmichael@suse.de
|
|
|
|
-- initial
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/changehat/tomcat_apparmor/Makefile.am
|
|
|
|
@@ -0,0 +1 @@
|
|
|
|
+SUBDIRS = tomcat_5_0 tomcat_5_5
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/changehat/tomcat_apparmor/tomcat_5_0/Makefile.am
|
|
|
|
@@ -0,0 +1,2 @@
|
|
|
|
+
|
|
|
|
+
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/changehat/tomcat_apparmor/tomcat_5_5/Makefile.am
|
|
|
|
@@ -0,0 +1,13 @@
|
|
|
|
+
|
|
|
|
+changeHatValve.jar: $(srcdir)/src/com/novell/apparmor/JNIChangeHat.java \
|
|
|
|
+ $(srcdir)/src/com/novell/apparmor/catalina/valves/ChangeHatValve.java
|
|
|
|
+ ant -Dinstall_lib=$(libdir) -Ddist=$(abs_srcdir) \
|
|
|
|
+ -Dant.build.javac.source=1.5 jar
|
|
|
|
+
|
|
|
|
+catalinadir = $(datadir)/tomcat6/lib
|
|
|
|
+
|
|
|
|
+catalina_DATA = changeHatValve.jar
|
|
|
|
+
|
|
|
|
+CLEANFILES = $(catalina_DATA)
|
|
|
|
+
|
|
|
|
+SUBDIRS = . src
|
|
|
|
--- a/changehat/tomcat_apparmor/tomcat_5_5/build.xml
|
|
|
|
+++ b/changehat/tomcat_apparmor/tomcat_5_5/build.xml
|
|
|
|
@@ -38,9 +38,16 @@
|
|
|
|
<fileset refid="tomcat.jars"/>
|
|
|
|
</path>
|
|
|
|
|
|
|
|
+ <!--
|
|
|
|
+ <target name="compile" description="Compile code">
|
|
|
|
+ <javac srcdir="${src}" destdir="${build}" includeAntRuntime="no"
|
|
|
|
+ classpathref="lib.path" debug="${compile.debug}"
|
|
|
|
+ source="${target}" target="${target}">
|
|
|
|
+ </javac>
|
|
|
|
+ </target>
|
|
|
|
+ -->
|
|
|
|
<target name="compile" description="Compile code">
|
|
|
|
<mkdir dir="${build}"/>
|
|
|
|
- <mkdir dir="${lib}"/>
|
|
|
|
<javac srcdir="${src}" destdir="${build}" includeAntRuntime="no"
|
|
|
|
classpathref="lib.path" debug="${compile.debug}"
|
|
|
|
source="${target}" target="${target}">
|
|
|
|
@@ -81,6 +88,8 @@
|
|
|
|
</target>
|
|
|
|
|
|
|
|
<target name="install_jar" depends="jni_so" description="Install jar file">
|
|
|
|
+ <mkdir dir="${build}"/>
|
|
|
|
+ <mkdir dir="${lib}"/>
|
|
|
|
<mkdir dir="${install_root}/${catalina_home}/lib/"/>
|
|
|
|
<copy file="${jarfile}" tofile="${install_root}/${catalina_home}/lib/${ant.project.name}.jar"/>
|
|
|
|
<chmod perm="644" file="${install_root}/${catalina_home}/lib/${ant.project.name}.jar"/>
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/changehat/tomcat_apparmor/tomcat_5_5/src/Makefile.am
|
|
|
|
@@ -0,0 +1 @@
|
|
|
|
+SUBDIRS = jni_src
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/changehat/tomcat_apparmor/tomcat_5_5/src/jni_src/Makefile.am
|
|
|
|
@@ -0,0 +1,17 @@
|
|
|
|
+lib_LTLIBRARIES = libJNIChangeHat.la
|
|
|
|
+
|
|
|
|
+INCLUDES = -I$(top_srcdir)/libraries/libapparmor/src -I$(JAVA_HOME)/include \
|
|
|
|
+ -I$(JAVA_HOME)/include/linux
|
|
|
|
+
|
|
|
|
+CLASSPATH = $(builddir)/../../build/
|
|
|
|
+CLASSFILE = $(CLASSPATH)/com/novell/apparmor/JNIChangeHat.class
|
|
|
|
+
|
|
|
|
+com_novell_apparmor_JNIChangeHat.h:
|
|
|
|
+ javah -jni -classpath $(CLASSPATH) com.novell.apparmor.JNIChangeHat
|
|
|
|
+
|
|
|
|
+JNIChangeHat.c : com_novell_apparmor_JNIChangeHat.h
|
|
|
|
+
|
|
|
|
+libJNIChangeHat_la_SOURCES = JNIChangeHat.c
|
|
|
|
+libJNIChangeHat_la_LIBADD = $(top_builddir)/libraries/libapparmor/src/.libs/libapparmor.la
|
|
|
|
+
|
|
|
|
+CLEANFILES = com_novell_apparmor_JNIChangeHat.h
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/configure.in
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -0,0 +1,203 @@
|
2011-01-17 17:43:05 +01:00
|
|
|
+AC_INIT(apparmor, 2.5.1)
|
|
|
|
+AC_CONFIG_MACRO_DIR([m4])
|
|
|
|
+
|
|
|
|
+AM_INIT_AUTOMAKE
|
|
|
|
+
|
|
|
|
+AM_PROG_LEX
|
|
|
|
+AC_PROG_YACC
|
|
|
|
+AC_PROG_SED
|
|
|
|
+AC_HEADER_STDC
|
|
|
|
+AC_CHECK_HEADERS(unistd.h stdint.h)
|
|
|
|
+
|
|
|
|
+AC_CHECK_FUNCS(asprintf)
|
|
|
|
+
|
|
|
|
+AM_PROG_CC_C_O
|
|
|
|
+AC_PROG_CXX
|
|
|
|
+AC_C_CONST
|
|
|
|
+AM_PROG_LIBTOOL
|
|
|
|
+AC_PATH_PROG([SWIG], [swig])
|
|
|
|
+
|
|
|
|
+PROG_POD2MAN
|
|
|
|
+
|
|
|
|
+AC_ARG_WITH(perl,
|
|
|
|
+[AS_HELP_STRING([--with-perl],
|
|
|
|
+ [enable perl support for libapparmor [default=detect]])],
|
|
|
|
+[with_perl=$withval], [with_perl=auto])
|
|
|
|
+if test "$with_perl" != "no"; then
|
|
|
|
+ AM_PATH_PERL(,,[no])
|
|
|
|
+ if test "$PERL" = "no"; then
|
|
|
|
+ if test "$with_perl" = "yes"; then
|
|
|
|
+ AC_MSG_ERROR([--with-perl was given but the perl interpreter could not be found])
|
|
|
|
+ fi
|
|
|
|
+ else
|
|
|
|
+ with_perl=yes
|
|
|
|
+ fi
|
|
|
|
+fi
|
|
|
|
+
|
|
|
|
+AC_ARG_WITH(python,
|
|
|
|
+[AS_HELP_STRING([--with-python],
|
|
|
|
+ [enable python support for libapparmor [default=detect]])],
|
|
|
|
+[with_python=$withval], [with_python=auto])
|
|
|
|
+if test "$with_python" != "no"; then
|
|
|
|
+ AM_PATH_PYTHON(,,[no])
|
|
|
|
+ if test "$PYTHON" = "no"; then
|
|
|
|
+ if test "$with_python" = "yes"; then
|
|
|
|
+ AC_MSG_ERROR([--with-python was given but the python interpreter could not be found])
|
|
|
|
+ fi
|
|
|
|
+ else
|
|
|
|
+ with_python=yes
|
|
|
|
+ fi
|
|
|
|
+fi
|
|
|
|
+
|
|
|
|
+AC_ARG_WITH(ruby,
|
|
|
|
+[AS_HELP_STRING([--with-ruby],
|
|
|
|
+ [enable ruby support for libapparmor [default=detect]])],
|
|
|
|
+[with_ruby=$withval], [with_ruby=auto])
|
|
|
|
+if test "$with_ruby" != "no"; then
|
|
|
|
+ AM_PATH_RUBY(,,[no])
|
|
|
|
+
|
|
|
|
+ CPPFLAGS="$CPPFLAGS $RUBY_INCLUDES"
|
|
|
|
+ AC_CHECK_HEADER(ruby.h,,RUBY=no,[-])
|
|
|
|
+
|
|
|
|
+ if test "$RUBY" = "no"; then
|
|
|
|
+ if test "$with_ruby" = "yes"; then
|
|
|
|
+ AC_MSG_ERROR([--with-ruby was given but the ruby development environment could not be found])
|
|
|
|
+ fi
|
|
|
|
+ else
|
|
|
|
+ with_ruby=yes
|
|
|
|
+ fi
|
|
|
|
+fi
|
|
|
|
+
|
|
|
|
+AC_ARG_WITH(apache,
|
|
|
|
+[AS_HELP_STRING([--with-apache],
|
|
|
|
+ [enable the apache2 changehat module [default=detect]])],
|
|
|
|
+[with_apache=$withval], [with_apache=auto])
|
|
|
|
+if test "$with_apache" != "no"; then
|
|
|
|
+ AM_PATH_APXS(,,[no])
|
|
|
|
+ if test "$APXS" = "no"; then
|
|
|
|
+ if test "$with_apache" = "yes";then
|
|
|
|
+ AC_MSG_ERROR([--with-apache was given but the apache build environment could not be found])
|
|
|
|
+ fi
|
|
|
|
+ else
|
|
|
|
+ with_apache=yes
|
|
|
|
+ apache_moduledir=`$APXS -q LIBEXECDIR`
|
|
|
|
+ AC_SUBST(apache_moduledir)
|
|
|
|
+ fi
|
|
|
|
+fi
|
|
|
|
+
|
|
|
|
+AC_ARG_WITH(tomcat,
|
|
|
|
+[AS_HELP_STRING([--with-tomcat],
|
|
|
|
+ [enable the tomcat changehat module [default=no]])],
|
|
|
|
+[with_tomcat=$withval], [with_tomcat=no])
|
|
|
|
+
|
|
|
|
+AC_ARG_WITH(pam,
|
|
|
|
+[AS_HELP_STRING([--with-pam],
|
|
|
|
+ [enable the PAM changehat module [default=auto]])],
|
|
|
|
+[with_pam=$withval], [with_pam=auto])
|
|
|
|
+if test "$with_pam" != "no"; then
|
|
|
|
+ AC_CHECK_HEADERS([security/pam_modules.h])
|
|
|
|
+ if test "$ac_cv_header_security_pam_modules_h" != "yes"; then
|
|
|
|
+ if test "$with_pam" = "yes";then
|
|
|
|
+ AC_MSG_ERROR([--with-pam was giving but the pam build environment could not be found])
|
|
|
|
+ fi
|
|
|
|
+ else
|
|
|
|
+ with_pam=yes
|
|
|
|
+ fi
|
|
|
|
+fi
|
|
|
|
+
|
|
|
|
+AC_ARG_WITH(dbus,
|
|
|
|
+[AS_HELP_STRING([--with-dbus],
|
|
|
|
+ [enable dbus support (deprecated) [default=no]])],
|
|
|
|
+[with_dbus=$withval], [with_dbus=no])
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+if test "$with_dbus" = "yes"; then
|
2011-01-17 17:43:05 +01:00
|
|
|
+ PKG_CHECK_MODULES(dbus, dbus-1, DBUS=yes, DBUS=no)
|
|
|
|
+ if test "$DBUS" = "no"; then
|
|
|
|
+ AC_MSG_ERROR([dbus could not be found])
|
|
|
|
+ fi
|
|
|
|
+fi
|
|
|
|
+
|
|
|
|
+AC_ARG_WITH(profileeditor,
|
|
|
|
+[AS_HELP_STRING([--with-profileeditor],
|
|
|
|
+ [enable profileeditor (deprecated) [default=no]])],
|
|
|
|
+[with_profileeditor=$withval], [with_profileeditor=no])
|
|
|
|
+
|
|
|
|
+if test "$with_profileeditor" = "yes"; then
|
|
|
|
+ WXTEST
|
|
|
|
+fi
|
|
|
|
+
|
|
|
|
+real_sbindir="/sbin"
|
|
|
|
+AC_SUBST(real_sbindir)
|
|
|
|
+
|
|
|
|
+etc_apparmordir="/etc/apparmor"
|
|
|
|
+AC_SUBST(etc_apparmordir)
|
|
|
|
+
|
|
|
|
+lib_apparmordir="/lib/apparmor"
|
|
|
|
+AC_SUBST(lib_apparmordir)
|
|
|
|
+
|
|
|
|
+AM_CONDITIONAL(HAVE_PYTHON, test "$with_python" = "yes")
|
|
|
|
+AM_CONDITIONAL(HAVE_PERL, test "$with_perl" = "yes")
|
|
|
|
+AM_CONDITIONAL(HAVE_RUBY, test "$with_ruby" = "yes")
|
|
|
|
+AM_CONDITIONAL(HAVE_PAM, test "$with_pam" = "yes")
|
|
|
|
+AM_CONDITIONAL(HAVE_APACHE, test "$with_apache" = "yes")
|
|
|
|
+AM_CONDITIONAL(HAVE_TOMCAT, test "$with_tomcat" = "yes")
|
|
|
|
+AM_CONDITIONAL(HAVE_DBUS, test "$with_dbus" = "yes")
|
|
|
|
+AM_CONDITIONAL(HAVE_PROFILEEDITOR, test "$with_profileeditor" = "yes")
|
|
|
|
+
|
|
|
|
+AC_OUTPUT(
|
|
|
|
+Makefile
|
|
|
|
+libraries/Makefile
|
|
|
|
+libraries/libapparmor/Makefile
|
|
|
|
+libraries/libapparmor/doc/Makefile
|
|
|
|
+libraries/libapparmor/src/Makefile
|
|
|
|
+libraries/libapparmor/swig/Makefile
|
|
|
|
+libraries/libapparmor/swig/perl/Makefile
|
|
|
|
+libraries/libapparmor/swig/python/Makefile
|
|
|
|
+libraries/libapparmor/swig/python/setup.py
|
|
|
|
+libraries/libapparmor/swig/ruby/Makefile
|
|
|
|
+libraries/libapparmor/testsuite/Makefile
|
|
|
|
+libraries/libapparmor/testsuite/config/Makefile
|
|
|
|
+libraries/libapparmor/testsuite/libaalogparse.test/Makefile
|
|
|
|
+libraries/libapparmor/testsuite/lib/Makefile
|
|
|
|
+parser/Makefile
|
|
|
|
+parser/libapparmor_re/Makefile
|
|
|
|
+changehat/Makefile
|
|
|
|
+changehat/mod_apparmor/Makefile
|
|
|
|
+changehat/pam_apparmor/Makefile
|
|
|
|
+changehat/tomcat_apparmor/Makefile
|
|
|
|
+changehat/tomcat_apparmor/tomcat_5_0/Makefile
|
|
|
|
+changehat/tomcat_apparmor/tomcat_5_5/Makefile
|
|
|
|
+changehat/tomcat_apparmor/tomcat_5_5/src/Makefile
|
|
|
|
+changehat/tomcat_apparmor/tomcat_5_5/src/jni_src/Makefile
|
|
|
|
+deprecated/Makefile
|
|
|
|
+deprecated/management/Makefile
|
|
|
|
+deprecated/management/apparmor-dbus/Makefile
|
|
|
|
+deprecated/management/apparmor-dbus/src/Makefile
|
|
|
|
+deprecated/management/profile-editor/Makefile
|
|
|
|
+deprecated/management/profile-editor/doc/Makefile
|
|
|
|
+deprecated/management/profile-editor/doc/en/Makefile
|
|
|
|
+deprecated/management/profile-editor/src/Makefile
|
|
|
|
+deprecated/management/profile-editor/src/wxStyledTextCtrl/Makefile
|
|
|
|
+tests/Makefile
|
|
|
|
+tests/regression/Makefile
|
|
|
|
+tests/regression/subdomain/Makefile
|
|
|
|
+utils/Makefile
|
2011-03-25 09:04:51 +01:00
|
|
|
+utils/Immunix/Makefile
|
2011-01-17 17:43:05 +01:00
|
|
|
+po/Makefile
|
|
|
|
+)
|
|
|
|
+
|
|
|
|
+AC_MSG_RESULT([Summary:])
|
|
|
|
+AC_MSG_RESULT([libapprmor bindings:])
|
|
|
|
+AC_MSG_RESULT([ Perl: $with_perl])
|
|
|
|
+AC_MSG_RESULT([ Python: $with_python])
|
|
|
|
+AC_MSG_RESULT([ Ruby: $with_ruby])
|
|
|
|
+AC_MSG_RESULT([changehat support:])
|
|
|
|
+AC_MSG_RESULT([ PAM: $with_pam])
|
|
|
|
+AC_MSG_RESULT([ Apache: $with_apache])
|
|
|
|
+AC_MSG_RESULT([ Tomcat: $with_tomcat])
|
|
|
|
+AC_MSG_RESULT([Deprecated management interfaces:])
|
|
|
|
+AC_MSG_RESULT([ DBUS: $with_dbus])
|
|
|
|
+AC_MSG_RESULT([ Profile Editor: $with_profileeditor])
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/deprecated/Makefile.am
|
|
|
|
@@ -0,0 +1,2 @@
|
|
|
|
+
|
|
|
|
+SUBDIRS = management
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/deprecated/management/Makefile.am
|
|
|
|
@@ -0,0 +1 @@
|
2011-03-25 09:04:51 +01:00
|
|
|
+SUBDIRS = apparmor-dbus profile-editor
|
2011-01-17 17:43:05 +01:00
|
|
|
--- a/deprecated/management/apparmor-dbus/Makefile.am
|
|
|
|
+++ b/deprecated/management/apparmor-dbus/Makefile.am
|
|
|
|
@@ -2,4 +2,6 @@
|
|
|
|
# have all needed files, that a GNU package needs
|
|
|
|
AUTOMAKE_OPTIONS = foreign 1.4
|
|
|
|
|
|
|
|
+if HAVE_DBUS
|
|
|
|
SUBDIRS = src
|
|
|
|
+endif
|
|
|
|
--- a/deprecated/management/apparmor-dbus/src/Makefile.am
|
|
|
|
+++ b/deprecated/management/apparmor-dbus/src/Makefile.am
|
|
|
|
@@ -1,2 +1,5 @@
|
|
|
|
bin_PROGRAMS = apparmor-dbus
|
|
|
|
+
|
|
|
|
+apparmor_dbus_CPPFLAGS = $(dbus_CFLAGS)
|
|
|
|
+apparmor_dbus_LDADD = $(dbus_LIBS) $(top_builddir)/libraries/libapparmor/src/libapparmor.la
|
|
|
|
apparmor_dbus_SOURCES = aadbus.c
|
|
|
|
--- a/deprecated/management/profile-editor/Makefile.am
|
|
|
|
+++ b/deprecated/management/profile-editor/Makefile.am
|
|
|
|
@@ -1,2 +1,4 @@
|
|
|
|
+if HAVE_PROFILEEDITOR
|
|
|
|
SUBDIRS = src doc
|
|
|
|
+endif
|
|
|
|
|
|
|
|
--- a/deprecated/management/profile-editor/src/Makefile.am
|
|
|
|
+++ b/deprecated/management/profile-editor/src/Makefile.am
|
|
|
|
@@ -12,13 +12,15 @@ bin_PROGRAMS = profileeditor
|
|
|
|
# the application source, library search path, and link libraries
|
|
|
|
profileeditor_SOURCES = ProfileTextCtrl.cpp Preferences.cpp AboutDialog.cpp \
|
|
|
|
SearchAllProfiles.cpp Configuration.cpp profileeditor.cpp
|
|
|
|
-profileeditor_LDFLAGS = $(WX_LIBS)
|
|
|
|
+profileeditor_LDFLAGS = $(WX_LIBS)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
SUBDIRS = wxStyledTextCtrl
|
|
|
|
+profileeditor_CPPFLAGS = $(WX_CPPFLAGS)
|
|
|
|
+profileeditor_CXXFLAGS = $(WX_CXXFLAGS)
|
|
|
|
profileeditor_LDADD =\
|
|
|
|
- $(top_builddir)/src/wxStyledTextCtrl/libAppArmorStyledTextCtrl.a
|
|
|
|
+ $(builddir)/wxStyledTextCtrl/libAppArmorStyledTextCtrl.a
|
|
|
|
|
|
|
|
AM_CXXFLAGS = \
|
|
|
|
-DHELP_FILE_LOCATION=\"$(datadir)/doc/@PACKAGE@/AppArmorProfileEditor.htb\"
|
|
|
|
--- a/deprecated/management/profile-editor/src/wxStyledTextCtrl/Makefile.am
|
|
|
|
+++ b/deprecated/management/profile-editor/src/wxStyledTextCtrl/Makefile.am
|
|
|
|
@@ -15,5 +15,5 @@ noinst_HEADERS = Accessor.h AutoComplete
|
|
|
|
UniConversion.h ViewStyle.h WindowAccessor.h XPM.h
|
|
|
|
|
|
|
|
AM_CFLAGS = -DSCI_LEXER -DLINK_LEXERS -fPIC -DPIC -DWX_PRECOMP -DNO_GCC_PRAGMA \
|
|
|
|
- -D__WXGTK__ -D__WX__
|
|
|
|
-AM_CXXFLAGS = -DSCI_LEXER -DLINK_LEXERS -fPIC -DPIC -DWX_PRECOMP -DNO_GCC_PRAGMA
|
|
|
|
+ -D__WXGTK__ -D__WX__ $(WX_CPPFLAGS)
|
|
|
|
+AM_CXXFLAGS = -DSCI_LEXER -DLINK_LEXERS -fPIC -DPIC -DWX_PRECOMP -DNO_GCC_PRAGMA $(WX_CXXFLAGS)
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/libraries/Makefile.am
|
|
|
|
@@ -0,0 +1 @@
|
|
|
|
+SUBDIRS = libapparmor
|
|
|
|
--- a/libraries/libapparmor/AUTHORS
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,2 +0,0 @@
|
2011-03-25 09:04:51 +01:00
|
|
|
-Steve Beattie <sbeattie@ubuntu.com>
|
2011-01-17 17:43:05 +01:00
|
|
|
-Matt Barringer <mbarringer@suse.de>
|
|
|
|
--- a/libraries/libapparmor/ChangeLog
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1 +0,0 @@
|
|
|
|
-
|
|
|
|
--- a/libraries/libapparmor/INSTALL
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,236 +0,0 @@
|
|
|
|
-Installation Instructions
|
|
|
|
-*************************
|
|
|
|
-
|
|
|
|
-Copyright (C) 1994, 1995, 1996, 1999, 2000, 2001, 2002, 2004, 2005 Free
|
|
|
|
-Software Foundation, Inc.
|
|
|
|
-
|
|
|
|
-This file is free documentation; the Free Software Foundation gives
|
|
|
|
-unlimited permission to copy, distribute and modify it.
|
|
|
|
-
|
|
|
|
-Basic Installation
|
|
|
|
-==================
|
|
|
|
-
|
|
|
|
-These are generic installation instructions.
|
|
|
|
-
|
|
|
|
- The `configure' shell script attempts to guess correct values for
|
|
|
|
-various system-dependent variables used during compilation. It uses
|
|
|
|
-those values to create a `Makefile' in each directory of the package.
|
|
|
|
-It may also create one or more `.h' files containing system-dependent
|
|
|
|
-definitions. Finally, it creates a shell script `config.status' that
|
|
|
|
-you can run in the future to recreate the current configuration, and a
|
|
|
|
-file `config.log' containing compiler output (useful mainly for
|
|
|
|
-debugging `configure').
|
|
|
|
-
|
|
|
|
- It can also use an optional file (typically called `config.cache'
|
|
|
|
-and enabled with `--cache-file=config.cache' or simply `-C') that saves
|
|
|
|
-the results of its tests to speed up reconfiguring. (Caching is
|
|
|
|
-disabled by default to prevent problems with accidental use of stale
|
|
|
|
-cache files.)
|
|
|
|
-
|
|
|
|
- If you need to do unusual things to compile the package, please try
|
|
|
|
-to figure out how `configure' could check whether to do them, and mail
|
|
|
|
-diffs or instructions to the address given in the `README' so they can
|
|
|
|
-be considered for the next release. If you are using the cache, and at
|
|
|
|
-some point `config.cache' contains results you don't want to keep, you
|
|
|
|
-may remove or edit it.
|
|
|
|
-
|
|
|
|
- The file `configure.ac' (or `configure.in') is used to create
|
|
|
|
-`configure' by a program called `autoconf'. You only need
|
|
|
|
-`configure.ac' if you want to change it or regenerate `configure' using
|
|
|
|
-a newer version of `autoconf'.
|
|
|
|
-
|
|
|
|
-The simplest way to compile this package is:
|
|
|
|
-
|
|
|
|
- 1. `cd' to the directory containing the package's source code and type
|
|
|
|
- `./configure' to configure the package for your system. If you're
|
|
|
|
- using `csh' on an old version of System V, you might need to type
|
|
|
|
- `sh ./configure' instead to prevent `csh' from trying to execute
|
|
|
|
- `configure' itself.
|
|
|
|
-
|
|
|
|
- Running `configure' takes awhile. While running, it prints some
|
|
|
|
- messages telling which features it is checking for.
|
|
|
|
-
|
|
|
|
- 2. Type `make' to compile the package.
|
|
|
|
-
|
|
|
|
- 3. Optionally, type `make check' to run any self-tests that come with
|
|
|
|
- the package.
|
|
|
|
-
|
|
|
|
- 4. Type `make install' to install the programs and any data files and
|
|
|
|
- documentation.
|
|
|
|
-
|
|
|
|
- 5. You can remove the program binaries and object files from the
|
|
|
|
- source code directory by typing `make clean'. To also remove the
|
|
|
|
- files that `configure' created (so you can compile the package for
|
|
|
|
- a different kind of computer), type `make distclean'. There is
|
|
|
|
- also a `make maintainer-clean' target, but that is intended mainly
|
|
|
|
- for the package's developers. If you use it, you may have to get
|
|
|
|
- all sorts of other programs in order to regenerate files that came
|
|
|
|
- with the distribution.
|
|
|
|
-
|
|
|
|
-Compilers and Options
|
|
|
|
-=====================
|
|
|
|
-
|
|
|
|
-Some systems require unusual options for compilation or linking that the
|
|
|
|
-`configure' script does not know about. Run `./configure --help' for
|
|
|
|
-details on some of the pertinent environment variables.
|
|
|
|
-
|
|
|
|
- You can give `configure' initial values for configuration parameters
|
|
|
|
-by setting variables in the command line or in the environment. Here
|
|
|
|
-is an example:
|
|
|
|
-
|
|
|
|
- ./configure CC=c89 CFLAGS=-O2 LIBS=-lposix
|
|
|
|
-
|
|
|
|
- *Note Defining Variables::, for more details.
|
|
|
|
-
|
|
|
|
-Compiling For Multiple Architectures
|
|
|
|
-====================================
|
|
|
|
-
|
|
|
|
-You can compile the package for more than one kind of computer at the
|
|
|
|
-same time, by placing the object files for each architecture in their
|
|
|
|
-own directory. To do this, you must use a version of `make' that
|
|
|
|
-supports the `VPATH' variable, such as GNU `make'. `cd' to the
|
|
|
|
-directory where you want the object files and executables to go and run
|
|
|
|
-the `configure' script. `configure' automatically checks for the
|
|
|
|
-source code in the directory that `configure' is in and in `..'.
|
|
|
|
-
|
|
|
|
- If you have to use a `make' that does not support the `VPATH'
|
|
|
|
-variable, you have to compile the package for one architecture at a
|
|
|
|
-time in the source code directory. After you have installed the
|
|
|
|
-package for one architecture, use `make distclean' before reconfiguring
|
|
|
|
-for another architecture.
|
|
|
|
-
|
|
|
|
-Installation Names
|
|
|
|
-==================
|
|
|
|
-
|
|
|
|
-By default, `make install' installs the package's commands under
|
|
|
|
-`/usr/local/bin', include files under `/usr/local/include', etc. You
|
|
|
|
-can specify an installation prefix other than `/usr/local' by giving
|
|
|
|
-`configure' the option `--prefix=PREFIX'.
|
|
|
|
-
|
|
|
|
- You can specify separate installation prefixes for
|
|
|
|
-architecture-specific files and architecture-independent files. If you
|
|
|
|
-pass the option `--exec-prefix=PREFIX' to `configure', the package uses
|
|
|
|
-PREFIX as the prefix for installing programs and libraries.
|
|
|
|
-Documentation and other data files still use the regular prefix.
|
|
|
|
-
|
|
|
|
- In addition, if you use an unusual directory layout you can give
|
|
|
|
-options like `--bindir=DIR' to specify different values for particular
|
|
|
|
-kinds of files. Run `configure --help' for a list of the directories
|
|
|
|
-you can set and what kinds of files go in them.
|
|
|
|
-
|
|
|
|
- If the package supports it, you can cause programs to be installed
|
|
|
|
-with an extra prefix or suffix on their names by giving `configure' the
|
|
|
|
-option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'.
|
|
|
|
-
|
|
|
|
-Optional Features
|
|
|
|
-=================
|
|
|
|
-
|
|
|
|
-Some packages pay attention to `--enable-FEATURE' options to
|
|
|
|
-`configure', where FEATURE indicates an optional part of the package.
|
|
|
|
-They may also pay attention to `--with-PACKAGE' options, where PACKAGE
|
|
|
|
-is something like `gnu-as' or `x' (for the X Window System). The
|
|
|
|
-`README' should mention any `--enable-' and `--with-' options that the
|
|
|
|
-package recognizes.
|
|
|
|
-
|
|
|
|
- For packages that use the X Window System, `configure' can usually
|
|
|
|
-find the X include and library files automatically, but if it doesn't,
|
|
|
|
-you can use the `configure' options `--x-includes=DIR' and
|
|
|
|
-`--x-libraries=DIR' to specify their locations.
|
|
|
|
-
|
|
|
|
-Specifying the System Type
|
|
|
|
-==========================
|
|
|
|
-
|
|
|
|
-There may be some features `configure' cannot figure out automatically,
|
|
|
|
-but needs to determine by the type of machine the package will run on.
|
|
|
|
-Usually, assuming the package is built to be run on the _same_
|
|
|
|
-architectures, `configure' can figure that out, but if it prints a
|
|
|
|
-message saying it cannot guess the machine type, give it the
|
|
|
|
-`--build=TYPE' option. TYPE can either be a short name for the system
|
|
|
|
-type, such as `sun4', or a canonical name which has the form:
|
|
|
|
-
|
|
|
|
- CPU-COMPANY-SYSTEM
|
|
|
|
-
|
|
|
|
-where SYSTEM can have one of these forms:
|
|
|
|
-
|
|
|
|
- OS KERNEL-OS
|
|
|
|
-
|
|
|
|
- See the file `config.sub' for the possible values of each field. If
|
|
|
|
-`config.sub' isn't included in this package, then this package doesn't
|
|
|
|
-need to know the machine type.
|
|
|
|
-
|
|
|
|
- If you are _building_ compiler tools for cross-compiling, you should
|
|
|
|
-use the option `--target=TYPE' to select the type of system they will
|
|
|
|
-produce code for.
|
|
|
|
-
|
|
|
|
- If you want to _use_ a cross compiler, that generates code for a
|
|
|
|
-platform different from the build platform, you should specify the
|
|
|
|
-"host" platform (i.e., that on which the generated programs will
|
|
|
|
-eventually be run) with `--host=TYPE'.
|
|
|
|
-
|
|
|
|
-Sharing Defaults
|
|
|
|
-================
|
|
|
|
-
|
|
|
|
-If you want to set default values for `configure' scripts to share, you
|
|
|
|
-can create a site shell script called `config.site' that gives default
|
|
|
|
-values for variables like `CC', `cache_file', and `prefix'.
|
|
|
|
-`configure' looks for `PREFIX/share/config.site' if it exists, then
|
|
|
|
-`PREFIX/etc/config.site' if it exists. Or, you can set the
|
|
|
|
-`CONFIG_SITE' environment variable to the location of the site script.
|
|
|
|
-A warning: not all `configure' scripts look for a site script.
|
|
|
|
-
|
|
|
|
-Defining Variables
|
|
|
|
-==================
|
|
|
|
-
|
|
|
|
-Variables not defined in a site shell script can be set in the
|
|
|
|
-environment passed to `configure'. However, some packages may run
|
|
|
|
-configure again during the build, and the customized values of these
|
|
|
|
-variables may be lost. In order to avoid this problem, you should set
|
|
|
|
-them in the `configure' command line, using `VAR=value'. For example:
|
|
|
|
-
|
|
|
|
- ./configure CC=/usr/local2/bin/gcc
|
|
|
|
-
|
|
|
|
-causes the specified `gcc' to be used as the C compiler (unless it is
|
|
|
|
-overridden in the site shell script). Here is a another example:
|
|
|
|
-
|
|
|
|
- /bin/bash ./configure CONFIG_SHELL=/bin/bash
|
|
|
|
-
|
|
|
|
-Here the `CONFIG_SHELL=/bin/bash' operand causes subsequent
|
|
|
|
-configuration-related scripts to be executed by `/bin/bash'.
|
|
|
|
-
|
|
|
|
-`configure' Invocation
|
|
|
|
-======================
|
|
|
|
-
|
|
|
|
-`configure' recognizes the following options to control how it operates.
|
|
|
|
-
|
|
|
|
-`--help'
|
|
|
|
-`-h'
|
|
|
|
- Print a summary of the options to `configure', and exit.
|
|
|
|
-
|
|
|
|
-`--version'
|
|
|
|
-`-V'
|
|
|
|
- Print the version of Autoconf used to generate the `configure'
|
|
|
|
- script, and exit.
|
|
|
|
-
|
|
|
|
-`--cache-file=FILE'
|
|
|
|
- Enable the cache: use and save the results of the tests in FILE,
|
|
|
|
- traditionally `config.cache'. FILE defaults to `/dev/null' to
|
|
|
|
- disable caching.
|
|
|
|
-
|
|
|
|
-`--config-cache'
|
|
|
|
-`-C'
|
|
|
|
- Alias for `--cache-file=config.cache'.
|
|
|
|
-
|
|
|
|
-`--quiet'
|
|
|
|
-`--silent'
|
|
|
|
-`-q'
|
|
|
|
- Do not print messages saying which checks are being made. To
|
|
|
|
- suppress all normal output, redirect it to `/dev/null' (any error
|
|
|
|
- messages will still be shown).
|
|
|
|
-
|
|
|
|
-`--srcdir=DIR'
|
|
|
|
- Look for the package's source code in directory DIR. Usually
|
|
|
|
- `configure' can determine that directory automatically.
|
|
|
|
-
|
|
|
|
-`configure' also accepts some other, not widely useful, options. Run
|
|
|
|
-`configure --help' for more details.
|
|
|
|
-
|
|
|
|
--- a/libraries/libapparmor/NEWS
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1 +0,0 @@
|
|
|
|
-- 2007-06-24 - Initial release, version 0.6
|
|
|
|
--- a/libraries/libapparmor/README
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1 +0,0 @@
|
|
|
|
-What little documentation exists is in src/aalogparse.h. Please file bugs using http://bugzilla.novell.com under the AppArmor product.
|
|
|
|
--- a/libraries/libapparmor/autogen.sh
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,42 +0,0 @@
|
|
|
|
-#!/bin/sh
|
|
|
|
-
|
|
|
|
-DIE=0
|
|
|
|
-
|
|
|
|
-(autoconf --version) < /dev/null > /dev/null 2>&1 || {
|
|
|
|
- echo
|
|
|
|
- echo "You must have autoconf installed to compile $package."
|
|
|
|
- echo "Download the appropriate package for your distribution,"
|
|
|
|
- echo "or get the source tarball at ftp://ftp.gnu.org/pub/gnu/"
|
|
|
|
- DIE=1
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-(automake --version) < /dev/null > /dev/null 2>&1 || {
|
|
|
|
- echo
|
|
|
|
- echo "You must have automake installed to compile $package."
|
|
|
|
- echo "Download the appropriate package for your system,"
|
|
|
|
- echo "or get the source from one of the GNU ftp sites"
|
|
|
|
- echo "listed in http://www.gnu.org/order/ftp.html"
|
|
|
|
- DIE=1
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-(libtool --version) < /dev/null > /dev/null 2>&1 || {
|
|
|
|
- echo
|
|
|
|
- echo "You must have libtool installed to compile $package."
|
|
|
|
- echo "Download the appropriate package for your system,"
|
|
|
|
- echo "or get the source from one of the GNU ftp sites"
|
|
|
|
- echo "listed in http://www.gnu.org/order/ftp.html"
|
|
|
|
- DIE=1
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-if test "$DIE" -eq 1; then
|
|
|
|
- exit 1
|
|
|
|
-fi
|
|
|
|
-
|
|
|
|
-echo "Running aclocal"
|
|
|
|
-aclocal
|
|
|
|
-echo "Running autoconf"
|
|
|
|
-autoconf --force
|
|
|
|
-echo "Running libtoolize"
|
|
|
|
-libtoolize --automake
|
|
|
|
-echo "Running automake -ac"
|
|
|
|
-automake -ac
|
|
|
|
--- a/libraries/libapparmor/compile
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,143 +0,0 @@
|
|
|
|
-#! /bin/sh
|
|
|
|
-# Wrapper for compilers which do not understand `-c -o'.
|
|
|
|
-
|
|
|
|
-scriptversion=2009-10-06.20; # UTC
|
|
|
|
-
|
|
|
|
-# Copyright (C) 1999, 2000, 2003, 2004, 2005, 2009 Free Software
|
|
|
|
-# Foundation, Inc.
|
|
|
|
-# Written by Tom Tromey <tromey@cygnus.com>.
|
|
|
|
-#
|
|
|
|
-# This program is free software; you can redistribute it and/or modify
|
|
|
|
-# it under the terms of the GNU General Public License as published by
|
|
|
|
-# the Free Software Foundation; either version 2, or (at your option)
|
|
|
|
-# any later version.
|
|
|
|
-#
|
|
|
|
-# This program is distributed in the hope that it will be useful,
|
|
|
|
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
-# GNU General Public License for more details.
|
|
|
|
-#
|
|
|
|
-# You should have received a copy of the GNU General Public License
|
|
|
|
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
-
|
|
|
|
-# As a special exception to the GNU General Public License, if you
|
|
|
|
-# distribute this file as part of a program that contains a
|
|
|
|
-# configuration script generated by Autoconf, you may include it under
|
|
|
|
-# the same distribution terms that you use for the rest of that program.
|
|
|
|
-
|
|
|
|
-# This file is maintained in Automake, please report
|
|
|
|
-# bugs to <bug-automake@gnu.org> or send patches to
|
|
|
|
-# <automake-patches@gnu.org>.
|
|
|
|
-
|
|
|
|
-case $1 in
|
|
|
|
- '')
|
|
|
|
- echo "$0: No command. Try \`$0 --help' for more information." 1>&2
|
|
|
|
- exit 1;
|
|
|
|
- ;;
|
|
|
|
- -h | --h*)
|
|
|
|
- cat <<\EOF
|
|
|
|
-Usage: compile [--help] [--version] PROGRAM [ARGS]
|
|
|
|
-
|
|
|
|
-Wrapper for compilers which do not understand `-c -o'.
|
|
|
|
-Remove `-o dest.o' from ARGS, run PROGRAM with the remaining
|
|
|
|
-arguments, and rename the output as expected.
|
|
|
|
-
|
|
|
|
-If you are trying to build a whole package this is not the
|
|
|
|
-right script to run: please start by reading the file `INSTALL'.
|
|
|
|
-
|
|
|
|
-Report bugs to <bug-automake@gnu.org>.
|
|
|
|
-EOF
|
|
|
|
- exit $?
|
|
|
|
- ;;
|
|
|
|
- -v | --v*)
|
|
|
|
- echo "compile $scriptversion"
|
|
|
|
- exit $?
|
|
|
|
- ;;
|
|
|
|
-esac
|
|
|
|
-
|
|
|
|
-ofile=
|
|
|
|
-cfile=
|
|
|
|
-eat=
|
|
|
|
-
|
|
|
|
-for arg
|
|
|
|
-do
|
|
|
|
- if test -n "$eat"; then
|
|
|
|
- eat=
|
|
|
|
- else
|
|
|
|
- case $1 in
|
|
|
|
- -o)
|
|
|
|
- # configure might choose to run compile as `compile cc -o foo foo.c'.
|
|
|
|
- # So we strip `-o arg' only if arg is an object.
|
|
|
|
- eat=1
|
|
|
|
- case $2 in
|
|
|
|
- *.o | *.obj)
|
|
|
|
- ofile=$2
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- set x "$@" -o "$2"
|
|
|
|
- shift
|
|
|
|
- ;;
|
|
|
|
- esac
|
|
|
|
- ;;
|
|
|
|
- *.c)
|
|
|
|
- cfile=$1
|
|
|
|
- set x "$@" "$1"
|
|
|
|
- shift
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- set x "$@" "$1"
|
|
|
|
- shift
|
|
|
|
- ;;
|
|
|
|
- esac
|
|
|
|
- fi
|
|
|
|
- shift
|
|
|
|
-done
|
|
|
|
-
|
|
|
|
-if test -z "$ofile" || test -z "$cfile"; then
|
|
|
|
- # If no `-o' option was seen then we might have been invoked from a
|
|
|
|
- # pattern rule where we don't need one. That is ok -- this is a
|
|
|
|
- # normal compilation that the losing compiler can handle. If no
|
|
|
|
- # `.c' file was seen then we are probably linking. That is also
|
|
|
|
- # ok.
|
|
|
|
- exec "$@"
|
|
|
|
-fi
|
|
|
|
-
|
|
|
|
-# Name of file we expect compiler to create.
|
|
|
|
-cofile=`echo "$cfile" | sed 's|^.*[\\/]||; s|^[a-zA-Z]:||; s/\.c$/.o/'`
|
|
|
|
-
|
|
|
|
-# Create the lock directory.
|
|
|
|
-# Note: use `[/\\:.-]' here to ensure that we don't use the same name
|
|
|
|
-# that we are using for the .o file. Also, base the name on the expected
|
|
|
|
-# object file name, since that is what matters with a parallel build.
|
|
|
|
-lockdir=`echo "$cofile" | sed -e 's|[/\\:.-]|_|g'`.d
|
|
|
|
-while true; do
|
|
|
|
- if mkdir "$lockdir" >/dev/null 2>&1; then
|
|
|
|
- break
|
|
|
|
- fi
|
|
|
|
- sleep 1
|
|
|
|
-done
|
|
|
|
-# FIXME: race condition here if user kills between mkdir and trap.
|
|
|
|
-trap "rmdir '$lockdir'; exit 1" 1 2 15
|
|
|
|
-
|
|
|
|
-# Run the compile.
|
|
|
|
-"$@"
|
|
|
|
-ret=$?
|
|
|
|
-
|
|
|
|
-if test -f "$cofile"; then
|
|
|
|
- test "$cofile" = "$ofile" || mv "$cofile" "$ofile"
|
|
|
|
-elif test -f "${cofile}bj"; then
|
|
|
|
- test "${cofile}bj" = "$ofile" || mv "${cofile}bj" "$ofile"
|
|
|
|
-fi
|
|
|
|
-
|
|
|
|
-rmdir "$lockdir"
|
|
|
|
-exit $ret
|
|
|
|
-
|
|
|
|
-# Local Variables:
|
|
|
|
-# mode: shell-script
|
|
|
|
-# sh-indentation: 2
|
|
|
|
-# eval: (add-hook 'write-file-hooks 'time-stamp)
|
|
|
|
-# time-stamp-start: "scriptversion="
|
|
|
|
-# time-stamp-format: "%:y-%02m-%02d.%02H"
|
|
|
|
-# time-stamp-time-zone: "UTC"
|
|
|
|
-# time-stamp-end: "; # UTC"
|
|
|
|
-# End:
|
|
|
|
--- a/libraries/libapparmor/config.guess
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,1502 +0,0 @@
|
|
|
|
-#! /bin/sh
|
|
|
|
-# Attempt to guess a canonical system name.
|
|
|
|
-# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
|
|
|
|
-# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
|
|
|
|
-# Free Software Foundation, Inc.
|
|
|
|
-
|
|
|
|
-timestamp='2009-12-30'
|
|
|
|
-
|
|
|
|
-# This file is free software; you can redistribute it and/or modify it
|
|
|
|
-# under the terms of the GNU General Public License as published by
|
|
|
|
-# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
-# (at your option) any later version.
|
|
|
|
-#
|
|
|
|
-# This program is distributed in the hope that it will be useful, but
|
|
|
|
-# WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
-# General Public License for more details.
|
|
|
|
-#
|
|
|
|
-# You should have received a copy of the GNU General Public License
|
|
|
|
-# along with this program; if not, write to the Free Software
|
|
|
|
-# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
|
|
|
|
-# 02110-1301, USA.
|
|
|
|
-#
|
|
|
|
-# As a special exception to the GNU General Public License, if you
|
|
|
|
-# distribute this file as part of a program that contains a
|
|
|
|
-# configuration script generated by Autoconf, you may include it under
|
|
|
|
-# the same distribution terms that you use for the rest of that program.
|
|
|
|
-
|
|
|
|
-
|
|
|
|
-# Originally written by Per Bothner. Please send patches (context
|
|
|
|
-# diff format) to <config-patches@gnu.org> and include a ChangeLog
|
|
|
|
-# entry.
|
|
|
|
-#
|
|
|
|
-# This script attempts to guess a canonical system name similar to
|
|
|
|
-# config.sub. If it succeeds, it prints the system name on stdout, and
|
|
|
|
-# exits with 0. Otherwise, it exits with 1.
|
|
|
|
-#
|
|
|
|
-# You can get the latest version of this script from:
|
|
|
|
-# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
|
|
|
|
-
|
|
|
|
-me=`echo "$0" | sed -e 's,.*/,,'`
|
|
|
|
-
|
|
|
|
-usage="\
|
|
|
|
-Usage: $0 [OPTION]
|
|
|
|
-
|
|
|
|
-Output the configuration name of the system \`$me' is run on.
|
|
|
|
-
|
|
|
|
-Operation modes:
|
|
|
|
- -h, --help print this help, then exit
|
|
|
|
- -t, --time-stamp print date of last modification, then exit
|
|
|
|
- -v, --version print version number, then exit
|
|
|
|
-
|
|
|
|
-Report bugs and patches to <config-patches@gnu.org>."
|
|
|
|
-
|
|
|
|
-version="\
|
|
|
|
-GNU config.guess ($timestamp)
|
|
|
|
-
|
|
|
|
-Originally written by Per Bothner.
|
|
|
|
-Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
|
|
|
|
-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free
|
|
|
|
-Software Foundation, Inc.
|
|
|
|
-
|
|
|
|
-This is free software; see the source for copying conditions. There is NO
|
|
|
|
-warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
|
|
|
|
-
|
|
|
|
-help="
|
|
|
|
-Try \`$me --help' for more information."
|
|
|
|
-
|
|
|
|
-# Parse command line
|
|
|
|
-while test $# -gt 0 ; do
|
|
|
|
- case $1 in
|
|
|
|
- --time-stamp | --time* | -t )
|
|
|
|
- echo "$timestamp" ; exit ;;
|
|
|
|
- --version | -v )
|
|
|
|
- echo "$version" ; exit ;;
|
|
|
|
- --help | --h* | -h )
|
|
|
|
- echo "$usage"; exit ;;
|
|
|
|
- -- ) # Stop option processing
|
|
|
|
- shift; break ;;
|
|
|
|
- - ) # Use stdin as input.
|
|
|
|
- break ;;
|
|
|
|
- -* )
|
|
|
|
- echo "$me: invalid option $1$help" >&2
|
|
|
|
- exit 1 ;;
|
|
|
|
- * )
|
|
|
|
- break ;;
|
|
|
|
- esac
|
|
|
|
-done
|
|
|
|
-
|
|
|
|
-if test $# != 0; then
|
|
|
|
- echo "$me: too many arguments$help" >&2
|
|
|
|
- exit 1
|
|
|
|
-fi
|
|
|
|
-
|
|
|
|
-trap 'exit 1' 1 2 15
|
|
|
|
-
|
|
|
|
-# CC_FOR_BUILD -- compiler used by this script. Note that the use of a
|
|
|
|
-# compiler to aid in system detection is discouraged as it requires
|
|
|
|
-# temporary files to be created and, as you can see below, it is a
|
|
|
|
-# headache to deal with in a portable fashion.
|
|
|
|
-
|
|
|
|
-# Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still
|
|
|
|
-# use `HOST_CC' if defined, but it is deprecated.
|
|
|
|
-
|
|
|
|
-# Portable tmp directory creation inspired by the Autoconf team.
|
|
|
|
-
|
|
|
|
-set_cc_for_build='
|
|
|
|
-trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ;
|
|
|
|
-trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ;
|
|
|
|
-: ${TMPDIR=/tmp} ;
|
|
|
|
- { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
|
|
|
|
- { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } ||
|
|
|
|
- { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } ||
|
|
|
|
- { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ;
|
|
|
|
-dummy=$tmp/dummy ;
|
|
|
|
-tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ;
|
|
|
|
-case $CC_FOR_BUILD,$HOST_CC,$CC in
|
|
|
|
- ,,) echo "int x;" > $dummy.c ;
|
|
|
|
- for c in cc gcc c89 c99 ; do
|
|
|
|
- if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then
|
|
|
|
- CC_FOR_BUILD="$c"; break ;
|
|
|
|
- fi ;
|
|
|
|
- done ;
|
|
|
|
- if test x"$CC_FOR_BUILD" = x ; then
|
|
|
|
- CC_FOR_BUILD=no_compiler_found ;
|
|
|
|
- fi
|
|
|
|
- ;;
|
|
|
|
- ,,*) CC_FOR_BUILD=$CC ;;
|
|
|
|
- ,*,*) CC_FOR_BUILD=$HOST_CC ;;
|
|
|
|
-esac ; set_cc_for_build= ;'
|
|
|
|
-
|
|
|
|
-# This is needed to find uname on a Pyramid OSx when run in the BSD universe.
|
|
|
|
-# (ghazi@noc.rutgers.edu 1994-08-24)
|
|
|
|
-if (test -f /.attbin/uname) >/dev/null 2>&1 ; then
|
|
|
|
- PATH=$PATH:/.attbin ; export PATH
|
|
|
|
-fi
|
|
|
|
-
|
|
|
|
-UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown
|
|
|
|
-UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown
|
|
|
|
-UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown
|
|
|
|
-UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
|
|
|
|
-
|
|
|
|
-# Note: order is significant - the case branches are not exclusive.
|
|
|
|
-
|
|
|
|
-case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
|
|
|
|
- *:NetBSD:*:*)
|
|
|
|
- # NetBSD (nbsd) targets should (where applicable) match one or
|
|
|
|
- # more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*,
|
|
|
|
- # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently
|
|
|
|
- # switched to ELF, *-*-netbsd* would select the old
|
|
|
|
- # object file format. This provides both forward
|
|
|
|
- # compatibility and a consistent mechanism for selecting the
|
|
|
|
- # object file format.
|
|
|
|
- #
|
|
|
|
- # Note: NetBSD doesn't particularly care about the vendor
|
|
|
|
- # portion of the name. We always set it to "unknown".
|
|
|
|
- sysctl="sysctl -n hw.machine_arch"
|
|
|
|
- UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \
|
|
|
|
- /usr/sbin/$sysctl 2>/dev/null || echo unknown)`
|
|
|
|
- case "${UNAME_MACHINE_ARCH}" in
|
|
|
|
- armeb) machine=armeb-unknown ;;
|
|
|
|
- arm*) machine=arm-unknown ;;
|
|
|
|
- sh3el) machine=shl-unknown ;;
|
|
|
|
- sh3eb) machine=sh-unknown ;;
|
|
|
|
- sh5el) machine=sh5le-unknown ;;
|
|
|
|
- *) machine=${UNAME_MACHINE_ARCH}-unknown ;;
|
|
|
|
- esac
|
|
|
|
- # The Operating System including object format, if it has switched
|
|
|
|
- # to ELF recently, or will in the future.
|
|
|
|
- case "${UNAME_MACHINE_ARCH}" in
|
|
|
|
- arm*|i386|m68k|ns32k|sh3*|sparc|vax)
|
|
|
|
- eval $set_cc_for_build
|
|
|
|
- if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
|
|
|
|
- | grep -q __ELF__
|
|
|
|
- then
|
|
|
|
- # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout).
|
|
|
|
- # Return netbsd for either. FIX?
|
|
|
|
- os=netbsd
|
|
|
|
- else
|
|
|
|
- os=netbsdelf
|
|
|
|
- fi
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- os=netbsd
|
|
|
|
- ;;
|
|
|
|
- esac
|
|
|
|
- # The OS release
|
|
|
|
- # Debian GNU/NetBSD machines have a different userland, and
|
|
|
|
- # thus, need a distinct triplet. However, they do not need
|
|
|
|
- # kernel version information, so it can be replaced with a
|
|
|
|
- # suitable tag, in the style of linux-gnu.
|
|
|
|
- case "${UNAME_VERSION}" in
|
|
|
|
- Debian*)
|
|
|
|
- release='-gnu'
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
|
|
|
|
- ;;
|
|
|
|
- esac
|
|
|
|
- # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
|
|
|
|
- # contains redundant information, the shorter form:
|
|
|
|
- # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
|
|
|
|
- echo "${machine}-${os}${release}"
|
|
|
|
- exit ;;
|
|
|
|
- *:OpenBSD:*:*)
|
|
|
|
- UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
|
|
|
|
- echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:ekkoBSD:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:SolidBSD:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- macppc:MirBSD:*:*)
|
|
|
|
- echo powerpc-unknown-mirbsd${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:MirBSD:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- alpha:OSF1:*:*)
|
|
|
|
- case $UNAME_RELEASE in
|
|
|
|
- *4.0)
|
|
|
|
- UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
|
|
|
|
- ;;
|
|
|
|
- *5.*)
|
|
|
|
- UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
|
|
|
|
- ;;
|
|
|
|
- esac
|
|
|
|
- # According to Compaq, /usr/sbin/psrinfo has been available on
|
|
|
|
- # OSF/1 and Tru64 systems produced since 1995. I hope that
|
|
|
|
- # covers most systems running today. This code pipes the CPU
|
|
|
|
- # types through head -n 1, so we only detect the type of CPU 0.
|
|
|
|
- ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1`
|
|
|
|
- case "$ALPHA_CPU_TYPE" in
|
|
|
|
- "EV4 (21064)")
|
|
|
|
- UNAME_MACHINE="alpha" ;;
|
|
|
|
- "EV4.5 (21064)")
|
|
|
|
- UNAME_MACHINE="alpha" ;;
|
|
|
|
- "LCA4 (21066/21068)")
|
|
|
|
- UNAME_MACHINE="alpha" ;;
|
|
|
|
- "EV5 (21164)")
|
|
|
|
- UNAME_MACHINE="alphaev5" ;;
|
|
|
|
- "EV5.6 (21164A)")
|
|
|
|
- UNAME_MACHINE="alphaev56" ;;
|
|
|
|
- "EV5.6 (21164PC)")
|
|
|
|
- UNAME_MACHINE="alphapca56" ;;
|
|
|
|
- "EV5.7 (21164PC)")
|
|
|
|
- UNAME_MACHINE="alphapca57" ;;
|
|
|
|
- "EV6 (21264)")
|
|
|
|
- UNAME_MACHINE="alphaev6" ;;
|
|
|
|
- "EV6.7 (21264A)")
|
|
|
|
- UNAME_MACHINE="alphaev67" ;;
|
|
|
|
- "EV6.8CB (21264C)")
|
|
|
|
- UNAME_MACHINE="alphaev68" ;;
|
|
|
|
- "EV6.8AL (21264B)")
|
|
|
|
- UNAME_MACHINE="alphaev68" ;;
|
|
|
|
- "EV6.8CX (21264D)")
|
|
|
|
- UNAME_MACHINE="alphaev68" ;;
|
|
|
|
- "EV6.9A (21264/EV69A)")
|
|
|
|
- UNAME_MACHINE="alphaev69" ;;
|
|
|
|
- "EV7 (21364)")
|
|
|
|
- UNAME_MACHINE="alphaev7" ;;
|
|
|
|
- "EV7.9 (21364A)")
|
|
|
|
- UNAME_MACHINE="alphaev79" ;;
|
|
|
|
- esac
|
|
|
|
- # A Pn.n version is a patched version.
|
|
|
|
- # A Vn.n version is a released version.
|
|
|
|
- # A Tn.n version is a released field test version.
|
|
|
|
- # A Xn.n version is an unreleased experimental baselevel.
|
|
|
|
- # 1.2 uses "1.2" for uname -r.
|
|
|
|
- echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
|
|
|
|
- exit ;;
|
|
|
|
- Alpha\ *:Windows_NT*:*)
|
|
|
|
- # How do we know it's Interix rather than the generic POSIX subsystem?
|
|
|
|
- # Should we change UNAME_MACHINE based on the output of uname instead
|
|
|
|
- # of the specific Alpha model?
|
|
|
|
- echo alpha-pc-interix
|
|
|
|
- exit ;;
|
|
|
|
- 21064:Windows_NT:50:3)
|
|
|
|
- echo alpha-dec-winnt3.5
|
|
|
|
- exit ;;
|
|
|
|
- Amiga*:UNIX_System_V:4.0:*)
|
|
|
|
- echo m68k-unknown-sysv4
|
|
|
|
- exit ;;
|
|
|
|
- *:[Aa]miga[Oo][Ss]:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-amigaos
|
|
|
|
- exit ;;
|
|
|
|
- *:[Mm]orph[Oo][Ss]:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-morphos
|
|
|
|
- exit ;;
|
|
|
|
- *:OS/390:*:*)
|
|
|
|
- echo i370-ibm-openedition
|
|
|
|
- exit ;;
|
|
|
|
- *:z/VM:*:*)
|
|
|
|
- echo s390-ibm-zvmoe
|
|
|
|
- exit ;;
|
|
|
|
- *:OS400:*:*)
|
|
|
|
- echo powerpc-ibm-os400
|
|
|
|
- exit ;;
|
|
|
|
- arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
|
|
|
|
- echo arm-acorn-riscix${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- arm:riscos:*:*|arm:RISCOS:*:*)
|
|
|
|
- echo arm-unknown-riscos
|
|
|
|
- exit ;;
|
|
|
|
- SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
|
|
|
|
- echo hppa1.1-hitachi-hiuxmpp
|
|
|
|
- exit ;;
|
|
|
|
- Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*)
|
|
|
|
- # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE.
|
|
|
|
- if test "`(/bin/universe) 2>/dev/null`" = att ; then
|
|
|
|
- echo pyramid-pyramid-sysv3
|
|
|
|
- else
|
|
|
|
- echo pyramid-pyramid-bsd
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- NILE*:*:*:dcosx)
|
|
|
|
- echo pyramid-pyramid-svr4
|
|
|
|
- exit ;;
|
|
|
|
- DRS?6000:unix:4.0:6*)
|
|
|
|
- echo sparc-icl-nx6
|
|
|
|
- exit ;;
|
|
|
|
- DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*)
|
|
|
|
- case `/usr/bin/uname -p` in
|
|
|
|
- sparc) echo sparc-icl-nx7; exit ;;
|
|
|
|
- esac ;;
|
|
|
|
- s390x:SunOS:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
|
|
|
|
- exit ;;
|
|
|
|
- sun4H:SunOS:5.*:*)
|
|
|
|
- echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
|
|
|
|
- exit ;;
|
|
|
|
- sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*)
|
|
|
|
- echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
|
|
|
|
- exit ;;
|
|
|
|
- i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*)
|
|
|
|
- echo i386-pc-auroraux${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*)
|
|
|
|
- eval $set_cc_for_build
|
|
|
|
- SUN_ARCH="i386"
|
|
|
|
- # If there is a compiler, see if it is configured for 64-bit objects.
|
|
|
|
- # Note that the Sun cc does not turn __LP64__ into 1 like gcc does.
|
|
|
|
- # This test works for both compilers.
|
|
|
|
- if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
|
|
|
|
- if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \
|
|
|
|
- (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
|
|
|
|
- grep IS_64BIT_ARCH >/dev/null
|
|
|
|
- then
|
|
|
|
- SUN_ARCH="x86_64"
|
|
|
|
- fi
|
|
|
|
- fi
|
|
|
|
- echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
|
|
|
|
- exit ;;
|
|
|
|
- sun4*:SunOS:6*:*)
|
|
|
|
- # According to config.sub, this is the proper way to canonicalize
|
|
|
|
- # SunOS6. Hard to guess exactly what SunOS6 will be like, but
|
|
|
|
- # it's likely to be more like Solaris than SunOS4.
|
|
|
|
- echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
|
|
|
|
- exit ;;
|
|
|
|
- sun4*:SunOS:*:*)
|
|
|
|
- case "`/usr/bin/arch -k`" in
|
|
|
|
- Series*|S4*)
|
|
|
|
- UNAME_RELEASE=`uname -v`
|
|
|
|
- ;;
|
|
|
|
- esac
|
|
|
|
- # Japanese Language versions have a version number like `4.1.3-JL'.
|
|
|
|
- echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'`
|
|
|
|
- exit ;;
|
|
|
|
- sun3*:SunOS:*:*)
|
|
|
|
- echo m68k-sun-sunos${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- sun*:*:4.2BSD:*)
|
|
|
|
- UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
|
|
|
|
- test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3
|
|
|
|
- case "`/bin/arch`" in
|
|
|
|
- sun3)
|
|
|
|
- echo m68k-sun-sunos${UNAME_RELEASE}
|
|
|
|
- ;;
|
|
|
|
- sun4)
|
|
|
|
- echo sparc-sun-sunos${UNAME_RELEASE}
|
|
|
|
- ;;
|
|
|
|
- esac
|
|
|
|
- exit ;;
|
|
|
|
- aushp:SunOS:*:*)
|
|
|
|
- echo sparc-auspex-sunos${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- # The situation for MiNT is a little confusing. The machine name
|
|
|
|
- # can be virtually everything (everything which is not
|
|
|
|
- # "atarist" or "atariste" at least should have a processor
|
|
|
|
- # > m68000). The system name ranges from "MiNT" over "FreeMiNT"
|
|
|
|
- # to the lowercase version "mint" (or "freemint"). Finally
|
|
|
|
- # the system name "TOS" denotes a system which is actually not
|
|
|
|
- # MiNT. But MiNT is downward compatible to TOS, so this should
|
|
|
|
- # be no problem.
|
|
|
|
- atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
|
|
|
|
- echo m68k-atari-mint${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
|
|
|
|
- echo m68k-atari-mint${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
|
|
|
|
- echo m68k-atari-mint${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
|
|
|
|
- echo m68k-milan-mint${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
|
|
|
|
- echo m68k-hades-mint${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
|
|
|
|
- echo m68k-unknown-mint${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- m68k:machten:*:*)
|
|
|
|
- echo m68k-apple-machten${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- powerpc:machten:*:*)
|
|
|
|
- echo powerpc-apple-machten${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- RISC*:Mach:*:*)
|
|
|
|
- echo mips-dec-mach_bsd4.3
|
|
|
|
- exit ;;
|
|
|
|
- RISC*:ULTRIX:*:*)
|
|
|
|
- echo mips-dec-ultrix${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- VAX*:ULTRIX*:*:*)
|
|
|
|
- echo vax-dec-ultrix${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- 2020:CLIX:*:* | 2430:CLIX:*:*)
|
|
|
|
- echo clipper-intergraph-clix${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- mips:*:*:UMIPS | mips:*:*:RISCos)
|
|
|
|
- eval $set_cc_for_build
|
|
|
|
- sed 's/^ //' << EOF >$dummy.c
|
|
|
|
-#ifdef __cplusplus
|
|
|
|
-#include <stdio.h> /* for printf() prototype */
|
|
|
|
- int main (int argc, char *argv[]) {
|
|
|
|
-#else
|
|
|
|
- int main (argc, argv) int argc; char *argv[]; {
|
|
|
|
-#endif
|
|
|
|
- #if defined (host_mips) && defined (MIPSEB)
|
|
|
|
- #if defined (SYSTYPE_SYSV)
|
|
|
|
- printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0);
|
|
|
|
- #endif
|
|
|
|
- #if defined (SYSTYPE_SVR4)
|
|
|
|
- printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0);
|
|
|
|
- #endif
|
|
|
|
- #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD)
|
|
|
|
- printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0);
|
|
|
|
- #endif
|
|
|
|
- #endif
|
|
|
|
- exit (-1);
|
|
|
|
- }
|
|
|
|
-EOF
|
|
|
|
- $CC_FOR_BUILD -o $dummy $dummy.c &&
|
|
|
|
- dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` &&
|
|
|
|
- SYSTEM_NAME=`$dummy $dummyarg` &&
|
|
|
|
- { echo "$SYSTEM_NAME"; exit; }
|
|
|
|
- echo mips-mips-riscos${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- Motorola:PowerMAX_OS:*:*)
|
|
|
|
- echo powerpc-motorola-powermax
|
|
|
|
- exit ;;
|
|
|
|
- Motorola:*:4.3:PL8-*)
|
|
|
|
- echo powerpc-harris-powermax
|
|
|
|
- exit ;;
|
|
|
|
- Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*)
|
|
|
|
- echo powerpc-harris-powermax
|
|
|
|
- exit ;;
|
|
|
|
- Night_Hawk:Power_UNIX:*:*)
|
|
|
|
- echo powerpc-harris-powerunix
|
|
|
|
- exit ;;
|
|
|
|
- m88k:CX/UX:7*:*)
|
|
|
|
- echo m88k-harris-cxux7
|
|
|
|
- exit ;;
|
|
|
|
- m88k:*:4*:R4*)
|
|
|
|
- echo m88k-motorola-sysv4
|
|
|
|
- exit ;;
|
|
|
|
- m88k:*:3*:R3*)
|
|
|
|
- echo m88k-motorola-sysv3
|
|
|
|
- exit ;;
|
|
|
|
- AViiON:dgux:*:*)
|
|
|
|
- # DG/UX returns AViiON for all architectures
|
|
|
|
- UNAME_PROCESSOR=`/usr/bin/uname -p`
|
|
|
|
- if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ]
|
|
|
|
- then
|
|
|
|
- if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \
|
|
|
|
- [ ${TARGET_BINARY_INTERFACE}x = x ]
|
|
|
|
- then
|
|
|
|
- echo m88k-dg-dgux${UNAME_RELEASE}
|
|
|
|
- else
|
|
|
|
- echo m88k-dg-dguxbcs${UNAME_RELEASE}
|
|
|
|
- fi
|
|
|
|
- else
|
|
|
|
- echo i586-dg-dgux${UNAME_RELEASE}
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- M88*:DolphinOS:*:*) # DolphinOS (SVR3)
|
|
|
|
- echo m88k-dolphin-sysv3
|
|
|
|
- exit ;;
|
|
|
|
- M88*:*:R3*:*)
|
|
|
|
- # Delta 88k system running SVR3
|
|
|
|
- echo m88k-motorola-sysv3
|
|
|
|
- exit ;;
|
|
|
|
- XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3)
|
|
|
|
- echo m88k-tektronix-sysv3
|
|
|
|
- exit ;;
|
|
|
|
- Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD)
|
|
|
|
- echo m68k-tektronix-bsd
|
|
|
|
- exit ;;
|
|
|
|
- *:IRIX*:*:*)
|
|
|
|
- echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'`
|
|
|
|
- exit ;;
|
|
|
|
- ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX.
|
|
|
|
- echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id
|
|
|
|
- exit ;; # Note that: echo "'`uname -s`'" gives 'AIX '
|
|
|
|
- i*86:AIX:*:*)
|
|
|
|
- echo i386-ibm-aix
|
|
|
|
- exit ;;
|
|
|
|
- ia64:AIX:*:*)
|
|
|
|
- if [ -x /usr/bin/oslevel ] ; then
|
|
|
|
- IBM_REV=`/usr/bin/oslevel`
|
|
|
|
- else
|
|
|
|
- IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
|
|
|
|
- fi
|
|
|
|
- echo ${UNAME_MACHINE}-ibm-aix${IBM_REV}
|
|
|
|
- exit ;;
|
|
|
|
- *:AIX:2:3)
|
|
|
|
- if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then
|
|
|
|
- eval $set_cc_for_build
|
|
|
|
- sed 's/^ //' << EOF >$dummy.c
|
|
|
|
- #include <sys/systemcfg.h>
|
|
|
|
-
|
|
|
|
- main()
|
|
|
|
- {
|
|
|
|
- if (!__power_pc())
|
|
|
|
- exit(1);
|
|
|
|
- puts("powerpc-ibm-aix3.2.5");
|
|
|
|
- exit(0);
|
|
|
|
- }
|
|
|
|
-EOF
|
|
|
|
- if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy`
|
|
|
|
- then
|
|
|
|
- echo "$SYSTEM_NAME"
|
|
|
|
- else
|
|
|
|
- echo rs6000-ibm-aix3.2.5
|
|
|
|
- fi
|
|
|
|
- elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then
|
|
|
|
- echo rs6000-ibm-aix3.2.4
|
|
|
|
- else
|
|
|
|
- echo rs6000-ibm-aix3.2
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- *:AIX:*:[456])
|
|
|
|
- IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'`
|
|
|
|
- if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then
|
|
|
|
- IBM_ARCH=rs6000
|
|
|
|
- else
|
|
|
|
- IBM_ARCH=powerpc
|
|
|
|
- fi
|
|
|
|
- if [ -x /usr/bin/oslevel ] ; then
|
|
|
|
- IBM_REV=`/usr/bin/oslevel`
|
|
|
|
- else
|
|
|
|
- IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
|
|
|
|
- fi
|
|
|
|
- echo ${IBM_ARCH}-ibm-aix${IBM_REV}
|
|
|
|
- exit ;;
|
|
|
|
- *:AIX:*:*)
|
|
|
|
- echo rs6000-ibm-aix
|
|
|
|
- exit ;;
|
|
|
|
- ibmrt:4.4BSD:*|romp-ibm:BSD:*)
|
|
|
|
- echo romp-ibm-bsd4.4
|
|
|
|
- exit ;;
|
|
|
|
- ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and
|
|
|
|
- echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to
|
|
|
|
- exit ;; # report: romp-ibm BSD 4.3
|
|
|
|
- *:BOSX:*:*)
|
|
|
|
- echo rs6000-bull-bosx
|
|
|
|
- exit ;;
|
|
|
|
- DPX/2?00:B.O.S.:*:*)
|
|
|
|
- echo m68k-bull-sysv3
|
|
|
|
- exit ;;
|
|
|
|
- 9000/[34]??:4.3bsd:1.*:*)
|
|
|
|
- echo m68k-hp-bsd
|
|
|
|
- exit ;;
|
|
|
|
- hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*)
|
|
|
|
- echo m68k-hp-bsd4.4
|
|
|
|
- exit ;;
|
|
|
|
- 9000/[34678]??:HP-UX:*:*)
|
|
|
|
- HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
|
|
|
|
- case "${UNAME_MACHINE}" in
|
|
|
|
- 9000/31? ) HP_ARCH=m68000 ;;
|
|
|
|
- 9000/[34]?? ) HP_ARCH=m68k ;;
|
|
|
|
- 9000/[678][0-9][0-9])
|
|
|
|
- if [ -x /usr/bin/getconf ]; then
|
|
|
|
- sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
|
|
|
|
- sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
|
|
|
|
- case "${sc_cpu_version}" in
|
|
|
|
- 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0
|
|
|
|
- 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1
|
|
|
|
- 532) # CPU_PA_RISC2_0
|
|
|
|
- case "${sc_kernel_bits}" in
|
|
|
|
- 32) HP_ARCH="hppa2.0n" ;;
|
|
|
|
- 64) HP_ARCH="hppa2.0w" ;;
|
|
|
|
- '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20
|
|
|
|
- esac ;;
|
|
|
|
- esac
|
|
|
|
- fi
|
|
|
|
- if [ "${HP_ARCH}" = "" ]; then
|
|
|
|
- eval $set_cc_for_build
|
|
|
|
- sed 's/^ //' << EOF >$dummy.c
|
|
|
|
-
|
|
|
|
- #define _HPUX_SOURCE
|
|
|
|
- #include <stdlib.h>
|
|
|
|
- #include <unistd.h>
|
|
|
|
-
|
|
|
|
- int main ()
|
|
|
|
- {
|
|
|
|
- #if defined(_SC_KERNEL_BITS)
|
|
|
|
- long bits = sysconf(_SC_KERNEL_BITS);
|
|
|
|
- #endif
|
|
|
|
- long cpu = sysconf (_SC_CPU_VERSION);
|
|
|
|
-
|
|
|
|
- switch (cpu)
|
|
|
|
- {
|
|
|
|
- case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
|
|
|
|
- case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
|
|
|
|
- case CPU_PA_RISC2_0:
|
|
|
|
- #if defined(_SC_KERNEL_BITS)
|
|
|
|
- switch (bits)
|
|
|
|
- {
|
|
|
|
- case 64: puts ("hppa2.0w"); break;
|
|
|
|
- case 32: puts ("hppa2.0n"); break;
|
|
|
|
- default: puts ("hppa2.0"); break;
|
|
|
|
- } break;
|
|
|
|
- #else /* !defined(_SC_KERNEL_BITS) */
|
|
|
|
- puts ("hppa2.0"); break;
|
|
|
|
- #endif
|
|
|
|
- default: puts ("hppa1.0"); break;
|
|
|
|
- }
|
|
|
|
- exit (0);
|
|
|
|
- }
|
|
|
|
-EOF
|
|
|
|
- (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy`
|
|
|
|
- test -z "$HP_ARCH" && HP_ARCH=hppa
|
|
|
|
- fi ;;
|
|
|
|
- esac
|
|
|
|
- if [ ${HP_ARCH} = "hppa2.0w" ]
|
|
|
|
- then
|
|
|
|
- eval $set_cc_for_build
|
|
|
|
-
|
|
|
|
- # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating
|
|
|
|
- # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler
|
|
|
|
- # generating 64-bit code. GNU and HP use different nomenclature:
|
|
|
|
- #
|
|
|
|
- # $ CC_FOR_BUILD=cc ./config.guess
|
|
|
|
- # => hppa2.0w-hp-hpux11.23
|
|
|
|
- # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
|
|
|
|
- # => hppa64-hp-hpux11.23
|
|
|
|
-
|
|
|
|
- if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) |
|
|
|
|
- grep -q __LP64__
|
|
|
|
- then
|
|
|
|
- HP_ARCH="hppa2.0w"
|
|
|
|
- else
|
|
|
|
- HP_ARCH="hppa64"
|
|
|
|
- fi
|
|
|
|
- fi
|
|
|
|
- echo ${HP_ARCH}-hp-hpux${HPUX_REV}
|
|
|
|
- exit ;;
|
|
|
|
- ia64:HP-UX:*:*)
|
|
|
|
- HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'`
|
|
|
|
- echo ia64-hp-hpux${HPUX_REV}
|
|
|
|
- exit ;;
|
|
|
|
- 3050*:HI-UX:*:*)
|
|
|
|
- eval $set_cc_for_build
|
|
|
|
- sed 's/^ //' << EOF >$dummy.c
|
|
|
|
- #include <unistd.h>
|
|
|
|
- int
|
|
|
|
- main ()
|
|
|
|
- {
|
|
|
|
- long cpu = sysconf (_SC_CPU_VERSION);
|
|
|
|
- /* The order matters, because CPU_IS_HP_MC68K erroneously returns
|
|
|
|
- true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct
|
|
|
|
- results, however. */
|
|
|
|
- if (CPU_IS_PA_RISC (cpu))
|
|
|
|
- {
|
|
|
|
- switch (cpu)
|
|
|
|
- {
|
|
|
|
- case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break;
|
|
|
|
- case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break;
|
|
|
|
- case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break;
|
|
|
|
- default: puts ("hppa-hitachi-hiuxwe2"); break;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- else if (CPU_IS_HP_MC68K (cpu))
|
|
|
|
- puts ("m68k-hitachi-hiuxwe2");
|
|
|
|
- else puts ("unknown-hitachi-hiuxwe2");
|
|
|
|
- exit (0);
|
|
|
|
- }
|
|
|
|
-EOF
|
|
|
|
- $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` &&
|
|
|
|
- { echo "$SYSTEM_NAME"; exit; }
|
|
|
|
- echo unknown-hitachi-hiuxwe2
|
|
|
|
- exit ;;
|
|
|
|
- 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* )
|
|
|
|
- echo hppa1.1-hp-bsd
|
|
|
|
- exit ;;
|
|
|
|
- 9000/8??:4.3bsd:*:*)
|
|
|
|
- echo hppa1.0-hp-bsd
|
|
|
|
- exit ;;
|
|
|
|
- *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*)
|
|
|
|
- echo hppa1.0-hp-mpeix
|
|
|
|
- exit ;;
|
|
|
|
- hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* )
|
|
|
|
- echo hppa1.1-hp-osf
|
|
|
|
- exit ;;
|
|
|
|
- hp8??:OSF1:*:*)
|
|
|
|
- echo hppa1.0-hp-osf
|
|
|
|
- exit ;;
|
|
|
|
- i*86:OSF1:*:*)
|
|
|
|
- if [ -x /usr/sbin/sysversion ] ; then
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-osf1mk
|
|
|
|
- else
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-osf1
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- parisc*:Lites*:*:*)
|
|
|
|
- echo hppa1.1-hp-lites
|
|
|
|
- exit ;;
|
|
|
|
- C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
|
|
|
|
- echo c1-convex-bsd
|
|
|
|
- exit ;;
|
|
|
|
- C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
|
|
|
|
- if getsysinfo -f scalar_acc
|
|
|
|
- then echo c32-convex-bsd
|
|
|
|
- else echo c2-convex-bsd
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
|
|
|
|
- echo c34-convex-bsd
|
|
|
|
- exit ;;
|
|
|
|
- C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
|
|
|
|
- echo c38-convex-bsd
|
|
|
|
- exit ;;
|
|
|
|
- C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
|
|
|
|
- echo c4-convex-bsd
|
|
|
|
- exit ;;
|
|
|
|
- CRAY*Y-MP:*:*:*)
|
|
|
|
- echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
|
|
|
|
- exit ;;
|
|
|
|
- CRAY*[A-Z]90:*:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \
|
|
|
|
- | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \
|
|
|
|
- -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \
|
|
|
|
- -e 's/\.[^.]*$/.X/'
|
|
|
|
- exit ;;
|
|
|
|
- CRAY*TS:*:*:*)
|
|
|
|
- echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
|
|
|
|
- exit ;;
|
|
|
|
- CRAY*T3E:*:*:*)
|
|
|
|
- echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
|
|
|
|
- exit ;;
|
|
|
|
- CRAY*SV1:*:*:*)
|
|
|
|
- echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
|
|
|
|
- exit ;;
|
|
|
|
- *:UNICOS/mp:*:*)
|
|
|
|
- echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
|
|
|
|
- exit ;;
|
|
|
|
- F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
|
|
|
|
- FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
|
|
|
|
- FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
|
|
|
|
- FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'`
|
|
|
|
- echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
|
|
|
|
- exit ;;
|
|
|
|
- 5000:UNIX_System_V:4.*:*)
|
|
|
|
- FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
|
|
|
|
- FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'`
|
|
|
|
- echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
|
|
|
|
- exit ;;
|
|
|
|
- i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- sparc*:BSD/OS:*:*)
|
|
|
|
- echo sparc-unknown-bsdi${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:BSD/OS:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:FreeBSD:*:*)
|
|
|
|
- case ${UNAME_MACHINE} in
|
|
|
|
- pc98)
|
|
|
|
- echo i386-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
|
|
|
|
- amd64)
|
|
|
|
- echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
|
|
|
|
- *)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
|
|
|
|
- esac
|
|
|
|
- exit ;;
|
|
|
|
- i*:CYGWIN*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-pc-cygwin
|
|
|
|
- exit ;;
|
|
|
|
- *:MINGW*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-pc-mingw32
|
|
|
|
- exit ;;
|
|
|
|
- i*:windows32*:*)
|
|
|
|
- # uname -m includes "-pc" on this system.
|
|
|
|
- echo ${UNAME_MACHINE}-mingw32
|
|
|
|
- exit ;;
|
|
|
|
- i*:PW*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-pc-pw32
|
|
|
|
- exit ;;
|
|
|
|
- *:Interix*:*)
|
|
|
|
- case ${UNAME_MACHINE} in
|
|
|
|
- x86)
|
|
|
|
- echo i586-pc-interix${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- authenticamd | genuineintel | EM64T)
|
|
|
|
- echo x86_64-unknown-interix${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- IA64)
|
|
|
|
- echo ia64-unknown-interix${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- esac ;;
|
|
|
|
- [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*)
|
|
|
|
- echo i${UNAME_MACHINE}-pc-mks
|
|
|
|
- exit ;;
|
|
|
|
- 8664:Windows_NT:*)
|
|
|
|
- echo x86_64-pc-mks
|
|
|
|
- exit ;;
|
|
|
|
- i*:Windows_NT*:* | Pentium*:Windows_NT*:*)
|
|
|
|
- # How do we know it's Interix rather than the generic POSIX subsystem?
|
|
|
|
- # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we
|
|
|
|
- # UNAME_MACHINE based on the output of uname instead of i386?
|
|
|
|
- echo i586-pc-interix
|
|
|
|
- exit ;;
|
|
|
|
- i*:UWIN*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-pc-uwin
|
|
|
|
- exit ;;
|
|
|
|
- amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*)
|
|
|
|
- echo x86_64-unknown-cygwin
|
|
|
|
- exit ;;
|
|
|
|
- p*:CYGWIN*:*)
|
|
|
|
- echo powerpcle-unknown-cygwin
|
|
|
|
- exit ;;
|
|
|
|
- prep*:SunOS:5.*:*)
|
|
|
|
- echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
|
|
|
|
- exit ;;
|
|
|
|
- *:GNU:*:*)
|
|
|
|
- # the GNU system
|
|
|
|
- echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
|
|
|
|
- exit ;;
|
|
|
|
- *:GNU/*:*:*)
|
|
|
|
- # other systems with GNU libc and userland
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu
|
|
|
|
- exit ;;
|
|
|
|
- i*86:Minix:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-pc-minix
|
|
|
|
- exit ;;
|
|
|
|
- alpha:Linux:*:*)
|
|
|
|
- case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in
|
|
|
|
- EV5) UNAME_MACHINE=alphaev5 ;;
|
|
|
|
- EV56) UNAME_MACHINE=alphaev56 ;;
|
|
|
|
- PCA56) UNAME_MACHINE=alphapca56 ;;
|
|
|
|
- PCA57) UNAME_MACHINE=alphapca56 ;;
|
|
|
|
- EV6) UNAME_MACHINE=alphaev6 ;;
|
|
|
|
- EV67) UNAME_MACHINE=alphaev67 ;;
|
|
|
|
- EV68*) UNAME_MACHINE=alphaev68 ;;
|
|
|
|
- esac
|
|
|
|
- objdump --private-headers /bin/sh | grep -q ld.so.1
|
|
|
|
- if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC}
|
|
|
|
- exit ;;
|
|
|
|
- arm*:Linux:*:*)
|
|
|
|
- eval $set_cc_for_build
|
|
|
|
- if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \
|
|
|
|
- | grep -q __ARM_EABI__
|
|
|
|
- then
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-linux-gnu
|
|
|
|
- else
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-linux-gnueabi
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- avr32*:Linux:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- cris:Linux:*:*)
|
|
|
|
- echo cris-axis-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- crisv32:Linux:*:*)
|
|
|
|
- echo crisv32-axis-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- frv:Linux:*:*)
|
|
|
|
- echo frv-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- i*86:Linux:*:*)
|
|
|
|
- LIBC=gnu
|
|
|
|
- eval $set_cc_for_build
|
|
|
|
- sed 's/^ //' << EOF >$dummy.c
|
|
|
|
- #ifdef __dietlibc__
|
|
|
|
- LIBC=dietlibc
|
|
|
|
- #endif
|
|
|
|
-EOF
|
|
|
|
- eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'`
|
|
|
|
- echo "${UNAME_MACHINE}-pc-linux-${LIBC}"
|
|
|
|
- exit ;;
|
|
|
|
- ia64:Linux:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- m32r*:Linux:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- m68*:Linux:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- mips:Linux:*:* | mips64:Linux:*:*)
|
|
|
|
- eval $set_cc_for_build
|
|
|
|
- sed 's/^ //' << EOF >$dummy.c
|
|
|
|
- #undef CPU
|
|
|
|
- #undef ${UNAME_MACHINE}
|
|
|
|
- #undef ${UNAME_MACHINE}el
|
|
|
|
- #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL)
|
|
|
|
- CPU=${UNAME_MACHINE}el
|
|
|
|
- #else
|
|
|
|
- #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB)
|
|
|
|
- CPU=${UNAME_MACHINE}
|
|
|
|
- #else
|
|
|
|
- CPU=
|
|
|
|
- #endif
|
|
|
|
- #endif
|
|
|
|
-EOF
|
|
|
|
- eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'`
|
|
|
|
- test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; }
|
|
|
|
- ;;
|
|
|
|
- or32:Linux:*:*)
|
|
|
|
- echo or32-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- padre:Linux:*:*)
|
|
|
|
- echo sparc-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- parisc64:Linux:*:* | hppa64:Linux:*:*)
|
|
|
|
- echo hppa64-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- parisc:Linux:*:* | hppa:Linux:*:*)
|
|
|
|
- # Look for CPU level
|
|
|
|
- case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
|
|
|
|
- PA7*) echo hppa1.1-unknown-linux-gnu ;;
|
|
|
|
- PA8*) echo hppa2.0-unknown-linux-gnu ;;
|
|
|
|
- *) echo hppa-unknown-linux-gnu ;;
|
|
|
|
- esac
|
|
|
|
- exit ;;
|
|
|
|
- ppc64:Linux:*:*)
|
|
|
|
- echo powerpc64-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- ppc:Linux:*:*)
|
|
|
|
- echo powerpc-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- s390:Linux:*:* | s390x:Linux:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-ibm-linux
|
|
|
|
- exit ;;
|
|
|
|
- sh64*:Linux:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- sh*:Linux:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- sparc:Linux:*:* | sparc64:Linux:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- vax:Linux:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-dec-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- x86_64:Linux:*:*)
|
|
|
|
- echo x86_64-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- xtensa*:Linux:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-linux-gnu
|
|
|
|
- exit ;;
|
|
|
|
- i*86:DYNIX/ptx:4*:*)
|
|
|
|
- # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
|
|
|
|
- # earlier versions are messed up and put the nodename in both
|
|
|
|
- # sysname and nodename.
|
|
|
|
- echo i386-sequent-sysv4
|
|
|
|
- exit ;;
|
|
|
|
- i*86:UNIX_SV:4.2MP:2.*)
|
|
|
|
- # Unixware is an offshoot of SVR4, but it has its own version
|
|
|
|
- # number series starting with 2...
|
|
|
|
- # I am not positive that other SVR4 systems won't match this,
|
|
|
|
- # I just have to hope. -- rms.
|
|
|
|
- # Use sysv4.2uw... so that sysv4* matches it.
|
|
|
|
- echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION}
|
|
|
|
- exit ;;
|
|
|
|
- i*86:OS/2:*:*)
|
|
|
|
- # If we were able to find `uname', then EMX Unix compatibility
|
|
|
|
- # is probably installed.
|
|
|
|
- echo ${UNAME_MACHINE}-pc-os2-emx
|
|
|
|
- exit ;;
|
|
|
|
- i*86:XTS-300:*:STOP)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-stop
|
|
|
|
- exit ;;
|
|
|
|
- i*86:atheos:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-atheos
|
|
|
|
- exit ;;
|
|
|
|
- i*86:syllable:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-pc-syllable
|
|
|
|
- exit ;;
|
|
|
|
- i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*)
|
|
|
|
- echo i386-unknown-lynxos${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- i*86:*DOS:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-pc-msdosdjgpp
|
|
|
|
- exit ;;
|
|
|
|
- i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*)
|
|
|
|
- UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'`
|
|
|
|
- if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then
|
|
|
|
- echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL}
|
|
|
|
- else
|
|
|
|
- echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL}
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- i*86:*:5:[678]*)
|
|
|
|
- # UnixWare 7.x, OpenUNIX and OpenServer 6.
|
|
|
|
- case `/bin/uname -X | grep "^Machine"` in
|
|
|
|
- *486*) UNAME_MACHINE=i486 ;;
|
|
|
|
- *Pentium) UNAME_MACHINE=i586 ;;
|
|
|
|
- *Pent*|*Celeron) UNAME_MACHINE=i686 ;;
|
|
|
|
- esac
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION}
|
|
|
|
- exit ;;
|
|
|
|
- i*86:*:3.2:*)
|
|
|
|
- if test -f /usr/options/cb.name; then
|
|
|
|
- UNAME_REL=`sed -n 's/.*Version //p' </usr/options/cb.name`
|
|
|
|
- echo ${UNAME_MACHINE}-pc-isc$UNAME_REL
|
|
|
|
- elif /bin/uname -X 2>/dev/null >/dev/null ; then
|
|
|
|
- UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')`
|
|
|
|
- (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486
|
|
|
|
- (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \
|
|
|
|
- && UNAME_MACHINE=i586
|
|
|
|
- (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \
|
|
|
|
- && UNAME_MACHINE=i686
|
|
|
|
- (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \
|
|
|
|
- && UNAME_MACHINE=i686
|
|
|
|
- echo ${UNAME_MACHINE}-pc-sco$UNAME_REL
|
|
|
|
- else
|
|
|
|
- echo ${UNAME_MACHINE}-pc-sysv32
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- pc:*:*:*)
|
|
|
|
- # Left here for compatibility:
|
|
|
|
- # uname -m prints for DJGPP always 'pc', but it prints nothing about
|
|
|
|
- # the processor, so we play safe by assuming i586.
|
|
|
|
- # Note: whatever this is, it MUST be the same as what config.sub
|
|
|
|
- # prints for the "djgpp" host, or else GDB configury will decide that
|
|
|
|
- # this is a cross-build.
|
|
|
|
- echo i586-pc-msdosdjgpp
|
|
|
|
- exit ;;
|
|
|
|
- Intel:Mach:3*:*)
|
|
|
|
- echo i386-pc-mach3
|
|
|
|
- exit ;;
|
|
|
|
- paragon:*:*:*)
|
|
|
|
- echo i860-intel-osf1
|
|
|
|
- exit ;;
|
|
|
|
- i860:*:4.*:*) # i860-SVR4
|
|
|
|
- if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then
|
|
|
|
- echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4
|
|
|
|
- else # Add other i860-SVR4 vendors below as they are discovered.
|
|
|
|
- echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- mini*:CTIX:SYS*5:*)
|
|
|
|
- # "miniframe"
|
|
|
|
- echo m68010-convergent-sysv
|
|
|
|
- exit ;;
|
|
|
|
- mc68k:UNIX:SYSTEM5:3.51m)
|
|
|
|
- echo m68k-convergent-sysv
|
|
|
|
- exit ;;
|
|
|
|
- M680?0:D-NIX:5.3:*)
|
|
|
|
- echo m68k-diab-dnix
|
|
|
|
- exit ;;
|
|
|
|
- M68*:*:R3V[5678]*:*)
|
|
|
|
- test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;;
|
|
|
|
- 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0)
|
|
|
|
- OS_REL=''
|
|
|
|
- test -r /etc/.relid \
|
|
|
|
- && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
|
|
|
|
- /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
|
|
|
|
- && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
|
|
|
|
- /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
|
|
|
|
- && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
|
|
|
|
- 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
|
|
|
|
- /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
|
|
|
|
- && { echo i486-ncr-sysv4; exit; } ;;
|
|
|
|
- NCR*:*:4.2:* | MPRAS*:*:4.2:*)
|
|
|
|
- OS_REL='.3'
|
|
|
|
- test -r /etc/.relid \
|
|
|
|
- && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid`
|
|
|
|
- /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
|
|
|
|
- && { echo i486-ncr-sysv4.3${OS_REL}; exit; }
|
|
|
|
- /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
|
|
|
|
- && { echo i586-ncr-sysv4.3${OS_REL}; exit; }
|
|
|
|
- /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \
|
|
|
|
- && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
|
|
|
|
- m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*)
|
|
|
|
- echo m68k-unknown-lynxos${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- mc68030:UNIX_System_V:4.*:*)
|
|
|
|
- echo m68k-atari-sysv4
|
|
|
|
- exit ;;
|
|
|
|
- TSUNAMI:LynxOS:2.*:*)
|
|
|
|
- echo sparc-unknown-lynxos${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- rs6000:LynxOS:2.*:*)
|
|
|
|
- echo rs6000-unknown-lynxos${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*)
|
|
|
|
- echo powerpc-unknown-lynxos${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- SM[BE]S:UNIX_SV:*:*)
|
|
|
|
- echo mips-dde-sysv${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- RM*:ReliantUNIX-*:*:*)
|
|
|
|
- echo mips-sni-sysv4
|
|
|
|
- exit ;;
|
|
|
|
- RM*:SINIX-*:*:*)
|
|
|
|
- echo mips-sni-sysv4
|
|
|
|
- exit ;;
|
|
|
|
- *:SINIX-*:*:*)
|
|
|
|
- if uname -p 2>/dev/null >/dev/null ; then
|
|
|
|
- UNAME_MACHINE=`(uname -p) 2>/dev/null`
|
|
|
|
- echo ${UNAME_MACHINE}-sni-sysv4
|
|
|
|
- else
|
|
|
|
- echo ns32k-sni-sysv
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort
|
|
|
|
- # says <Richard.M.Bartel@ccMail.Census.GOV>
|
|
|
|
- echo i586-unisys-sysv4
|
|
|
|
- exit ;;
|
|
|
|
- *:UNIX_System_V:4*:FTX*)
|
|
|
|
- # From Gerald Hewes <hewes@openmarket.com>.
|
|
|
|
- # How about differentiating between stratus architectures? -djm
|
|
|
|
- echo hppa1.1-stratus-sysv4
|
|
|
|
- exit ;;
|
|
|
|
- *:*:*:FTX*)
|
|
|
|
- # From seanf@swdc.stratus.com.
|
|
|
|
- echo i860-stratus-sysv4
|
|
|
|
- exit ;;
|
|
|
|
- i*86:VOS:*:*)
|
|
|
|
- # From Paul.Green@stratus.com.
|
|
|
|
- echo ${UNAME_MACHINE}-stratus-vos
|
|
|
|
- exit ;;
|
|
|
|
- *:VOS:*:*)
|
|
|
|
- # From Paul.Green@stratus.com.
|
|
|
|
- echo hppa1.1-stratus-vos
|
|
|
|
- exit ;;
|
|
|
|
- mc68*:A/UX:*:*)
|
|
|
|
- echo m68k-apple-aux${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- news*:NEWS-OS:6*:*)
|
|
|
|
- echo mips-sony-newsos6
|
|
|
|
- exit ;;
|
|
|
|
- R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
|
|
|
|
- if [ -d /usr/nec ]; then
|
|
|
|
- echo mips-nec-sysv${UNAME_RELEASE}
|
|
|
|
- else
|
|
|
|
- echo mips-unknown-sysv${UNAME_RELEASE}
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only.
|
|
|
|
- echo powerpc-be-beos
|
|
|
|
- exit ;;
|
|
|
|
- BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only.
|
|
|
|
- echo powerpc-apple-beos
|
|
|
|
- exit ;;
|
|
|
|
- BePC:BeOS:*:*) # BeOS running on Intel PC compatible.
|
|
|
|
- echo i586-pc-beos
|
|
|
|
- exit ;;
|
|
|
|
- BePC:Haiku:*:*) # Haiku running on Intel PC compatible.
|
|
|
|
- echo i586-pc-haiku
|
|
|
|
- exit ;;
|
|
|
|
- SX-4:SUPER-UX:*:*)
|
|
|
|
- echo sx4-nec-superux${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- SX-5:SUPER-UX:*:*)
|
|
|
|
- echo sx5-nec-superux${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- SX-6:SUPER-UX:*:*)
|
|
|
|
- echo sx6-nec-superux${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- SX-7:SUPER-UX:*:*)
|
|
|
|
- echo sx7-nec-superux${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- SX-8:SUPER-UX:*:*)
|
|
|
|
- echo sx8-nec-superux${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- SX-8R:SUPER-UX:*:*)
|
|
|
|
- echo sx8r-nec-superux${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- Power*:Rhapsody:*:*)
|
|
|
|
- echo powerpc-apple-rhapsody${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:Rhapsody:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:Darwin:*:*)
|
|
|
|
- UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown
|
|
|
|
- case $UNAME_PROCESSOR in
|
|
|
|
- i386)
|
|
|
|
- eval $set_cc_for_build
|
|
|
|
- if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
|
|
|
|
- if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
|
|
|
|
- (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
|
|
|
|
- grep IS_64BIT_ARCH >/dev/null
|
|
|
|
- then
|
|
|
|
- UNAME_PROCESSOR="x86_64"
|
|
|
|
- fi
|
|
|
|
- fi ;;
|
|
|
|
- unknown) UNAME_PROCESSOR=powerpc ;;
|
|
|
|
- esac
|
|
|
|
- echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:procnto*:*:* | *:QNX:[0123456789]*:*)
|
|
|
|
- UNAME_PROCESSOR=`uname -p`
|
|
|
|
- if test "$UNAME_PROCESSOR" = "x86"; then
|
|
|
|
- UNAME_PROCESSOR=i386
|
|
|
|
- UNAME_MACHINE=pc
|
|
|
|
- fi
|
|
|
|
- echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:QNX:*:4*)
|
|
|
|
- echo i386-pc-qnx
|
|
|
|
- exit ;;
|
|
|
|
- NSE-?:NONSTOP_KERNEL:*:*)
|
|
|
|
- echo nse-tandem-nsk${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- NSR-?:NONSTOP_KERNEL:*:*)
|
|
|
|
- echo nsr-tandem-nsk${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:NonStop-UX:*:*)
|
|
|
|
- echo mips-compaq-nonstopux
|
|
|
|
- exit ;;
|
|
|
|
- BS2000:POSIX*:*:*)
|
|
|
|
- echo bs2000-siemens-sysv
|
|
|
|
- exit ;;
|
|
|
|
- DS/*:UNIX_System_V:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:Plan9:*:*)
|
|
|
|
- # "uname -m" is not consistent, so use $cputype instead. 386
|
|
|
|
- # is converted to i386 for consistency with other x86
|
|
|
|
- # operating systems.
|
|
|
|
- if test "$cputype" = "386"; then
|
|
|
|
- UNAME_MACHINE=i386
|
|
|
|
- else
|
|
|
|
- UNAME_MACHINE="$cputype"
|
|
|
|
- fi
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-plan9
|
|
|
|
- exit ;;
|
|
|
|
- *:TOPS-10:*:*)
|
|
|
|
- echo pdp10-unknown-tops10
|
|
|
|
- exit ;;
|
|
|
|
- *:TENEX:*:*)
|
|
|
|
- echo pdp10-unknown-tenex
|
|
|
|
- exit ;;
|
|
|
|
- KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*)
|
|
|
|
- echo pdp10-dec-tops20
|
|
|
|
- exit ;;
|
|
|
|
- XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*)
|
|
|
|
- echo pdp10-xkl-tops20
|
|
|
|
- exit ;;
|
|
|
|
- *:TOPS-20:*:*)
|
|
|
|
- echo pdp10-unknown-tops20
|
|
|
|
- exit ;;
|
|
|
|
- *:ITS:*:*)
|
|
|
|
- echo pdp10-unknown-its
|
|
|
|
- exit ;;
|
|
|
|
- SEI:*:*:SEIUX)
|
|
|
|
- echo mips-sei-seiux${UNAME_RELEASE}
|
|
|
|
- exit ;;
|
|
|
|
- *:DragonFly:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
|
|
|
|
- exit ;;
|
|
|
|
- *:*VMS:*:*)
|
|
|
|
- UNAME_MACHINE=`(uname -p) 2>/dev/null`
|
|
|
|
- case "${UNAME_MACHINE}" in
|
|
|
|
- A*) echo alpha-dec-vms ; exit ;;
|
|
|
|
- I*) echo ia64-dec-vms ; exit ;;
|
|
|
|
- V*) echo vax-dec-vms ; exit ;;
|
|
|
|
- esac ;;
|
|
|
|
- *:XENIX:*:SysV)
|
|
|
|
- echo i386-pc-xenix
|
|
|
|
- exit ;;
|
|
|
|
- i*86:skyos:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//'
|
|
|
|
- exit ;;
|
|
|
|
- i*86:rdos:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-pc-rdos
|
|
|
|
- exit ;;
|
|
|
|
- i*86:AROS:*:*)
|
|
|
|
- echo ${UNAME_MACHINE}-pc-aros
|
|
|
|
- exit ;;
|
|
|
|
-esac
|
|
|
|
-
|
|
|
|
-#echo '(No uname command or uname output not recognized.)' 1>&2
|
|
|
|
-#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2
|
|
|
|
-
|
|
|
|
-eval $set_cc_for_build
|
|
|
|
-cat >$dummy.c <<EOF
|
|
|
|
-#ifdef _SEQUENT_
|
|
|
|
-# include <sys/types.h>
|
|
|
|
-# include <sys/utsname.h>
|
|
|
|
-#endif
|
|
|
|
-main ()
|
|
|
|
-{
|
|
|
|
-#if defined (sony)
|
|
|
|
-#if defined (MIPSEB)
|
|
|
|
- /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed,
|
|
|
|
- I don't know.... */
|
|
|
|
- printf ("mips-sony-bsd\n"); exit (0);
|
|
|
|
-#else
|
|
|
|
-#include <sys/param.h>
|
|
|
|
- printf ("m68k-sony-newsos%s\n",
|
|
|
|
-#ifdef NEWSOS4
|
|
|
|
- "4"
|
|
|
|
-#else
|
|
|
|
- ""
|
|
|
|
-#endif
|
|
|
|
- ); exit (0);
|
|
|
|
-#endif
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-#if defined (__arm) && defined (__acorn) && defined (__unix)
|
|
|
|
- printf ("arm-acorn-riscix\n"); exit (0);
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-#if defined (hp300) && !defined (hpux)
|
|
|
|
- printf ("m68k-hp-bsd\n"); exit (0);
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-#if defined (NeXT)
|
|
|
|
-#if !defined (__ARCHITECTURE__)
|
|
|
|
-#define __ARCHITECTURE__ "m68k"
|
|
|
|
-#endif
|
|
|
|
- int version;
|
|
|
|
- version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
|
|
|
|
- if (version < 4)
|
|
|
|
- printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
|
|
|
|
- else
|
|
|
|
- printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
|
|
|
|
- exit (0);
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-#if defined (MULTIMAX) || defined (n16)
|
|
|
|
-#if defined (UMAXV)
|
|
|
|
- printf ("ns32k-encore-sysv\n"); exit (0);
|
|
|
|
-#else
|
|
|
|
-#if defined (CMU)
|
|
|
|
- printf ("ns32k-encore-mach\n"); exit (0);
|
|
|
|
-#else
|
|
|
|
- printf ("ns32k-encore-bsd\n"); exit (0);
|
|
|
|
-#endif
|
|
|
|
-#endif
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-#if defined (__386BSD__)
|
|
|
|
- printf ("i386-pc-bsd\n"); exit (0);
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-#if defined (sequent)
|
|
|
|
-#if defined (i386)
|
|
|
|
- printf ("i386-sequent-dynix\n"); exit (0);
|
|
|
|
-#endif
|
|
|
|
-#if defined (ns32000)
|
|
|
|
- printf ("ns32k-sequent-dynix\n"); exit (0);
|
|
|
|
-#endif
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-#if defined (_SEQUENT_)
|
|
|
|
- struct utsname un;
|
|
|
|
-
|
|
|
|
- uname(&un);
|
|
|
|
-
|
|
|
|
- if (strncmp(un.version, "V2", 2) == 0) {
|
|
|
|
- printf ("i386-sequent-ptx2\n"); exit (0);
|
|
|
|
- }
|
|
|
|
- if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
|
|
|
|
- printf ("i386-sequent-ptx1\n"); exit (0);
|
|
|
|
- }
|
|
|
|
- printf ("i386-sequent-ptx\n"); exit (0);
|
|
|
|
-
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-#if defined (vax)
|
|
|
|
-# if !defined (ultrix)
|
|
|
|
-# include <sys/param.h>
|
|
|
|
-# if defined (BSD)
|
|
|
|
-# if BSD == 43
|
|
|
|
- printf ("vax-dec-bsd4.3\n"); exit (0);
|
|
|
|
-# else
|
|
|
|
-# if BSD == 199006
|
|
|
|
- printf ("vax-dec-bsd4.3reno\n"); exit (0);
|
|
|
|
-# else
|
|
|
|
- printf ("vax-dec-bsd\n"); exit (0);
|
|
|
|
-# endif
|
|
|
|
-# endif
|
|
|
|
-# else
|
|
|
|
- printf ("vax-dec-bsd\n"); exit (0);
|
|
|
|
-# endif
|
|
|
|
-# else
|
|
|
|
- printf ("vax-dec-ultrix\n"); exit (0);
|
|
|
|
-# endif
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-#if defined (alliant) && defined (i860)
|
|
|
|
- printf ("i860-alliant-bsd\n"); exit (0);
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
- exit (1);
|
|
|
|
-}
|
|
|
|
-EOF
|
|
|
|
-
|
|
|
|
-$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` &&
|
|
|
|
- { echo "$SYSTEM_NAME"; exit; }
|
|
|
|
-
|
|
|
|
-# Apollos put the system type in the environment.
|
|
|
|
-
|
|
|
|
-test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; }
|
|
|
|
-
|
|
|
|
-# Convex versions that predate uname can use getsysinfo(1)
|
|
|
|
-
|
|
|
|
-if [ -x /usr/convex/getsysinfo ]
|
|
|
|
-then
|
|
|
|
- case `getsysinfo -f cpu_type` in
|
|
|
|
- c1*)
|
|
|
|
- echo c1-convex-bsd
|
|
|
|
- exit ;;
|
|
|
|
- c2*)
|
|
|
|
- if getsysinfo -f scalar_acc
|
|
|
|
- then echo c32-convex-bsd
|
|
|
|
- else echo c2-convex-bsd
|
|
|
|
- fi
|
|
|
|
- exit ;;
|
|
|
|
- c34*)
|
|
|
|
- echo c34-convex-bsd
|
|
|
|
- exit ;;
|
|
|
|
- c38*)
|
|
|
|
- echo c38-convex-bsd
|
|
|
|
- exit ;;
|
|
|
|
- c4*)
|
|
|
|
- echo c4-convex-bsd
|
|
|
|
- exit ;;
|
|
|
|
- esac
|
|
|
|
-fi
|
|
|
|
-
|
|
|
|
-cat >&2 <<EOF
|
|
|
|
-$0: unable to guess system type
|
|
|
|
-
|
|
|
|
-This script, last modified $timestamp, has failed to recognize
|
|
|
|
-the operating system you are using. It is advised that you
|
|
|
|
-download the most up to date version of the config scripts from
|
|
|
|
-
|
|
|
|
- http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
|
|
|
|
-and
|
|
|
|
- http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
|
|
|
|
-
|
|
|
|
-If the version you run ($0) is already up to date, please
|
|
|
|
-send the following data and any information you think might be
|
|
|
|
-pertinent to <config-patches@gnu.org> in order to provide the needed
|
|
|
|
-information to handle your system.
|
|
|
|
-
|
|
|
|
-config.guess timestamp = $timestamp
|
|
|
|
-
|
|
|
|
-uname -m = `(uname -m) 2>/dev/null || echo unknown`
|
|
|
|
-uname -r = `(uname -r) 2>/dev/null || echo unknown`
|
|
|
|
-uname -s = `(uname -s) 2>/dev/null || echo unknown`
|
|
|
|
-uname -v = `(uname -v) 2>/dev/null || echo unknown`
|
|
|
|
-
|
|
|
|
-/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null`
|
|
|
|
-/bin/uname -X = `(/bin/uname -X) 2>/dev/null`
|
|
|
|
-
|
|
|
|
-hostinfo = `(hostinfo) 2>/dev/null`
|
|
|
|
-/bin/universe = `(/bin/universe) 2>/dev/null`
|
|
|
|
-/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null`
|
|
|
|
-/bin/arch = `(/bin/arch) 2>/dev/null`
|
|
|
|
-/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null`
|
|
|
|
-/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null`
|
|
|
|
-
|
|
|
|
-UNAME_MACHINE = ${UNAME_MACHINE}
|
|
|
|
-UNAME_RELEASE = ${UNAME_RELEASE}
|
|
|
|
-UNAME_SYSTEM = ${UNAME_SYSTEM}
|
|
|
|
-UNAME_VERSION = ${UNAME_VERSION}
|
|
|
|
-EOF
|
|
|
|
-
|
|
|
|
-exit 1
|
|
|
|
-
|
|
|
|
-# Local variables:
|
|
|
|
-# eval: (add-hook 'write-file-hooks 'time-stamp)
|
|
|
|
-# time-stamp-start: "timestamp='"
|
|
|
|
-# time-stamp-format: "%:y-%02m-%02d"
|
|
|
|
-# time-stamp-end: "'"
|
|
|
|
-# End:
|
|
|
|
--- a/libraries/libapparmor/config.sub
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,1714 +0,0 @@
|
|
|
|
-#! /bin/sh
|
|
|
|
-# Configuration validation subroutine script.
|
|
|
|
-# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
|
|
|
|
-# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
|
|
|
|
-# Free Software Foundation, Inc.
|
|
|
|
-
|
|
|
|
-timestamp='2010-01-22'
|
|
|
|
-
|
|
|
|
-# This file is (in principle) common to ALL GNU software.
|
|
|
|
-# The presence of a machine in this file suggests that SOME GNU software
|
|
|
|
-# can handle that machine. It does not imply ALL GNU software can.
|
|
|
|
-#
|
|
|
|
-# This file is free software; you can redistribute it and/or modify
|
|
|
|
-# it under the terms of the GNU General Public License as published by
|
|
|
|
-# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
-# (at your option) any later version.
|
|
|
|
-#
|
|
|
|
-# This program is distributed in the hope that it will be useful,
|
|
|
|
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
-# GNU General Public License for more details.
|
|
|
|
-#
|
|
|
|
-# You should have received a copy of the GNU General Public License
|
|
|
|
-# along with this program; if not, write to the Free Software
|
|
|
|
-# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
|
|
|
|
-# 02110-1301, USA.
|
|
|
|
-#
|
|
|
|
-# As a special exception to the GNU General Public License, if you
|
|
|
|
-# distribute this file as part of a program that contains a
|
|
|
|
-# configuration script generated by Autoconf, you may include it under
|
|
|
|
-# the same distribution terms that you use for the rest of that program.
|
|
|
|
-
|
|
|
|
-
|
|
|
|
-# Please send patches to <config-patches@gnu.org>. Submit a context
|
|
|
|
-# diff and a properly formatted GNU ChangeLog entry.
|
|
|
|
-#
|
|
|
|
-# Configuration subroutine to validate and canonicalize a configuration type.
|
|
|
|
-# Supply the specified configuration type as an argument.
|
|
|
|
-# If it is invalid, we print an error message on stderr and exit with code 1.
|
|
|
|
-# Otherwise, we print the canonical config type on stdout and succeed.
|
|
|
|
-
|
|
|
|
-# You can get the latest version of this script from:
|
|
|
|
-# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
|
|
|
|
-
|
|
|
|
-# This file is supposed to be the same for all GNU packages
|
|
|
|
-# and recognize all the CPU types, system types and aliases
|
|
|
|
-# that are meaningful with *any* GNU software.
|
|
|
|
-# Each package is responsible for reporting which valid configurations
|
|
|
|
-# it does not support. The user should be able to distinguish
|
|
|
|
-# a failure to support a valid configuration from a meaningless
|
|
|
|
-# configuration.
|
|
|
|
-
|
|
|
|
-# The goal of this file is to map all the various variations of a given
|
|
|
|
-# machine specification into a single specification in the form:
|
|
|
|
-# CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM
|
|
|
|
-# or in some cases, the newer four-part form:
|
|
|
|
-# CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM
|
|
|
|
-# It is wrong to echo any other type of specification.
|
|
|
|
-
|
|
|
|
-me=`echo "$0" | sed -e 's,.*/,,'`
|
|
|
|
-
|
|
|
|
-usage="\
|
|
|
|
-Usage: $0 [OPTION] CPU-MFR-OPSYS
|
|
|
|
- $0 [OPTION] ALIAS
|
|
|
|
-
|
|
|
|
-Canonicalize a configuration name.
|
|
|
|
-
|
|
|
|
-Operation modes:
|
|
|
|
- -h, --help print this help, then exit
|
|
|
|
- -t, --time-stamp print date of last modification, then exit
|
|
|
|
- -v, --version print version number, then exit
|
|
|
|
-
|
|
|
|
-Report bugs and patches to <config-patches@gnu.org>."
|
|
|
|
-
|
|
|
|
-version="\
|
|
|
|
-GNU config.sub ($timestamp)
|
|
|
|
-
|
|
|
|
-Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
|
|
|
|
-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free
|
|
|
|
-Software Foundation, Inc.
|
|
|
|
-
|
|
|
|
-This is free software; see the source for copying conditions. There is NO
|
|
|
|
-warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
|
|
|
|
-
|
|
|
|
-help="
|
|
|
|
-Try \`$me --help' for more information."
|
|
|
|
-
|
|
|
|
-# Parse command line
|
|
|
|
-while test $# -gt 0 ; do
|
|
|
|
- case $1 in
|
|
|
|
- --time-stamp | --time* | -t )
|
|
|
|
- echo "$timestamp" ; exit ;;
|
|
|
|
- --version | -v )
|
|
|
|
- echo "$version" ; exit ;;
|
|
|
|
- --help | --h* | -h )
|
|
|
|
- echo "$usage"; exit ;;
|
|
|
|
- -- ) # Stop option processing
|
|
|
|
- shift; break ;;
|
|
|
|
- - ) # Use stdin as input.
|
|
|
|
- break ;;
|
|
|
|
- -* )
|
|
|
|
- echo "$me: invalid option $1$help"
|
|
|
|
- exit 1 ;;
|
|
|
|
-
|
|
|
|
- *local*)
|
|
|
|
- # First pass through any local machine types.
|
|
|
|
- echo $1
|
|
|
|
- exit ;;
|
|
|
|
-
|
|
|
|
- * )
|
|
|
|
- break ;;
|
|
|
|
- esac
|
|
|
|
-done
|
|
|
|
-
|
|
|
|
-case $# in
|
|
|
|
- 0) echo "$me: missing argument$help" >&2
|
|
|
|
- exit 1;;
|
|
|
|
- 1) ;;
|
|
|
|
- *) echo "$me: too many arguments$help" >&2
|
|
|
|
- exit 1;;
|
|
|
|
-esac
|
|
|
|
-
|
|
|
|
-# Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any).
|
|
|
|
-# Here we must recognize all the valid KERNEL-OS combinations.
|
|
|
|
-maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'`
|
|
|
|
-case $maybe_os in
|
|
|
|
- nto-qnx* | linux-gnu* | linux-dietlibc | linux-newlib* | linux-uclibc* | \
|
|
|
|
- uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* | \
|
|
|
|
- kopensolaris*-gnu* | \
|
|
|
|
- storm-chaos* | os2-emx* | rtmk-nova*)
|
|
|
|
- os=-$maybe_os
|
|
|
|
- basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- basic_machine=`echo $1 | sed 's/-[^-]*$//'`
|
|
|
|
- if [ $basic_machine != $1 ]
|
|
|
|
- then os=`echo $1 | sed 's/.*-/-/'`
|
|
|
|
- else os=; fi
|
|
|
|
- ;;
|
|
|
|
-esac
|
|
|
|
-
|
|
|
|
-### Let's recognize common machines as not being operating systems so
|
|
|
|
-### that things like config.sub decstation-3100 work. We also
|
|
|
|
-### recognize some manufacturers as not being operating systems, so we
|
|
|
|
-### can provide default operating systems below.
|
|
|
|
-case $os in
|
|
|
|
- -sun*os*)
|
|
|
|
- # Prevent following clause from handling this invalid input.
|
|
|
|
- ;;
|
|
|
|
- -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \
|
|
|
|
- -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \
|
|
|
|
- -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \
|
|
|
|
- -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\
|
|
|
|
- -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \
|
|
|
|
- -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \
|
|
|
|
- -apple | -axis | -knuth | -cray | -microblaze)
|
|
|
|
- os=
|
|
|
|
- basic_machine=$1
|
|
|
|
- ;;
|
|
|
|
- -bluegene*)
|
|
|
|
- os=-cnk
|
|
|
|
- ;;
|
|
|
|
- -sim | -cisco | -oki | -wec | -winbond)
|
|
|
|
- os=
|
|
|
|
- basic_machine=$1
|
|
|
|
- ;;
|
|
|
|
- -scout)
|
|
|
|
- ;;
|
|
|
|
- -wrs)
|
|
|
|
- os=-vxworks
|
|
|
|
- basic_machine=$1
|
|
|
|
- ;;
|
|
|
|
- -chorusos*)
|
|
|
|
- os=-chorusos
|
|
|
|
- basic_machine=$1
|
|
|
|
- ;;
|
|
|
|
- -chorusrdb)
|
|
|
|
- os=-chorusrdb
|
|
|
|
- basic_machine=$1
|
|
|
|
- ;;
|
|
|
|
- -hiux*)
|
|
|
|
- os=-hiuxwe2
|
|
|
|
- ;;
|
|
|
|
- -sco6)
|
|
|
|
- os=-sco5v6
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
|
|
|
|
- ;;
|
|
|
|
- -sco5)
|
|
|
|
- os=-sco3.2v5
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
|
|
|
|
- ;;
|
|
|
|
- -sco4)
|
|
|
|
- os=-sco3.2v4
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
|
|
|
|
- ;;
|
|
|
|
- -sco3.2.[4-9]*)
|
|
|
|
- os=`echo $os | sed -e 's/sco3.2./sco3.2v/'`
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
|
|
|
|
- ;;
|
|
|
|
- -sco3.2v[4-9]*)
|
|
|
|
- # Don't forget version if it is 3.2v4 or newer.
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
|
|
|
|
- ;;
|
|
|
|
- -sco5v6*)
|
|
|
|
- # Don't forget version if it is 3.2v4 or newer.
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
|
|
|
|
- ;;
|
|
|
|
- -sco*)
|
|
|
|
- os=-sco3.2v2
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
|
|
|
|
- ;;
|
|
|
|
- -udk*)
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
|
|
|
|
- ;;
|
|
|
|
- -isc)
|
|
|
|
- os=-isc2.2
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
|
|
|
|
- ;;
|
|
|
|
- -clix*)
|
|
|
|
- basic_machine=clipper-intergraph
|
|
|
|
- ;;
|
|
|
|
- -isc*)
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'`
|
|
|
|
- ;;
|
|
|
|
- -lynx*)
|
|
|
|
- os=-lynxos
|
|
|
|
- ;;
|
|
|
|
- -ptx*)
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'`
|
|
|
|
- ;;
|
|
|
|
- -windowsnt*)
|
|
|
|
- os=`echo $os | sed -e 's/windowsnt/winnt/'`
|
|
|
|
- ;;
|
|
|
|
- -psos*)
|
|
|
|
- os=-psos
|
|
|
|
- ;;
|
|
|
|
- -mint | -mint[0-9]*)
|
|
|
|
- basic_machine=m68k-atari
|
|
|
|
- os=-mint
|
|
|
|
- ;;
|
|
|
|
-esac
|
|
|
|
-
|
|
|
|
-# Decode aliases for certain CPU-COMPANY combinations.
|
|
|
|
-case $basic_machine in
|
|
|
|
- # Recognize the basic CPU types without company name.
|
|
|
|
- # Some are omitted here because they have special meanings below.
|
|
|
|
- 1750a | 580 \
|
|
|
|
- | a29k \
|
|
|
|
- | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \
|
|
|
|
- | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \
|
|
|
|
- | am33_2.0 \
|
|
|
|
- | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \
|
|
|
|
- | bfin \
|
|
|
|
- | c4x | clipper \
|
|
|
|
- | d10v | d30v | dlx | dsp16xx \
|
|
|
|
- | fido | fr30 | frv \
|
|
|
|
- | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \
|
|
|
|
- | i370 | i860 | i960 | ia64 \
|
|
|
|
- | ip2k | iq2000 \
|
|
|
|
- | lm32 \
|
|
|
|
- | m32c | m32r | m32rle | m68000 | m68k | m88k \
|
|
|
|
- | maxq | mb | microblaze | mcore | mep | metag \
|
|
|
|
- | mips | mipsbe | mipseb | mipsel | mipsle \
|
|
|
|
- | mips16 \
|
|
|
|
- | mips64 | mips64el \
|
|
|
|
- | mips64octeon | mips64octeonel \
|
|
|
|
- | mips64orion | mips64orionel \
|
|
|
|
- | mips64r5900 | mips64r5900el \
|
|
|
|
- | mips64vr | mips64vrel \
|
|
|
|
- | mips64vr4100 | mips64vr4100el \
|
|
|
|
- | mips64vr4300 | mips64vr4300el \
|
|
|
|
- | mips64vr5000 | mips64vr5000el \
|
|
|
|
- | mips64vr5900 | mips64vr5900el \
|
|
|
|
- | mipsisa32 | mipsisa32el \
|
|
|
|
- | mipsisa32r2 | mipsisa32r2el \
|
|
|
|
- | mipsisa64 | mipsisa64el \
|
|
|
|
- | mipsisa64r2 | mipsisa64r2el \
|
|
|
|
- | mipsisa64sb1 | mipsisa64sb1el \
|
|
|
|
- | mipsisa64sr71k | mipsisa64sr71kel \
|
|
|
|
- | mipstx39 | mipstx39el \
|
|
|
|
- | mn10200 | mn10300 \
|
|
|
|
- | moxie \
|
|
|
|
- | mt \
|
|
|
|
- | msp430 \
|
|
|
|
- | nios | nios2 \
|
|
|
|
- | ns16k | ns32k \
|
|
|
|
- | or32 \
|
|
|
|
- | pdp10 | pdp11 | pj | pjl \
|
|
|
|
- | powerpc | powerpc64 | powerpc64le | powerpcle | ppcbe \
|
|
|
|
- | pyramid \
|
|
|
|
- | rx \
|
|
|
|
- | score \
|
|
|
|
- | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \
|
|
|
|
- | sh64 | sh64le \
|
|
|
|
- | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \
|
|
|
|
- | sparcv8 | sparcv9 | sparcv9b | sparcv9v \
|
|
|
|
- | spu | strongarm \
|
|
|
|
- | tahoe | thumb | tic4x | tic80 | tron \
|
|
|
|
- | ubicom32 \
|
|
|
|
- | v850 | v850e \
|
|
|
|
- | we32k \
|
|
|
|
- | x86 | xc16x | xscale | xscalee[bl] | xstormy16 | xtensa \
|
|
|
|
- | z8k | z80)
|
|
|
|
- basic_machine=$basic_machine-unknown
|
|
|
|
- ;;
|
|
|
|
- m6811 | m68hc11 | m6812 | m68hc12 | picochip)
|
|
|
|
- # Motorola 68HC11/12.
|
|
|
|
- basic_machine=$basic_machine-unknown
|
|
|
|
- os=-none
|
|
|
|
- ;;
|
|
|
|
- m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k)
|
|
|
|
- ;;
|
|
|
|
- ms1)
|
|
|
|
- basic_machine=mt-unknown
|
|
|
|
- ;;
|
|
|
|
-
|
|
|
|
- # We use `pc' rather than `unknown'
|
|
|
|
- # because (1) that's what they normally are, and
|
|
|
|
- # (2) the word "unknown" tends to confuse beginning users.
|
|
|
|
- i*86 | x86_64)
|
|
|
|
- basic_machine=$basic_machine-pc
|
|
|
|
- ;;
|
|
|
|
- # Object if more than one company name word.
|
|
|
|
- *-*-*)
|
|
|
|
- echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
|
|
|
|
- exit 1
|
|
|
|
- ;;
|
|
|
|
- # Recognize the basic CPU types with company name.
|
|
|
|
- 580-* \
|
|
|
|
- | a29k-* \
|
|
|
|
- | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \
|
|
|
|
- | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \
|
|
|
|
- | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \
|
|
|
|
- | arm-* | armbe-* | armle-* | armeb-* | armv*-* \
|
|
|
|
- | avr-* | avr32-* \
|
|
|
|
- | bfin-* | bs2000-* \
|
|
|
|
- | c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \
|
|
|
|
- | clipper-* | craynv-* | cydra-* \
|
|
|
|
- | d10v-* | d30v-* | dlx-* \
|
|
|
|
- | elxsi-* \
|
|
|
|
- | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \
|
|
|
|
- | h8300-* | h8500-* \
|
|
|
|
- | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \
|
|
|
|
- | i*86-* | i860-* | i960-* | ia64-* \
|
|
|
|
- | ip2k-* | iq2000-* \
|
|
|
|
- | lm32-* \
|
|
|
|
- | m32c-* | m32r-* | m32rle-* \
|
|
|
|
- | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \
|
|
|
|
- | m88110-* | m88k-* | maxq-* | mcore-* | metag-* | microblaze-* \
|
|
|
|
- | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \
|
|
|
|
- | mips16-* \
|
|
|
|
- | mips64-* | mips64el-* \
|
|
|
|
- | mips64octeon-* | mips64octeonel-* \
|
|
|
|
- | mips64orion-* | mips64orionel-* \
|
|
|
|
- | mips64r5900-* | mips64r5900el-* \
|
|
|
|
- | mips64vr-* | mips64vrel-* \
|
|
|
|
- | mips64vr4100-* | mips64vr4100el-* \
|
|
|
|
- | mips64vr4300-* | mips64vr4300el-* \
|
|
|
|
- | mips64vr5000-* | mips64vr5000el-* \
|
|
|
|
- | mips64vr5900-* | mips64vr5900el-* \
|
|
|
|
- | mipsisa32-* | mipsisa32el-* \
|
|
|
|
- | mipsisa32r2-* | mipsisa32r2el-* \
|
|
|
|
- | mipsisa64-* | mipsisa64el-* \
|
|
|
|
- | mipsisa64r2-* | mipsisa64r2el-* \
|
|
|
|
- | mipsisa64sb1-* | mipsisa64sb1el-* \
|
|
|
|
- | mipsisa64sr71k-* | mipsisa64sr71kel-* \
|
|
|
|
- | mipstx39-* | mipstx39el-* \
|
|
|
|
- | mmix-* \
|
|
|
|
- | mt-* \
|
|
|
|
- | msp430-* \
|
|
|
|
- | nios-* | nios2-* \
|
|
|
|
- | none-* | np1-* | ns16k-* | ns32k-* \
|
|
|
|
- | orion-* \
|
|
|
|
- | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \
|
|
|
|
- | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* | ppcbe-* \
|
|
|
|
- | pyramid-* \
|
|
|
|
- | romp-* | rs6000-* | rx-* \
|
|
|
|
- | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \
|
|
|
|
- | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \
|
|
|
|
- | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \
|
|
|
|
- | sparclite-* \
|
|
|
|
- | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | strongarm-* | sv1-* | sx?-* \
|
|
|
|
- | tahoe-* | thumb-* \
|
|
|
|
- | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \
|
|
|
|
- | tile-* | tilegx-* \
|
|
|
|
- | tron-* \
|
|
|
|
- | ubicom32-* \
|
|
|
|
- | v850-* | v850e-* | vax-* \
|
|
|
|
- | we32k-* \
|
|
|
|
- | x86-* | x86_64-* | xc16x-* | xps100-* | xscale-* | xscalee[bl]-* \
|
|
|
|
- | xstormy16-* | xtensa*-* \
|
|
|
|
- | ymp-* \
|
|
|
|
- | z8k-* | z80-*)
|
|
|
|
- ;;
|
|
|
|
- # Recognize the basic CPU types without company name, with glob match.
|
|
|
|
- xtensa*)
|
|
|
|
- basic_machine=$basic_machine-unknown
|
|
|
|
- ;;
|
|
|
|
- # Recognize the various machine names and aliases which stand
|
|
|
|
- # for a CPU type and a company and sometimes even an OS.
|
|
|
|
- 386bsd)
|
|
|
|
- basic_machine=i386-unknown
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc)
|
|
|
|
- basic_machine=m68000-att
|
|
|
|
- ;;
|
|
|
|
- 3b*)
|
|
|
|
- basic_machine=we32k-att
|
|
|
|
- ;;
|
|
|
|
- a29khif)
|
|
|
|
- basic_machine=a29k-amd
|
|
|
|
- os=-udi
|
|
|
|
- ;;
|
|
|
|
- abacus)
|
|
|
|
- basic_machine=abacus-unknown
|
|
|
|
- ;;
|
|
|
|
- adobe68k)
|
|
|
|
- basic_machine=m68010-adobe
|
|
|
|
- os=-scout
|
|
|
|
- ;;
|
|
|
|
- alliant | fx80)
|
|
|
|
- basic_machine=fx80-alliant
|
|
|
|
- ;;
|
|
|
|
- altos | altos3068)
|
|
|
|
- basic_machine=m68k-altos
|
|
|
|
- ;;
|
|
|
|
- am29k)
|
|
|
|
- basic_machine=a29k-none
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- amd64)
|
|
|
|
- basic_machine=x86_64-pc
|
|
|
|
- ;;
|
|
|
|
- amd64-*)
|
|
|
|
- basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- ;;
|
|
|
|
- amdahl)
|
|
|
|
- basic_machine=580-amdahl
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- amiga | amiga-*)
|
|
|
|
- basic_machine=m68k-unknown
|
|
|
|
- ;;
|
|
|
|
- amigaos | amigados)
|
|
|
|
- basic_machine=m68k-unknown
|
|
|
|
- os=-amigaos
|
|
|
|
- ;;
|
|
|
|
- amigaunix | amix)
|
|
|
|
- basic_machine=m68k-unknown
|
|
|
|
- os=-sysv4
|
|
|
|
- ;;
|
|
|
|
- apollo68)
|
|
|
|
- basic_machine=m68k-apollo
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- apollo68bsd)
|
|
|
|
- basic_machine=m68k-apollo
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- aros)
|
|
|
|
- basic_machine=i386-pc
|
|
|
|
- os=-aros
|
|
|
|
- ;;
|
|
|
|
- aux)
|
|
|
|
- basic_machine=m68k-apple
|
|
|
|
- os=-aux
|
|
|
|
- ;;
|
|
|
|
- balance)
|
|
|
|
- basic_machine=ns32k-sequent
|
|
|
|
- os=-dynix
|
|
|
|
- ;;
|
|
|
|
- blackfin)
|
|
|
|
- basic_machine=bfin-unknown
|
|
|
|
- os=-linux
|
|
|
|
- ;;
|
|
|
|
- blackfin-*)
|
|
|
|
- basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- os=-linux
|
|
|
|
- ;;
|
|
|
|
- bluegene*)
|
|
|
|
- basic_machine=powerpc-ibm
|
|
|
|
- os=-cnk
|
|
|
|
- ;;
|
|
|
|
- c90)
|
|
|
|
- basic_machine=c90-cray
|
|
|
|
- os=-unicos
|
|
|
|
- ;;
|
|
|
|
- cegcc)
|
|
|
|
- basic_machine=arm-unknown
|
|
|
|
- os=-cegcc
|
|
|
|
- ;;
|
|
|
|
- convex-c1)
|
|
|
|
- basic_machine=c1-convex
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- convex-c2)
|
|
|
|
- basic_machine=c2-convex
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- convex-c32)
|
|
|
|
- basic_machine=c32-convex
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- convex-c34)
|
|
|
|
- basic_machine=c34-convex
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- convex-c38)
|
|
|
|
- basic_machine=c38-convex
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- cray | j90)
|
|
|
|
- basic_machine=j90-cray
|
|
|
|
- os=-unicos
|
|
|
|
- ;;
|
|
|
|
- craynv)
|
|
|
|
- basic_machine=craynv-cray
|
|
|
|
- os=-unicosmp
|
|
|
|
- ;;
|
|
|
|
- cr16)
|
|
|
|
- basic_machine=cr16-unknown
|
|
|
|
- os=-elf
|
|
|
|
- ;;
|
|
|
|
- crds | unos)
|
|
|
|
- basic_machine=m68k-crds
|
|
|
|
- ;;
|
|
|
|
- crisv32 | crisv32-* | etraxfs*)
|
|
|
|
- basic_machine=crisv32-axis
|
|
|
|
- ;;
|
|
|
|
- cris | cris-* | etrax*)
|
|
|
|
- basic_machine=cris-axis
|
|
|
|
- ;;
|
|
|
|
- crx)
|
|
|
|
- basic_machine=crx-unknown
|
|
|
|
- os=-elf
|
|
|
|
- ;;
|
|
|
|
- da30 | da30-*)
|
|
|
|
- basic_machine=m68k-da30
|
|
|
|
- ;;
|
|
|
|
- decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn)
|
|
|
|
- basic_machine=mips-dec
|
|
|
|
- ;;
|
|
|
|
- decsystem10* | dec10*)
|
|
|
|
- basic_machine=pdp10-dec
|
|
|
|
- os=-tops10
|
|
|
|
- ;;
|
|
|
|
- decsystem20* | dec20*)
|
|
|
|
- basic_machine=pdp10-dec
|
|
|
|
- os=-tops20
|
|
|
|
- ;;
|
|
|
|
- delta | 3300 | motorola-3300 | motorola-delta \
|
|
|
|
- | 3300-motorola | delta-motorola)
|
|
|
|
- basic_machine=m68k-motorola
|
|
|
|
- ;;
|
|
|
|
- delta88)
|
|
|
|
- basic_machine=m88k-motorola
|
|
|
|
- os=-sysv3
|
|
|
|
- ;;
|
|
|
|
- dicos)
|
|
|
|
- basic_machine=i686-pc
|
|
|
|
- os=-dicos
|
|
|
|
- ;;
|
|
|
|
- djgpp)
|
|
|
|
- basic_machine=i586-pc
|
|
|
|
- os=-msdosdjgpp
|
|
|
|
- ;;
|
|
|
|
- dpx20 | dpx20-*)
|
|
|
|
- basic_machine=rs6000-bull
|
|
|
|
- os=-bosx
|
|
|
|
- ;;
|
|
|
|
- dpx2* | dpx2*-bull)
|
|
|
|
- basic_machine=m68k-bull
|
|
|
|
- os=-sysv3
|
|
|
|
- ;;
|
|
|
|
- ebmon29k)
|
|
|
|
- basic_machine=a29k-amd
|
|
|
|
- os=-ebmon
|
|
|
|
- ;;
|
|
|
|
- elxsi)
|
|
|
|
- basic_machine=elxsi-elxsi
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- encore | umax | mmax)
|
|
|
|
- basic_machine=ns32k-encore
|
|
|
|
- ;;
|
|
|
|
- es1800 | OSE68k | ose68k | ose | OSE)
|
|
|
|
- basic_machine=m68k-ericsson
|
|
|
|
- os=-ose
|
|
|
|
- ;;
|
|
|
|
- fx2800)
|
|
|
|
- basic_machine=i860-alliant
|
|
|
|
- ;;
|
|
|
|
- genix)
|
|
|
|
- basic_machine=ns32k-ns
|
|
|
|
- ;;
|
|
|
|
- gmicro)
|
|
|
|
- basic_machine=tron-gmicro
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- go32)
|
|
|
|
- basic_machine=i386-pc
|
|
|
|
- os=-go32
|
|
|
|
- ;;
|
|
|
|
- h3050r* | hiux*)
|
|
|
|
- basic_machine=hppa1.1-hitachi
|
|
|
|
- os=-hiuxwe2
|
|
|
|
- ;;
|
|
|
|
- h8300hms)
|
|
|
|
- basic_machine=h8300-hitachi
|
|
|
|
- os=-hms
|
|
|
|
- ;;
|
|
|
|
- h8300xray)
|
|
|
|
- basic_machine=h8300-hitachi
|
|
|
|
- os=-xray
|
|
|
|
- ;;
|
|
|
|
- h8500hms)
|
|
|
|
- basic_machine=h8500-hitachi
|
|
|
|
- os=-hms
|
|
|
|
- ;;
|
|
|
|
- harris)
|
|
|
|
- basic_machine=m88k-harris
|
|
|
|
- os=-sysv3
|
|
|
|
- ;;
|
|
|
|
- hp300-*)
|
|
|
|
- basic_machine=m68k-hp
|
|
|
|
- ;;
|
|
|
|
- hp300bsd)
|
|
|
|
- basic_machine=m68k-hp
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- hp300hpux)
|
|
|
|
- basic_machine=m68k-hp
|
|
|
|
- os=-hpux
|
|
|
|
- ;;
|
|
|
|
- hp3k9[0-9][0-9] | hp9[0-9][0-9])
|
|
|
|
- basic_machine=hppa1.0-hp
|
|
|
|
- ;;
|
|
|
|
- hp9k2[0-9][0-9] | hp9k31[0-9])
|
|
|
|
- basic_machine=m68000-hp
|
|
|
|
- ;;
|
|
|
|
- hp9k3[2-9][0-9])
|
|
|
|
- basic_machine=m68k-hp
|
|
|
|
- ;;
|
|
|
|
- hp9k6[0-9][0-9] | hp6[0-9][0-9])
|
|
|
|
- basic_machine=hppa1.0-hp
|
|
|
|
- ;;
|
|
|
|
- hp9k7[0-79][0-9] | hp7[0-79][0-9])
|
|
|
|
- basic_machine=hppa1.1-hp
|
|
|
|
- ;;
|
|
|
|
- hp9k78[0-9] | hp78[0-9])
|
|
|
|
- # FIXME: really hppa2.0-hp
|
|
|
|
- basic_machine=hppa1.1-hp
|
|
|
|
- ;;
|
|
|
|
- hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893)
|
|
|
|
- # FIXME: really hppa2.0-hp
|
|
|
|
- basic_machine=hppa1.1-hp
|
|
|
|
- ;;
|
|
|
|
- hp9k8[0-9][13679] | hp8[0-9][13679])
|
|
|
|
- basic_machine=hppa1.1-hp
|
|
|
|
- ;;
|
|
|
|
- hp9k8[0-9][0-9] | hp8[0-9][0-9])
|
|
|
|
- basic_machine=hppa1.0-hp
|
|
|
|
- ;;
|
|
|
|
- hppa-next)
|
|
|
|
- os=-nextstep3
|
|
|
|
- ;;
|
|
|
|
- hppaosf)
|
|
|
|
- basic_machine=hppa1.1-hp
|
|
|
|
- os=-osf
|
|
|
|
- ;;
|
|
|
|
- hppro)
|
|
|
|
- basic_machine=hppa1.1-hp
|
|
|
|
- os=-proelf
|
|
|
|
- ;;
|
|
|
|
- i370-ibm* | ibm*)
|
|
|
|
- basic_machine=i370-ibm
|
|
|
|
- ;;
|
|
|
|
-# I'm not sure what "Sysv32" means. Should this be sysv3.2?
|
|
|
|
- i*86v32)
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
|
|
|
|
- os=-sysv32
|
|
|
|
- ;;
|
|
|
|
- i*86v4*)
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
|
|
|
|
- os=-sysv4
|
|
|
|
- ;;
|
|
|
|
- i*86v)
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- i*86sol2)
|
|
|
|
- basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'`
|
|
|
|
- os=-solaris2
|
|
|
|
- ;;
|
|
|
|
- i386mach)
|
|
|
|
- basic_machine=i386-mach
|
|
|
|
- os=-mach
|
|
|
|
- ;;
|
|
|
|
- i386-vsta | vsta)
|
|
|
|
- basic_machine=i386-unknown
|
|
|
|
- os=-vsta
|
|
|
|
- ;;
|
|
|
|
- iris | iris4d)
|
|
|
|
- basic_machine=mips-sgi
|
|
|
|
- case $os in
|
|
|
|
- -irix*)
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- os=-irix4
|
|
|
|
- ;;
|
|
|
|
- esac
|
|
|
|
- ;;
|
|
|
|
- isi68 | isi)
|
|
|
|
- basic_machine=m68k-isi
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- m68knommu)
|
|
|
|
- basic_machine=m68k-unknown
|
|
|
|
- os=-linux
|
|
|
|
- ;;
|
|
|
|
- m68knommu-*)
|
|
|
|
- basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- os=-linux
|
|
|
|
- ;;
|
|
|
|
- m88k-omron*)
|
|
|
|
- basic_machine=m88k-omron
|
|
|
|
- ;;
|
|
|
|
- magnum | m3230)
|
|
|
|
- basic_machine=mips-mips
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- merlin)
|
|
|
|
- basic_machine=ns32k-utek
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- microblaze)
|
|
|
|
- basic_machine=microblaze-xilinx
|
|
|
|
- ;;
|
|
|
|
- mingw32)
|
|
|
|
- basic_machine=i386-pc
|
|
|
|
- os=-mingw32
|
|
|
|
- ;;
|
|
|
|
- mingw32ce)
|
|
|
|
- basic_machine=arm-unknown
|
|
|
|
- os=-mingw32ce
|
|
|
|
- ;;
|
|
|
|
- miniframe)
|
|
|
|
- basic_machine=m68000-convergent
|
|
|
|
- ;;
|
|
|
|
- *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*)
|
|
|
|
- basic_machine=m68k-atari
|
|
|
|
- os=-mint
|
|
|
|
- ;;
|
|
|
|
- mips3*-*)
|
|
|
|
- basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`
|
|
|
|
- ;;
|
|
|
|
- mips3*)
|
|
|
|
- basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown
|
|
|
|
- ;;
|
|
|
|
- monitor)
|
|
|
|
- basic_machine=m68k-rom68k
|
|
|
|
- os=-coff
|
|
|
|
- ;;
|
|
|
|
- morphos)
|
|
|
|
- basic_machine=powerpc-unknown
|
|
|
|
- os=-morphos
|
|
|
|
- ;;
|
|
|
|
- msdos)
|
|
|
|
- basic_machine=i386-pc
|
|
|
|
- os=-msdos
|
|
|
|
- ;;
|
|
|
|
- ms1-*)
|
|
|
|
- basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'`
|
|
|
|
- ;;
|
|
|
|
- mvs)
|
|
|
|
- basic_machine=i370-ibm
|
|
|
|
- os=-mvs
|
|
|
|
- ;;
|
|
|
|
- ncr3000)
|
|
|
|
- basic_machine=i486-ncr
|
|
|
|
- os=-sysv4
|
|
|
|
- ;;
|
|
|
|
- netbsd386)
|
|
|
|
- basic_machine=i386-unknown
|
|
|
|
- os=-netbsd
|
|
|
|
- ;;
|
|
|
|
- netwinder)
|
|
|
|
- basic_machine=armv4l-rebel
|
|
|
|
- os=-linux
|
|
|
|
- ;;
|
|
|
|
- news | news700 | news800 | news900)
|
|
|
|
- basic_machine=m68k-sony
|
|
|
|
- os=-newsos
|
|
|
|
- ;;
|
|
|
|
- news1000)
|
|
|
|
- basic_machine=m68030-sony
|
|
|
|
- os=-newsos
|
|
|
|
- ;;
|
|
|
|
- news-3600 | risc-news)
|
|
|
|
- basic_machine=mips-sony
|
|
|
|
- os=-newsos
|
|
|
|
- ;;
|
|
|
|
- necv70)
|
|
|
|
- basic_machine=v70-nec
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- next | m*-next )
|
|
|
|
- basic_machine=m68k-next
|
|
|
|
- case $os in
|
|
|
|
- -nextstep* )
|
|
|
|
- ;;
|
|
|
|
- -ns2*)
|
|
|
|
- os=-nextstep2
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- os=-nextstep3
|
|
|
|
- ;;
|
|
|
|
- esac
|
|
|
|
- ;;
|
|
|
|
- nh3000)
|
|
|
|
- basic_machine=m68k-harris
|
|
|
|
- os=-cxux
|
|
|
|
- ;;
|
|
|
|
- nh[45]000)
|
|
|
|
- basic_machine=m88k-harris
|
|
|
|
- os=-cxux
|
|
|
|
- ;;
|
|
|
|
- nindy960)
|
|
|
|
- basic_machine=i960-intel
|
|
|
|
- os=-nindy
|
|
|
|
- ;;
|
|
|
|
- mon960)
|
|
|
|
- basic_machine=i960-intel
|
|
|
|
- os=-mon960
|
|
|
|
- ;;
|
|
|
|
- nonstopux)
|
|
|
|
- basic_machine=mips-compaq
|
|
|
|
- os=-nonstopux
|
|
|
|
- ;;
|
|
|
|
- np1)
|
|
|
|
- basic_machine=np1-gould
|
|
|
|
- ;;
|
|
|
|
- nsr-tandem)
|
|
|
|
- basic_machine=nsr-tandem
|
|
|
|
- ;;
|
|
|
|
- op50n-* | op60c-*)
|
|
|
|
- basic_machine=hppa1.1-oki
|
|
|
|
- os=-proelf
|
|
|
|
- ;;
|
|
|
|
- openrisc | openrisc-*)
|
|
|
|
- basic_machine=or32-unknown
|
|
|
|
- ;;
|
|
|
|
- os400)
|
|
|
|
- basic_machine=powerpc-ibm
|
|
|
|
- os=-os400
|
|
|
|
- ;;
|
|
|
|
- OSE68000 | ose68000)
|
|
|
|
- basic_machine=m68000-ericsson
|
|
|
|
- os=-ose
|
|
|
|
- ;;
|
|
|
|
- os68k)
|
|
|
|
- basic_machine=m68k-none
|
|
|
|
- os=-os68k
|
|
|
|
- ;;
|
|
|
|
- pa-hitachi)
|
|
|
|
- basic_machine=hppa1.1-hitachi
|
|
|
|
- os=-hiuxwe2
|
|
|
|
- ;;
|
|
|
|
- paragon)
|
|
|
|
- basic_machine=i860-intel
|
|
|
|
- os=-osf
|
|
|
|
- ;;
|
|
|
|
- parisc)
|
|
|
|
- basic_machine=hppa-unknown
|
|
|
|
- os=-linux
|
|
|
|
- ;;
|
|
|
|
- parisc-*)
|
|
|
|
- basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- os=-linux
|
|
|
|
- ;;
|
|
|
|
- pbd)
|
|
|
|
- basic_machine=sparc-tti
|
|
|
|
- ;;
|
|
|
|
- pbb)
|
|
|
|
- basic_machine=m68k-tti
|
|
|
|
- ;;
|
|
|
|
- pc532 | pc532-*)
|
|
|
|
- basic_machine=ns32k-pc532
|
|
|
|
- ;;
|
|
|
|
- pc98)
|
|
|
|
- basic_machine=i386-pc
|
|
|
|
- ;;
|
|
|
|
- pc98-*)
|
|
|
|
- basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- ;;
|
|
|
|
- pentium | p5 | k5 | k6 | nexgen | viac3)
|
|
|
|
- basic_machine=i586-pc
|
|
|
|
- ;;
|
|
|
|
- pentiumpro | p6 | 6x86 | athlon | athlon_*)
|
|
|
|
- basic_machine=i686-pc
|
|
|
|
- ;;
|
|
|
|
- pentiumii | pentium2 | pentiumiii | pentium3)
|
|
|
|
- basic_machine=i686-pc
|
|
|
|
- ;;
|
|
|
|
- pentium4)
|
|
|
|
- basic_machine=i786-pc
|
|
|
|
- ;;
|
|
|
|
- pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*)
|
|
|
|
- basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- ;;
|
|
|
|
- pentiumpro-* | p6-* | 6x86-* | athlon-*)
|
|
|
|
- basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- ;;
|
|
|
|
- pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*)
|
|
|
|
- basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- ;;
|
|
|
|
- pentium4-*)
|
|
|
|
- basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- ;;
|
|
|
|
- pn)
|
|
|
|
- basic_machine=pn-gould
|
|
|
|
- ;;
|
|
|
|
- power) basic_machine=power-ibm
|
|
|
|
- ;;
|
|
|
|
- ppc) basic_machine=powerpc-unknown
|
|
|
|
- ;;
|
|
|
|
- ppc-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- ;;
|
|
|
|
- ppcle | powerpclittle | ppc-le | powerpc-little)
|
|
|
|
- basic_machine=powerpcle-unknown
|
|
|
|
- ;;
|
|
|
|
- ppcle-* | powerpclittle-*)
|
|
|
|
- basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- ;;
|
|
|
|
- ppc64) basic_machine=powerpc64-unknown
|
|
|
|
- ;;
|
|
|
|
- ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- ;;
|
|
|
|
- ppc64le | powerpc64little | ppc64-le | powerpc64-little)
|
|
|
|
- basic_machine=powerpc64le-unknown
|
|
|
|
- ;;
|
|
|
|
- ppc64le-* | powerpc64little-*)
|
|
|
|
- basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'`
|
|
|
|
- ;;
|
|
|
|
- ps2)
|
|
|
|
- basic_machine=i386-ibm
|
|
|
|
- ;;
|
|
|
|
- pw32)
|
|
|
|
- basic_machine=i586-unknown
|
|
|
|
- os=-pw32
|
|
|
|
- ;;
|
|
|
|
- rdos)
|
|
|
|
- basic_machine=i386-pc
|
|
|
|
- os=-rdos
|
|
|
|
- ;;
|
|
|
|
- rom68k)
|
|
|
|
- basic_machine=m68k-rom68k
|
|
|
|
- os=-coff
|
|
|
|
- ;;
|
|
|
|
- rm[46]00)
|
|
|
|
- basic_machine=mips-siemens
|
|
|
|
- ;;
|
|
|
|
- rtpc | rtpc-*)
|
|
|
|
- basic_machine=romp-ibm
|
|
|
|
- ;;
|
|
|
|
- s390 | s390-*)
|
|
|
|
- basic_machine=s390-ibm
|
|
|
|
- ;;
|
|
|
|
- s390x | s390x-*)
|
|
|
|
- basic_machine=s390x-ibm
|
|
|
|
- ;;
|
|
|
|
- sa29200)
|
|
|
|
- basic_machine=a29k-amd
|
|
|
|
- os=-udi
|
|
|
|
- ;;
|
|
|
|
- sb1)
|
|
|
|
- basic_machine=mipsisa64sb1-unknown
|
|
|
|
- ;;
|
|
|
|
- sb1el)
|
|
|
|
- basic_machine=mipsisa64sb1el-unknown
|
|
|
|
- ;;
|
|
|
|
- sde)
|
|
|
|
- basic_machine=mipsisa32-sde
|
|
|
|
- os=-elf
|
|
|
|
- ;;
|
|
|
|
- sei)
|
|
|
|
- basic_machine=mips-sei
|
|
|
|
- os=-seiux
|
|
|
|
- ;;
|
|
|
|
- sequent)
|
|
|
|
- basic_machine=i386-sequent
|
|
|
|
- ;;
|
|
|
|
- sh)
|
|
|
|
- basic_machine=sh-hitachi
|
|
|
|
- os=-hms
|
|
|
|
- ;;
|
|
|
|
- sh5el)
|
|
|
|
- basic_machine=sh5le-unknown
|
|
|
|
- ;;
|
|
|
|
- sh64)
|
|
|
|
- basic_machine=sh64-unknown
|
|
|
|
- ;;
|
|
|
|
- sparclite-wrs | simso-wrs)
|
|
|
|
- basic_machine=sparclite-wrs
|
|
|
|
- os=-vxworks
|
|
|
|
- ;;
|
|
|
|
- sps7)
|
|
|
|
- basic_machine=m68k-bull
|
|
|
|
- os=-sysv2
|
|
|
|
- ;;
|
|
|
|
- spur)
|
|
|
|
- basic_machine=spur-unknown
|
|
|
|
- ;;
|
|
|
|
- st2000)
|
|
|
|
- basic_machine=m68k-tandem
|
|
|
|
- ;;
|
|
|
|
- stratus)
|
|
|
|
- basic_machine=i860-stratus
|
|
|
|
- os=-sysv4
|
|
|
|
- ;;
|
|
|
|
- sun2)
|
|
|
|
- basic_machine=m68000-sun
|
|
|
|
- ;;
|
|
|
|
- sun2os3)
|
|
|
|
- basic_machine=m68000-sun
|
|
|
|
- os=-sunos3
|
|
|
|
- ;;
|
|
|
|
- sun2os4)
|
|
|
|
- basic_machine=m68000-sun
|
|
|
|
- os=-sunos4
|
|
|
|
- ;;
|
|
|
|
- sun3os3)
|
|
|
|
- basic_machine=m68k-sun
|
|
|
|
- os=-sunos3
|
|
|
|
- ;;
|
|
|
|
- sun3os4)
|
|
|
|
- basic_machine=m68k-sun
|
|
|
|
- os=-sunos4
|
|
|
|
- ;;
|
|
|
|
- sun4os3)
|
|
|
|
- basic_machine=sparc-sun
|
|
|
|
- os=-sunos3
|
|
|
|
- ;;
|
|
|
|
- sun4os4)
|
|
|
|
- basic_machine=sparc-sun
|
|
|
|
- os=-sunos4
|
|
|
|
- ;;
|
|
|
|
- sun4sol2)
|
|
|
|
- basic_machine=sparc-sun
|
|
|
|
- os=-solaris2
|
|
|
|
- ;;
|
|
|
|
- sun3 | sun3-*)
|
|
|
|
- basic_machine=m68k-sun
|
|
|
|
- ;;
|
|
|
|
- sun4)
|
|
|
|
- basic_machine=sparc-sun
|
|
|
|
- ;;
|
|
|
|
- sun386 | sun386i | roadrunner)
|
|
|
|
- basic_machine=i386-sun
|
|
|
|
- ;;
|
|
|
|
- sv1)
|
|
|
|
- basic_machine=sv1-cray
|
|
|
|
- os=-unicos
|
|
|
|
- ;;
|
|
|
|
- symmetry)
|
|
|
|
- basic_machine=i386-sequent
|
|
|
|
- os=-dynix
|
|
|
|
- ;;
|
|
|
|
- t3e)
|
|
|
|
- basic_machine=alphaev5-cray
|
|
|
|
- os=-unicos
|
|
|
|
- ;;
|
|
|
|
- t90)
|
|
|
|
- basic_machine=t90-cray
|
|
|
|
- os=-unicos
|
|
|
|
- ;;
|
|
|
|
- tic54x | c54x*)
|
|
|
|
- basic_machine=tic54x-unknown
|
|
|
|
- os=-coff
|
|
|
|
- ;;
|
|
|
|
- tic55x | c55x*)
|
|
|
|
- basic_machine=tic55x-unknown
|
|
|
|
- os=-coff
|
|
|
|
- ;;
|
|
|
|
- tic6x | c6x*)
|
|
|
|
- basic_machine=tic6x-unknown
|
|
|
|
- os=-coff
|
|
|
|
- ;;
|
|
|
|
- # This must be matched before tile*.
|
|
|
|
- tilegx*)
|
|
|
|
- basic_machine=tilegx-unknown
|
|
|
|
- os=-linux-gnu
|
|
|
|
- ;;
|
|
|
|
- tile*)
|
|
|
|
- basic_machine=tile-unknown
|
|
|
|
- os=-linux-gnu
|
|
|
|
- ;;
|
|
|
|
- tx39)
|
|
|
|
- basic_machine=mipstx39-unknown
|
|
|
|
- ;;
|
|
|
|
- tx39el)
|
|
|
|
- basic_machine=mipstx39el-unknown
|
|
|
|
- ;;
|
|
|
|
- toad1)
|
|
|
|
- basic_machine=pdp10-xkl
|
|
|
|
- os=-tops20
|
|
|
|
- ;;
|
|
|
|
- tower | tower-32)
|
|
|
|
- basic_machine=m68k-ncr
|
|
|
|
- ;;
|
|
|
|
- tpf)
|
|
|
|
- basic_machine=s390x-ibm
|
|
|
|
- os=-tpf
|
|
|
|
- ;;
|
|
|
|
- udi29k)
|
|
|
|
- basic_machine=a29k-amd
|
|
|
|
- os=-udi
|
|
|
|
- ;;
|
|
|
|
- ultra3)
|
|
|
|
- basic_machine=a29k-nyu
|
|
|
|
- os=-sym1
|
|
|
|
- ;;
|
|
|
|
- v810 | necv810)
|
|
|
|
- basic_machine=v810-nec
|
|
|
|
- os=-none
|
|
|
|
- ;;
|
|
|
|
- vaxv)
|
|
|
|
- basic_machine=vax-dec
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- vms)
|
|
|
|
- basic_machine=vax-dec
|
|
|
|
- os=-vms
|
|
|
|
- ;;
|
|
|
|
- vpp*|vx|vx-*)
|
|
|
|
- basic_machine=f301-fujitsu
|
|
|
|
- ;;
|
|
|
|
- vxworks960)
|
|
|
|
- basic_machine=i960-wrs
|
|
|
|
- os=-vxworks
|
|
|
|
- ;;
|
|
|
|
- vxworks68)
|
|
|
|
- basic_machine=m68k-wrs
|
|
|
|
- os=-vxworks
|
|
|
|
- ;;
|
|
|
|
- vxworks29k)
|
|
|
|
- basic_machine=a29k-wrs
|
|
|
|
- os=-vxworks
|
|
|
|
- ;;
|
|
|
|
- w65*)
|
|
|
|
- basic_machine=w65-wdc
|
|
|
|
- os=-none
|
|
|
|
- ;;
|
|
|
|
- w89k-*)
|
|
|
|
- basic_machine=hppa1.1-winbond
|
|
|
|
- os=-proelf
|
|
|
|
- ;;
|
|
|
|
- xbox)
|
|
|
|
- basic_machine=i686-pc
|
|
|
|
- os=-mingw32
|
|
|
|
- ;;
|
|
|
|
- xps | xps100)
|
|
|
|
- basic_machine=xps100-honeywell
|
|
|
|
- ;;
|
|
|
|
- ymp)
|
|
|
|
- basic_machine=ymp-cray
|
|
|
|
- os=-unicos
|
|
|
|
- ;;
|
|
|
|
- z8k-*-coff)
|
|
|
|
- basic_machine=z8k-unknown
|
|
|
|
- os=-sim
|
|
|
|
- ;;
|
|
|
|
- z80-*-coff)
|
|
|
|
- basic_machine=z80-unknown
|
|
|
|
- os=-sim
|
|
|
|
- ;;
|
|
|
|
- none)
|
|
|
|
- basic_machine=none-none
|
|
|
|
- os=-none
|
|
|
|
- ;;
|
|
|
|
-
|
|
|
|
-# Here we handle the default manufacturer of certain CPU types. It is in
|
|
|
|
-# some cases the only manufacturer, in others, it is the most popular.
|
|
|
|
- w89k)
|
|
|
|
- basic_machine=hppa1.1-winbond
|
|
|
|
- ;;
|
|
|
|
- op50n)
|
|
|
|
- basic_machine=hppa1.1-oki
|
|
|
|
- ;;
|
|
|
|
- op60c)
|
|
|
|
- basic_machine=hppa1.1-oki
|
|
|
|
- ;;
|
|
|
|
- romp)
|
|
|
|
- basic_machine=romp-ibm
|
|
|
|
- ;;
|
|
|
|
- mmix)
|
|
|
|
- basic_machine=mmix-knuth
|
|
|
|
- ;;
|
|
|
|
- rs6000)
|
|
|
|
- basic_machine=rs6000-ibm
|
|
|
|
- ;;
|
|
|
|
- vax)
|
|
|
|
- basic_machine=vax-dec
|
|
|
|
- ;;
|
|
|
|
- pdp10)
|
|
|
|
- # there are many clones, so DEC is not a safe bet
|
|
|
|
- basic_machine=pdp10-unknown
|
|
|
|
- ;;
|
|
|
|
- pdp11)
|
|
|
|
- basic_machine=pdp11-dec
|
|
|
|
- ;;
|
|
|
|
- we32k)
|
|
|
|
- basic_machine=we32k-att
|
|
|
|
- ;;
|
|
|
|
- sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele)
|
|
|
|
- basic_machine=sh-unknown
|
|
|
|
- ;;
|
|
|
|
- sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v)
|
|
|
|
- basic_machine=sparc-sun
|
|
|
|
- ;;
|
|
|
|
- cydra)
|
|
|
|
- basic_machine=cydra-cydrome
|
|
|
|
- ;;
|
|
|
|
- orion)
|
|
|
|
- basic_machine=orion-highlevel
|
|
|
|
- ;;
|
|
|
|
- orion105)
|
|
|
|
- basic_machine=clipper-highlevel
|
|
|
|
- ;;
|
|
|
|
- mac | mpw | mac-mpw)
|
|
|
|
- basic_machine=m68k-apple
|
|
|
|
- ;;
|
|
|
|
- pmac | pmac-mpw)
|
|
|
|
- basic_machine=powerpc-apple
|
|
|
|
- ;;
|
|
|
|
- *-unknown)
|
|
|
|
- # Make sure to match an already-canonicalized machine name.
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2
|
|
|
|
- exit 1
|
|
|
|
- ;;
|
|
|
|
-esac
|
|
|
|
-
|
|
|
|
-# Here we canonicalize certain aliases for manufacturers.
|
|
|
|
-case $basic_machine in
|
|
|
|
- *-digital*)
|
|
|
|
- basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'`
|
|
|
|
- ;;
|
|
|
|
- *-commodore*)
|
|
|
|
- basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'`
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- ;;
|
|
|
|
-esac
|
|
|
|
-
|
|
|
|
-# Decode manufacturer-specific aliases for certain operating systems.
|
|
|
|
-
|
|
|
|
-if [ x"$os" != x"" ]
|
|
|
|
-then
|
|
|
|
-case $os in
|
|
|
|
- # First match some system type aliases
|
|
|
|
- # that might get confused with valid system types.
|
|
|
|
- # -solaris* is a basic system type, with this one exception.
|
|
|
|
- -auroraux)
|
|
|
|
- os=-auroraux
|
|
|
|
- ;;
|
|
|
|
- -solaris1 | -solaris1.*)
|
|
|
|
- os=`echo $os | sed -e 's|solaris1|sunos4|'`
|
|
|
|
- ;;
|
|
|
|
- -solaris)
|
|
|
|
- os=-solaris2
|
|
|
|
- ;;
|
|
|
|
- -svr4*)
|
|
|
|
- os=-sysv4
|
|
|
|
- ;;
|
|
|
|
- -unixware*)
|
|
|
|
- os=-sysv4.2uw
|
|
|
|
- ;;
|
|
|
|
- -gnu/linux*)
|
|
|
|
- os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'`
|
|
|
|
- ;;
|
|
|
|
- # First accept the basic system types.
|
|
|
|
- # The portable systems comes first.
|
|
|
|
- # Each alternative MUST END IN A *, to match a version number.
|
|
|
|
- # -sysv* is not here because it comes later, after sysvr4.
|
|
|
|
- -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \
|
|
|
|
- | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\
|
|
|
|
- | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \
|
|
|
|
- | -sym* | -kopensolaris* \
|
|
|
|
- | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \
|
|
|
|
- | -aos* | -aros* \
|
|
|
|
- | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \
|
|
|
|
- | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \
|
|
|
|
- | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \
|
|
|
|
- | -openbsd* | -solidbsd* \
|
|
|
|
- | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \
|
|
|
|
- | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \
|
|
|
|
- | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \
|
|
|
|
- | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \
|
|
|
|
- | -chorusos* | -chorusrdb* | -cegcc* \
|
|
|
|
- | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \
|
|
|
|
- | -mingw32* | -linux-gnu* | -linux-newlib* | -linux-uclibc* \
|
|
|
|
- | -uxpv* | -beos* | -mpeix* | -udk* \
|
|
|
|
- | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \
|
|
|
|
- | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \
|
|
|
|
- | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \
|
|
|
|
- | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \
|
|
|
|
- | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \
|
|
|
|
- | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \
|
|
|
|
- | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*)
|
|
|
|
- # Remember, each alternative MUST END IN *, to match a version number.
|
|
|
|
- ;;
|
|
|
|
- -qnx*)
|
|
|
|
- case $basic_machine in
|
|
|
|
- x86-* | i*86-*)
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- os=-nto$os
|
|
|
|
- ;;
|
|
|
|
- esac
|
|
|
|
- ;;
|
|
|
|
- -nto-qnx*)
|
|
|
|
- ;;
|
|
|
|
- -nto*)
|
|
|
|
- os=`echo $os | sed -e 's|nto|nto-qnx|'`
|
|
|
|
- ;;
|
|
|
|
- -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \
|
|
|
|
- | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \
|
|
|
|
- | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*)
|
|
|
|
- ;;
|
|
|
|
- -mac*)
|
|
|
|
- os=`echo $os | sed -e 's|mac|macos|'`
|
|
|
|
- ;;
|
|
|
|
- -linux-dietlibc)
|
|
|
|
- os=-linux-dietlibc
|
|
|
|
- ;;
|
|
|
|
- -linux*)
|
|
|
|
- os=`echo $os | sed -e 's|linux|linux-gnu|'`
|
|
|
|
- ;;
|
|
|
|
- -sunos5*)
|
|
|
|
- os=`echo $os | sed -e 's|sunos5|solaris2|'`
|
|
|
|
- ;;
|
|
|
|
- -sunos6*)
|
|
|
|
- os=`echo $os | sed -e 's|sunos6|solaris3|'`
|
|
|
|
- ;;
|
|
|
|
- -opened*)
|
|
|
|
- os=-openedition
|
|
|
|
- ;;
|
|
|
|
- -os400*)
|
|
|
|
- os=-os400
|
|
|
|
- ;;
|
|
|
|
- -wince*)
|
|
|
|
- os=-wince
|
|
|
|
- ;;
|
|
|
|
- -osfrose*)
|
|
|
|
- os=-osfrose
|
|
|
|
- ;;
|
|
|
|
- -osf*)
|
|
|
|
- os=-osf
|
|
|
|
- ;;
|
|
|
|
- -utek*)
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- -dynix*)
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- -acis*)
|
|
|
|
- os=-aos
|
|
|
|
- ;;
|
|
|
|
- -atheos*)
|
|
|
|
- os=-atheos
|
|
|
|
- ;;
|
|
|
|
- -syllable*)
|
|
|
|
- os=-syllable
|
|
|
|
- ;;
|
|
|
|
- -386bsd)
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- -ctix* | -uts*)
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- -nova*)
|
|
|
|
- os=-rtmk-nova
|
|
|
|
- ;;
|
|
|
|
- -ns2 )
|
|
|
|
- os=-nextstep2
|
|
|
|
- ;;
|
|
|
|
- -nsk*)
|
|
|
|
- os=-nsk
|
|
|
|
- ;;
|
|
|
|
- # Preserve the version number of sinix5.
|
|
|
|
- -sinix5.*)
|
|
|
|
- os=`echo $os | sed -e 's|sinix|sysv|'`
|
|
|
|
- ;;
|
|
|
|
- -sinix*)
|
|
|
|
- os=-sysv4
|
|
|
|
- ;;
|
|
|
|
- -tpf*)
|
|
|
|
- os=-tpf
|
|
|
|
- ;;
|
|
|
|
- -triton*)
|
|
|
|
- os=-sysv3
|
|
|
|
- ;;
|
|
|
|
- -oss*)
|
|
|
|
- os=-sysv3
|
|
|
|
- ;;
|
|
|
|
- -svr4)
|
|
|
|
- os=-sysv4
|
|
|
|
- ;;
|
|
|
|
- -svr3)
|
|
|
|
- os=-sysv3
|
|
|
|
- ;;
|
|
|
|
- -sysvr4)
|
|
|
|
- os=-sysv4
|
|
|
|
- ;;
|
|
|
|
- # This must come after -sysvr4.
|
|
|
|
- -sysv*)
|
|
|
|
- ;;
|
|
|
|
- -ose*)
|
|
|
|
- os=-ose
|
|
|
|
- ;;
|
|
|
|
- -es1800*)
|
|
|
|
- os=-ose
|
|
|
|
- ;;
|
|
|
|
- -xenix)
|
|
|
|
- os=-xenix
|
|
|
|
- ;;
|
|
|
|
- -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
|
|
|
|
- os=-mint
|
|
|
|
- ;;
|
|
|
|
- -aros*)
|
|
|
|
- os=-aros
|
|
|
|
- ;;
|
|
|
|
- -kaos*)
|
|
|
|
- os=-kaos
|
|
|
|
- ;;
|
|
|
|
- -zvmoe)
|
|
|
|
- os=-zvmoe
|
|
|
|
- ;;
|
|
|
|
- -dicos*)
|
|
|
|
- os=-dicos
|
|
|
|
- ;;
|
|
|
|
- -nacl*)
|
|
|
|
- ;;
|
|
|
|
- -none)
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- # Get rid of the `-' at the beginning of $os.
|
|
|
|
- os=`echo $os | sed 's/[^-]*-//'`
|
|
|
|
- echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2
|
|
|
|
- exit 1
|
|
|
|
- ;;
|
|
|
|
-esac
|
|
|
|
-else
|
|
|
|
-
|
2011-09-09 11:06:14 +02:00
|
|
|
-# Here we handle the default operating systems that come with various machines.
|
|
|
|
-# The value should be what the vendor currently ships out the door with their
|
|
|
|
-# machine or put another way, the most popular os provided with the machine.
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-09-09 11:06:14 +02:00
|
|
|
-# Note that if you're going to try to match "-MANUFACTURER" here (say,
|
|
|
|
-# "-sun"), then you have to tell the case statement up towards the top
|
|
|
|
-# that MANUFACTURER isn't an operating system. Otherwise, code above
|
|
|
|
-# will signal an error saying that MANUFACTURER isn't an operating
|
|
|
|
-# system, and we'll never get to this point.
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-09-09 11:06:14 +02:00
|
|
|
-case $basic_machine in
|
|
|
|
- score-*)
|
|
|
|
- os=-elf
|
|
|
|
- ;;
|
|
|
|
- spu-*)
|
|
|
|
- os=-elf
|
|
|
|
- ;;
|
|
|
|
- *-acorn)
|
|
|
|
- os=-riscix1.2
|
|
|
|
- ;;
|
|
|
|
- arm*-rebel)
|
|
|
|
- os=-linux
|
|
|
|
- ;;
|
|
|
|
- arm*-semi)
|
|
|
|
- os=-aout
|
|
|
|
- ;;
|
|
|
|
- c4x-* | tic4x-*)
|
|
|
|
- os=-coff
|
|
|
|
- ;;
|
|
|
|
- # This must come before the *-dec entry.
|
|
|
|
- pdp10-*)
|
|
|
|
- os=-tops20
|
|
|
|
- ;;
|
|
|
|
- pdp11-*)
|
|
|
|
- os=-none
|
|
|
|
- ;;
|
|
|
|
- *-dec | vax-*)
|
|
|
|
- os=-ultrix4.2
|
|
|
|
- ;;
|
|
|
|
- m68*-apollo)
|
|
|
|
- os=-domain
|
|
|
|
- ;;
|
|
|
|
- i386-sun)
|
|
|
|
- os=-sunos4.0.2
|
|
|
|
- ;;
|
|
|
|
- m68000-sun)
|
|
|
|
- os=-sunos3
|
|
|
|
- # This also exists in the configure program, but was not the
|
|
|
|
- # default.
|
|
|
|
- # os=-sunos4
|
|
|
|
- ;;
|
|
|
|
- m68*-cisco)
|
|
|
|
- os=-aout
|
|
|
|
- ;;
|
|
|
|
- mep-*)
|
|
|
|
- os=-elf
|
|
|
|
- ;;
|
|
|
|
- mips*-cisco)
|
|
|
|
- os=-elf
|
|
|
|
- ;;
|
|
|
|
- mips*-*)
|
|
|
|
- os=-elf
|
|
|
|
- ;;
|
|
|
|
- or32-*)
|
|
|
|
- os=-coff
|
|
|
|
- ;;
|
|
|
|
- *-tti) # must be before sparc entry or we get the wrong os.
|
|
|
|
- os=-sysv3
|
|
|
|
- ;;
|
|
|
|
- sparc-* | *-sun)
|
|
|
|
- os=-sunos4.1.1
|
|
|
|
- ;;
|
|
|
|
- *-be)
|
|
|
|
- os=-beos
|
|
|
|
- ;;
|
|
|
|
- *-haiku)
|
|
|
|
- os=-haiku
|
|
|
|
- ;;
|
|
|
|
- *-ibm)
|
|
|
|
- os=-aix
|
|
|
|
- ;;
|
|
|
|
- *-knuth)
|
|
|
|
- os=-mmixware
|
|
|
|
- ;;
|
|
|
|
- *-wec)
|
|
|
|
- os=-proelf
|
|
|
|
- ;;
|
|
|
|
- *-winbond)
|
|
|
|
- os=-proelf
|
|
|
|
- ;;
|
|
|
|
- *-oki)
|
|
|
|
- os=-proelf
|
|
|
|
- ;;
|
|
|
|
- *-hp)
|
|
|
|
- os=-hpux
|
|
|
|
- ;;
|
|
|
|
- *-hitachi)
|
|
|
|
- os=-hiux
|
|
|
|
- ;;
|
|
|
|
- i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent)
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- *-cbm)
|
|
|
|
- os=-amigaos
|
|
|
|
- ;;
|
|
|
|
- *-dg)
|
|
|
|
- os=-dgux
|
|
|
|
- ;;
|
|
|
|
- *-dolphin)
|
|
|
|
- os=-sysv3
|
|
|
|
- ;;
|
|
|
|
- m68k-ccur)
|
|
|
|
- os=-rtu
|
|
|
|
- ;;
|
|
|
|
- m88k-omron*)
|
|
|
|
- os=-luna
|
|
|
|
- ;;
|
|
|
|
- *-next )
|
|
|
|
- os=-nextstep
|
|
|
|
- ;;
|
|
|
|
- *-sequent)
|
|
|
|
- os=-ptx
|
|
|
|
- ;;
|
|
|
|
- *-crds)
|
|
|
|
- os=-unos
|
|
|
|
- ;;
|
|
|
|
- *-ns)
|
|
|
|
- os=-genix
|
|
|
|
- ;;
|
|
|
|
- i370-*)
|
|
|
|
- os=-mvs
|
|
|
|
- ;;
|
|
|
|
- *-next)
|
|
|
|
- os=-nextstep3
|
|
|
|
- ;;
|
|
|
|
- *-gould)
|
|
|
|
- os=-sysv
|
|
|
|
- ;;
|
|
|
|
- *-highlevel)
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- *-encore)
|
|
|
|
- os=-bsd
|
|
|
|
- ;;
|
|
|
|
- *-sgi)
|
|
|
|
- os=-irix
|
|
|
|
- ;;
|
|
|
|
- *-siemens)
|
|
|
|
- os=-sysv4
|
|
|
|
- ;;
|
|
|
|
- *-masscomp)
|
|
|
|
- os=-rtu
|
|
|
|
- ;;
|
|
|
|
- f30[01]-fujitsu | f700-fujitsu)
|
|
|
|
- os=-uxpv
|
|
|
|
- ;;
|
|
|
|
- *-rom68k)
|
|
|
|
- os=-coff
|
|
|
|
- ;;
|
|
|
|
- *-*bug)
|
|
|
|
- os=-coff
|
|
|
|
- ;;
|
|
|
|
- *-apple)
|
|
|
|
- os=-macos
|
|
|
|
- ;;
|
|
|
|
- *-atari*)
|
|
|
|
- os=-mint
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- os=-none
|
|
|
|
- ;;
|
|
|
|
-esac
|
|
|
|
-fi
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-09-09 11:06:14 +02:00
|
|
|
-# Here we handle the case where we know the os, and the CPU type, but not the
|
|
|
|
-# manufacturer. We pick the logical manufacturer.
|
|
|
|
-vendor=unknown
|
|
|
|
-case $basic_machine in
|
|
|
|
- *-unknown)
|
|
|
|
- case $os in
|
|
|
|
- -riscix*)
|
|
|
|
- vendor=acorn
|
|
|
|
- ;;
|
|
|
|
- -sunos*)
|
|
|
|
- vendor=sun
|
|
|
|
- ;;
|
|
|
|
- -cnk*|-aix*)
|
|
|
|
- vendor=ibm
|
|
|
|
- ;;
|
|
|
|
- -beos*)
|
|
|
|
- vendor=be
|
|
|
|
- ;;
|
|
|
|
- -hpux*)
|
|
|
|
- vendor=hp
|
|
|
|
- ;;
|
|
|
|
- -mpeix*)
|
|
|
|
- vendor=hp
|
|
|
|
- ;;
|
|
|
|
- -hiux*)
|
|
|
|
- vendor=hitachi
|
|
|
|
- ;;
|
|
|
|
- -unos*)
|
|
|
|
- vendor=crds
|
|
|
|
- ;;
|
|
|
|
- -dgux*)
|
|
|
|
- vendor=dg
|
|
|
|
- ;;
|
|
|
|
- -luna*)
|
|
|
|
- vendor=omron
|
|
|
|
- ;;
|
|
|
|
- -genix*)
|
|
|
|
- vendor=ns
|
|
|
|
- ;;
|
|
|
|
- -mvs* | -opened*)
|
|
|
|
- vendor=ibm
|
|
|
|
- ;;
|
|
|
|
- -os400*)
|
|
|
|
- vendor=ibm
|
|
|
|
- ;;
|
|
|
|
- -ptx*)
|
|
|
|
- vendor=sequent
|
|
|
|
- ;;
|
|
|
|
- -tpf*)
|
|
|
|
- vendor=ibm
|
|
|
|
- ;;
|
|
|
|
- -vxsim* | -vxworks* | -windiss*)
|
|
|
|
- vendor=wrs
|
|
|
|
- ;;
|
|
|
|
- -aux*)
|
|
|
|
- vendor=apple
|
|
|
|
- ;;
|
|
|
|
- -hms*)
|
|
|
|
- vendor=hitachi
|
|
|
|
- ;;
|
|
|
|
- -mpw* | -macos*)
|
|
|
|
- vendor=apple
|
|
|
|
- ;;
|
|
|
|
- -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*)
|
|
|
|
- vendor=atari
|
|
|
|
- ;;
|
|
|
|
- -vos*)
|
|
|
|
- vendor=stratus
|
|
|
|
- ;;
|
|
|
|
- esac
|
|
|
|
- basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"`
|
|
|
|
- ;;
|
|
|
|
-esac
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-09-09 11:06:14 +02:00
|
|
|
-echo $basic_machine$os
|
|
|
|
-exit
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-09-09 11:06:14 +02:00
|
|
|
-# Local variables:
|
|
|
|
-# eval: (add-hook 'write-file-hooks 'time-stamp)
|
|
|
|
-# time-stamp-start: "timestamp='"
|
|
|
|
-# time-stamp-format: "%:y-%02m-%02d"
|
|
|
|
-# time-stamp-end: "'"
|
|
|
|
-# End:
|
2011-01-17 17:43:05 +01:00
|
|
|
--- a/libraries/libapparmor/doc/Makefile.am
|
|
|
|
+++ b/libraries/libapparmor/doc/Makefile.am
|
|
|
|
@@ -11,11 +11,9 @@ EXTRA_DIST = $(man_MANS) $(PODS)
|
|
|
|
## delete man pages at maintainer-clean
|
|
|
|
BUILT_SOURCES = $(man_MANS)
|
|
|
|
|
|
|
|
-%.2: %.pod
|
|
|
|
- $(POD2MAN) \
|
|
|
|
- --section=2 \
|
|
|
|
- --release="NOVELL/SUSE" \
|
|
|
|
- --center="AppArmor" \
|
|
|
|
- --date="2007-07-27" \
|
|
|
|
- $< > $@
|
|
|
|
-$
|
|
|
|
+PODARGS = --center=AppArmor --release=NOVELL/SUSE
|
|
|
|
+
|
|
|
|
+pod2man = pod2man $(PODARGS) --section $(subst .,,$(suffix $<)) $< > $@
|
|
|
|
+
|
|
|
|
+.pod.2:
|
|
|
|
+ $(pod2man)
|
|
|
|
--- a/libraries/libapparmor/install-sh
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,520 +0,0 @@
|
|
|
|
-#!/bin/sh
|
|
|
|
-# install - install a program, script, or datafile
|
|
|
|
-
|
|
|
|
-scriptversion=2009-04-28.21; # UTC
|
|
|
|
-
|
|
|
|
-# This originates from X11R5 (mit/util/scripts/install.sh), which was
|
|
|
|
-# later released in X11R6 (xc/config/util/install.sh) with the
|
|
|
|
-# following copyright and license.
|
|
|
|
-#
|
|
|
|
-# Copyright (C) 1994 X Consortium
|
|
|
|
-#
|
|
|
|
-# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
-# of this software and associated documentation files (the "Software"), to
|
|
|
|
-# deal in the Software without restriction, including without limitation the
|
|
|
|
-# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
|
|
|
|
-# sell copies of the Software, and to permit persons to whom the Software is
|
|
|
|
-# furnished to do so, subject to the following conditions:
|
|
|
|
-#
|
|
|
|
-# The above copyright notice and this permission notice shall be included in
|
|
|
|
-# all copies or substantial portions of the Software.
|
|
|
|
-#
|
|
|
|
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
-# X CONSORTIUM BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
|
|
|
|
-# AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNEC-
|
|
|
|
-# TION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
|
|
-#
|
|
|
|
-# Except as contained in this notice, the name of the X Consortium shall not
|
|
|
|
-# be used in advertising or otherwise to promote the sale, use or other deal-
|
|
|
|
-# ings in this Software without prior written authorization from the X Consor-
|
|
|
|
-# tium.
|
|
|
|
-#
|
|
|
|
-#
|
|
|
|
-# FSF changes to this file are in the public domain.
|
|
|
|
-#
|
|
|
|
-# Calling this script install-sh is preferred over install.sh, to prevent
|
|
|
|
-# `make' implicit rules from creating a file called install from it
|
|
|
|
-# when there is no Makefile.
|
|
|
|
-#
|
|
|
|
-# This script is compatible with the BSD install script, but was written
|
|
|
|
-# from scratch.
|
|
|
|
-
|
|
|
|
-nl='
|
|
|
|
-'
|
|
|
|
-IFS=" "" $nl"
|
|
|
|
-
|
|
|
|
-# set DOITPROG to echo to test this script
|
|
|
|
-
|
|
|
|
-# Don't use :- since 4.3BSD and earlier shells don't like it.
|
|
|
|
-doit=${DOITPROG-}
|
|
|
|
-if test -z "$doit"; then
|
|
|
|
- doit_exec=exec
|
|
|
|
-else
|
|
|
|
- doit_exec=$doit
|
|
|
|
-fi
|
|
|
|
-
|
|
|
|
-# Put in absolute file names if you don't have them in your path;
|
|
|
|
-# or use environment vars.
|
|
|
|
-
|
|
|
|
-chgrpprog=${CHGRPPROG-chgrp}
|
|
|
|
-chmodprog=${CHMODPROG-chmod}
|
|
|
|
-chownprog=${CHOWNPROG-chown}
|
|
|
|
-cmpprog=${CMPPROG-cmp}
|
|
|
|
-cpprog=${CPPROG-cp}
|
|
|
|
-mkdirprog=${MKDIRPROG-mkdir}
|
|
|
|
-mvprog=${MVPROG-mv}
|
|
|
|
-rmprog=${RMPROG-rm}
|
|
|
|
-stripprog=${STRIPPROG-strip}
|
|
|
|
-
|
|
|
|
-posix_glob='?'
|
|
|
|
-initialize_posix_glob='
|
|
|
|
- test "$posix_glob" != "?" || {
|
|
|
|
- if (set -f) 2>/dev/null; then
|
|
|
|
- posix_glob=
|
|
|
|
- else
|
|
|
|
- posix_glob=:
|
|
|
|
- fi
|
|
|
|
- }
|
|
|
|
-'
|
|
|
|
-
|
|
|
|
-posix_mkdir=
|
|
|
|
-
|
|
|
|
-# Desired mode of installed file.
|
|
|
|
-mode=0755
|
|
|
|
-
|
|
|
|
-chgrpcmd=
|
|
|
|
-chmodcmd=$chmodprog
|
|
|
|
-chowncmd=
|
|
|
|
-mvcmd=$mvprog
|
|
|
|
-rmcmd="$rmprog -f"
|
|
|
|
-stripcmd=
|
|
|
|
-
|
|
|
|
-src=
|
|
|
|
-dst=
|
|
|
|
-dir_arg=
|
|
|
|
-dst_arg=
|
|
|
|
-
|
|
|
|
-copy_on_change=false
|
|
|
|
-no_target_directory=
|
|
|
|
-
|
|
|
|
-usage="\
|
|
|
|
-Usage: $0 [OPTION]... [-T] SRCFILE DSTFILE
|
|
|
|
- or: $0 [OPTION]... SRCFILES... DIRECTORY
|
|
|
|
- or: $0 [OPTION]... -t DIRECTORY SRCFILES...
|
|
|
|
- or: $0 [OPTION]... -d DIRECTORIES...
|
|
|
|
-
|
|
|
|
-In the 1st form, copy SRCFILE to DSTFILE.
|
|
|
|
-In the 2nd and 3rd, copy all SRCFILES to DIRECTORY.
|
|
|
|
-In the 4th, create DIRECTORIES.
|
|
|
|
-
|
|
|
|
-Options:
|
|
|
|
- --help display this help and exit.
|
|
|
|
- --version display version info and exit.
|
|
|
|
-
|
|
|
|
- -c (ignored)
|
|
|
|
- -C install only if different (preserve the last data modification time)
|
|
|
|
- -d create directories instead of installing files.
|
|
|
|
- -g GROUP $chgrpprog installed files to GROUP.
|
|
|
|
- -m MODE $chmodprog installed files to MODE.
|
|
|
|
- -o USER $chownprog installed files to USER.
|
|
|
|
- -s $stripprog installed files.
|
|
|
|
- -t DIRECTORY install into DIRECTORY.
|
|
|
|
- -T report an error if DSTFILE is a directory.
|
|
|
|
-
|
|
|
|
-Environment variables override the default commands:
|
|
|
|
- CHGRPPROG CHMODPROG CHOWNPROG CMPPROG CPPROG MKDIRPROG MVPROG
|
|
|
|
- RMPROG STRIPPROG
|
|
|
|
-"
|
|
|
|
-
|
|
|
|
-while test $# -ne 0; do
|
|
|
|
- case $1 in
|
|
|
|
- -c) ;;
|
|
|
|
-
|
|
|
|
- -C) copy_on_change=true;;
|
|
|
|
-
|
|
|
|
- -d) dir_arg=true;;
|
|
|
|
-
|
|
|
|
- -g) chgrpcmd="$chgrpprog $2"
|
|
|
|
- shift;;
|
|
|
|
-
|
|
|
|
- --help) echo "$usage"; exit $?;;
|
|
|
|
-
|
|
|
|
- -m) mode=$2
|
|
|
|
- case $mode in
|
|
|
|
- *' '* | *' '* | *'
|
|
|
|
-'* | *'*'* | *'?'* | *'['*)
|
|
|
|
- echo "$0: invalid mode: $mode" >&2
|
|
|
|
- exit 1;;
|
|
|
|
- esac
|
|
|
|
- shift;;
|
|
|
|
-
|
|
|
|
- -o) chowncmd="$chownprog $2"
|
|
|
|
- shift;;
|
|
|
|
-
|
|
|
|
- -s) stripcmd=$stripprog;;
|
|
|
|
-
|
|
|
|
- -t) dst_arg=$2
|
|
|
|
- shift;;
|
|
|
|
-
|
|
|
|
- -T) no_target_directory=true;;
|
|
|
|
-
|
|
|
|
- --version) echo "$0 $scriptversion"; exit $?;;
|
|
|
|
-
|
|
|
|
- --) shift
|
|
|
|
- break;;
|
|
|
|
-
|
|
|
|
- -*) echo "$0: invalid option: $1" >&2
|
|
|
|
- exit 1;;
|
|
|
|
-
|
|
|
|
- *) break;;
|
|
|
|
- esac
|
|
|
|
- shift
|
|
|
|
-done
|
|
|
|
-
|
|
|
|
-if test $# -ne 0 && test -z "$dir_arg$dst_arg"; then
|
|
|
|
- # When -d is used, all remaining arguments are directories to create.
|
|
|
|
- # When -t is used, the destination is already specified.
|
|
|
|
- # Otherwise, the last argument is the destination. Remove it from $@.
|
|
|
|
- for arg
|
|
|
|
- do
|
|
|
|
- if test -n "$dst_arg"; then
|
|
|
|
- # $@ is not empty: it contains at least $arg.
|
|
|
|
- set fnord "$@" "$dst_arg"
|
|
|
|
- shift # fnord
|
|
|
|
- fi
|
|
|
|
- shift # arg
|
|
|
|
- dst_arg=$arg
|
|
|
|
- done
|
|
|
|
-fi
|
|
|
|
-
|
|
|
|
-if test $# -eq 0; then
|
|
|
|
- if test -z "$dir_arg"; then
|
|
|
|
- echo "$0: no input file specified." >&2
|
|
|
|
- exit 1
|
|
|
|
- fi
|
|
|
|
- # It's OK to call `install-sh -d' without argument.
|
|
|
|
- # This can happen when creating conditional directories.
|
|
|
|
- exit 0
|
|
|
|
-fi
|
|
|
|
-
|
|
|
|
-if test -z "$dir_arg"; then
|
|
|
|
- trap '(exit $?); exit' 1 2 13 15
|
|
|
|
-
|
|
|
|
- # Set umask so as not to create temps with too-generous modes.
|
|
|
|
- # However, 'strip' requires both read and write access to temps.
|
|
|
|
- case $mode in
|
|
|
|
- # Optimize common cases.
|
|
|
|
- *644) cp_umask=133;;
|
|
|
|
- *755) cp_umask=22;;
|
|
|
|
-
|
|
|
|
- *[0-7])
|
|
|
|
- if test -z "$stripcmd"; then
|
|
|
|
- u_plus_rw=
|
|
|
|
- else
|
|
|
|
- u_plus_rw='% 200'
|
|
|
|
- fi
|
|
|
|
- cp_umask=`expr '(' 777 - $mode % 1000 ')' $u_plus_rw`;;
|
|
|
|
- *)
|
|
|
|
- if test -z "$stripcmd"; then
|
|
|
|
- u_plus_rw=
|
|
|
|
- else
|
|
|
|
- u_plus_rw=,u+rw
|
|
|
|
- fi
|
|
|
|
- cp_umask=$mode$u_plus_rw;;
|
|
|
|
- esac
|
|
|
|
-fi
|
|
|
|
-
|
|
|
|
-for src
|
|
|
|
-do
|
|
|
|
- # Protect names starting with `-'.
|
|
|
|
- case $src in
|
|
|
|
- -*) src=./$src;;
|
|
|
|
- esac
|
|
|
|
-
|
|
|
|
- if test -n "$dir_arg"; then
|
|
|
|
- dst=$src
|
|
|
|
- dstdir=$dst
|
|
|
|
- test -d "$dstdir"
|
|
|
|
- dstdir_status=$?
|
|
|
|
- else
|
|
|
|
-
|
|
|
|
- # Waiting for this to be detected by the "$cpprog $src $dsttmp" command
|
|
|
|
- # might cause directories to be created, which would be especially bad
|
|
|
|
- # if $src (and thus $dsttmp) contains '*'.
|
|
|
|
- if test ! -f "$src" && test ! -d "$src"; then
|
|
|
|
- echo "$0: $src does not exist." >&2
|
|
|
|
- exit 1
|
|
|
|
- fi
|
|
|
|
-
|
|
|
|
- if test -z "$dst_arg"; then
|
|
|
|
- echo "$0: no destination specified." >&2
|
|
|
|
- exit 1
|
|
|
|
- fi
|
|
|
|
-
|
|
|
|
- dst=$dst_arg
|
|
|
|
- # Protect names starting with `-'.
|
|
|
|
- case $dst in
|
|
|
|
- -*) dst=./$dst;;
|
|
|
|
- esac
|
|
|
|
-
|
|
|
|
- # If destination is a directory, append the input filename; won't work
|
|
|
|
- # if double slashes aren't ignored.
|
|
|
|
- if test -d "$dst"; then
|
|
|
|
- if test -n "$no_target_directory"; then
|
|
|
|
- echo "$0: $dst_arg: Is a directory" >&2
|
|
|
|
- exit 1
|
|
|
|
- fi
|
|
|
|
- dstdir=$dst
|
|
|
|
- dst=$dstdir/`basename "$src"`
|
|
|
|
- dstdir_status=0
|
|
|
|
- else
|
|
|
|
- # Prefer dirname, but fall back on a substitute if dirname fails.
|
|
|
|
- dstdir=`
|
|
|
|
- (dirname "$dst") 2>/dev/null ||
|
|
|
|
- expr X"$dst" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
|
|
|
|
- X"$dst" : 'X\(//\)[^/]' \| \
|
|
|
|
- X"$dst" : 'X\(//\)$' \| \
|
|
|
|
- X"$dst" : 'X\(/\)' \| . 2>/dev/null ||
|
|
|
|
- echo X"$dst" |
|
|
|
|
- sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
|
|
|
|
- s//\1/
|
|
|
|
- q
|
|
|
|
- }
|
|
|
|
- /^X\(\/\/\)[^/].*/{
|
|
|
|
- s//\1/
|
|
|
|
- q
|
|
|
|
- }
|
|
|
|
- /^X\(\/\/\)$/{
|
|
|
|
- s//\1/
|
|
|
|
- q
|
|
|
|
- }
|
|
|
|
- /^X\(\/\).*/{
|
|
|
|
- s//\1/
|
|
|
|
- q
|
|
|
|
- }
|
|
|
|
- s/.*/./; q'
|
|
|
|
- `
|
|
|
|
-
|
|
|
|
- test -d "$dstdir"
|
|
|
|
- dstdir_status=$?
|
|
|
|
- fi
|
|
|
|
- fi
|
|
|
|
-
|
|
|
|
- obsolete_mkdir_used=false
|
|
|
|
-
|
|
|
|
- if test $dstdir_status != 0; then
|
|
|
|
- case $posix_mkdir in
|
|
|
|
- '')
|
|
|
|
- # Create intermediate dirs using mode 755 as modified by the umask.
|
|
|
|
- # This is like FreeBSD 'install' as of 1997-10-28.
|
|
|
|
- umask=`umask`
|
|
|
|
- case $stripcmd.$umask in
|
|
|
|
- # Optimize common cases.
|
|
|
|
- *[2367][2367]) mkdir_umask=$umask;;
|
|
|
|
- .*0[02][02] | .[02][02] | .[02]) mkdir_umask=22;;
|
|
|
|
-
|
|
|
|
- *[0-7])
|
|
|
|
- mkdir_umask=`expr $umask + 22 \
|
|
|
|
- - $umask % 100 % 40 + $umask % 20 \
|
|
|
|
- - $umask % 10 % 4 + $umask % 2
|
|
|
|
- `;;
|
|
|
|
- *) mkdir_umask=$umask,go-w;;
|
|
|
|
- esac
|
|
|
|
-
|
|
|
|
- # With -d, create the new directory with the user-specified mode.
|
|
|
|
- # Otherwise, rely on $mkdir_umask.
|
|
|
|
- if test -n "$dir_arg"; then
|
|
|
|
- mkdir_mode=-m$mode
|
|
|
|
- else
|
|
|
|
- mkdir_mode=
|
|
|
|
- fi
|
|
|
|
-
|
|
|
|
- posix_mkdir=false
|
|
|
|
- case $umask in
|
|
|
|
- *[123567][0-7][0-7])
|
|
|
|
- # POSIX mkdir -p sets u+wx bits regardless of umask, which
|
|
|
|
- # is incompatible with FreeBSD 'install' when (umask & 300) != 0.
|
|
|
|
- ;;
|
|
|
|
- *)
|
|
|
|
- tmpdir=${TMPDIR-/tmp}/ins$RANDOM-$$
|
|
|
|
- trap 'ret=$?; rmdir "$tmpdir/d" "$tmpdir" 2>/dev/null; exit $ret' 0
|
|
|
|
-
|
|
|
|
- if (umask $mkdir_umask &&
|
|
|
|
- exec $mkdirprog $mkdir_mode -p -- "$tmpdir/d") >/dev/null 2>&1
|
|
|
|
- then
|
|
|
|
- if test -z "$dir_arg" || {
|
|
|
|
- # Check for POSIX incompatibilities with -m.
|
|
|
|
- # HP-UX 11.23 and IRIX 6.5 mkdir -m -p sets group- or
|
|
|
|
- # other-writeable bit of parent directory when it shouldn't.
|
|
|
|
- # FreeBSD 6.1 mkdir -m -p sets mode of existing directory.
|
|
|
|
- ls_ld_tmpdir=`ls -ld "$tmpdir"`
|
|
|
|
- case $ls_ld_tmpdir in
|
|
|
|
- d????-?r-*) different_mode=700;;
|
|
|
|
- d????-?--*) different_mode=755;;
|
|
|
|
- *) false;;
|
|
|
|
- esac &&
|
|
|
|
- $mkdirprog -m$different_mode -p -- "$tmpdir" && {
|
|
|
|
- ls_ld_tmpdir_1=`ls -ld "$tmpdir"`
|
|
|
|
- test "$ls_ld_tmpdir" = "$ls_ld_tmpdir_1"
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- then posix_mkdir=:
|
|
|
|
- fi
|
|
|
|
- rmdir "$tmpdir/d" "$tmpdir"
|
|
|
|
- else
|
|
|
|
- # Remove any dirs left behind by ancient mkdir implementations.
|
|
|
|
- rmdir ./$mkdir_mode ./-p ./-- 2>/dev/null
|
|
|
|
- fi
|
|
|
|
- trap '' 0;;
|
|
|
|
- esac;;
|
|
|
|
- esac
|
|
|
|
-
|
|
|
|
- if
|
|
|
|
- $posix_mkdir && (
|
|
|
|
- umask $mkdir_umask &&
|
|
|
|
- $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir"
|
|
|
|
- )
|
|
|
|
- then :
|
|
|
|
- else
|
|
|
|
-
|
|
|
|
- # The umask is ridiculous, or mkdir does not conform to POSIX,
|
|
|
|
- # or it failed possibly due to a race condition. Create the
|
|
|
|
- # directory the slow way, step by step, checking for races as we go.
|
|
|
|
-
|
|
|
|
- case $dstdir in
|
|
|
|
- /*) prefix='/';;
|
|
|
|
- -*) prefix='./';;
|
|
|
|
- *) prefix='';;
|
|
|
|
- esac
|
|
|
|
-
|
|
|
|
- eval "$initialize_posix_glob"
|
|
|
|
-
|
|
|
|
- oIFS=$IFS
|
|
|
|
- IFS=/
|
|
|
|
- $posix_glob set -f
|
|
|
|
- set fnord $dstdir
|
|
|
|
- shift
|
|
|
|
- $posix_glob set +f
|
|
|
|
- IFS=$oIFS
|
|
|
|
-
|
|
|
|
- prefixes=
|
|
|
|
-
|
|
|
|
- for d
|
|
|
|
- do
|
|
|
|
- test -z "$d" && continue
|
|
|
|
-
|
|
|
|
- prefix=$prefix$d
|
|
|
|
- if test -d "$prefix"; then
|
|
|
|
- prefixes=
|
|
|
|
- else
|
|
|
|
- if $posix_mkdir; then
|
|
|
|
- (umask=$mkdir_umask &&
|
|
|
|
- $doit_exec $mkdirprog $mkdir_mode -p -- "$dstdir") && break
|
|
|
|
- # Don't fail if two instances are running concurrently.
|
|
|
|
- test -d "$prefix" || exit 1
|
|
|
|
- else
|
|
|
|
- case $prefix in
|
|
|
|
- *\'*) qprefix=`echo "$prefix" | sed "s/'/'\\\\\\\\''/g"`;;
|
|
|
|
- *) qprefix=$prefix;;
|
|
|
|
- esac
|
|
|
|
- prefixes="$prefixes '$qprefix'"
|
|
|
|
- fi
|
|
|
|
- fi
|
|
|
|
- prefix=$prefix/
|
|
|
|
- done
|
|
|
|
-
|
|
|
|
- if test -n "$prefixes"; then
|
|
|
|
- # Don't fail if two instances are running concurrently.
|
|
|
|
- (umask $mkdir_umask &&
|
|
|
|
- eval "\$doit_exec \$mkdirprog $prefixes") ||
|
|
|
|
- test -d "$dstdir" || exit 1
|
|
|
|
- obsolete_mkdir_used=true
|
|
|
|
- fi
|
|
|
|
- fi
|
|
|
|
- fi
|
|
|
|
-
|
|
|
|
- if test -n "$dir_arg"; then
|
|
|
|
- { test -z "$chowncmd" || $doit $chowncmd "$dst"; } &&
|
|
|
|
- { test -z "$chgrpcmd" || $doit $chgrpcmd "$dst"; } &&
|
|
|
|
- { test "$obsolete_mkdir_used$chowncmd$chgrpcmd" = false ||
|
|
|
|
- test -z "$chmodcmd" || $doit $chmodcmd $mode "$dst"; } || exit 1
|
|
|
|
- else
|
|
|
|
-
|
|
|
|
- # Make a couple of temp file names in the proper directory.
|
|
|
|
- dsttmp=$dstdir/_inst.$$_
|
|
|
|
- rmtmp=$dstdir/_rm.$$_
|
|
|
|
-
|
|
|
|
- # Trap to clean up those temp files at exit.
|
|
|
|
- trap 'ret=$?; rm -f "$dsttmp" "$rmtmp" && exit $ret' 0
|
|
|
|
-
|
|
|
|
- # Copy the file name to the temp name.
|
|
|
|
- (umask $cp_umask && $doit_exec $cpprog "$src" "$dsttmp") &&
|
|
|
|
-
|
|
|
|
- # and set any options; do chmod last to preserve setuid bits.
|
|
|
|
- #
|
|
|
|
- # If any of these fail, we abort the whole thing. If we want to
|
|
|
|
- # ignore errors from any of these, just make sure not to ignore
|
|
|
|
- # errors from the above "$doit $cpprog $src $dsttmp" command.
|
|
|
|
- #
|
|
|
|
- { test -z "$chowncmd" || $doit $chowncmd "$dsttmp"; } &&
|
|
|
|
- { test -z "$chgrpcmd" || $doit $chgrpcmd "$dsttmp"; } &&
|
|
|
|
- { test -z "$stripcmd" || $doit $stripcmd "$dsttmp"; } &&
|
|
|
|
- { test -z "$chmodcmd" || $doit $chmodcmd $mode "$dsttmp"; } &&
|
|
|
|
-
|
|
|
|
- # If -C, don't bother to copy if it wouldn't change the file.
|
|
|
|
- if $copy_on_change &&
|
|
|
|
- old=`LC_ALL=C ls -dlL "$dst" 2>/dev/null` &&
|
|
|
|
- new=`LC_ALL=C ls -dlL "$dsttmp" 2>/dev/null` &&
|
|
|
|
-
|
|
|
|
- eval "$initialize_posix_glob" &&
|
|
|
|
- $posix_glob set -f &&
|
|
|
|
- set X $old && old=:$2:$4:$5:$6 &&
|
|
|
|
- set X $new && new=:$2:$4:$5:$6 &&
|
|
|
|
- $posix_glob set +f &&
|
|
|
|
-
|
|
|
|
- test "$old" = "$new" &&
|
|
|
|
- $cmpprog "$dst" "$dsttmp" >/dev/null 2>&1
|
|
|
|
- then
|
|
|
|
- rm -f "$dsttmp"
|
|
|
|
- else
|
|
|
|
- # Rename the file to the real destination.
|
|
|
|
- $doit $mvcmd -f "$dsttmp" "$dst" 2>/dev/null ||
|
|
|
|
-
|
|
|
|
- # The rename failed, perhaps because mv can't rename something else
|
|
|
|
- # to itself, or perhaps because mv is so ancient that it does not
|
|
|
|
- # support -f.
|
|
|
|
- {
|
|
|
|
- # Now remove or move aside any old file at destination location.
|
|
|
|
- # We try this two ways since rm can't unlink itself on some
|
|
|
|
- # systems and the destination file might be busy for other
|
|
|
|
- # reasons. In this case, the final cleanup might fail but the new
|
|
|
|
- # file should still install successfully.
|
|
|
|
- {
|
|
|
|
- test ! -f "$dst" ||
|
|
|
|
- $doit $rmcmd -f "$dst" 2>/dev/null ||
|
|
|
|
- { $doit $mvcmd -f "$dst" "$rmtmp" 2>/dev/null &&
|
|
|
|
- { $doit $rmcmd -f "$rmtmp" 2>/dev/null; :; }
|
|
|
|
- } ||
|
|
|
|
- { echo "$0: cannot unlink or rename $dst" >&2
|
|
|
|
- (exit 1); exit 1
|
|
|
|
- }
|
|
|
|
- } &&
|
|
|
|
-
|
|
|
|
- # Now rename the file to the real destination.
|
|
|
|
- $doit $mvcmd "$dsttmp" "$dst"
|
|
|
|
- }
|
|
|
|
- fi || exit 1
|
|
|
|
-
|
|
|
|
- trap '' 0
|
|
|
|
- fi
|
|
|
|
-done
|
|
|
|
-
|
|
|
|
-# Local variables:
|
|
|
|
-# eval: (add-hook 'write-file-hooks 'time-stamp)
|
|
|
|
-# time-stamp-start: "scriptversion="
|
|
|
|
-# time-stamp-format: "%:y-%02m-%02d.%02H"
|
|
|
|
-# time-stamp-time-zone: "UTC"
|
|
|
|
-# time-stamp-end: "; # UTC"
|
|
|
|
-# End:
|
|
|
|
--- a/libraries/libapparmor/libapparmor1.spec
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,178 +0,0 @@
|
|
|
|
-#
|
|
|
|
-# spec file for package libapparmor
|
|
|
|
-#
|
|
|
|
-# norootforbuild
|
|
|
|
-%define _unpackaged_files_terminate_build 0
|
|
|
|
-
|
|
|
|
-Name: libapparmor1
|
2011-03-25 09:04:51 +01:00
|
|
|
-Version: 2.5
|
2011-01-17 17:43:05 +01:00
|
|
|
-Release: 3.20070916
|
|
|
|
-License: LGPL
|
|
|
|
-Group: Development/Libraries/C and C++
|
|
|
|
-BuildRoot: %{_tmppath}/%{name}-%{version}-build
|
|
|
|
-Source0: %{name}-%{version}.tar.bz2
|
|
|
|
-BuildRequires: swig gcc perl
|
|
|
|
-Provides: libapparmor
|
|
|
|
-Provides: libimmunix
|
|
|
|
-Obsoletes: libapparmor
|
|
|
|
-Obsoletes: libimmunix
|
|
|
|
-Summary: A utility library for AppArmor
|
|
|
|
-
|
|
|
|
-%define aalibversion 1.0.2
|
|
|
|
-
|
|
|
|
-%description
|
|
|
|
--
|
|
|
|
-
|
|
|
|
-%package -n libapparmor-devel
|
|
|
|
-Requires: %{name} = %{version}-%{release}
|
|
|
|
-Group: Development/Libraries/C and C++
|
|
|
|
-Provides: libapparmor:/usr/include/sys/apparmor.h
|
|
|
|
-Summary: -
|
|
|
|
-
|
|
|
|
-%description -n libapparmor-devel
|
|
|
|
--
|
|
|
|
-
|
|
|
|
-%post -n libapparmor-devel
|
|
|
|
-/sbin/ldconfig
|
|
|
|
-
|
|
|
|
-%postun -n libapparmor-devel
|
|
|
|
-/sbin/ldconfig
|
|
|
|
-
|
|
|
|
-%package -n perl-libapparmor
|
|
|
|
-Requires: %{name} = %{version}
|
|
|
|
-Requires: perl = %{perl_version}
|
|
|
|
-Group: Development/Libraries/Perl
|
|
|
|
-Summary: -
|
|
|
|
-
|
|
|
|
-%description -n perl-libapparmor
|
|
|
|
--
|
|
|
|
-
|
|
|
|
-%prep
|
|
|
|
-%setup -q
|
|
|
|
-
|
|
|
|
-%build
|
|
|
|
-./configure --prefix=%{_prefix} --libdir=%{_libdir} --with-perl
|
|
|
|
-make CFLAGS="${RPM_OPT_FLAGS}"
|
|
|
|
-
|
|
|
|
-%install
|
|
|
|
-make install DESTDIR="$RPM_BUILD_ROOT"
|
|
|
|
-mkdir ${RPM_BUILD_ROOT}/%{_lib}
|
|
|
|
-# this is really hacky
|
|
|
|
-rm ${RPM_BUILD_ROOT}/%{_libdir}/libapparmor.so
|
|
|
|
-rm ${RPM_BUILD_ROOT}/%{_libdir}/libimmunix.so
|
|
|
|
-cp ${RPM_BUILD_ROOT}/%{_libdir}/libapparmor.so.%{aalibversion} ${RPM_BUILD_ROOT}/%{_lib}
|
|
|
|
-cp ${RPM_BUILD_ROOT}/%{_libdir}/libimmunix.so.%{aalibversion} ${RPM_BUILD_ROOT}/%{_lib}
|
|
|
|
-ln -s /%{_lib}/libapparmor.so.%{aalibversion} ${RPM_BUILD_ROOT}/%{_libdir}/libapparmor.so
|
|
|
|
-
|
|
|
|
-find $RPM_BUILD_ROOT -name .packlist -exec rm -f {} \;
|
|
|
|
-find $RPM_BUILD_ROOT -name perllocal.pod -exec rm -f {} \;
|
|
|
|
-
|
|
|
|
-# create symlink for old change_hat(2) manpage
|
|
|
|
-ln -s aa_change_hat.2 ${RPM_BUILD_ROOT}/%{_mandir}/man2/change_hat.2
|
|
|
|
-%clean
|
|
|
|
-rm -rf "$RPM_BUILD_ROOT"
|
|
|
|
-
|
|
|
|
-%post
|
|
|
|
-/sbin/ldconfig
|
|
|
|
-
|
|
|
|
-%postun
|
|
|
|
-/sbin/ldconfig
|
|
|
|
-
|
|
|
|
-%files
|
|
|
|
-%defattr(-,root,root)
|
|
|
|
-/%{_lib}/libapparmor.so.*
|
|
|
|
-/%{_lib}/libimmunix.so.*
|
|
|
|
-
|
|
|
|
-%files -n libapparmor-devel
|
|
|
|
-%defattr(-,root,root)
|
|
|
|
-%{_libdir}/libapparmor.so
|
|
|
|
-%{_libdir}/libapparmor.la
|
|
|
|
-%{_libdir}/libapparmor.a
|
|
|
|
-%{_libdir}/libimmunix.la
|
|
|
|
-%{_libdir}/libimmunix.a
|
|
|
|
-%doc %{_mandir}/man*/*
|
|
|
|
-%dir %{_includedir}/aalogparse
|
|
|
|
-%{_includedir}/sys/apparmor.h
|
|
|
|
-%{_includedir}/aalogparse/*
|
|
|
|
-
|
|
|
|
-%files -n perl-libapparmor
|
|
|
|
-%defattr(-,root,root)
|
|
|
|
-%dir %{perl_vendorarch}/auto/LibAppArmor
|
|
|
|
-%{perl_vendorarch}/auto/LibAppArmor/*
|
|
|
|
-%{perl_vendorarch}/LibAppArmor.pm
|
|
|
|
-
|
|
|
|
-%changelog
|
|
|
|
-* Sun Sep 16 2007 - sbeattie@suse.de
|
|
|
|
-- aalogparse: add support for type=15xx audit field
|
|
|
|
-- aalogparse: add support for audit messages thru syslog
|
|
|
|
-- aalogparse: reduce noise to stdout on syntax errors
|
|
|
|
-- aalogparse: add support for more missing message types
|
|
|
|
-- aalogparse: parse messages w/safe (hex) string encodings
|
|
|
|
-* Fri Aug 17 2007 - sbeattie@suse.de
|
|
|
|
-- Fix broken symlink for old change_hat(2) manpage
|
|
|
|
-* Wed Aug 15 2007 - sbeattie@suse.de
|
|
|
|
-- fix braindead symbol versioning issue with old version name
|
|
|
|
-- re-enable CFLAGS=RPM_OPT_FLAGS for build
|
|
|
|
-- convert change_hat(2) to aa_change_hat(2)
|
|
|
|
-- use 64bit magic token
|
|
|
|
-- add aa_change_profile(2) interface
|
|
|
|
-* Sat Jul 28 2007 - mbarringer@suse.de
|
|
|
|
-- Merged in libaalogparse to the library/package
|
|
|
|
-* Tue Apr 7 2007 - sbeattie@suse.de
|
|
|
|
-- Add change_hat manpage to package
|
|
|
|
-* Thu Jan 18 2007 - sbeattie@suse.de
|
|
|
|
-- Add a clean stage to remove buildroot to specfile
|
|
|
|
-* Fri Feb 17 2006 Seth Arnold <seth.arnold@suse.de> 2.0-4.1
|
|
|
|
-- use gettid() instead of /proc/self
|
|
|
|
-* Fri Feb 10 2006 Steve Beattie <sbeattie@suse.de> 2.0-3.2
|
|
|
|
-- Use RPM_OPT_FLAGS
|
|
|
|
-- Fix installed library version to match specfile version
|
|
|
|
-* Wed Feb 1 2006 Steve Beattie <sbeattie@suse.de> 2.0-3.1
|
|
|
|
-- Fix prototype to match change_hat(2) manpage
|
|
|
|
-* Mon Jan 23 2006 Steve Beattie <sbeattie@suse.de> 2.0-3
|
|
|
|
-- Rename to libapparmor.so and apparmor.h
|
|
|
|
-* Thu Jan 5 2006 Steve Beattie <sbeattie@suse.de> 2.0-2
|
|
|
|
-- Add svn repo number to tarball
|
|
|
|
-* Wed Dec 7 2005 Steve Beattie <sbeattie@suse.de> 2.0-1
|
|
|
|
-- Reset version for inclusion is SUSE autobuild
|
|
|
|
-* Wed Dec 7 2005 Steve Beattie <sbeattie@suse.de> 1.99-8
|
|
|
|
-- Disable 32bit builds on 64bit platforms for now
|
|
|
|
-* Mon Dec 5 2005 Steve Beattie <sbeattie@suse.de> 1.99-7
|
|
|
|
-- Rename package to libapparmor
|
|
|
|
-* Wed Aug 10 2005 Steve Beattie <sbeattie@suse.de> 1.99-6_imnx
|
|
|
|
-- Cleanup some of the deprecated exported symbols
|
|
|
|
-* Thu Aug 4 2005 John Johansen <jjohansen@novell.com> 1.99-5_imnx
|
|
|
|
-- and -m31 flag for s390
|
|
|
|
-* Mon Jul 11 2005 Steve Beattie <sbeattie@novell.com> 1.99-4_imnx
|
|
|
|
-- get rid of libimmunix_post_upgrade
|
|
|
|
-- Re-license to LGPL
|
|
|
|
-- update description
|
|
|
|
-* Fri May 27 2005 Steve Beattie <steve@immunix.com> 1.99-3_imnx
|
|
|
|
-- Clear token buffer before freeing.
|
|
|
|
-- Error handling cleanup.
|
|
|
|
-* Fri Feb 18 2005 Steve Beattie <steve@immunix.com> 1.99-2_imnx
|
|
|
|
-- Use the right command for the 32bit env on 64bit platforms
|
|
|
|
-- Support for 64bit builds on systems with combined 32/64 support
|
|
|
|
-* Fri Feb 4 2005 Seth Arnold <sarnold@immunix.com> 1.99-1_imnx
|
|
|
|
-- Reversion to 1.99
|
|
|
|
-* Mon Nov 8 2004 Steve Beattie <steve@immunix.com> 1.2-3_imnx
|
|
|
|
-- Finish conversion to slack-capable infrastructure.
|
|
|
|
-* Thu Oct 28 2004 Steve Beattie <steve@immunix.com> 1.2-2_imnx
|
|
|
|
-- Added a 'make install' target for prelim slack support
|
|
|
|
-* Tue Oct 12 2004 Steve Beattie <steve@immunix.com> 1.2-1_imnx
|
|
|
|
-- Bump version after shass-1.1 branched off
|
|
|
|
-* Thu Sep 23 2004 Steve Beattie <steve@immunix.com> 1.0-13_imnx
|
|
|
|
-- Vastly simplify the string handling in change_hat().
|
|
|
|
-* Thu Sep 9 2004 Steve Beattie <steve@immunix.com> 1.0-12_imnx
|
|
|
|
-- Conditionalize group the package shows up in.
|
|
|
|
-* Thu Sep 9 2004 Steve Beattie <steve@immunix.com> 1.0-11_imnx
|
|
|
|
-- Fix so change_hat functions correctly even when the token is zero.
|
|
|
|
-* Thu Sep 2 2004 Steve Beattie <steve@immunix.com> 1.0-10_imnx
|
|
|
|
-- Added that it provides %{_prefix}/sbin/libimmunix_post_upgrade, this
|
|
|
|
- was somehow breaking yast.
|
|
|
|
-* Mon Aug 30 2004 Steve Beattie <steve@immunix.com> 1.0-9_imnx
|
|
|
|
-- Copyright cleanups.
|
|
|
|
-* Wed Jul 21 2004 Steve Beattie <steve@immunix.com> 1.0-8_imnx
|
|
|
|
-- add basis for conditional distro support
|
|
|
|
-* Thu May 28 2004 Tony Jones <tony@immunix.com> 1.0-7_imnx
|
|
|
|
-- Add "changehat" command word to start of string written to /proc/pid/attr
|
|
|
|
--- a/libraries/libapparmor/m4/ac_pod2man.m4
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,16 +0,0 @@
|
|
|
|
-AC_DEFUN([PROG_POD2MAN],[
|
|
|
|
- AC_CHECK_PROG(POD2MAN,pod2man,pod2man,no)
|
|
|
|
- if test "$POD2MAN" = "no"; then
|
|
|
|
- AC_MSG_ERROR([
|
|
|
|
-The pod2man program was not found in the default path. pod2man is part of
|
|
|
|
-Perl, which can be retrieved from:
|
|
|
|
-
|
|
|
|
- http://www.perl.com/
|
|
|
|
-
|
|
|
|
-The latest version at this time is 5.6.1; it is available packaged as the
|
|
|
|
-following archive:
|
|
|
|
-
|
|
|
|
- http://www.perl.com/CPAN/src/stable.tar.gz
|
|
|
|
-])
|
|
|
|
- fi
|
|
|
|
-])
|
|
|
|
--- a/libraries/libapparmor/m4/ac_python_devel.m4
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,193 +0,0 @@
|
|
|
|
-AC_DEFUN([AC_PYTHON_DEVEL],[
|
|
|
|
- #
|
|
|
|
- # Allow the use of a (user set) custom python version
|
|
|
|
- #
|
|
|
|
- AC_ARG_VAR([PYTHON_VERSION],[The installed Python
|
|
|
|
- version to use, for example '2.3'. This string
|
|
|
|
- will be appended to the Python interpreter
|
|
|
|
- canonical name.])
|
|
|
|
-
|
|
|
|
- AC_PATH_PROG([PYTHON],[python[$PYTHON_VERSION]])
|
|
|
|
- if test -z "$PYTHON"; then
|
|
|
|
- AC_MSG_ERROR([Cannot find python$PYTHON_VERSION in your system path])
|
|
|
|
- PYTHON_VERSION=""
|
|
|
|
- fi
|
|
|
|
-
|
|
|
|
- #
|
|
|
|
- # Check for a version of Python >= 2.1.0
|
|
|
|
- #
|
|
|
|
- AC_MSG_CHECKING([for a version of Python >= '2.1.0'])
|
|
|
|
- ac_supports_python_ver=`$PYTHON -c "import sys, string; \
|
|
|
|
- ver = string.split(sys.version)[[0]]; \
|
|
|
|
- print ver >= '2.1.0'"`
|
|
|
|
- if test "$ac_supports_python_ver" != "True"; then
|
|
|
|
- if test -z "$PYTHON_NOVERSIONCHECK"; then
|
|
|
|
- AC_MSG_RESULT([no])
|
|
|
|
- AC_MSG_FAILURE([
|
|
|
|
-This version of the AC@&t@_PYTHON_DEVEL macro
|
|
|
|
-doesn't work properly with versions of Python before
|
|
|
|
-2.1.0. You may need to re-run configure, setting the
|
|
|
|
-variables PYTHON_CPPFLAGS, PYTHON_LDFLAGS, PYTHON_SITE_PKG,
|
|
|
|
-PYTHON_EXTRA_LIBS and PYTHON_EXTRA_LDFLAGS by hand.
|
|
|
|
-Moreover, to disable this check, set PYTHON_NOVERSIONCHECK
|
|
|
|
-to something else than an empty string.
|
|
|
|
-])
|
|
|
|
- else
|
|
|
|
- AC_MSG_RESULT([skip at user request])
|
|
|
|
- fi
|
|
|
|
- else
|
|
|
|
- AC_MSG_RESULT([yes])
|
|
|
|
- fi
|
|
|
|
-
|
|
|
|
- #
|
|
|
|
- # if the macro parameter ``version'' is set, honour it
|
|
|
|
- #
|
|
|
|
- if test -n "$1"; then
|
|
|
|
- AC_MSG_CHECKING([for a version of Python $1])
|
|
|
|
- ac_supports_python_ver=`$PYTHON -c "import sys, string; \
|
|
|
|
- ver = string.split(sys.version)[[0]]; \
|
|
|
|
- print ver $1"`
|
|
|
|
- if test "$ac_supports_python_ver" = "True"; then
|
|
|
|
- AC_MSG_RESULT([yes])
|
|
|
|
- else
|
|
|
|
- AC_MSG_RESULT([no])
|
|
|
|
- AC_MSG_ERROR([this package requires Python $1.
|
|
|
|
-If you have it installed, but it isn't the default Python
|
|
|
|
-interpreter in your system path, please pass the PYTHON_VERSION
|
|
|
|
-variable to configure. See ``configure --help'' for reference.
|
|
|
|
-])
|
|
|
|
- PYTHON_VERSION=""
|
|
|
|
- fi
|
|
|
|
- fi
|
|
|
|
-
|
|
|
|
- #
|
|
|
|
- # Check if you have distutils, else fail
|
|
|
|
- #
|
|
|
|
- AC_MSG_CHECKING([for the distutils Python package])
|
|
|
|
- ac_distutils_result=`$PYTHON -c "import distutils" 2>&1`
|
|
|
|
- if test -z "$ac_distutils_result"; then
|
|
|
|
- AC_MSG_RESULT([yes])
|
|
|
|
- else
|
|
|
|
- AC_MSG_RESULT([no])
|
|
|
|
- AC_MSG_ERROR([cannot import Python module "distutils".
|
|
|
|
-Please check your Python installation. The error was:
|
|
|
|
-$ac_distutils_result])
|
|
|
|
- PYTHON_VERSION=""
|
|
|
|
- fi
|
|
|
|
-
|
|
|
|
- #
|
|
|
|
- # Check for Python include path
|
|
|
|
- #
|
|
|
|
- AC_MSG_CHECKING([for Python include path])
|
|
|
|
- if test -z "$PYTHON_CPPFLAGS"; then
|
|
|
|
- python_path=`$PYTHON -c "import distutils.sysconfig; \
|
|
|
|
- print distutils.sysconfig.get_python_inc();"`
|
|
|
|
- if test -n "${python_path}"; then
|
|
|
|
- python_path="-I$python_path"
|
|
|
|
- fi
|
|
|
|
- PYTHON_CPPFLAGS=$python_path
|
|
|
|
- fi
|
|
|
|
- AC_MSG_RESULT([$PYTHON_CPPFLAGS])
|
|
|
|
- AC_SUBST([PYTHON_CPPFLAGS])
|
|
|
|
-
|
|
|
|
- #
|
|
|
|
- # Check for Python library path
|
|
|
|
- #
|
|
|
|
- AC_MSG_CHECKING([for Python library path])
|
|
|
|
- if test -z "$PYTHON_LDFLAGS"; then
|
|
|
|
- # (makes two attempts to ensure we've got a version number
|
|
|
|
- # from the interpreter)
|
|
|
|
- py_version=`$PYTHON -c "from distutils.sysconfig import *; \
|
|
|
|
- from string import join; \
|
|
|
|
- print join(get_config_vars('VERSION'))"`
|
|
|
|
- if test "$py_version" == "[None]"; then
|
|
|
|
- if test -n "$PYTHON_VERSION"; then
|
|
|
|
- py_version=$PYTHON_VERSION
|
|
|
|
- else
|
|
|
|
- py_version=`$PYTHON -c "import sys; \
|
|
|
|
- print sys.version[[:3]]"`
|
|
|
|
- fi
|
|
|
|
- fi
|
|
|
|
-
|
|
|
|
- PYTHON_LDFLAGS=`$PYTHON -c "from distutils.sysconfig import *; \
|
|
|
|
- from string import join; \
|
|
|
|
- print '-L' + get_python_lib(0,1), \
|
|
|
|
- '-lpython';"`$py_version
|
|
|
|
- fi
|
|
|
|
- AC_MSG_RESULT([$PYTHON_LDFLAGS])
|
|
|
|
- AC_SUBST([PYTHON_LDFLAGS])
|
|
|
|
-
|
|
|
|
- #
|
|
|
|
- # Check for site packages
|
|
|
|
- #
|
|
|
|
- AC_MSG_CHECKING([for Python site-packages path])
|
|
|
|
- if test -z "$PYTHON_SITE_PKG"; then
|
|
|
|
- PYTHON_SITE_PKG=`$PYTHON -c "import distutils.sysconfig; \
|
|
|
|
- print distutils.sysconfig.get_python_lib(0,0);"`
|
|
|
|
- fi
|
|
|
|
- AC_MSG_RESULT([$PYTHON_SITE_PKG])
|
|
|
|
- AC_SUBST([PYTHON_SITE_PKG])
|
|
|
|
-
|
|
|
|
- #
|
|
|
|
- # libraries which must be linked in when embedding
|
|
|
|
- #
|
|
|
|
- AC_MSG_CHECKING(python extra libraries)
|
|
|
|
- if test -z "$PYTHON_EXTRA_LIBS"; then
|
|
|
|
- PYTHON_EXTRA_LIBS=`$PYTHON -c "import distutils.sysconfig; \
|
|
|
|
- conf = distutils.sysconfig.get_config_var; \
|
|
|
|
- print conf('LOCALMODLIBS'), conf('LIBS')"`
|
|
|
|
- fi
|
|
|
|
- AC_MSG_RESULT([$PYTHON_EXTRA_LIBS])
|
|
|
|
- AC_SUBST(PYTHON_EXTRA_LIBS)
|
|
|
|
-
|
|
|
|
- #
|
|
|
|
- # linking flags needed when embedding
|
|
|
|
- #
|
|
|
|
- AC_MSG_CHECKING(python extra linking flags)
|
|
|
|
- if test -z "$PYTHON_EXTRA_LDFLAGS"; then
|
|
|
|
- PYTHON_EXTRA_LDFLAGS=`$PYTHON -c "import distutils.sysconfig; \
|
|
|
|
- conf = distutils.sysconfig.get_config_var; \
|
|
|
|
- print conf('LINKFORSHARED')"`
|
|
|
|
- fi
|
|
|
|
- AC_MSG_RESULT([$PYTHON_EXTRA_LDFLAGS])
|
|
|
|
- AC_SUBST(PYTHON_EXTRA_LDFLAGS)
|
|
|
|
-
|
|
|
|
- #
|
|
|
|
- # final check to see if everything compiles alright
|
|
|
|
- #
|
|
|
|
- AC_MSG_CHECKING([consistency of all components of python development environment])
|
|
|
|
- AC_LANG_PUSH([C])
|
|
|
|
- # save current global flags
|
|
|
|
- LIBS="$ac_save_LIBS $PYTHON_LDFLAGS"
|
|
|
|
- CPPFLAGS="$ac_save_CPPFLAGS $PYTHON_CPPFLAGS"
|
|
|
|
- AC_TRY_LINK([
|
|
|
|
- #include <Python.h>
|
|
|
|
- ],[
|
|
|
|
- Py_Initialize();
|
|
|
|
- ],[pythonexists=yes],[pythonexists=no])
|
|
|
|
-
|
|
|
|
- AC_MSG_RESULT([$pythonexists])
|
|
|
|
-
|
|
|
|
- if test ! "$pythonexists" = "yes"; then
|
|
|
|
- AC_MSG_ERROR([
|
|
|
|
- Could not link test program to Python. Maybe the main Python library has been
|
|
|
|
- installed in some non-standard library path. If so, pass it to configure,
|
|
|
|
- via the LDFLAGS environment variable.
|
|
|
|
- Example: ./configure LDFLAGS="-L/usr/non-standard-path/python/lib"
|
|
|
|
- ============================================================================
|
|
|
|
- ERROR!
|
|
|
|
- You probably have to install the development version of the Python package
|
|
|
|
- for your distribution. The exact name of this package varies among them.
|
|
|
|
- ============================================================================
|
|
|
|
- ])
|
|
|
|
- PYTHON_VERSION=""
|
|
|
|
- fi
|
|
|
|
- AC_LANG_POP
|
|
|
|
- # turn back to default flags
|
|
|
|
- CPPFLAGS="$ac_save_CPPFLAGS"
|
|
|
|
- LIBS="$ac_save_LIBS"
|
|
|
|
-
|
|
|
|
- #
|
|
|
|
- # all done!
|
|
|
|
- #
|
|
|
|
-])
|
|
|
|
--- a/libraries/libapparmor/src/Makefile.am
|
|
|
|
+++ b/libraries/libapparmor/src/Makefile.am
|
|
|
|
@@ -23,10 +23,10 @@ noinst_HEADERS = grammar.h parser.h scan
|
|
|
|
|
2011-03-25 09:04:51 +01:00
|
|
|
libapparmor_la_SOURCES = grammar.y libaalogparse.c kernel_interface.c scanner.c
|
2011-01-17 17:43:05 +01:00
|
|
|
libapparmor_la_LDFLAGS = -version-info 1:2:0 -XCClinker -dynamic \
|
|
|
|
- -Wl,--version-script=$(top_srcdir)/src/libapparmor.map -Wl,-soname=libapparmor.so.1
|
|
|
|
+ -Wl,--version-script=$(srcdir)/libapparmor.map -Wl,-soname=libapparmor.so.1
|
|
|
|
|
2011-03-25 09:04:51 +01:00
|
|
|
libimmunix_la_SOURCES = kernel_interface.c libimmunix_warning.c
|
2011-01-17 17:43:05 +01:00
|
|
|
-libimmunix_la_LDFLAGS = -version-info 1:2:0 -Wl,--version-script=$(top_srcdir)/src/libapparmor.map -Wl,-soname=libimmunix.so.1
|
|
|
|
+libimmunix_la_LDFLAGS = -version-info 1:2:0 -Wl,--version-script=$(srcdir)/libapparmor.map -Wl,-soname=libimmunix.so.1
|
|
|
|
|
|
|
|
tst_aalogmisc_SOURCES = tst_aalogmisc.c
|
|
|
|
tst_aalogmisc_LDADD = .libs/libapparmor.a
|
|
|
|
--- a/libraries/libapparmor/swig/perl/Makefile.PL.in
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,17 +0,0 @@
|
|
|
|
-#!/usr/bin/perl -w
|
|
|
|
-
|
|
|
|
-use ExtUtils::MakeMaker;
|
|
|
|
-
|
|
|
|
-use vars qw($CFLAGS $OBJECT $VERSION $OPTIMIZE);
|
|
|
|
-
|
|
|
|
-WriteMakefile(
|
|
|
|
- 'NAME' => 'LibAppArmor',
|
|
|
|
- 'MAKEFILE' => 'Makefile.perl',
|
|
|
|
- 'FIRST_MAKEFILE' => 'Makefile.perl',
|
|
|
|
- 'ABSTRACT' => q[Perl interface to AppArmor] ,
|
|
|
|
- 'VERSION' => q[@VERSION@],
|
|
|
|
- 'INC' => q[-I@top_srcdir@/src @CFLAGS@],
|
|
|
|
- 'LIBS' => q[-L@top_builddir@/src/.libs/ -lapparmor @LIBS@],
|
|
|
|
- 'OBJECT' => 'libapparmor_wrap.o', # $(OBJ_EXT)
|
|
|
|
-) ;
|
|
|
|
-
|
|
|
|
--- a/libraries/libapparmor/swig/perl/Makefile.am
|
|
|
|
+++ b/libraries/libapparmor/swig/perl/Makefile.am
|
|
|
|
@@ -1,32 +1,21 @@
|
|
|
|
-EXTRA_DIST =Makefile.PL libapparmor_wrap.c LibAppArmor.pm examples/*.pl
|
|
|
|
+EXTRA_DIST = libapparmor_wrap.c LibAppArmor.pm examples/*.pl
|
|
|
|
|
|
|
|
if HAVE_PERL
|
|
|
|
-noinst_DATA =LibAppArmor.so
|
|
|
|
+vendorarchdir = $(VENDOR_ARCH_PERL)
|
|
|
|
+vendorarch_DATA = LibAppArmor.pm
|
|
|
|
+
|
|
|
|
+apparmordir = $(vendorarchdir)/auto/LibAppArmor
|
|
|
|
+apparmor_LTLIBRARIES = LibAppArmor.la
|
|
|
|
+
|
|
|
|
+LibAppArmor_la_LDFLAGS = -module -no-undefined -avoid-version
|
|
|
|
+LibAppArmor_la_SOURCES = libapparmor_wrap.c
|
|
|
|
+LibAppArmor_la_LIBADD = $(top_builddir)/libraries/libapparmor/src/.libs/libapparmor.la
|
|
|
|
+
|
|
|
|
+LibAppArmor_la_CFLAGS = -I$(top_srcdir)/libraries/libapparmor/src $(CFLAGS) $(PERL_CCFLAGS)
|
|
|
|
|
|
|
|
libapparmor_wrap.c: $(srcdir)/../SWIG/libapparmor.i
|
|
|
|
$(SWIG) -perl -I$(srcdir)/../../src -module LibAppArmor -o $@ $(srcdir)/../SWIG/libapparmor.i
|
|
|
|
+LibAppArmor.pm: libapparmor_wrap.c
|
|
|
|
|
|
|
|
MOSTLYCLEANFILES=libapparmor_wrap.c LibAppArmor.pm
|
|
|
|
-
|
|
|
|
-Makefile.perl: Makefile.PL
|
|
|
|
- $(PERL) $< PREFIX=$(prefix) MAKEFILE=$@
|
|
|
|
- sed -ie 's/^LD_RUN_PATH.*//g' Makefile.perl
|
|
|
|
-
|
|
|
|
-LibAppArmor.so: libapparmor_wrap.c Makefile.perl
|
|
|
|
- if test ! -f libapparmor_wrap.c; then cp $(srcdir)/libapparmor_wrap.c . ; fi
|
|
|
|
- $(MAKE) -fMakefile.perl
|
|
|
|
- if test $(top_srcdir) != $(top_builddir) ; then rm -f libapparmor_wrap.c ; fi
|
|
|
|
-
|
|
|
|
-install-exec-local: Makefile.perl
|
|
|
|
- $(MAKE) -fMakefile.perl install_vendor
|
|
|
|
-
|
|
|
|
-# sadly there is no make uninstall for perl
|
|
|
|
-#uninstall-local: Makefile.perl
|
|
|
|
-#$(MAKE) -fMakefile.perl uninstall
|
|
|
|
-
|
|
|
|
-clean-local:
|
|
|
|
- if test -f Makefile.perl; then $(MAKE) -fMakefile.perl realclean; fi
|
|
|
|
-#rm -f Makefile.perl Makefile.perl.old
|
|
|
|
- rm -f *.so # *.o
|
|
|
|
-
|
|
|
|
endif
|
|
|
|
--- a/libraries/libapparmor/swig/python/Makefile.am
|
|
|
|
+++ b/libraries/libapparmor/swig/python/Makefile.am
|
|
|
|
@@ -9,7 +9,6 @@ libapparmor_wrap.c: $(srcdir)/../SWIG/li
|
|
|
|
MOSTLYCLEANFILES=libapparmor_wrap.c __init__.py
|
|
|
|
|
|
|
|
all-local: libapparmor_wrap.c setup.py
|
|
|
|
- if test ! -f libapparmor_wrap.c; then cp $(srcdir)/libapparmor_wrap.c . ; fi
|
|
|
|
$(PYTHON) setup.py build
|
|
|
|
|
|
|
|
install-exec-local:
|
|
|
|
@@ -18,6 +17,5 @@ install-exec-local:
|
|
|
|
clean-local:
|
|
|
|
if test -x "$(PYTHON)"; then $(PYTHON) setup.py clean; fi
|
|
|
|
rm -rf build
|
|
|
|
- if test $(top_srcdir) != $(top_builddir) ; then rm -f libapparmor_wrap.c ; fi
|
|
|
|
|
|
|
|
endif
|
|
|
|
--- a/libraries/libapparmor/swig/ruby/Makefile.am
|
|
|
|
+++ b/libraries/libapparmor/swig/ruby/Makefile.am
|
|
|
|
@@ -1,28 +1,17 @@
|
|
|
|
if HAVE_RUBY
|
|
|
|
|
|
|
|
-EXTRA_DIST = extconf.rb LibAppArmor_wrap.c examples/*.rb
|
|
|
|
-noinst_DATA = LibAppArmor.so
|
|
|
|
+EXTRA_DIST = LibAppArmor_wrap.c examples/*.rb
|
|
|
|
|
|
|
|
-LibAppArmor_wrap.c : $(srcdir)/../SWIG/libapparmor.i
|
|
|
|
- $(SWIG) -ruby -module LibAppArmor -I$(top_srcdir)/src -o $@ $(srcdir)/../SWIG/libapparmor.i
|
|
|
|
-
|
|
|
|
-MOSTLYCLEANFILES=LibAppArmor_wrap.c
|
|
|
|
+rbexec_LTLIBRARIES = LibAppArmor.la
|
|
|
|
|
|
|
|
-Makefile.ruby: extconf.rb
|
|
|
|
- PREFIX=$(prefix) $(RUBY) $< --with-LibAppArmor-include=$(top_srcdir)/src
|
|
|
|
+LibAppArmor_la_LDFLAGS = -module -no-undefined -avoid-version
|
|
|
|
+LibAppArmor_la_SOURCES = LibAppArmor_wrap.c
|
|
|
|
+LibAppArmor_la_LIBADD = $(builddir)/../../src/.libs/libapparmor.la
|
|
|
|
+LibAppArmor_la_CFLAGS = -I$(top_srcdir)/libraries/libapparmor/src $(CFLAGS)
|
|
|
|
|
|
|
|
-LibAppArmor.so: LibAppArmor_wrap.c Makefile.ruby
|
|
|
|
- $(MAKE) -fMakefile.ruby
|
|
|
|
-
|
|
|
|
-install-exec-local: Makefile.ruby
|
|
|
|
- $(MAKE) -fMakefile.ruby install
|
|
|
|
+LibAppArmor_wrap.c : $(srcdir)/../SWIG/libapparmor.i
|
|
|
|
+ $(SWIG) -ruby -module LibAppArmor -I$(srcdir)/../../src -o $@ $(srcdir)/../SWIG/libapparmor.i
|
|
|
|
|
|
|
|
-#uninstall
|
|
|
|
-#./lib/ruby/site_ruby/1.8/i686-linux/LibAppArmor.so
|
|
|
|
-
|
|
|
|
-clean-local:
|
|
|
|
- if test -f Makefile.ruby; then $(MAKE) -fMakefile.ruby clean; fi
|
|
|
|
- rm -f Makefile.ruby Makefile.new
|
|
|
|
- rm -f *.o *.so *.log
|
|
|
|
+MOSTLYCLEANFILES=LibAppArmor_wrap.c
|
|
|
|
|
|
|
|
endif
|
|
|
|
--- a/libraries/libapparmor/swig/ruby/extconf.rb
|
|
|
|
+++ /dev/null
|
|
|
|
@@ -1,37 +0,0 @@
|
|
|
|
-#!/usr/bin/env ruby
|
|
|
|
-
|
|
|
|
-require 'mkmf'
|
|
|
|
-
|
|
|
|
-# hack 1: ruby black magic to write a Makefile.new instead of a Makefile
|
|
|
|
-alias open_orig open
|
|
|
|
-def open(path, mode=nil, perm=nil)
|
|
|
|
- path = 'Makefile.new' if path == 'Makefile'
|
|
|
|
- if block_given?
|
|
|
|
- open_orig(path, mode, perm) { |io| yield(io) }
|
|
|
|
- else
|
|
|
|
- open_orig(path, mode, perm)
|
|
|
|
- end
|
|
|
|
-end
|
|
|
|
-
|
|
|
|
-if ENV['PREFIX']
|
|
|
|
- prefix = CONFIG['prefix']
|
|
|
|
- %w[ prefix sitedir datadir infodir mandir oldincludedir ].each do |key|
|
|
|
|
- CONFIG[key] = CONFIG[key].sub(/#{prefix}/, ENV['PREFIX'])
|
|
|
|
- end
|
|
|
|
-end
|
|
|
|
-
|
|
|
|
-dir_config('LibAppArmor')
|
|
|
|
-if find_library('apparmor', 'parse_record', '../../src/.libs') and
|
|
|
|
- have_header('aalogparse.h')
|
|
|
|
- create_makefile('LibAppArmor')
|
|
|
|
-
|
|
|
|
- # hack 2: strip all rpath references
|
|
|
|
- open('Makefile.ruby', 'w') do |out|
|
|
|
|
- IO.foreach('Makefile.new') do |line|
|
|
|
|
- out.puts line.gsub(/-Wl,-R'[^']*'/, '')
|
|
|
|
- end
|
|
|
|
- end
|
|
|
|
-else
|
|
|
|
- puts 'apparmor lib not found'
|
|
|
|
-end
|
|
|
|
-
|
|
|
|
--- a/libraries/libapparmor/testsuite/Makefile.am
|
|
|
|
+++ b/libraries/libapparmor/testsuite/Makefile.am
|
|
|
|
@@ -2,7 +2,7 @@ SUBDIRS = lib config libaalogparse.test
|
|
|
|
PACKAGE = libaalogparse
|
|
|
|
AUTOMAKE_OPTIONS = dejagnu
|
|
|
|
|
|
|
|
-INCLUDES = -I. -I$(top_srcdir)/src
|
|
|
|
+INCLUDES = -I. -I$(srcdir)/../src
|
|
|
|
|
|
|
|
AM_CPPFLAGS = $(DEBUG_FLAGS) -DLOCALEDIR=\"${localedir}\"
|
|
|
|
AM_CFLAGS = -Wall
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/m4/ac_pod2man.m4
|
|
|
|
@@ -0,0 +1,16 @@
|
|
|
|
+AC_DEFUN([PROG_POD2MAN],[
|
|
|
|
+ AC_CHECK_PROG(POD2MAN,pod2man,pod2man,no)
|
|
|
|
+ if test "$POD2MAN" = "no"; then
|
|
|
|
+ AC_MSG_ERROR([
|
|
|
|
+The pod2man program was not found in the default path. pod2man is part of
|
|
|
|
+Perl, which can be retrieved from:
|
|
|
|
+
|
|
|
|
+ http://www.perl.com/
|
|
|
|
+
|
|
|
|
+The latest version at this time is 5.6.1; it is available packaged as the
|
|
|
|
+following archive:
|
|
|
|
+
|
|
|
|
+ http://www.perl.com/CPAN/src/stable.tar.gz
|
|
|
|
+])
|
|
|
|
+ fi
|
|
|
|
+])
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/m4/ac_python_devel.m4
|
|
|
|
@@ -0,0 +1,209 @@
|
|
|
|
+AC_DEFUN([AC_PYTHON_DEVEL],[
|
|
|
|
+ #
|
|
|
|
+ # Allow the use of a (user set) custom python version
|
|
|
|
+ #
|
|
|
|
+ AC_ARG_VAR([PYTHON_VERSION],[The installed Python
|
|
|
|
+ version to use, for example '2.3'. This string
|
|
|
|
+ will be appended to the Python interpreter
|
|
|
|
+ canonical name.])
|
|
|
|
+
|
|
|
|
+ AC_PATH_PROG([PYTHON],[python[$PYTHON_VERSION]])
|
|
|
|
+ if test -z "$PYTHON"; then
|
|
|
|
+ AC_MSG_ERROR([Cannot find python$PYTHON_VERSION in your system path])
|
|
|
|
+ PYTHON_VERSION=""
|
|
|
|
+ fi
|
|
|
|
+
|
|
|
|
+ #
|
|
|
|
+ # Check for a version of Python >= 2.1.0
|
|
|
|
+ #
|
|
|
|
+ AC_MSG_CHECKING([for a version of Python >= '2.1.0'])
|
|
|
|
+ ac_supports_python_ver=`$PYTHON -c "import sys, string; \
|
|
|
|
+ ver = string.split(sys.version)[[0]]; \
|
|
|
|
+ print ver >= '2.1.0'"`
|
|
|
|
+ if test "$ac_supports_python_ver" != "True"; then
|
|
|
|
+ if test -z "$PYTHON_NOVERSIONCHECK"; then
|
|
|
|
+ AC_MSG_RESULT([no])
|
|
|
|
+ AC_MSG_FAILURE([
|
|
|
|
+This version of the AC@&t@_PYTHON_DEVEL macro
|
|
|
|
+doesn't work properly with versions of Python before
|
|
|
|
+2.1.0. You may need to re-run configure, setting the
|
|
|
|
+variables PYTHON_CPPFLAGS, PYTHON_LDFLAGS, PYTHON_SITE_PKG,
|
|
|
|
+PYTHON_EXTRA_LIBS and PYTHON_EXTRA_LDFLAGS by hand.
|
|
|
|
+Moreover, to disable this check, set PYTHON_NOVERSIONCHECK
|
|
|
|
+to something else than an empty string.
|
|
|
|
+])
|
|
|
|
+ else
|
|
|
|
+ AC_MSG_RESULT([skip at user request])
|
|
|
|
+ fi
|
|
|
|
+ else
|
|
|
|
+ AC_MSG_RESULT([yes])
|
|
|
|
+ fi
|
|
|
|
+
|
|
|
|
+ #
|
|
|
|
+ # if the macro parameter ``version'' is set, honour it
|
|
|
|
+ #
|
|
|
|
+ if test -n "$1"; then
|
|
|
|
+ AC_MSG_CHECKING([for a version of Python $1])
|
|
|
|
+ ac_supports_python_ver=`$PYTHON -c "import sys, string; \
|
|
|
|
+ ver = string.split(sys.version)[[0]]; \
|
|
|
|
+ print ver $1"`
|
|
|
|
+ if test "$ac_supports_python_ver" = "True"; then
|
|
|
|
+ AC_MSG_RESULT([yes])
|
|
|
|
+ else
|
|
|
|
+ AC_MSG_RESULT([no])
|
|
|
|
+ AC_MSG_ERROR([this package requires Python $1.
|
|
|
|
+If you have it installed, but it isn't the default Python
|
|
|
|
+interpreter in your system path, please pass the PYTHON_VERSION
|
|
|
|
+variable to configure. See ``configure --help'' for reference.
|
|
|
|
+])
|
|
|
|
+ PYTHON_VERSION=""
|
|
|
|
+ fi
|
|
|
|
+ fi
|
|
|
|
+
|
|
|
|
+ #
|
|
|
|
+ # Check if you have distutils, else fail
|
|
|
|
+ #
|
|
|
|
+ AC_MSG_CHECKING([for the distutils Python package])
|
|
|
|
+ ac_distutils_result=`$PYTHON -c "import distutils" 2>&1`
|
|
|
|
+ if test -z "$ac_distutils_result"; then
|
|
|
|
+ AC_MSG_RESULT([yes])
|
|
|
|
+ else
|
|
|
|
+ AC_MSG_RESULT([no])
|
|
|
|
+ AC_MSG_ERROR([cannot import Python module "distutils".
|
|
|
|
+Please check your Python installation. The error was:
|
|
|
|
+$ac_distutils_result])
|
|
|
|
+ PYTHON_VERSION=""
|
|
|
|
+ fi
|
|
|
|
+
|
|
|
|
+ #
|
|
|
|
+ # Check for Python include path
|
|
|
|
+ #
|
|
|
|
+ AC_MSG_CHECKING([for Python include path])
|
|
|
|
+ if test -z "$PYTHON_CPPFLAGS"; then
|
|
|
|
+ python_path=`$PYTHON -c "import distutils.sysconfig; \
|
|
|
|
+ print distutils.sysconfig.get_python_inc();"`
|
|
|
|
+ if test -n "${python_path}"; then
|
|
|
|
+ python_path="-I$python_path"
|
|
|
|
+ fi
|
|
|
|
+ PYTHON_CPPFLAGS=$python_path
|
|
|
|
+ fi
|
|
|
|
+ AC_MSG_RESULT([$PYTHON_CPPFLAGS])
|
|
|
|
+ AC_SUBST([PYTHON_CPPFLAGS])
|
|
|
|
+
|
|
|
|
+ #
|
|
|
|
+ # Check for Python library path
|
|
|
|
+ #
|
|
|
|
+ AC_MSG_CHECKING([for Python library path])
|
|
|
|
+ if test -z "$PYTHON_LDFLAGS"; then
|
|
|
|
+ # (makes two attempts to ensure we've got a version number
|
|
|
|
+ # from the interpreter)
|
|
|
|
+ py_version=`$PYTHON -c "from distutils.sysconfig import *; \
|
|
|
|
+ from string import join; \
|
|
|
|
+ print join(get_config_vars('VERSION'))"`
|
|
|
|
+ if test "$py_version" == "[None]"; then
|
|
|
|
+ if test -n "$PYTHON_VERSION"; then
|
|
|
|
+ py_version=$PYTHON_VERSION
|
|
|
|
+ else
|
|
|
|
+ py_version=`$PYTHON -c "import sys; \
|
|
|
|
+ print sys.version[[:3]]"`
|
|
|
|
+ fi
|
|
|
|
+ fi
|
|
|
|
+
|
|
|
|
+ PYTHON_LDFLAGS=`$PYTHON -c "from distutils.sysconfig import *; \
|
|
|
|
+ from string import join; \
|
|
|
|
+ print '-L' + get_python_lib(0,1), \
|
|
|
|
+ '-lpython';"`$py_version
|
|
|
|
+ fi
|
|
|
|
+ AC_MSG_RESULT([$PYTHON_LDFLAGS])
|
|
|
|
+ AC_SUBST([PYTHON_LDFLAGS])
|
|
|
|
+
|
|
|
|
+ #
|
|
|
|
+ # Check for site packages
|
|
|
|
+ #
|
|
|
|
+ AC_MSG_CHECKING([for Python site-packages path])
|
|
|
|
+ if test -z "$PYTHON_SITE_PKG"; then
|
|
|
|
+ PYTHON_SITE_PKG=`$PYTHON -c "import distutils.sysconfig; \
|
|
|
|
+ print distutils.sysconfig.get_python_lib(0,0);"`
|
|
|
|
+ fi
|
|
|
|
+ AC_MSG_RESULT([$PYTHON_SITE_PKG])
|
|
|
|
+ AC_SUBST([PYTHON_SITE_PKG])
|
|
|
|
+
|
|
|
|
+ #
|
|
|
|
+ # libraries which must be linked in when embedding
|
|
|
|
+ #
|
|
|
|
+ AC_MSG_CHECKING(python extra libraries)
|
|
|
|
+ if test -z "$PYTHON_EXTRA_LIBS"; then
|
|
|
|
+ PYTHON_EXTRA_LIBS=`$PYTHON -c "import distutils.sysconfig; \
|
|
|
|
+ conf = distutils.sysconfig.get_config_var; \
|
|
|
|
+ print conf('LOCALMODLIBS'), conf('LIBS')"`
|
|
|
|
+ fi
|
|
|
|
+ AC_MSG_RESULT([$PYTHON_EXTRA_LIBS])
|
|
|
|
+ AC_SUBST(PYTHON_EXTRA_LIBS)
|
|
|
|
+
|
|
|
|
+ #
|
|
|
|
+ # linking flags needed when embedding
|
|
|
|
+ #
|
|
|
|
+ AC_MSG_CHECKING(python extra linking flags)
|
|
|
|
+ if test -z "$PYTHON_EXTRA_LDFLAGS"; then
|
|
|
|
+ PYTHON_EXTRA_LDFLAGS=`$PYTHON -c "import distutils.sysconfig; \
|
|
|
|
+ conf = distutils.sysconfig.get_config_var; \
|
|
|
|
+ print conf('LINKFORSHARED')"`
|
|
|
|
+ fi
|
|
|
|
+ AC_MSG_RESULT([$PYTHON_EXTRA_LDFLAGS])
|
|
|
|
+ AC_SUBST(PYTHON_EXTRA_LDFLAGS)
|
|
|
|
+
|
|
|
|
+ #
|
|
|
|
+ # final check to see if everything compiles alright
|
|
|
|
+ #
|
|
|
|
+ AC_MSG_CHECKING([consistency of all components of python development environment])
|
|
|
|
+ AC_LANG_PUSH([C])
|
|
|
|
+ # save current global flags
|
|
|
|
+ LIBS="$ac_save_LIBS $PYTHON_LDFLAGS"
|
|
|
|
+ CPPFLAGS="$ac_save_CPPFLAGS $PYTHON_CPPFLAGS"
|
|
|
|
+ AC_TRY_LINK([
|
|
|
|
+ #include <Python.h>
|
|
|
|
+ ],[
|
|
|
|
+ Py_Initialize();
|
|
|
|
+ ],[pythonexists=yes],[pythonexists=no])
|
|
|
|
+
|
|
|
|
+ AC_MSG_RESULT([$pythonexists])
|
|
|
|
+
|
|
|
|
+ if test ! "$pythonexists" = "yes"; then
|
|
|
|
+ AC_MSG_ERROR([
|
|
|
|
+ Could not link test program to Python. Maybe the main Python library has been
|
|
|
|
+ installed in some non-standard library path. If so, pass it to configure,
|
|
|
|
+ via the LDFLAGS environment variable.
|
|
|
|
+ Example: ./configure LDFLAGS="-L/usr/non-standard-path/python/lib"
|
|
|
|
+ ============================================================================
|
|
|
|
+ ERROR!
|
|
|
|
+ You probably have to install the development version of the Python package
|
|
|
|
+ for your distribution. The exact name of this package varies among them.
|
|
|
|
+ ============================================================================
|
|
|
|
+ ])
|
|
|
|
+ PYTHON_VERSION=""
|
|
|
|
+ fi
|
|
|
|
+ AC_LANG_POP
|
|
|
|
+ # turn back to default flags
|
|
|
|
+ CPPFLAGS="$ac_save_CPPFLAGS"
|
|
|
|
+ LIBS="$ac_save_LIBS"
|
|
|
|
+
|
|
|
|
+ #
|
|
|
|
+ # all done!
|
|
|
|
+ #
|
|
|
|
+])
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+AC_DEFUN([PROG_PYTHON],[
|
|
|
|
+AC_ARG_WITH(python,
|
|
|
|
+[ --with-python enable python support for libapparmor [[default=auto]]],
|
|
|
|
+[with_python=$withval], [with_python=auto])
|
|
|
|
+
|
|
|
|
+if test "$with_python" != "no"; then
|
|
|
|
+ AM_PATH_PYTHON(,, [no])
|
|
|
|
+ if test "$with_python" = "yes" -a "$PYTHON" = "no"; then
|
|
|
|
+ AC_MSG_ERROR([--with-python was given but python could not be found])
|
|
|
|
+ elif test "$PYTHON" = "no"; then
|
|
|
|
+ AC_MSG_RESULT([ --- python support for libappamor not available])
|
|
|
|
+ fi
|
|
|
|
+fi
|
|
|
|
+])
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/m4/am_path_apxs.m4
|
|
|
|
@@ -0,0 +1,12 @@
|
|
|
|
+AC_DEFUN([AM_PATH_APXS],[
|
|
|
|
+ m4_define_default([_AM_APXS_LIST], [apxs2 apxs])
|
|
|
|
+
|
|
|
|
+ AC_PATH_PROGS(APXS, _AM_APXS_LIST, :, [PATH /usr/sbin /usr/bin])
|
|
|
|
+
|
|
|
|
+ if test "$APXS" = :; then
|
|
|
|
+ m4_default([$3], [AC_MSG_ERROR([no apache extension tool found])])
|
|
|
|
+ else
|
|
|
|
+ echo -n
|
|
|
|
+ $2
|
|
|
|
+ fi
|
|
|
|
+])
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/m4/am_path_perl.m4
|
|
|
|
@@ -0,0 +1,25 @@
|
|
|
|
+AC_DEFUN([AM_PATH_PERL],[
|
|
|
|
+ m4_define_default([_AM_PERL_LIST], [perl perl6 perl5])
|
|
|
|
+
|
|
|
|
+ AC_PATH_PROGS(PERL, _AM_PERL_LIST, :,)
|
|
|
|
+
|
|
|
|
+ if test "$PERL" = :; then
|
|
|
|
+ m4_default([$3], [AC_MSG_ERROR([no perl interpreter found])])
|
|
|
|
+ else
|
|
|
|
+ VENDOR_PERL=`$PERL -V::vendorlib:`
|
|
|
|
+ VENDOR_PERL=`eval echo $VENDOR_PERL`
|
|
|
|
+ AC_MSG_RESULT([Perl vendirlib is $VENDOR_PERL])
|
|
|
|
+ AC_SUBST(VENDOR_PERL)
|
|
|
|
+
|
|
|
|
+ VENDOR_ARCH_PERL=`$PERL -V::vendorarch:`
|
|
|
|
+ VENDOR_ARCH_PERL=`eval echo $VENDOR_ARCH_PERL`
|
|
|
|
+ AC_MSG_RESULT([Perl vendorarch is $VENDOR_ARCH_PERL])
|
|
|
|
+ AC_SUBST(VENDOR_ARCH_PERL)
|
|
|
|
+
|
|
|
|
+ PERL_CCFLAGS=`$PERL -MExtUtils::Embed -e ccopts`
|
|
|
|
+ AC_SUBST(PERL_CCFLAGS)
|
|
|
|
+
|
|
|
|
+ $2
|
|
|
|
+ fi
|
|
|
|
+])
|
|
|
|
+
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/m4/am_path_ruby.m4
|
|
|
|
@@ -0,0 +1,115 @@
|
|
|
|
+#
|
|
|
|
+# AM_PATH_RUBY([MINIMUM-VERSION], [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND])
|
|
|
|
+# ---------------------------------------------------------------------------
|
|
|
|
+# Adds support for distributing Ruby modules and packages. To
|
|
|
|
+# install modules, copy them to $(rubydir), using the ruby_RUBY
|
|
|
|
+# automake variable. To install a package with the same name as the
|
|
|
|
+# automake package, install to $(pkgrubydir), or use the
|
|
|
|
+# pkgruby_RUBY automake variable.
|
|
|
|
+#
|
|
|
|
+# The variables $(rbexecdir) and $(pkgrbexecdir) are provided as
|
|
|
|
+# locations to install ruby extension modules (shared libraries).
|
|
|
|
+# Another macro is required to find the appropriate flags to compile
|
|
|
|
+# extension modules.
|
|
|
|
+#
|
|
|
|
+AC_DEFUN([AM_PATH_RUBY],
|
|
|
|
+ [
|
|
|
|
+ dnl Find a Ruby interpreter.
|
|
|
|
+ m4_define_default([_AM_RUBY_INTERPRETER_LIST],
|
|
|
|
+ [ruby ruby1.8 ruby1.7 ruby1.6])
|
|
|
|
+
|
|
|
|
+ m4_if([$1],[],[
|
|
|
|
+ dnl No version check is needed.
|
|
|
|
+ # Find any Ruby interpreter.
|
|
|
|
+ if test -z "$RUBY"; then
|
|
|
|
+ AC_PATH_PROGS([RUBY], _AM_RUBY_INTERPRETER_LIST, :)
|
|
|
|
+ fi
|
|
|
|
+ am_display_RUBY=ruby
|
|
|
|
+ ], [
|
|
|
|
+ dnl A version check is needed.
|
|
|
|
+ if test -n "$RUBY"; then
|
|
|
|
+ # If the user set $RUBY, use it and don't search something else.
|
|
|
|
+ #AC_MSG_CHECKING([whether $RUBY version >= $1])
|
|
|
|
+ #AM_RUBY_CHECK_VERSION([$RUBY], [$1],
|
|
|
|
+ # [AC_MSG_RESULT(yes)],
|
|
|
|
+ # [AC_MSG_ERROR(too old)])
|
|
|
|
+ am_display_RUBY=$RUBY
|
|
|
|
+ else
|
|
|
|
+ # Otherwise, try each interpreter until we find one that satisfies
|
|
|
|
+ # VERSION.
|
|
|
|
+ AC_CACHE_CHECK([for a Ruby interpreter with version >= $1],
|
|
|
|
+ [am_cv_pathless_RUBY],[
|
|
|
|
+ for am_cv_pathless_RUBY in _AM_RUBY_INTERPRETER_LIST none; do
|
|
|
|
+ test "$am_cv_pathless_RUBY" = none && break
|
|
|
|
+ #AM_RUBY_CHECK_VERSION([$am_cv_pathless_RUBY], [$1], [break])
|
|
|
|
+ [], [$1], [break])
|
|
|
|
+ done])
|
|
|
|
+ # Set $RUBY to the absolute path of $am_cv_pathless_RUBY.
|
|
|
|
+ if test "$am_cv_pathless_RUBY" = none; then
|
|
|
|
+ RUBY=:
|
|
|
|
+ else
|
|
|
|
+ AC_PATH_PROG([RUBY], [$am_cv_pathless_RUBY])
|
|
|
|
+ fi
|
|
|
|
+ am_display_RUBY=$am_cv_pathless_RUBY
|
|
|
|
+ fi
|
|
|
|
+ ])
|
|
|
|
+
|
|
|
|
+ if test "$RUBY" = :; then
|
|
|
|
+ dnl Run any user-specified action, or abort.
|
|
|
|
+ m4_default([$3], [AC_MSG_ERROR([no suitable Ruby interpreter found])])
|
|
|
|
+ else
|
|
|
|
+
|
|
|
|
+ dnl Query Ruby for its version number. Getting [:3] seems to be
|
|
|
|
+ dnl the best way to do this; it's what "site.py" does in the standard
|
|
|
|
+ dnl library.
|
|
|
|
+
|
|
|
|
+ AC_CACHE_CHECK([for $am_display_RUBY version], [am_cv_ruby_version],
|
|
|
|
+ [am_cv_ruby_version=`$RUBY -e "print RUBY_VERSION"`])
|
|
|
|
+ AC_SUBST([RUBY_VERSION], [$am_cv_ruby_version])
|
|
|
|
+
|
|
|
|
+ dnl Use the values of $prefix and $exec_prefix for the corresponding
|
|
|
|
+ dnl values of RUBY_PREFIX and RUBY_EXEC_PREFIX. These are made
|
|
|
|
+ dnl distinct variables so they can be overridden if need be. However,
|
|
|
|
+ dnl general consensus is that you shouldn't need this ability.
|
|
|
|
+
|
|
|
|
+ AC_SUBST([RUBY_PREFIX], ['${prefix}'])
|
|
|
|
+ AC_SUBST([RUBY_EXEC_PREFIX], ['${exec_prefix}'])
|
|
|
|
+
|
|
|
|
+ dnl At times (like when building shared libraries) you may want
|
|
|
|
+ dnl to know which OS platform Ruby thinks this is.
|
|
|
|
+
|
|
|
|
+ AC_CACHE_CHECK([for $am_display_RUBY platform], [am_cv_ruby_platform],
|
|
|
|
+ [am_cv_ruby_platform=`$RUBY -e "print RUBY_PLATFORM"`])
|
|
|
|
+ AC_SUBST([RUBY_PLATFORM], [$am_cv_ruby_platform])
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ dnl Set up 4 directories:
|
|
|
|
+ dnl rubydir -- where to install ruby scripts.
|
|
|
|
+ AC_CACHE_CHECK([for $am_display_RUBY script directory],
|
|
|
|
+ [am_cv_ruby_rubydir],
|
|
|
|
+ [am_cv_ruby_rubydir=`$RUBY -rrbconfig -e "drive = File::PATH_SEPARATOR == ';' ? /\A\w:/ : /\A/; prefix = Regexp.new('\\A' + Regexp.quote(Config::CONFIG[['prefix']])); \\$prefix = Config::CONFIG[['prefix']].sub(drive, ''); \\$archdir = Config::CONFIG[['archdir']].sub(prefix, '\\$(prefix)').sub(drive, ''); print \\$archdir;"`])
|
|
|
|
+ AC_SUBST([rubydir], [$am_cv_ruby_rubydir])
|
|
|
|
+
|
|
|
|
+ dnl pkgrubydir -- $PACKAGE directory under rubydir.
|
|
|
|
+ AC_SUBST([pkgrubydir], [\${rubydir}/$PACKAGE])
|
|
|
|
+
|
|
|
|
+ dnl rbexecdir -- directory for installing ruby extension modules
|
|
|
|
+ dnl (shared libraries)
|
|
|
|
+ AC_CACHE_CHECK([for $am_display_RUBY extension module directory],
|
|
|
|
+ [am_cv_ruby_rbexecdir],
|
|
|
|
+ [am_cv_ruby_rbexecdir=`$RUBY -rrbconfig -e "drive = File::PATH_SEPARATOR == ';' ? /\A\w:/ : /\A/; prefix = Regexp.new('\\A' + Regexp.quote(Config::CONFIG[['prefix']])); \\$prefix = Config::CONFIG[['prefix']].sub(drive, ''); \\$sitearchdir = Config::CONFIG[['sitearchdir']].sub(prefix, '\\$(prefix)').sub(drive, ''); print \\$sitearchdir;" 2>/dev/null || echo "${RUBY_EXEC_PREFIX}/local/lib/site_ruby/${RUBY_VERSION}/${RUBY_PLATFORM}"`])
|
|
|
|
+ AC_SUBST([rbexecdir], [$am_cv_ruby_rbexecdir])
|
|
|
|
+
|
|
|
|
+ RUBY_INCLUDE_DIR=`$RUBY -r rbconfig -e 'puts Config::CONFIG[["archdir"]]'`
|
|
|
|
+ RUBY_INCLUDES=" -I $RUBY_INCLUDE_DIR"
|
|
|
|
+ AC_SUBST([RUBY_INCLUDES])
|
|
|
|
+
|
|
|
|
+ dnl pkgrbexecdir -- $(rbexecdir)/$(PACKAGE)
|
|
|
|
+
|
|
|
|
+ AC_SUBST([pkgrbexecdir], [\${rbexecdir}/$PACKAGE])
|
|
|
|
+
|
|
|
|
+ dnl Run any user-specified action.
|
|
|
|
+ $2
|
|
|
|
+ fi
|
|
|
|
+
|
|
|
|
+])
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/m4/wxwidgets.m4
|
|
|
|
@@ -0,0 +1,37 @@
|
|
|
|
+AC_DEFUN([WXTEST],
|
|
|
|
+[
|
|
|
|
+ AC_REQUIRE([AC_PROG_AWK])
|
|
|
|
+ WXCONFIG=wx-config
|
|
|
|
+ AC_ARG_WITH(wx-config,
|
|
|
|
+ [[ --with-wx-config=FILE Use the given path to wx-config when determining
|
|
|
|
+ wxWidgets configuration; defaults to "wx-config"]],
|
|
|
|
+ [
|
|
|
|
+ if test "$withval" != "yes" -a "$withval" != ""; then
|
|
|
|
+ WXCONFIG=$withval
|
|
|
|
+ fi
|
|
|
|
+ ])
|
|
|
|
+
|
|
|
|
+ wxversion=0
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ AC_MSG_CHECKING([wxWidgets version])
|
|
|
|
+ if wxversion=`$WXCONFIG --version`; then
|
|
|
|
+ AC_MSG_RESULT([$wxversion])
|
|
|
|
+ # Verify minimus requires
|
|
|
|
+ vers=[`echo $wxversion | $AWK 'BEGIN { FS = "."; } { printf "% d", ($''1 * 1000 + $''2) * 1000 + $''3;}'`]
|
|
|
|
+ if test -n "$vers" && test "$vers" -ge 2006000; then
|
|
|
|
+ WX_CPPFLAGS="`$WXCONFIG --cppflags`"
|
|
|
|
+ WX_CXXFLAGS="`$WXCONFIG --cxxflags | sed -e 's/-fno-exceptions//'`"
|
|
|
|
+ WX_LIBS="`$WXCONFIG --libs`"
|
|
|
|
+ AC_SUBST(WX_CXXFLAGS)
|
|
|
|
+ AC_SUBST(WX_CPPFLAGS)
|
|
|
|
+ AC_SUBST(WX_LIBS)
|
|
|
|
+ else
|
|
|
|
+ AC_MSG_ERROR([wxWidgets 2.6.0 or newer is required])
|
|
|
|
+ fi
|
|
|
|
+
|
|
|
|
+ else
|
|
|
|
+ AC_MSG_RESULT([not found])
|
|
|
|
+ AC_MSG_ERROR([wxWidgets is required. Try --with-wx-config.])
|
|
|
|
+ fi])
|
|
|
|
+
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/parser/Makefile.am
|
|
|
|
@@ -0,0 +1,81 @@
|
|
|
|
+SUBDIRS = libapparmor_re po
|
|
|
|
+
|
|
|
|
+lib_apparmor_DATA = rc.apparmor.functions
|
|
|
|
+etc_apparmor_DATA = subdomain.conf
|
|
|
|
+noinst_DATA = techdoc.pdf techdoc/techdoc.html techdoc/techdoc.css \
|
|
|
|
+ techdoc.txt apparmor.d.5.html apparmor.7.html \
|
|
|
|
+ apparmor_parser.8.html subdomain.conf.5.html \
|
|
|
|
+ apparmor.vim.5.html
|
|
|
|
+
|
|
|
|
+real_sbin_PROGRAMS = apparmor_parser
|
|
|
|
+dist_man_MANS = apparmor.d.5 apparmor.7 apparmor_parser.8 subdomain.conf.5 \
|
|
|
|
+ apparmor.vim.5
|
|
|
|
+
|
|
|
|
+BUILT_SOURCES = parser_lex.c parser_yacc.c af_names.h cap_names.h
|
|
|
|
+AM_YFLAGS = -d
|
|
|
|
+AM_CFLAGS = -DLOCALEDIR=\"$(localedir)\"
|
|
|
|
+apparmor_parser_SOURCES = parser_yacc.y parser_lex.l parser_include.c \
|
|
|
|
+ parser_interface.c parser_main.c parser_misc.c \
|
|
|
|
+ parser_merge.c parser_symtab.c parser_regex.c \
|
|
|
|
+ parser_variable.c parser_policy.c parser_alias.c
|
|
|
|
+
|
|
|
|
+libstdc++.a:
|
|
|
|
+ ln -s `g++ -print-file-name=libstdc++.a`
|
|
|
|
+
|
|
|
|
+apparmor_parser_LDFLAGS = -static-libgcc
|
|
|
|
+apparmor_parser_LDADD = $(top_srcdir)/libraries/libapparmor/src/libapparmor.la \
|
|
|
|
+ $(srcdir)/libapparmor_re/libapparmor_re.la \
|
|
|
|
+ libstdc++.a -lpcre
|
|
|
|
+
|
|
|
|
+install-data-local:
|
|
|
|
+ $(mkinstalldirs) $(DESTDIR)/var/lib/apparmor
|
|
|
|
+
|
|
|
|
+CLEANFILES = $(BUILT_SOURCES) $(doc_DATA) $(dist_man_MANS)
|
|
|
|
+
|
|
|
|
+# These are the families that it doesn't make sense for apparmor to mediate.
|
|
|
|
+# We use PF_ here since that is what is required in bits/socket.h, but we will
|
|
|
|
+# rewrite these as AF_.
|
|
|
|
+FILTER_FAMILIES=PF_RXRPC PF_MAX PF_UNSPEC PF_UNIX PF_LOCAL PF_NETLINK PF_LLC PF_IUCV PF_TIPC PF_CAN PF_ISDN PF_PHONET
|
|
|
|
+
|
|
|
|
+__FILTER=$(shell echo $(strip $(FILTER_FAMILIES)) | sed -e 's/ /\\\|/g')
|
|
|
|
+
|
|
|
|
+af_names.h: /usr/include/bits/socket.h
|
|
|
|
+ LC_ALL=C sed -n -e '/$(__FILTER)/d' -e "s/^\#define[ \\t]\\+PF_\\([A-Z0-9_]\\+\\)[ \\t]\\+\\([0-9]\\+\\)\\(.*\\)\$$/#ifndef AF_\\1\\n# define AF_\\1 \\2\\n#endif\\nAA_GEN_NET_ENT(\"\\L\\1\", \\UAF_\\1)\\n/p" $< > $@
|
|
|
|
+ LC_ALL=C sed -n -e "s/^\#define[ \\t]\\+PF_MAX[ \\t]\\+\\([0-9]\\+\\)[ \\t]\\+.*/#define AA_AF_MAX \\1\n/p" $< >> $@
|
|
|
|
+ cat $@
|
|
|
|
+
|
|
|
|
+cap_names.h: /usr/include/linux/capability.h
|
|
|
|
+ LC_ALL=C sed -n -e "/CAP_EMPTY_SET/d" -e "s/^\#define[ \\t]\\+CAP_\\([A-Z0-9_]\\+\\)[ \\t]\\+\\([0-9xa-f]\\+\\)\\(.*\\)\$$/\{\"\\L\\1\", \\UCAP_\\1\},/p" $< > $@
|
|
|
|
+
|
|
|
|
+.tex.pdf:
|
|
|
|
+ while pdflatex $< || exit 1; \
|
|
|
|
+ grep -q "Label(s) may have changed" techdoc.log; \
|
|
|
|
+ do :; done
|
|
|
|
+
|
|
|
|
+techdoc/techdoc.css :
|
|
|
|
+techdoc/%.html : %.tex
|
|
|
|
+#.tex.html:
|
|
|
|
+ latex2html -show_section_numbers -split 0 -noinfo -nonavigation -noaddress $<
|
|
|
|
+
|
|
|
|
+%.txt : techdoc/%.html
|
|
|
|
+ w3m -dump $< > $@
|
|
|
|
+
|
|
|
|
+PODARGS = --center=AppArmor --release=NOVELL/SUSE
|
|
|
|
+
|
|
|
|
+pod2man = pod2man $(PODARGS) --section $(subst .,,$(suffix $<)) $< > $@
|
|
|
|
+
|
|
|
|
+.pod.5:
|
|
|
|
+ $(pod2man)
|
|
|
|
+.pod.7:
|
|
|
|
+ $(pod2man)
|
|
|
|
+.pod.8:
|
|
|
|
+ $(pod2man)
|
|
|
|
+
|
|
|
|
+pod2html = pod2html --header --css ../common/apparmor.css --infile=$< --outfile=$@
|
|
|
|
+
|
|
|
|
+%.5.html : %.pod
|
|
|
|
+ $(pod2html)
|
|
|
|
+%.7.html : %.pod
|
|
|
|
+ $(pod2html)
|
|
|
|
+%.8.html : %.pod
|
|
|
|
+ $(pod2html)
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/parser/libapparmor_re/Makefile.am
|
|
|
|
@@ -0,0 +1,4 @@
|
|
|
|
+
|
|
|
|
+noinst_LTLIBRARIES = libapparmor_re.la
|
|
|
|
+
|
|
|
|
+libapparmor_re_la_SOURCES = regexp.yy
|
|
|
|
--- a/parser/libapparmor_re/regexp.y
|
|
|
|
+++ /dev/null
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -1,3082 +0,0 @@
|
2011-01-17 17:43:05 +01:00
|
|
|
-/*
|
|
|
|
- * regexp.y -- Regular Expression Matcher Generator
|
|
|
|
- * (C) 2006, 2007 Andreas Gruenbacher <agruen@suse.de>
|
|
|
|
- *
|
|
|
|
- * Implementation based on the Lexical Analysis chapter of:
|
|
|
|
- * Alfred V. Aho, Ravi Sethi, Jeffrey D. Ullman:
|
|
|
|
- * Compilers: Principles, Techniques, and Tools (The "Dragon Book"),
|
|
|
|
- * Addison-Wesley, 1986.
|
|
|
|
- *
|
|
|
|
- * This program is free software; you can redistribute it and/or modify
|
|
|
|
- * it under the terms of the GNU General Public License version 2 as
|
|
|
|
- * published by the Free Software Foundation.
|
|
|
|
- *
|
|
|
|
- * See http://www.gnu.org for more details.
|
|
|
|
- */
|
|
|
|
-
|
|
|
|
-%{
|
|
|
|
- /* #define DEBUG_TREE */
|
|
|
|
-
|
|
|
|
- #include <list>
|
|
|
|
- #include <vector>
|
2011-03-25 09:04:51 +01:00
|
|
|
- #include <stack>
|
2011-01-17 17:43:05 +01:00
|
|
|
- #include <set>
|
|
|
|
- #include <map>
|
|
|
|
- #include <ostream>
|
|
|
|
- #include <iostream>
|
|
|
|
- #include <fstream>
|
|
|
|
-
|
|
|
|
- using namespace std;
|
|
|
|
-
|
|
|
|
- typedef unsigned char uchar;
|
|
|
|
- typedef set<uchar> Chars;
|
|
|
|
-
|
|
|
|
- ostream& operator<<(ostream& os, uchar c);
|
|
|
|
-
|
|
|
|
- /* Compute the union of two sets. */
|
|
|
|
- template<class T>
|
|
|
|
- set<T> operator+(const set<T>& a, const set<T>& b)
|
|
|
|
- {
|
|
|
|
- set<T> c(a);
|
|
|
|
- c.insert(b.begin(), b.end());
|
|
|
|
- return c;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- /**
|
2011-03-25 09:04:51 +01:00
|
|
|
- * When creating DFAs from regex trees, a DFA state is constructed from
|
|
|
|
- * a set of important nodes in the syntax tree. This includes AcceptNodes,
|
|
|
|
- * which indicate that when a match ends in a particular state, the
|
|
|
|
- * regular expressions that the AcceptNode belongs to match.
|
2011-01-17 17:43:05 +01:00
|
|
|
- */
|
|
|
|
- class ImportantNode;
|
2011-03-25 09:04:51 +01:00
|
|
|
- typedef set <ImportantNode *> NodeSet;
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
- /**
|
2011-03-25 09:04:51 +01:00
|
|
|
- * Out-edges from a state to another: we store the follow-set of Nodes
|
2011-01-17 17:43:05 +01:00
|
|
|
- * for each input character that is not a default match in
|
|
|
|
- * cases (i.e., following a CharNode or CharSetNode), and default
|
|
|
|
- * matches in otherwise as well as in all matching explicit cases
|
|
|
|
- * (i.e., following an AnyCharNode or NotCharSetNode). This avoids
|
|
|
|
- * enumerating all the explicit tranitions for default matches.
|
|
|
|
- */
|
2011-03-25 09:04:51 +01:00
|
|
|
- typedef struct NodeCases {
|
|
|
|
- typedef map<uchar, NodeSet *>::iterator iterator;
|
2011-01-17 17:43:05 +01:00
|
|
|
- iterator begin() { return cases.begin(); }
|
|
|
|
- iterator end() { return cases.end(); }
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- NodeCases() : otherwise(0) { }
|
|
|
|
- map<uchar, NodeSet *> cases;
|
|
|
|
- NodeSet *otherwise;
|
|
|
|
- } NodeCases;
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
-
|
|
|
|
- /* An abstract node in the syntax tree. */
|
|
|
|
- class Node {
|
|
|
|
- public:
|
|
|
|
- Node() :
|
2011-03-25 09:04:51 +01:00
|
|
|
- nullable(false) { child[0] = child[1] = 0; }
|
2011-01-17 17:43:05 +01:00
|
|
|
- Node(Node *left) :
|
2011-03-25 09:04:51 +01:00
|
|
|
- nullable(false) { child[0] = left; child[1] = 0; }
|
2011-01-17 17:43:05 +01:00
|
|
|
- Node(Node *left, Node *right) :
|
2011-03-25 09:04:51 +01:00
|
|
|
- nullable(false) { child[0] = left; child[1] = right; }
|
2011-01-17 17:43:05 +01:00
|
|
|
- virtual ~Node()
|
|
|
|
- {
|
|
|
|
- if (child[0])
|
|
|
|
- child[0]->release();
|
|
|
|
- if (child[1])
|
|
|
|
- child[1]->release();
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- /**
|
|
|
|
- * See the "Dragon Book" for an explanation of nullable, firstpos,
|
|
|
|
- * lastpos, and followpos.
|
|
|
|
- */
|
|
|
|
- virtual void compute_nullable() { }
|
|
|
|
- virtual void compute_firstpos() = 0;
|
|
|
|
- virtual void compute_lastpos() = 0;
|
|
|
|
- virtual void compute_followpos() { }
|
|
|
|
- virtual int eq(Node *other) = 0;
|
|
|
|
- virtual ostream& dump(ostream& os) = 0;
|
|
|
|
-
|
|
|
|
- bool nullable;
|
2011-03-25 09:04:51 +01:00
|
|
|
- NodeSet firstpos, lastpos, followpos;
|
2011-01-17 17:43:05 +01:00
|
|
|
- /* child 0 is left, child 1 is right */
|
|
|
|
- Node *child[2];
|
|
|
|
-
|
|
|
|
- unsigned int label; /* unique number for debug etc */
|
|
|
|
- /**
|
2011-03-25 09:04:51 +01:00
|
|
|
- * We indirectly release Nodes through a virtual function because
|
|
|
|
- * accept and Eps Nodes are shared, and must be treated specially.
|
|
|
|
- * We could use full reference counting here but the indirect release
|
|
|
|
- * is sufficient and has less overhead
|
2011-01-17 17:43:05 +01:00
|
|
|
- */
|
2011-03-25 09:04:51 +01:00
|
|
|
- virtual void release(void) {
|
|
|
|
- delete this;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- };
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- class InnerNode : public Node {
|
|
|
|
- public:
|
|
|
|
- InnerNode() : Node() { };
|
|
|
|
- InnerNode(Node *left) : Node(left) {};
|
|
|
|
- InnerNode(Node *left, Node *right) : Node(left, right) { };
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- class OneChildNode : public InnerNode {
|
|
|
|
- public:
|
|
|
|
- OneChildNode(Node *left) : InnerNode(left) { };
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- class TwoChildNode : public InnerNode {
|
|
|
|
- public:
|
|
|
|
- TwoChildNode(Node *left, Node *right) : InnerNode(left, right) { };
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- class LeafNode : public Node {
|
|
|
|
- public:
|
|
|
|
- LeafNode() : Node() { };
|
|
|
|
-
|
|
|
|
- };
|
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
- /* Match nothing (//). */
|
2011-03-25 09:04:51 +01:00
|
|
|
- class EpsNode : public LeafNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
- public:
|
2011-03-25 09:04:51 +01:00
|
|
|
- EpsNode() : LeafNode()
|
2011-01-17 17:43:05 +01:00
|
|
|
- {
|
|
|
|
- nullable = true;
|
2011-03-25 09:04:51 +01:00
|
|
|
- label = 0;
|
|
|
|
- }
|
|
|
|
- void release(void)
|
|
|
|
- {
|
|
|
|
- /* don't delete Eps nodes because there is a single static instance
|
|
|
|
- * shared by all trees. Look for epsnode in the code
|
|
|
|
- */
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
2011-03-25 09:04:51 +01:00
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
- void compute_firstpos()
|
|
|
|
- {
|
|
|
|
- }
|
|
|
|
- void compute_lastpos()
|
|
|
|
- {
|
|
|
|
- }
|
|
|
|
- int eq(Node *other) {
|
|
|
|
- if (dynamic_cast<EpsNode *>(other))
|
|
|
|
- return 1;
|
|
|
|
- return 0;
|
|
|
|
- }
|
|
|
|
- ostream& dump(ostream& os)
|
|
|
|
- {
|
|
|
|
- return os << "[]";
|
|
|
|
- }
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- /**
|
|
|
|
- * Leaf nodes in the syntax tree are important to us: they describe the
|
|
|
|
- * characters that the regular expression matches. We also consider
|
|
|
|
- * AcceptNodes import: they indicate when a regular expression matches.
|
|
|
|
- */
|
2011-03-25 09:04:51 +01:00
|
|
|
- class ImportantNode : public LeafNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
- public:
|
2011-03-25 09:04:51 +01:00
|
|
|
- ImportantNode() : LeafNode() { }
|
2011-01-17 17:43:05 +01:00
|
|
|
- void compute_firstpos()
|
|
|
|
- {
|
|
|
|
- firstpos.insert(this);
|
|
|
|
- }
|
|
|
|
- void compute_lastpos() {
|
|
|
|
- lastpos.insert(this);
|
|
|
|
- }
|
2011-03-25 09:04:51 +01:00
|
|
|
- virtual void follow(NodeCases& cases) = 0;
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- /* common base class for all the different classes that contain
|
|
|
|
- * character information.
|
|
|
|
- */
|
|
|
|
- class CNode : public ImportantNode {
|
|
|
|
- public:
|
|
|
|
- CNode() : ImportantNode() { }
|
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
- };
|
|
|
|
-
|
|
|
|
- /* Match one specific character (/c/). */
|
2011-03-25 09:04:51 +01:00
|
|
|
- class CharNode : public CNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
- public:
|
|
|
|
- CharNode(uchar c) : c(c) { }
|
2011-03-25 09:04:51 +01:00
|
|
|
- void follow(NodeCases& cases)
|
2011-01-17 17:43:05 +01:00
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- NodeSet **x = &cases.cases[c];
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (!*x) {
|
|
|
|
- if (cases.otherwise)
|
2011-03-25 09:04:51 +01:00
|
|
|
- *x = new NodeSet(*cases.otherwise);
|
2011-01-17 17:43:05 +01:00
|
|
|
- else
|
2011-03-25 09:04:51 +01:00
|
|
|
- *x = new NodeSet;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- (*x)->insert(followpos.begin(), followpos.end());
|
|
|
|
- }
|
|
|
|
- int eq(Node *other) {
|
|
|
|
- CharNode *o = dynamic_cast<CharNode *>(other);
|
|
|
|
- if (o) {
|
|
|
|
- return c == o->c;
|
|
|
|
- }
|
|
|
|
- return 0;
|
|
|
|
- }
|
|
|
|
- ostream& dump(ostream& os)
|
|
|
|
- {
|
|
|
|
- return os << c;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- uchar c;
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- /* Match a set of characters (/[abc]/). */
|
2011-03-25 09:04:51 +01:00
|
|
|
- class CharSetNode : public CNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
- public:
|
|
|
|
- CharSetNode(Chars& chars) : chars(chars) { }
|
2011-03-25 09:04:51 +01:00
|
|
|
- void follow(NodeCases& cases)
|
2011-01-17 17:43:05 +01:00
|
|
|
- {
|
|
|
|
- for (Chars::iterator i = chars.begin(); i != chars.end(); i++) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- NodeSet **x = &cases.cases[*i];
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (!*x) {
|
|
|
|
- if (cases.otherwise)
|
2011-03-25 09:04:51 +01:00
|
|
|
- *x = new NodeSet(*cases.otherwise);
|
2011-01-17 17:43:05 +01:00
|
|
|
- else
|
2011-03-25 09:04:51 +01:00
|
|
|
- *x = new NodeSet;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- (*x)->insert(followpos.begin(), followpos.end());
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- int eq(Node *other) {
|
|
|
|
- CharSetNode *o = dynamic_cast<CharSetNode *>(other);
|
|
|
|
- if (!o || chars.size() != o->chars.size())
|
|
|
|
- return 0;
|
|
|
|
-
|
|
|
|
- for (Chars::iterator i = chars.begin(), j = o->chars.begin();
|
|
|
|
- i != chars.end() && j != o->chars.end();
|
|
|
|
- i++, j++) {
|
|
|
|
- if (*i != *j)
|
|
|
|
- return 0;
|
|
|
|
- }
|
|
|
|
- return 1;
|
|
|
|
- }
|
|
|
|
- ostream& dump(ostream& os)
|
|
|
|
- {
|
|
|
|
- os << '[';
|
|
|
|
- for (Chars::iterator i = chars.begin(); i != chars.end(); i++)
|
|
|
|
- os << *i;
|
|
|
|
- return os << ']';
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- Chars chars;
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- /* Match all except one character (/[^abc]/). */
|
2011-03-25 09:04:51 +01:00
|
|
|
- class NotCharSetNode : public CNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
- public:
|
|
|
|
- NotCharSetNode(Chars& chars) : chars(chars) { }
|
2011-03-25 09:04:51 +01:00
|
|
|
- void follow(NodeCases& cases)
|
2011-01-17 17:43:05 +01:00
|
|
|
- {
|
|
|
|
- if (!cases.otherwise)
|
2011-03-25 09:04:51 +01:00
|
|
|
- cases.otherwise = new NodeSet;
|
2011-01-17 17:43:05 +01:00
|
|
|
- for (Chars::iterator j = chars.begin(); j != chars.end(); j++) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- NodeSet **x = &cases.cases[*j];
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (!*x)
|
2011-03-25 09:04:51 +01:00
|
|
|
- *x = new NodeSet(*cases.otherwise);
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- /**
|
|
|
|
- * Note: Add to the nonmatching characters after copying away the
|
|
|
|
- * old otherwise state for the matching characters.
|
|
|
|
- */
|
|
|
|
- cases.otherwise->insert(followpos.begin(), followpos.end());
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (NodeCases::iterator i = cases.begin(); i != cases.end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (chars.find(i->first) == chars.end())
|
|
|
|
- i->second->insert(followpos.begin(), followpos.end());
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- int eq(Node *other) {
|
|
|
|
- NotCharSetNode *o = dynamic_cast<NotCharSetNode *>(other);
|
|
|
|
- if (!o || chars.size() != o->chars.size())
|
|
|
|
- return 0;
|
|
|
|
-
|
|
|
|
- for (Chars::iterator i = chars.begin(), j = o->chars.begin();
|
|
|
|
- i != chars.end() && j != o->chars.end();
|
|
|
|
- i++, j++) {
|
|
|
|
- if (*i != *j)
|
|
|
|
- return 0;
|
|
|
|
- }
|
|
|
|
- return 1;
|
|
|
|
- }
|
|
|
|
- ostream& dump(ostream& os)
|
|
|
|
- {
|
|
|
|
- os << "[^";
|
|
|
|
- for (Chars::iterator i = chars.begin(); i != chars.end(); i++)
|
|
|
|
- os << *i;
|
|
|
|
- return os << ']';
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- Chars chars;
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- /* Match any character (/./). */
|
2011-03-25 09:04:51 +01:00
|
|
|
- class AnyCharNode : public CNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
- public:
|
|
|
|
- AnyCharNode() { }
|
2011-03-25 09:04:51 +01:00
|
|
|
- void follow(NodeCases& cases)
|
2011-01-17 17:43:05 +01:00
|
|
|
- {
|
|
|
|
- if (!cases.otherwise)
|
2011-03-25 09:04:51 +01:00
|
|
|
- cases.otherwise = new NodeSet;
|
2011-01-17 17:43:05 +01:00
|
|
|
- cases.otherwise->insert(followpos.begin(), followpos.end());
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (NodeCases::iterator i = cases.begin(); i != cases.end(); i++)
|
2011-01-17 17:43:05 +01:00
|
|
|
- i->second->insert(followpos.begin(), followpos.end());
|
|
|
|
- }
|
|
|
|
- int eq(Node *other) {
|
|
|
|
- if (dynamic_cast<AnyCharNode *>(other))
|
|
|
|
- return 1;
|
|
|
|
- return 0;
|
|
|
|
- }
|
|
|
|
- ostream& dump(ostream& os) {
|
|
|
|
- return os << ".";
|
|
|
|
- }
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- /**
|
|
|
|
- * Indicate that a regular expression matches. An AcceptNode itself
|
|
|
|
- * doesn't match anything, so it will never generate any transitions.
|
|
|
|
- */
|
|
|
|
- class AcceptNode : public ImportantNode {
|
|
|
|
- public:
|
|
|
|
- AcceptNode() {}
|
2011-03-25 09:04:51 +01:00
|
|
|
- void release(void)
|
|
|
|
- {
|
|
|
|
- /* don't delete AcceptNode via release as they are shared,
|
|
|
|
- * and will be deleted when the table the are stored in is deleted
|
|
|
|
- */
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- void follow(NodeCases& cases __attribute__((unused)))
|
2011-01-17 17:43:05 +01:00
|
|
|
- {
|
|
|
|
- /* Nothing to follow. */
|
|
|
|
- }
|
|
|
|
- /* requires accept nodes to be common by pointer */
|
|
|
|
- int eq(Node *other) {
|
|
|
|
- if (dynamic_cast<AcceptNode *>(other))
|
|
|
|
- return (this == other);
|
|
|
|
- return 0;
|
|
|
|
- }
|
|
|
|
- };
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- /* Match a node zero or more times. (This is a unary operator.) */
|
|
|
|
- class StarNode : public OneChildNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
- public:
|
2011-03-25 09:04:51 +01:00
|
|
|
- StarNode(Node *left) :
|
|
|
|
- OneChildNode(left)
|
2011-01-17 17:43:05 +01:00
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- nullable = true;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- void compute_firstpos()
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- firstpos = child[0]->firstpos;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- void compute_lastpos()
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- lastpos = child[0]->lastpos;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- void compute_followpos()
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- NodeSet from = child[0]->lastpos, to = child[0]->firstpos;
|
|
|
|
- for(NodeSet::iterator i = from.begin(); i != from.end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- (*i)->followpos.insert(to.begin(), to.end());
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- int eq(Node *other) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (dynamic_cast<StarNode *>(other))
|
|
|
|
- return child[0]->eq(other->child[0]);
|
2011-01-17 17:43:05 +01:00
|
|
|
- return 0;
|
|
|
|
- }
|
|
|
|
- ostream& dump(ostream& os)
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- os << '(';
|
2011-01-17 17:43:05 +01:00
|
|
|
- child[0]->dump(os);
|
2011-03-25 09:04:51 +01:00
|
|
|
- return os << ")*";
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- };
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- /* Match a node one or more times. (This is a unary operator.) */
|
|
|
|
- class PlusNode : public OneChildNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
- public:
|
2011-03-25 09:04:51 +01:00
|
|
|
- PlusNode(Node *left) :
|
|
|
|
- OneChildNode(left) { }
|
|
|
|
- void compute_nullable()
|
2011-01-17 17:43:05 +01:00
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- nullable = child[0]->nullable;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- void compute_firstpos()
|
|
|
|
- {
|
|
|
|
- firstpos = child[0]->firstpos;
|
|
|
|
- }
|
|
|
|
- void compute_lastpos()
|
|
|
|
- {
|
|
|
|
- lastpos = child[0]->lastpos;
|
|
|
|
- }
|
|
|
|
- void compute_followpos()
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- NodeSet from = child[0]->lastpos, to = child[0]->firstpos;
|
|
|
|
- for(NodeSet::iterator i = from.begin(); i != from.end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- (*i)->followpos.insert(to.begin(), to.end());
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- int eq(Node *other) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (dynamic_cast<PlusNode *>(other))
|
2011-01-17 17:43:05 +01:00
|
|
|
- return child[0]->eq(other->child[0]);
|
|
|
|
- return 0;
|
|
|
|
- }
|
|
|
|
- ostream& dump(ostream& os)
|
|
|
|
- {
|
|
|
|
- os << '(';
|
|
|
|
- child[0]->dump(os);
|
2011-03-25 09:04:51 +01:00
|
|
|
- return os << ")+";
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- };
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- /* Match a pair of consecutive nodes. */
|
|
|
|
- class CatNode : public TwoChildNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
- public:
|
2011-03-25 09:04:51 +01:00
|
|
|
- CatNode(Node *left, Node *right) :
|
|
|
|
- TwoChildNode(left, right) { }
|
2011-01-17 17:43:05 +01:00
|
|
|
- void compute_nullable()
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- nullable = child[0]->nullable && child[1]->nullable;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- void compute_firstpos()
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (child[0]->nullable)
|
|
|
|
- firstpos = child[0]->firstpos + child[1]->firstpos;
|
|
|
|
- else
|
|
|
|
- firstpos = child[0]->firstpos;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- void compute_lastpos()
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (child[1]->nullable)
|
|
|
|
- lastpos = child[0]->lastpos + child[1]->lastpos;
|
|
|
|
- else
|
|
|
|
- lastpos = child[1]->lastpos;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- void compute_followpos()
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- NodeSet from = child[0]->lastpos, to = child[1]->firstpos;
|
|
|
|
- for(NodeSet::iterator i = from.begin(); i != from.end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- (*i)->followpos.insert(to.begin(), to.end());
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- int eq(Node *other) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (dynamic_cast<CatNode *>(other)) {
|
|
|
|
- if (!child[0]->eq(other->child[0]))
|
|
|
|
- return 0;
|
|
|
|
- return child[1]->eq(other->child[1]);
|
|
|
|
- }
|
2011-01-17 17:43:05 +01:00
|
|
|
- return 0;
|
|
|
|
- }
|
|
|
|
- ostream& dump(ostream& os)
|
|
|
|
- {
|
|
|
|
- child[0]->dump(os);
|
2011-03-25 09:04:51 +01:00
|
|
|
- child[1]->dump(os);
|
|
|
|
- return os;
|
|
|
|
- //return os << ' ';
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- /* Match one of two alternative nodes. */
|
2011-03-25 09:04:51 +01:00
|
|
|
- class AltNode : public TwoChildNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
- public:
|
|
|
|
- AltNode(Node *left, Node *right) :
|
2011-03-25 09:04:51 +01:00
|
|
|
- TwoChildNode(left, right) { }
|
2011-01-17 17:43:05 +01:00
|
|
|
- void compute_nullable()
|
|
|
|
- {
|
|
|
|
- nullable = child[0]->nullable || child[1]->nullable;
|
|
|
|
- }
|
|
|
|
- void compute_lastpos()
|
|
|
|
- {
|
|
|
|
- lastpos = child[0]->lastpos + child[1]->lastpos;
|
|
|
|
- }
|
|
|
|
- void compute_firstpos()
|
|
|
|
- {
|
|
|
|
- firstpos = child[0]->firstpos + child[1]->firstpos;
|
|
|
|
- }
|
|
|
|
- int eq(Node *other) {
|
|
|
|
- if (dynamic_cast<AltNode *>(other)) {
|
|
|
|
- if (!child[0]->eq(other->child[0]))
|
|
|
|
- return 0;
|
|
|
|
- return child[1]->eq(other->child[1]);
|
|
|
|
- }
|
|
|
|
- return 0;
|
|
|
|
- }
|
|
|
|
- ostream& dump(ostream& os)
|
|
|
|
- {
|
|
|
|
- os << '(';
|
|
|
|
- child[0]->dump(os);
|
|
|
|
- os << '|';
|
|
|
|
- child[1]->dump(os);
|
|
|
|
- os << ')';
|
|
|
|
- return os;
|
|
|
|
- }
|
|
|
|
- };
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
-/* Use a single static EpsNode as it carries no node specific information */
|
|
|
|
-static EpsNode epsnode;
|
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
-/*
|
2011-03-25 09:04:51 +01:00
|
|
|
- * Normalize the regex parse tree for factoring and cancelations. Normalization
|
|
|
|
- * reorganizes internal (alt and cat) nodes into a fixed "normalized" form that
|
|
|
|
- * simplifies factoring code, in that it produces a canonicalized form for
|
|
|
|
- * the direction being normalized so that the factoring code does not have
|
|
|
|
- * to consider as many cases.
|
|
|
|
- *
|
2011-01-17 17:43:05 +01:00
|
|
|
- * left normalization (dir == 0) uses these rules
|
|
|
|
- * (E | a) -> (a | E)
|
|
|
|
- * (a | b) | c -> a | (b | c)
|
|
|
|
- * (ab)c -> a(bc)
|
|
|
|
- *
|
|
|
|
- * right normalization (dir == 1) uses the same rules but reversed
|
|
|
|
- * (a | E) -> (E | a)
|
|
|
|
- * a | (b | c) -> (a | b) | c
|
|
|
|
- * a(bc) -> (ab)c
|
2011-03-25 09:04:51 +01:00
|
|
|
- *
|
|
|
|
- * Note: This is written iteratively for a given node (the top node stays
|
|
|
|
- * fixed and the children are rotated) instead of recursively.
|
|
|
|
- * For a given node under examination rotate over nodes from
|
|
|
|
- * dir to !dir. Until no dir direction node meets the criterial.
|
|
|
|
- * Then recurse to the children (which will have a different node type)
|
|
|
|
- * to make sure they are normalized.
|
|
|
|
- * Normalization of a child node is guarenteed to not affect the
|
|
|
|
- * normalization of the parent.
|
|
|
|
- *
|
|
|
|
- * For cat nodes the depth first traverse order is guarenteed to be
|
|
|
|
- * maintained. This is not necessary for altnodes.
|
|
|
|
- *
|
|
|
|
- * Eg. For left normalization
|
|
|
|
- *
|
|
|
|
- * |1 |1
|
|
|
|
- * / \ / \
|
|
|
|
- * |2 T -> a |2
|
|
|
|
- * / \ / \
|
|
|
|
- * |3 c b |3
|
|
|
|
- * / \ / \
|
|
|
|
- * a b c T
|
|
|
|
- *
|
2011-01-17 17:43:05 +01:00
|
|
|
- */
|
2011-03-25 09:04:51 +01:00
|
|
|
-static void rotate_node(Node *t, int dir) {
|
|
|
|
- // (a | b) | c -> a | (b | c)
|
|
|
|
- // (ab)c -> a(bc)
|
|
|
|
- Node *left = t->child[dir];
|
|
|
|
- t->child[dir] = left->child[dir];
|
|
|
|
- left->child[dir] = left->child[!dir];
|
|
|
|
- left->child[!dir] = t->child[!dir];
|
|
|
|
- t->child[!dir] = left;
|
|
|
|
-}
|
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
-void normalize_tree(Node *t, int dir)
|
|
|
|
-{
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (dynamic_cast<LeafNode *>(t))
|
2011-01-17 17:43:05 +01:00
|
|
|
- return;
|
|
|
|
-
|
|
|
|
- for (;;) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- if ((&epsnode == t->child[dir]) &&
|
|
|
|
- (&epsnode != t->child[!dir]) &&
|
|
|
|
- dynamic_cast<TwoChildNode *>(t)) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- // (E | a) -> (a | E)
|
|
|
|
- // Ea -> aE
|
|
|
|
- Node *c = t->child[dir];
|
|
|
|
- t->child[dir] = t->child[!dir];
|
|
|
|
- t->child[!dir] = c;
|
2011-03-25 09:04:51 +01:00
|
|
|
- // Don't break here as 'a' may be a tree that
|
|
|
|
- // can be pulled up.
|
2011-01-17 17:43:05 +01:00
|
|
|
- } else if ((dynamic_cast<AltNode *>(t) &&
|
|
|
|
- dynamic_cast<AltNode *>(t->child[dir])) ||
|
|
|
|
- (dynamic_cast<CatNode *>(t) &&
|
|
|
|
- dynamic_cast<CatNode *>(t->child[dir]))) {
|
|
|
|
- // (a | b) | c -> a | (b | c)
|
|
|
|
- // (ab)c -> a(bc)
|
2011-03-25 09:04:51 +01:00
|
|
|
- rotate_node(t, dir);
|
2011-01-17 17:43:05 +01:00
|
|
|
- } else if (dynamic_cast<AltNode *>(t) &&
|
|
|
|
- dynamic_cast<CharSetNode *>(t->child[dir]) &&
|
|
|
|
- dynamic_cast<CharNode *>(t->child[!dir])) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- // [a] | b -> b | [a]
|
2011-01-17 17:43:05 +01:00
|
|
|
- Node *c = t->child[dir];
|
|
|
|
- t->child[dir] = t->child[!dir];
|
|
|
|
- t->child[!dir] = c;
|
|
|
|
- } else {
|
|
|
|
- break;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- if (t->child[dir])
|
|
|
|
- normalize_tree(t->child[dir], dir);
|
|
|
|
- if (t->child[!dir])
|
|
|
|
- normalize_tree(t->child[!dir], dir);
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-//charset conversion is disabled for now,
|
|
|
|
-//it hinders tree optimization in some cases, so it need to be either
|
|
|
|
-//done post optimization, or have extra factoring rules added
|
|
|
|
-#if 0
|
|
|
|
-static Node *merge_charset(Node *a, Node *b)
|
|
|
|
-{
|
|
|
|
- if (dynamic_cast<CharNode *>(a) &&
|
|
|
|
- dynamic_cast<CharNode *>(b)) {
|
|
|
|
- Chars chars;
|
|
|
|
- chars.insert(dynamic_cast<CharNode *>(a)->c);
|
|
|
|
- chars.insert(dynamic_cast<CharNode *>(b)->c);
|
|
|
|
- CharSetNode *n = new CharSetNode(chars);
|
|
|
|
- return n;
|
|
|
|
- } else if (dynamic_cast<CharNode *>(a) &&
|
|
|
|
- dynamic_cast<CharSetNode *>(b)) {
|
|
|
|
- Chars *chars = &dynamic_cast<CharSetNode *>(b)->chars;
|
|
|
|
- chars->insert(dynamic_cast<CharNode *>(a)->c);
|
2011-03-25 09:04:51 +01:00
|
|
|
- return b;
|
2011-01-17 17:43:05 +01:00
|
|
|
- } else if (dynamic_cast<CharSetNode *>(a) &&
|
|
|
|
- dynamic_cast<CharSetNode *>(b)) {
|
|
|
|
- Chars *from = &dynamic_cast<CharSetNode *>(a)->chars;
|
|
|
|
- Chars *to = &dynamic_cast<CharSetNode *>(b)->chars;
|
|
|
|
- for (Chars::iterator i = from->begin(); i != from->end(); i++)
|
|
|
|
- to->insert(*i);
|
2011-03-25 09:04:51 +01:00
|
|
|
- return b;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
-
|
|
|
|
- //return ???;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-static Node *alt_to_charsets(Node *t, int dir)
|
|
|
|
-{
|
|
|
|
-/*
|
|
|
|
- Node *first = NULL;
|
|
|
|
- Node *p = t;
|
|
|
|
- Node *i = t;
|
|
|
|
- for (;dynamic_cast<AltNode *>(i);) {
|
|
|
|
- if (dynamic_cast<CharNode *>(i->child[dir]) ||
|
|
|
|
- dynamic_cast<CharNodeSet *>(i->child[dir])) {
|
|
|
|
- if (!first) {
|
|
|
|
- first = i;
|
|
|
|
- p = i;
|
|
|
|
- i = i->child[!dir];
|
|
|
|
- } else {
|
|
|
|
- first->child[dir] = merge_charset(first->child[dir],
|
|
|
|
- i->child[dir]);
|
2011-03-25 09:04:51 +01:00
|
|
|
- p->child[!dir] = i->child[!dir];
|
2011-01-17 17:43:05 +01:00
|
|
|
- Node *tmp = i;
|
2011-03-25 09:04:51 +01:00
|
|
|
- i = tmp->child[!dir];
|
|
|
|
- tmp->child[!dir] = NULL;
|
2011-01-17 17:43:05 +01:00
|
|
|
- tmp->release();
|
|
|
|
- }
|
|
|
|
- } else {
|
|
|
|
- p = i;
|
|
|
|
- i = i->child[!dir];
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- // last altnode of chain check other dir as well
|
|
|
|
- if (first && (dynamic_cast<charNode *>(i) ||
|
|
|
|
- dynamic_cast<charNodeSet *>(i))) {
|
|
|
|
-
|
|
|
|
- }
|
|
|
|
-*/
|
|
|
|
-
|
|
|
|
-/*
|
|
|
|
- if (dynamic_cast<CharNode *>(t->child[dir]) ||
|
|
|
|
- dynamic_cast<CharSetNode *>(t->child[dir]))
|
|
|
|
- char_test = true;
|
|
|
|
- (char_test &&
|
|
|
|
- (dynamic_cast<CharNode *>(i->child[dir]) ||
|
|
|
|
- dynamic_cast<CharSetNode *>(i->child[dir])))) {
|
|
|
|
-*/
|
|
|
|
- return t;
|
|
|
|
-}
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-static Node *basic_alt_factor(Node *t, int dir)
|
|
|
|
-{
|
|
|
|
- if (!dynamic_cast<AltNode *>(t))
|
|
|
|
- return t;
|
|
|
|
-
|
|
|
|
- if (t->child[dir]->eq(t->child[!dir])) {
|
|
|
|
- // (a | a) -> a
|
2011-03-25 09:04:51 +01:00
|
|
|
- Node *tmp = t->child[dir];
|
|
|
|
- t->child[dir] = NULL;
|
2011-01-17 17:43:05 +01:00
|
|
|
- t->release();
|
|
|
|
- return tmp;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- // (ab) | (ac) -> a(b|c)
|
|
|
|
- if (dynamic_cast<CatNode *>(t->child[dir]) &&
|
|
|
|
- dynamic_cast<CatNode *>(t->child[!dir]) &&
|
|
|
|
- t->child[dir]->child[dir]->eq(t->child[!dir]->child[dir])) {
|
|
|
|
- // (ab) | (ac) -> a(b|c)
|
|
|
|
- Node *left = t->child[dir];
|
|
|
|
- Node *right = t->child[!dir];
|
|
|
|
- t->child[dir] = left->child[!dir];
|
2011-03-25 09:04:51 +01:00
|
|
|
- t->child[!dir] = right->child[!dir];
|
|
|
|
- right->child[!dir] = NULL;
|
2011-01-17 17:43:05 +01:00
|
|
|
- right->release();
|
2011-03-25 09:04:51 +01:00
|
|
|
- left->child[!dir] = t;
|
2011-01-17 17:43:05 +01:00
|
|
|
- return left;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- // a | (ab) -> a (E | b) -> a (b | E)
|
|
|
|
- if (dynamic_cast<CatNode *>(t->child[!dir]) &&
|
|
|
|
- t->child[dir]->eq(t->child[!dir]->child[dir])) {
|
|
|
|
- Node *c = t->child[!dir];
|
|
|
|
- t->child[dir]->release();
|
|
|
|
- t->child[dir] = c->child[!dir];
|
2011-03-25 09:04:51 +01:00
|
|
|
- t->child[!dir] = &epsnode;
|
2011-01-17 17:43:05 +01:00
|
|
|
- c->child[!dir] = t;
|
|
|
|
- return c;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- // ab | (a) -> a (b | E)
|
|
|
|
- if (dynamic_cast<CatNode *>(t->child[dir]) &&
|
|
|
|
- t->child[dir]->child[dir]->eq(t->child[!dir])) {
|
|
|
|
- Node *c = t->child[dir];
|
|
|
|
- t->child[!dir]->release();
|
|
|
|
- t->child[dir] = c->child[!dir];
|
2011-03-25 09:04:51 +01:00
|
|
|
- t->child[!dir] = &epsnode;
|
2011-01-17 17:43:05 +01:00
|
|
|
- c->child[!dir] = t;
|
|
|
|
- return c;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- return t;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-static Node *basic_simplify(Node *t, int dir)
|
|
|
|
-{
|
|
|
|
- if (dynamic_cast<CatNode *>(t) &&
|
2011-03-25 09:04:51 +01:00
|
|
|
- &epsnode == t->child[!dir]) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- // aE -> a
|
2011-03-25 09:04:51 +01:00
|
|
|
- Node *tmp = t->child[dir];
|
|
|
|
- t->child[dir] = NULL;
|
2011-01-17 17:43:05 +01:00
|
|
|
- t->release();
|
|
|
|
- return tmp;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- return basic_alt_factor(t, dir);
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/*
|
|
|
|
- * assumes a normalized tree. reductions shown for left normalization
|
|
|
|
- * aE -> a
|
|
|
|
- * (a | a) -> a
|
|
|
|
- ** factoring patterns
|
|
|
|
- * a | (a | b) -> (a | b)
|
|
|
|
- * a | (ab) -> a (E | b) -> a (b | E)
|
|
|
|
- * (ab) | (ac) -> a(b|c)
|
|
|
|
- *
|
|
|
|
- * returns t - if no simplifications were made
|
|
|
|
- * a new root node - if simplifications were made
|
|
|
|
- */
|
|
|
|
-Node *simplify_tree_base(Node *t, int dir, bool &mod)
|
|
|
|
-{
|
|
|
|
- if (dynamic_cast<ImportantNode *>(t))
|
|
|
|
- return t;
|
|
|
|
-
|
|
|
|
- for (int i=0; i < 2; i++) {
|
|
|
|
- if (t->child[i]) {
|
|
|
|
- Node *c = simplify_tree_base(t->child[i], dir, mod);
|
|
|
|
- if (c != t->child[i]) {
|
|
|
|
- t->child[i] = c;
|
|
|
|
- mod = true;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- // only iterate on loop if modification made
|
|
|
|
- for (;; mod = true) {
|
|
|
|
-
|
|
|
|
- Node *tmp = basic_simplify(t, dir);
|
|
|
|
- if (tmp != t) {
|
|
|
|
- t = tmp;
|
|
|
|
- continue;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
-
|
|
|
|
- /* all tests after this must meet 2 alt node condition */
|
|
|
|
- if (!dynamic_cast<AltNode *>(t) ||
|
|
|
|
- !dynamic_cast<AltNode *>(t->child[!dir]))
|
|
|
|
- break;
|
|
|
|
-
|
|
|
|
- // a | (a | b) -> (a | b)
|
|
|
|
- // a | (b | (c | a)) -> (b | (c | a))
|
|
|
|
- Node *p = t;
|
|
|
|
- Node *i = t->child[!dir];
|
|
|
|
- for (;dynamic_cast<AltNode *>(i); p = i, i = i->child[!dir]) {
|
|
|
|
- if (t->child[dir]->eq(i->child[dir])) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- Node *tmp = t->child[!dir];
|
|
|
|
- t->child[!dir] = NULL;
|
|
|
|
- t->release();
|
|
|
|
- t = tmp;
|
2011-01-17 17:43:05 +01:00
|
|
|
- continue;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- // last altnode of chain check other dir as well
|
|
|
|
- if (t->child[dir]->eq(p->child[!dir])) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- Node *tmp = t->child[!dir];
|
|
|
|
- t->child[!dir] = NULL;
|
|
|
|
- t->release();
|
|
|
|
- t = tmp;
|
2011-01-17 17:43:05 +01:00
|
|
|
- continue;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- //exact match didn't work, try factoring front
|
|
|
|
- //a | (ac | (ad | () -> (a (E | c)) | (...)
|
|
|
|
- //ab | (ac | (...)) -> (a (b | c)) | (...)
|
|
|
|
- //ab | (a | (...)) -> (a (b | E)) | (...)
|
|
|
|
- Node *pp;
|
|
|
|
- int count = 0;
|
|
|
|
- Node *subject = t->child[dir];
|
|
|
|
- Node *a = subject;
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (dynamic_cast<CatNode *>(subject))
|
|
|
|
- a = subject->child[dir];
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
- for (pp = p = t, i = t->child[!dir];
|
|
|
|
- dynamic_cast<AltNode *>(i); ) {
|
|
|
|
- if ((dynamic_cast<CatNode *>(i->child[dir]) &&
|
|
|
|
- a->eq(i->child[dir]->child[dir])) ||
|
|
|
|
- (a->eq(i->child[dir]))) {
|
|
|
|
- // extract matching alt node
|
|
|
|
- p->child[!dir] = i->child[!dir];
|
|
|
|
- i->child[!dir] = subject;
|
|
|
|
- subject = basic_simplify(i, dir);
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (dynamic_cast<CatNode *>(subject))
|
|
|
|
- a = subject->child[dir];
|
|
|
|
- else
|
|
|
|
- a = subject;
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
- i = p->child[!dir];
|
|
|
|
- count++;
|
|
|
|
- } else {
|
|
|
|
- pp = p; p = i; i = i->child[!dir];
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- // last altnode in chain check other dir as well
|
|
|
|
- if ((dynamic_cast<CatNode *>(i) &&
|
|
|
|
- a->eq(i->child[dir])) ||
|
|
|
|
- (a->eq(i))) {
|
|
|
|
- count++;
|
|
|
|
- if (t == p) {
|
|
|
|
- t->child[dir] = subject;
|
|
|
|
- t = basic_simplify(t, dir);
|
|
|
|
- } else {
|
|
|
|
- t->child[dir] = p->child[dir];
|
|
|
|
- p->child[dir] = subject;
|
|
|
|
- pp->child[!dir] = basic_simplify(p, dir);
|
|
|
|
- }
|
|
|
|
- } else {
|
|
|
|
- t->child[dir] = i;
|
|
|
|
- p->child[!dir] = subject;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- if (count == 0)
|
|
|
|
- break;
|
|
|
|
- }
|
|
|
|
- return t;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-int debug_tree(Node *t)
|
|
|
|
-{
|
|
|
|
- int nodes = 1;
|
|
|
|
-
|
|
|
|
- if (!dynamic_cast<ImportantNode *>(t)) {
|
|
|
|
- if (t->child[0])
|
|
|
|
- nodes += debug_tree(t->child[0]);
|
|
|
|
- if (t->child[1])
|
|
|
|
- nodes += debug_tree(t->child[1]);
|
|
|
|
- }
|
|
|
|
- return nodes;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-struct node_counts {
|
|
|
|
- int charnode;
|
|
|
|
- int charset;
|
|
|
|
- int notcharset;
|
|
|
|
- int alt;
|
|
|
|
- int plus;
|
|
|
|
- int star;
|
|
|
|
- int any;
|
|
|
|
- int cat;
|
|
|
|
-};
|
|
|
|
-
|
|
|
|
-
|
|
|
|
-static void count_tree_nodes(Node *t, struct node_counts *counts)
|
|
|
|
-{
|
|
|
|
- if (dynamic_cast<AltNode *>(t)) {
|
|
|
|
- counts->alt++;
|
|
|
|
- count_tree_nodes(t->child[0], counts);
|
|
|
|
- count_tree_nodes(t->child[1], counts);
|
|
|
|
- } else if (dynamic_cast<CatNode *>(t)) {
|
|
|
|
- counts->cat++;
|
|
|
|
- count_tree_nodes(t->child[0], counts);
|
|
|
|
- count_tree_nodes(t->child[1], counts);
|
|
|
|
- } else if (dynamic_cast<PlusNode *>(t)) {
|
|
|
|
- counts->plus++;
|
|
|
|
- count_tree_nodes(t->child[0], counts);
|
|
|
|
- } else if (dynamic_cast<StarNode *>(t)) {
|
|
|
|
- counts->star++;
|
|
|
|
- count_tree_nodes(t->child[0], counts);
|
|
|
|
- } else if (dynamic_cast<CharNode *>(t)) {
|
|
|
|
- counts->charnode++;
|
|
|
|
- } else if (dynamic_cast<AnyCharNode *>(t)) {
|
|
|
|
- counts->any++;
|
|
|
|
- } else if (dynamic_cast<CharSetNode *>(t)) {
|
|
|
|
- counts->charset++;
|
|
|
|
- } else if (dynamic_cast<NotCharSetNode *>(t)) {
|
|
|
|
- counts->notcharset++;
|
|
|
|
- }
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-#include "stdio.h"
|
|
|
|
-#include "stdint.h"
|
|
|
|
-#include "apparmor_re.h"
|
|
|
|
-
|
|
|
|
-Node *simplify_tree(Node *t, dfaflags_t flags)
|
|
|
|
-{
|
|
|
|
- bool update;
|
|
|
|
-
|
|
|
|
- if (flags & DFA_DUMP_TREE_STATS) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- struct node_counts counts = { 0, 0, 0, 0, 0, 0, 0, 0 };
|
2011-01-17 17:43:05 +01:00
|
|
|
- count_tree_nodes(t, &counts);
|
|
|
|
- fprintf(stderr, "expr tree: c %d, [] %d, [^] %d, | %d, + %d, * %d, . %d, cat %d\n", counts.charnode, counts.charset, counts.notcharset, counts.alt, counts.plus, counts.star, counts.any, counts.cat);
|
|
|
|
- }
|
|
|
|
- do {
|
|
|
|
- update = false;
|
|
|
|
- //default to right normalize first as this reduces the number
|
|
|
|
- //of trailing nodes which might follow an internal *
|
|
|
|
- //or **, which is where state explosion can happen
|
|
|
|
- //eg. in one test this makes the difference between
|
|
|
|
- // the dfa having about 7 thousands states,
|
|
|
|
- // and it having about 1.25 million states
|
|
|
|
- int dir = 1;
|
|
|
|
- if (flags & DFA_CONTROL_TREE_LEFT)
|
|
|
|
- dir = 0;
|
|
|
|
- for (int count = 0; count < 2; count++) {
|
|
|
|
- bool modified;
|
|
|
|
- do {
|
|
|
|
- modified = false;
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (flags & DFA_CONTROL_TREE_NORMAL)
|
2011-01-17 17:43:05 +01:00
|
|
|
- normalize_tree(t, dir);
|
|
|
|
- t = simplify_tree_base(t, dir, modified);
|
|
|
|
- if (modified)
|
|
|
|
- update = true;
|
|
|
|
- } while (modified);
|
|
|
|
- if (flags & DFA_CONTROL_TREE_LEFT)
|
|
|
|
- dir++;
|
|
|
|
- else
|
|
|
|
- dir--;
|
|
|
|
- }
|
|
|
|
- } while(update);
|
|
|
|
- if (flags & DFA_DUMP_TREE_STATS) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- struct node_counts counts = { 0, 0, 0, 0, 0, 0, 0, 0 };
|
2011-01-17 17:43:05 +01:00
|
|
|
- count_tree_nodes(t, &counts);
|
|
|
|
- fprintf(stderr, "simplified expr tree: c %d, [] %d, [^] %d, | %d, + %d, * %d, . %d, cat %d\n", counts.charnode, counts.charset, counts.notcharset, counts.alt, counts.plus, counts.star, counts.any, counts.cat);
|
|
|
|
- }
|
|
|
|
- return t;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-
|
|
|
|
-%}
|
|
|
|
-
|
|
|
|
-%union {
|
|
|
|
- char c;
|
|
|
|
- Node *node;
|
|
|
|
- Chars *cset;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-%{
|
|
|
|
- void regexp_error(Node **, const char *, const char *);
|
|
|
|
-# define YYLEX_PARAM &text
|
|
|
|
- int regexp_lex(YYSTYPE *, const char **);
|
|
|
|
-
|
|
|
|
- static inline Chars*
|
|
|
|
- insert_char(Chars* cset, uchar a)
|
|
|
|
- {
|
|
|
|
- cset->insert(a);
|
|
|
|
- return cset;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- static inline Chars*
|
|
|
|
- insert_char_range(Chars* cset, uchar a, uchar b)
|
|
|
|
- {
|
|
|
|
- if (a > b)
|
|
|
|
- swap(a, b);
|
|
|
|
- for (uchar i = a; i <= b; i++)
|
|
|
|
- cset->insert(i);
|
|
|
|
- return cset;
|
|
|
|
- }
|
|
|
|
-%}
|
|
|
|
-
|
|
|
|
-%pure-parser
|
|
|
|
-/* %error-verbose */
|
|
|
|
-%parse-param {Node **root}
|
|
|
|
-%parse-param {const char *text}
|
|
|
|
-%name-prefix = "regexp_"
|
|
|
|
-
|
|
|
|
-%token <c> CHAR
|
|
|
|
-%type <c> regex_char cset_char1 cset_char cset_charN
|
|
|
|
-%type <cset> charset cset_chars
|
|
|
|
-%type <node> regexp expr terms0 terms qterm term
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Note: destroy all nodes upon failure, but *not* the start symbol once
|
|
|
|
- * parsing succeeds!
|
|
|
|
- */
|
|
|
|
-%destructor { $$->release(); } expr terms0 terms qterm term
|
|
|
|
-
|
|
|
|
-%%
|
|
|
|
-
|
|
|
|
-/* FIXME: Does not parse "[--]", "[---]", "[^^-x]". I don't actually know
|
|
|
|
- which precise grammer Perl regexps use, and rediscovering that
|
|
|
|
- is proving to be painful. */
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
-regexp : /* empty */ { *root = $$ = &epsnode; }
|
2011-01-17 17:43:05 +01:00
|
|
|
- | expr { *root = $$ = $1; }
|
|
|
|
- ;
|
|
|
|
-
|
|
|
|
-expr : terms
|
|
|
|
- | expr '|' terms0 { $$ = new AltNode($1, $3); }
|
2011-03-25 09:04:51 +01:00
|
|
|
- | '|' terms0 { $$ = new AltNode(&epsnode, $2); }
|
2011-01-17 17:43:05 +01:00
|
|
|
- ;
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
-terms0 : /* empty */ { $$ = &epsnode; }
|
2011-01-17 17:43:05 +01:00
|
|
|
- | terms
|
|
|
|
- ;
|
|
|
|
-
|
|
|
|
-terms : qterm
|
|
|
|
- | terms qterm { $$ = new CatNode($1, $2); }
|
|
|
|
- ;
|
|
|
|
-
|
|
|
|
-qterm : term
|
|
|
|
- | term '*' { $$ = new StarNode($1); }
|
|
|
|
- | term '+' { $$ = new PlusNode($1); }
|
|
|
|
- ;
|
|
|
|
-
|
|
|
|
-term : '.' { $$ = new AnyCharNode; }
|
|
|
|
- | regex_char { $$ = new CharNode($1); }
|
|
|
|
- | '[' charset ']' { $$ = new CharSetNode(*$2);
|
|
|
|
- delete $2; }
|
|
|
|
- | '[' '^' charset ']'
|
|
|
|
- { $$ = new NotCharSetNode(*$3);
|
|
|
|
- delete $3; }
|
|
|
|
- | '[' '^' '^' cset_chars ']'
|
|
|
|
- { $4->insert('^');
|
|
|
|
- $$ = new NotCharSetNode(*$4);
|
|
|
|
- delete $4; }
|
|
|
|
- | '(' regexp ')' { $$ = $2; }
|
|
|
|
- ;
|
|
|
|
-
|
|
|
|
-regex_char : CHAR
|
|
|
|
- | '^' { $$ = '^'; }
|
|
|
|
- | '-' { $$ = '-'; }
|
|
|
|
- | ']' { $$ = ']'; }
|
|
|
|
- ;
|
|
|
|
-
|
|
|
|
-charset : cset_char1 cset_chars
|
|
|
|
- { $$ = insert_char($2, $1); }
|
|
|
|
- | cset_char1 '-' cset_charN cset_chars
|
|
|
|
- { $$ = insert_char_range($4, $1, $3); }
|
|
|
|
- ;
|
|
|
|
-
|
|
|
|
-cset_chars : /* nothing */ { $$ = new Chars; }
|
|
|
|
- | cset_chars cset_charN
|
|
|
|
- { $$ = insert_char($1, $2); }
|
|
|
|
- | cset_chars cset_charN '-' cset_charN
|
|
|
|
- { $$ = insert_char_range($1, $2, $4); }
|
|
|
|
- ;
|
|
|
|
-
|
|
|
|
-cset_char1 : cset_char
|
|
|
|
- | ']' { $$ = ']'; }
|
|
|
|
- | '-' { $$ = '-'; }
|
|
|
|
- ;
|
|
|
|
-
|
|
|
|
-cset_charN : cset_char
|
|
|
|
- | '^' { $$ = '^'; }
|
|
|
|
- ;
|
|
|
|
-
|
|
|
|
-cset_char : CHAR
|
|
|
|
- | '[' { $$ = '['; }
|
|
|
|
- | '*' { $$ = '*'; }
|
|
|
|
- | '+' { $$ = '+'; }
|
|
|
|
- | '.' { $$ = '.'; }
|
|
|
|
- | '|' { $$ = '|'; }
|
|
|
|
- | '(' { $$ = '('; }
|
|
|
|
- | ')' { $$ = ')'; }
|
|
|
|
- ;
|
|
|
|
-
|
|
|
|
-%%
|
|
|
|
-
|
|
|
|
-#include <string.h>
|
|
|
|
-#include <getopt.h>
|
|
|
|
-#include <assert.h>
|
|
|
|
-#include <arpa/inet.h>
|
|
|
|
-
|
|
|
|
-#include <iostream>
|
|
|
|
-#include <fstream>
|
|
|
|
-
|
|
|
|
-#include "../immunix.h"
|
|
|
|
-
|
|
|
|
-/* Traverse the syntax tree depth-first in an iterator-like manner. */
|
|
|
|
-class depth_first_traversal {
|
2011-03-25 09:04:51 +01:00
|
|
|
- stack<Node *> pos;
|
|
|
|
- void push_left(Node *node)
|
|
|
|
- {
|
|
|
|
- pos.push(node);
|
|
|
|
-
|
|
|
|
- while (dynamic_cast<InnerNode *>(node)) {
|
|
|
|
- pos.push(node->child[0]);
|
|
|
|
- node = node->child[0];
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
-public:
|
|
|
|
- depth_first_traversal(Node *node) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- push_left(node);
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- Node *operator*()
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- return pos.top();
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- Node* operator->()
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- return pos.top();
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- operator bool()
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- return !pos.empty();
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- void operator++(int)
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- Node *last = pos.top();
|
|
|
|
- pos.pop();
|
|
|
|
-
|
|
|
|
- if (!pos.empty()) {
|
|
|
|
- /* no need to dynamic cast, as we just popped a node so the top node
|
|
|
|
- * must be an inner node */
|
|
|
|
- InnerNode *node = (InnerNode *)(pos.top());
|
|
|
|
-
|
|
|
|
- if (node->child[1] && node->child[1] != last) {
|
|
|
|
- push_left(node->child[1]);
|
|
|
|
- }
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-};
|
|
|
|
-
|
|
|
|
-ostream& operator<<(ostream& os, Node& node)
|
|
|
|
-{
|
|
|
|
- node.dump(os);
|
|
|
|
- return os;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-ostream& operator<<(ostream& os, uchar c)
|
|
|
|
-{
|
|
|
|
- const char *search = "\a\033\f\n\r\t|*+[](). ",
|
|
|
|
- *replace = "aefnrt|*+[](). ", *s;
|
|
|
|
-
|
|
|
|
- if ((s = strchr(search, c)) && *s != '\0')
|
|
|
|
- os << '\\' << replace[s - search];
|
|
|
|
- else if (c < 32 || c >= 127)
|
|
|
|
- os << '\\' << '0' << char('0' + (c >> 6))
|
|
|
|
- << char('0' + ((c >> 3) & 7)) << char('0' + (c & 7));
|
|
|
|
- else
|
|
|
|
- os << (char)c;
|
|
|
|
- return os;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-int
|
|
|
|
-octdigit(char c)
|
|
|
|
-{
|
|
|
|
- if (c >= '0' && c <= '7')
|
|
|
|
- return c - '0';
|
|
|
|
- return -1;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-int
|
|
|
|
-hexdigit(char c)
|
|
|
|
-{
|
|
|
|
- if (c >= '0' && c <= '9')
|
|
|
|
- return c - '0';
|
|
|
|
- else if (c >= 'A' && c <= 'F')
|
|
|
|
- return 10 + c - 'A';
|
|
|
|
- else if (c >= 'a' && c <= 'f')
|
|
|
|
- return 10 + c - 'A';
|
|
|
|
- else
|
|
|
|
- return -1;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-int
|
|
|
|
-regexp_lex(YYSTYPE *val, const char **pos)
|
|
|
|
-{
|
|
|
|
- int c;
|
|
|
|
-
|
|
|
|
- val->c = **pos;
|
|
|
|
- switch(*(*pos)++) {
|
|
|
|
- case '\0':
|
|
|
|
- (*pos)--;
|
|
|
|
- return 0;
|
|
|
|
-
|
|
|
|
- case '*': case '+': case '.': case '|': case '^': case '-':
|
|
|
|
- case '[': case ']': case '(' : case ')':
|
|
|
|
- return *(*pos - 1);
|
|
|
|
-
|
|
|
|
- case '\\':
|
|
|
|
- val->c = **pos;
|
|
|
|
- switch(*(*pos)++) {
|
|
|
|
- case '\0':
|
|
|
|
- (*pos)--;
|
|
|
|
- /* fall through */
|
|
|
|
- case '\\':
|
|
|
|
- val->c = '\\';
|
|
|
|
- break;
|
|
|
|
-
|
|
|
|
- case '0':
|
|
|
|
- val->c = 0;
|
|
|
|
- if ((c = octdigit(**pos)) >= 0) {
|
|
|
|
- val->c = c;
|
|
|
|
- (*pos)++;
|
|
|
|
- }
|
|
|
|
- if ((c = octdigit(**pos)) >= 0) {
|
|
|
|
- val->c = (val->c << 3) + c;
|
|
|
|
- (*pos)++;
|
|
|
|
- }
|
|
|
|
- if ((c = octdigit(**pos)) >= 0) {
|
|
|
|
- val->c = (val->c << 3) + c;
|
|
|
|
- (*pos)++;
|
|
|
|
- }
|
|
|
|
- break;
|
|
|
|
-
|
|
|
|
- case 'x':
|
|
|
|
- val->c = 0;
|
|
|
|
- if ((c = hexdigit(**pos)) >= 0) {
|
|
|
|
- val->c = c;
|
|
|
|
- (*pos)++;
|
|
|
|
- }
|
|
|
|
- if ((c = hexdigit(**pos)) >= 0) {
|
|
|
|
- val->c = (val->c << 4) + c;
|
|
|
|
- (*pos)++;
|
|
|
|
- }
|
|
|
|
- break;
|
|
|
|
-
|
|
|
|
- case 'a':
|
|
|
|
- val->c = '\a';
|
|
|
|
- break;
|
|
|
|
-
|
|
|
|
- case 'e':
|
|
|
|
- val->c = 033 /* ESC */;
|
|
|
|
- break;
|
|
|
|
-
|
|
|
|
- case 'f':
|
|
|
|
- val->c = '\f';
|
|
|
|
- break;
|
|
|
|
-
|
|
|
|
- case 'n':
|
|
|
|
- val->c = '\n';
|
|
|
|
- break;
|
|
|
|
-
|
|
|
|
- case 'r':
|
|
|
|
- val->c = '\r';
|
|
|
|
- break;
|
|
|
|
-
|
|
|
|
- case 't':
|
|
|
|
- val->c = '\t';
|
|
|
|
- break;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- return CHAR;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-void
|
2011-03-25 09:04:51 +01:00
|
|
|
-regexp_error(Node ** __attribute__((unused)),
|
|
|
|
- const char *text __attribute__((unused)),
|
|
|
|
- const char *error __attribute__((unused)))
|
2011-01-17 17:43:05 +01:00
|
|
|
-{
|
|
|
|
- /* We don't want the library to print error messages. */
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Assign a consecutive number to each node. This is only needed for
|
|
|
|
- * pretty-printing the debug output.
|
2011-03-25 09:04:51 +01:00
|
|
|
- *
|
|
|
|
- * The epsnode is labeled 0. Start labeling at 1
|
2011-01-17 17:43:05 +01:00
|
|
|
- */
|
|
|
|
-void label_nodes(Node *root)
|
|
|
|
-{
|
2011-03-25 09:04:51 +01:00
|
|
|
- int nodes = 1;
|
2011-01-17 17:43:05 +01:00
|
|
|
- for (depth_first_traversal i(root); i; i++)
|
|
|
|
- i->label = nodes++;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Text-dump a state (for debugging).
|
|
|
|
- */
|
2011-03-25 09:04:51 +01:00
|
|
|
-ostream& operator<<(ostream& os, const NodeSet& state)
|
2011-01-17 17:43:05 +01:00
|
|
|
-{
|
|
|
|
- os << '{';
|
|
|
|
- if (!state.empty()) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- NodeSet::iterator i = state.begin();
|
2011-01-17 17:43:05 +01:00
|
|
|
- for(;;) {
|
|
|
|
- os << (*i)->label;
|
|
|
|
- if (++i == state.end())
|
|
|
|
- break;
|
|
|
|
- os << ',';
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- os << '}';
|
|
|
|
- return os;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Text-dump the syntax tree (for debugging).
|
|
|
|
- */
|
|
|
|
-void dump_syntax_tree(ostream& os, Node *node) {
|
|
|
|
- for (depth_first_traversal i(node); i; i++) {
|
|
|
|
- os << i->label << '\t';
|
|
|
|
- if ((*i)->child[0] == 0)
|
|
|
|
- os << **i << '\t' << (*i)->followpos << endl;
|
|
|
|
- else {
|
|
|
|
- if ((*i)->child[1] == 0)
|
|
|
|
- os << (*i)->child[0]->label << **i;
|
|
|
|
- else
|
|
|
|
- os << (*i)->child[0]->label << **i
|
|
|
|
- << (*i)->child[1]->label;
|
|
|
|
- os << '\t' << (*i)->firstpos
|
|
|
|
- << (*i)->lastpos << endl;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- os << endl;
|
|
|
|
-}
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
-/* Comparison operator for sets of <NodeSet *>.
|
|
|
|
- * Compare set hashes, and if the sets have the same hash
|
|
|
|
- * do compare pointer comparison on set of <Node *>, the pointer comparison
|
|
|
|
- * allows us to determine which Sets of <Node *> we have seen already from
|
|
|
|
- * new ones when constructing the DFA.
|
|
|
|
- */
|
|
|
|
-struct deref_less_than {
|
|
|
|
- bool operator()(pair <unsigned long, NodeSet *> const & lhs, pair <unsigned long, NodeSet *> const & rhs) const
|
|
|
|
- {
|
|
|
|
- if (lhs.first == rhs.first)
|
|
|
|
- return *(lhs.second) < *(rhs.second);
|
|
|
|
- else
|
|
|
|
- return lhs.first < rhs.first;
|
|
|
|
- }
|
2011-01-17 17:43:05 +01:00
|
|
|
-};
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
-unsigned long hash_NodeSet(const NodeSet *ns)
|
|
|
|
-{
|
|
|
|
- unsigned long hash = 5381;
|
|
|
|
-
|
|
|
|
- for (NodeSet::iterator i = ns->begin(); i != ns->end(); i++) {
|
|
|
|
- hash = ((hash << 5) + hash) + (unsigned long) *i;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- return hash;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-class State;
|
2011-01-17 17:43:05 +01:00
|
|
|
-/**
|
2011-03-25 09:04:51 +01:00
|
|
|
- * State cases are identical to NodesCases except they map to State *
|
|
|
|
- * instead of NodeSet.
|
|
|
|
- * Out-edges from a state to another: we store the follow State
|
|
|
|
- * for each input character that is not a default match in cases and
|
|
|
|
- * default matches in otherwise as well as in all matching explicit cases
|
|
|
|
- * This avoids enumerating all the explicit tranitions for default matches.
|
2011-01-17 17:43:05 +01:00
|
|
|
- */
|
2011-03-25 09:04:51 +01:00
|
|
|
-typedef struct Cases {
|
|
|
|
- typedef map<uchar, State *>::iterator iterator;
|
|
|
|
- iterator begin() { return cases.begin(); }
|
|
|
|
- iterator end() { return cases.end(); }
|
|
|
|
-
|
|
|
|
- Cases() : otherwise(0) { }
|
|
|
|
- map<uchar, State *> cases;
|
|
|
|
- State *otherwise;
|
|
|
|
-} Cases;
|
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
-typedef list<State *> Partition;
|
2011-03-25 09:04:51 +01:00
|
|
|
-
|
|
|
|
-uint32_t accept_perms(NodeSet *state, uint32_t *audit_ctl, int *error);
|
|
|
|
-
|
|
|
|
-/*
|
|
|
|
- * State - DFA individual state information
|
|
|
|
- * label: a unique label to identify the state used for pretty printing
|
|
|
|
- * the non-matching state is setup to have label == 0 and
|
|
|
|
- * the start state is setup to have label == 1
|
|
|
|
- * audit: the audit permission mask for the state
|
|
|
|
- * accept: the accept permissions for the state
|
|
|
|
- * cases: set of transitions from this state
|
|
|
|
- * parition: Is a temporary work variable used during dfa minimization.
|
|
|
|
- * it can be replaced with a map, but that is slower and uses more
|
|
|
|
- * memory.
|
|
|
|
- * nodes: Is a temporary work variable used during dfa creation. It can
|
|
|
|
- * be replaced by using the nodemap, but that is slower
|
|
|
|
- */
|
|
|
|
-class State {
|
|
|
|
-public:
|
|
|
|
- State() : label (0), audit(0), accept(0), cases(), nodes(NULL) { };
|
|
|
|
- State(int l): label (l), audit(0), accept(0), cases(), nodes(NULL) { };
|
|
|
|
- State(int l, NodeSet *n) throw (int):
|
|
|
|
- label(l), audit(0), accept(0), cases(), nodes(n)
|
|
|
|
- {
|
|
|
|
- int error;
|
|
|
|
-
|
|
|
|
- /* Compute permissions associated with the State. */
|
|
|
|
- accept = accept_perms(nodes, &audit, &error);
|
|
|
|
- if (error) {
|
|
|
|
-cerr << "Failing on accept perms " << error << "\n";
|
|
|
|
- throw error;
|
|
|
|
- }
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
- int label;
|
|
|
|
- uint32_t audit, accept;
|
|
|
|
- Cases cases;
|
|
|
|
- union {
|
|
|
|
- Partition *partition;
|
|
|
|
- NodeSet *nodes;
|
|
|
|
- };
|
|
|
|
-};
|
|
|
|
-
|
|
|
|
-ostream& operator<<(ostream& os, const State& state)
|
|
|
|
-{
|
|
|
|
- /* dump the state label */
|
|
|
|
- os << '{';
|
|
|
|
- os << state.label;
|
|
|
|
- os << '}';
|
|
|
|
- return os;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-typedef map<pair<unsigned long, NodeSet *>, State *, deref_less_than > NodeMap;
|
2011-01-17 17:43:05 +01:00
|
|
|
-/* Transitions in the DFA. */
|
2011-03-25 09:04:51 +01:00
|
|
|
-
|
|
|
|
-/* dfa_stats - structure to group various stats about dfa creation
|
|
|
|
- * duplicates - how many duplicate NodeSets where encountered and discarded
|
|
|
|
- * proto_max - maximum length of a NodeSet encountered during dfa construction
|
|
|
|
- * proto_sum - sum of NodeSet length during dfa construction. Used to find
|
|
|
|
- * average length.
|
|
|
|
- */
|
|
|
|
-typedef struct dfa_stats {
|
|
|
|
- unsigned int duplicates, proto_max, proto_sum;
|
|
|
|
-} dfa_stats_t;
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
-class DFA {
|
2011-03-25 09:04:51 +01:00
|
|
|
- void dump_node_to_dfa(void);
|
|
|
|
- State* add_new_state(NodeMap &nodemap, pair <unsigned long, NodeSet *> index, NodeSet *nodes, dfa_stats_t &stats);
|
|
|
|
- void update_state_transitions(NodeMap &nodemap, list <State *> &work_queue, State *state, dfa_stats_t &stats);
|
|
|
|
- State *find_target_state(NodeMap &nodemap, list <State *> &work_queue,
|
|
|
|
- NodeSet *nodes, dfa_stats_t &stats);
|
2011-01-17 17:43:05 +01:00
|
|
|
-public:
|
|
|
|
- DFA(Node *root, dfaflags_t flags);
|
|
|
|
- virtual ~DFA();
|
|
|
|
- void remove_unreachable(dfaflags_t flags);
|
2011-03-25 09:04:51 +01:00
|
|
|
- bool same_mappings(State *s1, State *s2);
|
2011-01-17 17:43:05 +01:00
|
|
|
- size_t hash_trans(State *s);
|
|
|
|
- void minimize(dfaflags_t flags);
|
|
|
|
- void dump(ostream& os);
|
|
|
|
- void dump_dot_graph(ostream& os);
|
2011-03-25 09:04:51 +01:00
|
|
|
- void dump_uniq_perms(const char *s);
|
2011-01-17 17:43:05 +01:00
|
|
|
- map<uchar, uchar> equivalence_classes(dfaflags_t flags);
|
|
|
|
- void apply_equivalence_classes(map<uchar, uchar>& eq);
|
|
|
|
- Node *root;
|
|
|
|
- State *nonmatching, *start;
|
2011-03-25 09:04:51 +01:00
|
|
|
- Partition states;
|
2011-01-17 17:43:05 +01:00
|
|
|
-};
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
-State* DFA::add_new_state(NodeMap &nodemap, pair <unsigned long, NodeSet *> index, NodeSet *nodes, dfa_stats_t &stats)
|
|
|
|
-{
|
|
|
|
- State *state = new State(nodemap.size(), nodes);
|
|
|
|
- states.push_back(state);
|
|
|
|
- nodemap.insert(make_pair(index, state));
|
|
|
|
- stats.proto_sum += nodes->size();
|
|
|
|
- if (nodes->size() > stats.proto_max)
|
|
|
|
- stats.proto_max = nodes->size();
|
|
|
|
- return state;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-State *DFA::find_target_state(NodeMap &nodemap, list <State *> &work_queue,
|
|
|
|
- NodeSet *nodes, dfa_stats_t &stats)
|
|
|
|
-{
|
|
|
|
- State *target;
|
|
|
|
-
|
|
|
|
- pair <unsigned long, NodeSet *> index = make_pair(hash_NodeSet(nodes), nodes);
|
|
|
|
-
|
|
|
|
- map<pair <unsigned long, NodeSet *>, State *, deref_less_than>::iterator x = nodemap.find(index);
|
|
|
|
-
|
|
|
|
- if (x == nodemap.end()) {
|
|
|
|
- /* set of nodes isn't known so create new state, and nodes to
|
|
|
|
- * state mapping
|
|
|
|
- */
|
|
|
|
- target = add_new_state(nodemap, index, nodes, stats);
|
|
|
|
- work_queue.push_back(target);
|
|
|
|
- } else {
|
|
|
|
- /* set of nodes already has a mapping so free this one */
|
|
|
|
- stats.duplicates++;
|
|
|
|
- delete (nodes);
|
|
|
|
- target = x->second;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- return target;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-void DFA::update_state_transitions(NodeMap &nodemap,
|
|
|
|
- list <State *> &work_queue, State *state,
|
|
|
|
- dfa_stats_t &stats)
|
|
|
|
-{
|
|
|
|
- /* Compute possible transitions for state->nodes. This is done by
|
|
|
|
- * iterating over all the nodes in state->nodes and combining the
|
|
|
|
- * transitions.
|
|
|
|
- *
|
|
|
|
- * The resultant transition set is a mapping of characters to
|
|
|
|
- * sets of nodes.
|
|
|
|
- */
|
|
|
|
- NodeCases cases;
|
|
|
|
- for (NodeSet::iterator i = state->nodes->begin(); i != state->nodes->end(); i++)
|
|
|
|
- (*i)->follow(cases);
|
|
|
|
-
|
|
|
|
- /* Now for each set of nodes in the computed transitions, make
|
|
|
|
- * sure that there is a state that maps to it, and add the
|
|
|
|
- * matching case to the state.
|
|
|
|
- */
|
|
|
|
-
|
|
|
|
- /* check the default transition first */
|
|
|
|
- if (cases.otherwise)
|
|
|
|
- state->cases.otherwise = find_target_state(nodemap, work_queue,
|
|
|
|
- cases.otherwise,
|
|
|
|
- stats);;
|
|
|
|
-
|
|
|
|
- /* For each transition from *from, check if the set of nodes it
|
|
|
|
- * transitions to already has been mapped to a state
|
|
|
|
- */
|
|
|
|
- for (NodeCases::iterator j = cases.begin(); j != cases.end(); j++) {
|
|
|
|
- State *target;
|
|
|
|
- target = find_target_state(nodemap, work_queue, j->second,
|
|
|
|
- stats);
|
|
|
|
-
|
|
|
|
- /* Don't insert transition that the default transition
|
|
|
|
- * already covers
|
|
|
|
- */
|
|
|
|
- if (target != state->cases.otherwise)
|
|
|
|
- state->cases.cases[j->first] = target;
|
|
|
|
- }
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-
|
|
|
|
-/* WARNING: This routine can only be called from within DFA creation as
|
|
|
|
- * the nodes value is only valid during dfa construction.
|
|
|
|
- */
|
|
|
|
-void DFA::dump_node_to_dfa(void)
|
|
|
|
-{
|
|
|
|
- cerr << "Mapping of States to expr nodes\n"
|
|
|
|
- " State <= Nodes\n"
|
|
|
|
- "-------------------\n";
|
|
|
|
- for (Partition::iterator i = states.begin(); i != states.end(); i++)
|
|
|
|
- cerr << " " << (*i)->label << " <= " << *(*i)->nodes << "\n";
|
|
|
|
-}
|
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
-/**
|
|
|
|
- * Construct a DFA from a syntax tree.
|
|
|
|
- */
|
|
|
|
-DFA::DFA(Node *root, dfaflags_t flags) : root(root)
|
|
|
|
-{
|
2011-03-25 09:04:51 +01:00
|
|
|
- dfa_stats_t stats = { 0, 0, 0 };
|
|
|
|
- int i = 0;
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (flags & DFA_DUMP_PROGRESS)
|
|
|
|
- fprintf(stderr, "Creating dfa:\r");
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (depth_first_traversal i(root); i; i++) {
|
|
|
|
- (*i)->compute_nullable();
|
|
|
|
- (*i)->compute_firstpos();
|
|
|
|
- (*i)->compute_lastpos();
|
|
|
|
- }
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (flags & DFA_DUMP_PROGRESS)
|
|
|
|
- fprintf(stderr, "Creating dfa: followpos\r");
|
|
|
|
- for (depth_first_traversal i(root); i; i++) {
|
|
|
|
- (*i)->compute_followpos();
|
|
|
|
- }
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- NodeMap nodemap;
|
|
|
|
- NodeSet *emptynode = new NodeSet;
|
|
|
|
- nonmatching = add_new_state(nodemap,
|
|
|
|
- make_pair(hash_NodeSet(emptynode), emptynode),
|
|
|
|
- emptynode, stats);
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- NodeSet *first = new NodeSet(root->firstpos);
|
|
|
|
- start = add_new_state(nodemap, make_pair(hash_NodeSet(first), first),
|
|
|
|
- first, stats);
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- /* the work_queue contains the states that need to have their
|
|
|
|
- * transitions computed. This could be done with a recursive
|
|
|
|
- * algorithm instead of a work_queue, but it would be slightly slower
|
|
|
|
- * and consume more memory.
|
|
|
|
- *
|
|
|
|
- * TODO: currently the work_queue is treated in a breadth first
|
|
|
|
- * search manner. Test using the work_queue in a depth first
|
|
|
|
- * manner, this may help reduce the number of entries on the
|
|
|
|
- * work_queue at any given time, thus reducing peak memory use.
|
|
|
|
- */
|
|
|
|
- list<State *> work_queue;
|
|
|
|
- work_queue.push_back(start);
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- while (!work_queue.empty()) {
|
|
|
|
- if (i % 1000 == 0 && (flags & DFA_DUMP_PROGRESS))
|
|
|
|
- fprintf(stderr, "\033[2KCreating dfa: queue %ld\tstates %ld\teliminated duplicates %d\r", work_queue.size(), states.size(), stats.duplicates);
|
|
|
|
- i++;
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- State *from = work_queue.front();
|
|
|
|
- work_queue.pop_front();
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- /* Update 'from's transitions, and if it transitions to any
|
|
|
|
- * unknown State create it and add it to the work_queue
|
|
|
|
- */
|
|
|
|
- update_state_transitions(nodemap, work_queue, from, stats);
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- } /* for (NodeSet *nodes ... */
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- /* cleanup Sets of nodes used computing the DFA as they are no longer
|
|
|
|
- * needed.
|
|
|
|
- */
|
|
|
|
- for (depth_first_traversal i(root); i; i++) {
|
|
|
|
- (*i)->firstpos.clear();
|
|
|
|
- (*i)->lastpos.clear();
|
|
|
|
- (*i)->followpos.clear();
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- if (flags & DFA_DUMP_NODE_TO_DFA)
|
|
|
|
- dump_node_to_dfa();
|
|
|
|
-
|
|
|
|
- for (NodeMap::iterator i = nodemap.begin(); i != nodemap.end(); i++)
|
|
|
|
- delete i->first.second;
|
|
|
|
- nodemap.clear();
|
|
|
|
-
|
|
|
|
- if (flags & (DFA_DUMP_STATS))
|
|
|
|
- fprintf(stderr, "\033[2KCreated dfa: states %ld,\teliminated duplicates %d,\tprotostate sets: longest %u, avg %u\n", states.size(), stats.duplicates, stats.proto_max, (unsigned int) (stats.proto_sum/states.size()));
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
-}
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
-DFA::~DFA()
|
|
|
|
-{
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Partition::iterator i = states.begin(); i != states.end(); i++)
|
2011-01-17 17:43:05 +01:00
|
|
|
- delete *i;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-class MatchFlag : public AcceptNode {
|
|
|
|
-public:
|
|
|
|
-MatchFlag(uint32_t flag, uint32_t audit) : flag(flag), audit(audit) {}
|
|
|
|
- ostream& dump(ostream& os)
|
|
|
|
- {
|
|
|
|
- return os << '<' << flag << '>';
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- uint32_t flag;
|
|
|
|
- uint32_t audit;
|
|
|
|
- };
|
|
|
|
-
|
|
|
|
-class ExactMatchFlag : public MatchFlag {
|
|
|
|
-public:
|
|
|
|
- ExactMatchFlag(uint32_t flag, uint32_t audit) : MatchFlag(flag, audit) {}
|
|
|
|
-};
|
|
|
|
-
|
|
|
|
-class DenyMatchFlag : public MatchFlag {
|
|
|
|
-public:
|
|
|
|
- DenyMatchFlag(uint32_t flag, uint32_t quiet) : MatchFlag(flag, quiet) {}
|
|
|
|
-};
|
|
|
|
-
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
-void DFA::dump_uniq_perms(const char *s)
|
2011-01-17 17:43:05 +01:00
|
|
|
-{
|
2011-03-25 09:04:51 +01:00
|
|
|
- set < pair<uint32_t, uint32_t> > uniq;
|
|
|
|
- for (Partition::iterator i = states.begin(); i != states.end(); i++)
|
|
|
|
- uniq.insert(make_pair((*i)->accept, (*i)->audit));
|
|
|
|
-
|
|
|
|
- cerr << "Unique Permission sets: " << s << " (" << uniq.size() << ")\n";
|
|
|
|
- cerr << "----------------------\n";
|
|
|
|
- for (set< pair<uint32_t, uint32_t> >::iterator i = uniq.begin();
|
|
|
|
- i != uniq.end(); i++) {
|
|
|
|
- cerr << " " << hex << i->first << " " << i->second << dec <<"\n";
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
-}
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
-/* Remove dead or unreachable states */
|
|
|
|
-void DFA::remove_unreachable(dfaflags_t flags)
|
|
|
|
-{
|
|
|
|
- set <State *> reachable;
|
|
|
|
- list <State *> work_queue;
|
|
|
|
-
|
|
|
|
- /* find the set of reachable states */
|
|
|
|
- reachable.insert(nonmatching);
|
|
|
|
- work_queue.push_back(start);
|
|
|
|
- while (!work_queue.empty()) {
|
|
|
|
- State *from = work_queue.front();
|
|
|
|
- work_queue.pop_front();
|
|
|
|
- reachable.insert(from);
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (from->cases.otherwise &&
|
|
|
|
- (reachable.find(from->cases.otherwise) == reachable.end()))
|
|
|
|
- work_queue.push_back(from->cases.otherwise);
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Cases::iterator j = from->cases.begin();
|
|
|
|
- j != from->cases.end(); j++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (reachable.find(j->second) == reachable.end())
|
|
|
|
- work_queue.push_back(j->second);
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- /* walk the set of states and remove any that aren't reachable */
|
|
|
|
- if (reachable.size() < states.size()) {
|
|
|
|
- int count = 0;
|
2011-03-25 09:04:51 +01:00
|
|
|
- Partition::iterator i;
|
|
|
|
- Partition::iterator next;
|
2011-01-17 17:43:05 +01:00
|
|
|
- for (i = states.begin(); i != states.end(); i = next) {
|
|
|
|
- next = i;
|
|
|
|
- next++;
|
|
|
|
- if (reachable.find(*i) == reachable.end()) {
|
|
|
|
- if (flags & DFA_DUMP_UNREACHABLE) {
|
|
|
|
- cerr << "unreachable: "<< **i;
|
|
|
|
- if (*i == start)
|
|
|
|
- cerr << " <==";
|
2011-03-25 09:04:51 +01:00
|
|
|
- if ((*i)->accept) {
|
|
|
|
- cerr << " (0x" << hex << (*i)->accept
|
|
|
|
- << " " << (*i)->audit << dec << ')';
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- cerr << endl;
|
|
|
|
- }
|
2011-03-25 09:04:51 +01:00
|
|
|
- State *current = *i;
|
|
|
|
- states.erase(i);
|
|
|
|
- delete(current);
|
|
|
|
- count++;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- if (count && (flags & DFA_DUMP_STATS))
|
|
|
|
- cerr << "DFA: states " << states.size() << " removed "
|
|
|
|
- << count << " unreachable states\n";
|
|
|
|
- }
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/* test if two states have the same transitions under partition_map */
|
2011-03-25 09:04:51 +01:00
|
|
|
-bool DFA::same_mappings(State *s1, State *s2)
|
2011-01-17 17:43:05 +01:00
|
|
|
-{
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (s1->cases.otherwise && s1->cases.otherwise != nonmatching) {
|
|
|
|
- if (!s2->cases.otherwise || s2->cases.otherwise == nonmatching)
|
2011-01-17 17:43:05 +01:00
|
|
|
- return false;
|
2011-03-25 09:04:51 +01:00
|
|
|
- Partition *p1 = s1->cases.otherwise->partition;
|
|
|
|
- Partition *p2 = s2->cases.otherwise->partition;
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (p1 != p2)
|
|
|
|
- return false;
|
2011-03-25 09:04:51 +01:00
|
|
|
- } else if (s2->cases.otherwise && s2->cases.otherwise != nonmatching) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- return false;
|
|
|
|
- }
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (s1->cases.cases.size() != s2->cases.cases.size())
|
2011-01-17 17:43:05 +01:00
|
|
|
- return false;
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Cases::iterator j1 = s1->cases.begin(); j1 != s1->cases.end();
|
2011-01-17 17:43:05 +01:00
|
|
|
- j1++){
|
2011-03-25 09:04:51 +01:00
|
|
|
- Cases::iterator j2 = s2->cases.cases.find(j1->first);
|
|
|
|
- if (j2 == s2->cases.end())
|
2011-01-17 17:43:05 +01:00
|
|
|
- return false;
|
2011-03-25 09:04:51 +01:00
|
|
|
- Partition *p1 = j1->second->partition;
|
|
|
|
- Partition *p2 = j2->second->partition;
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (p1 != p2)
|
|
|
|
- return false;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- return true;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/* Do simple djb2 hashing against a States transition cases
|
|
|
|
- * this provides a rough initial guess at state equivalence as if a state
|
|
|
|
- * has a different number of transitions or has transitions on different
|
|
|
|
- * cases they will never be equivalent.
|
|
|
|
- * Note: this only hashes based off of the alphabet (not destination)
|
|
|
|
- * as different destinations could end up being equiv
|
|
|
|
- */
|
|
|
|
-size_t DFA::hash_trans(State *s)
|
|
|
|
-{
|
|
|
|
- unsigned long hash = 5381;
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Cases::iterator j = s->cases.begin(); j != s->cases.end(); j++){
|
2011-01-17 17:43:05 +01:00
|
|
|
- hash = ((hash << 5) + hash) + j->first;
|
2011-03-25 09:04:51 +01:00
|
|
|
- State *k = j->second;
|
|
|
|
- hash = ((hash << 5) + hash) + k->cases.cases.size();
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (s->cases.otherwise && s->cases.otherwise != nonmatching) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- hash = ((hash << 5) + hash) + 5381;
|
2011-03-25 09:04:51 +01:00
|
|
|
- State *k = s->cases.otherwise;
|
|
|
|
- hash = ((hash << 5) + hash) + k->cases.cases.size();
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- hash = (hash << 8) | s->cases.cases.size();
|
2011-01-17 17:43:05 +01:00
|
|
|
- return hash;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/* minimize the number of dfa states */
|
|
|
|
-void DFA::minimize(dfaflags_t flags)
|
|
|
|
-{
|
|
|
|
- map <pair <uint64_t, size_t>, Partition *> perm_map;
|
|
|
|
- list <Partition *> partitions;
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- /* Set up the initial partitions
|
|
|
|
- * minimium of - 1 non accepting, and 1 accepting
|
|
|
|
- * if trans hashing is used the accepting and non-accepting partitions
|
|
|
|
- * can be further split based on the number and type of transitions
|
|
|
|
- * a state makes.
|
|
|
|
- * If permission hashing is enabled the accepting partitions can
|
|
|
|
- * be further divided by permissions. This can result in not
|
|
|
|
- * obtaining a truely minimized dfa but comes close, and can speedup
|
|
|
|
- * minimization.
|
2011-01-17 17:43:05 +01:00
|
|
|
- */
|
|
|
|
- int accept_count = 0;
|
2011-03-25 09:04:51 +01:00
|
|
|
- int final_accept = 0;
|
|
|
|
- for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
|
|
|
- uint64_t perm_hash = 0;
|
|
|
|
- if (flags & DFA_CONTROL_MINIMIZE_HASH_PERMS) {
|
|
|
|
- /* make every unique perm create a new partition */
|
|
|
|
- perm_hash = ((uint64_t)(*i)->audit)<<32 |
|
|
|
|
- (uint64_t)(*i)->accept;
|
|
|
|
- } else if ((*i)->audit || (*i)->accept) {
|
|
|
|
- /* combine all perms together into a single parition */
|
|
|
|
- perm_hash = 1;
|
|
|
|
- } /* else not an accept state so 0 for perm_hash */
|
|
|
|
-
|
|
|
|
- size_t trans_hash = 0;
|
|
|
|
- if (flags & DFA_CONTROL_MINIMIZE_HASH_TRANS)
|
|
|
|
- trans_hash = hash_trans(*i);
|
|
|
|
- pair <uint64_t, size_t> group = make_pair(perm_hash, trans_hash);
|
2011-01-17 17:43:05 +01:00
|
|
|
- map <pair <uint64_t, size_t>, Partition *>::iterator p = perm_map.find(group);
|
|
|
|
- if (p == perm_map.end()) {
|
|
|
|
- Partition *part = new Partition();
|
|
|
|
- part->push_back(*i);
|
|
|
|
- perm_map.insert(make_pair(group, part));
|
|
|
|
- partitions.push_back(part);
|
2011-03-25 09:04:51 +01:00
|
|
|
- (*i)->partition = part;
|
|
|
|
- if (perm_hash)
|
2011-01-17 17:43:05 +01:00
|
|
|
- accept_count++;
|
|
|
|
- } else {
|
2011-03-25 09:04:51 +01:00
|
|
|
- (*i)->partition = p->second;
|
2011-01-17 17:43:05 +01:00
|
|
|
- p->second->push_back(*i);
|
|
|
|
- }
|
2011-03-25 09:04:51 +01:00
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
- if ((flags & DFA_DUMP_PROGRESS) &&
|
|
|
|
- (partitions.size() % 1000 == 0))
|
2011-03-25 09:04:51 +01:00
|
|
|
- cerr << "\033[2KMinimize dfa: partitions " << partitions.size() << "\tinit " << partitions.size() << " (accept " << accept_count << ")\r";
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- /* perm_map is no longer needed so free the memory it is using.
|
|
|
|
- * Don't remove - doing it manually here helps reduce peak memory usage.
|
|
|
|
- */
|
|
|
|
- perm_map.clear();
|
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
- int init_count = partitions.size();
|
|
|
|
- if (flags & DFA_DUMP_PROGRESS)
|
2011-03-25 09:04:51 +01:00
|
|
|
- cerr << "\033[2KMinimize dfa: partitions " << partitions.size() << "\tinit " << init_count << " (accept " << accept_count << ")\r";
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
- /* Now do repartitioning until each partition contains the set of
|
|
|
|
- * states that are the same. This will happen when the partition
|
|
|
|
- * splitting stables. With a worse case of 1 state per partition
|
|
|
|
- * ie. already minimized.
|
|
|
|
- */
|
|
|
|
- Partition *new_part;
|
|
|
|
- int new_part_count;
|
|
|
|
- do {
|
|
|
|
- new_part_count = 0;
|
|
|
|
- for (list <Partition *>::iterator p = partitions.begin();
|
|
|
|
- p != partitions.end(); p++) {
|
|
|
|
- new_part = NULL;
|
|
|
|
- State *rep = *((*p)->begin());
|
|
|
|
- Partition::iterator next;
|
|
|
|
- for (Partition::iterator s = ++(*p)->begin();
|
|
|
|
- s != (*p)->end(); ) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (same_mappings(rep, *s)) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- ++s;
|
|
|
|
- continue;
|
|
|
|
- }
|
|
|
|
- if (!new_part) {
|
|
|
|
- new_part = new Partition;
|
|
|
|
- list <Partition *>::iterator tmp = p;
|
|
|
|
- partitions.insert(++tmp, new_part);
|
|
|
|
- new_part_count++;
|
|
|
|
- }
|
|
|
|
- new_part->push_back(*s);
|
|
|
|
- s = (*p)->erase(s);
|
|
|
|
- }
|
|
|
|
- /* remapping partition_map for new_part entries
|
|
|
|
- * Do not do this above as it messes up same_mappings
|
|
|
|
- */
|
|
|
|
- if (new_part) {
|
|
|
|
- for (Partition::iterator m = new_part->begin();
|
|
|
|
- m != new_part->end(); m++) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- (*m)->partition = new_part;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- if ((flags & DFA_DUMP_PROGRESS) &&
|
|
|
|
- (partitions.size() % 100 == 0))
|
2011-03-25 09:04:51 +01:00
|
|
|
- cerr << "\033[2KMinimize dfa: partitions " << partitions.size() << "\tinit " << init_count << " (accept " << accept_count << ")\r";
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- } while(new_part_count);
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (partitions.size() == states.size()) {
|
|
|
|
- if (flags & DFA_DUMP_STATS)
|
|
|
|
- cerr << "\033[2KDfa minimization no states removed: partitions " << partitions.size() << "\tinit " << init_count << " (accept " << accept_count << ")\n";
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
-
|
|
|
|
- goto out;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- /* Remap the dfa so it uses the representative states
|
|
|
|
- * Use the first state of a partition as the representative state
|
|
|
|
- * At this point all states with in a partion have transitions
|
2011-03-25 09:04:51 +01:00
|
|
|
- * to states within the same partitions, however this can slow
|
|
|
|
- * down compressed dfa compression as there are more states,
|
2011-01-17 17:43:05 +01:00
|
|
|
- */
|
|
|
|
- for (list <Partition *>::iterator p = partitions.begin();
|
|
|
|
- p != partitions.end(); p++) {
|
|
|
|
- /* representative state for this partition */
|
|
|
|
- State *rep = *((*p)->begin());
|
|
|
|
-
|
|
|
|
- /* update representative state's transitions */
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (rep->cases.otherwise) {
|
|
|
|
- Partition *partition = rep->cases.otherwise->partition;
|
|
|
|
- rep->cases.otherwise = *partition->begin();
|
|
|
|
- }
|
|
|
|
- for (Cases::iterator c = rep->cases.begin();
|
|
|
|
- c != rep->cases.end(); c++) {
|
|
|
|
- Partition *partition = c->second->partition;
|
|
|
|
- c->second = *partition->begin();
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
-//if ((*p)->size() > 1)
|
|
|
|
-//cerr << rep->label << ": ";
|
|
|
|
- /* clear the state label for all non representative states,
|
|
|
|
- * and accumulate permissions */
|
|
|
|
- for (Partition::iterator i = ++(*p)->begin(); i != (*p)->end(); i++) {
|
|
|
|
-//cerr << " " << (*i)->label;
|
|
|
|
- (*i)->label = -1;
|
|
|
|
- rep->accept |= (*i)->accept;
|
|
|
|
- rep->audit |= (*i)->audit;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (rep->accept || rep->audit)
|
|
|
|
- final_accept++;
|
|
|
|
-//if ((*p)->size() > 1)
|
|
|
|
-//cerr << "\n";
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (flags & DFA_DUMP_STATS)
|
|
|
|
- cerr << "\033[2KMinimized dfa: final partitions " << partitions.size() << " (accept " << final_accept << ")" << "\tinit " << init_count << " (accept " << accept_count << ")\n";
|
|
|
|
-
|
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
- /* make sure nonmatching and start state are up to date with the
|
|
|
|
- * mappings */
|
|
|
|
- {
|
2011-03-25 09:04:51 +01:00
|
|
|
- Partition *partition = nonmatching->partition;
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (*partition->begin() != nonmatching) {
|
|
|
|
- nonmatching = *partition->begin();
|
|
|
|
- }
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- partition = start->partition;
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (*partition->begin() != start) {
|
|
|
|
- start = *partition->begin();
|
|
|
|
- }
|
|
|
|
- }
|
2011-03-25 09:04:51 +01:00
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
- /* Now that the states have been remapped, remove all states
|
2011-03-25 09:04:51 +01:00
|
|
|
- * that are not the representive states for their partition, they
|
|
|
|
- * will have a label == -1
|
2011-01-17 17:43:05 +01:00
|
|
|
- */
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Partition::iterator i = states.begin(); i != states.end(); ) {
|
|
|
|
- if ((*i)->label == -1) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- State *s = *i;
|
2011-03-25 09:04:51 +01:00
|
|
|
- i = states.erase(i);
|
2011-01-17 17:43:05 +01:00
|
|
|
- delete(s);
|
2011-03-25 09:04:51 +01:00
|
|
|
- } else
|
|
|
|
- i++;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
-
|
|
|
|
-out:
|
|
|
|
- /* Cleanup */
|
|
|
|
- while (!partitions.empty()) {
|
|
|
|
- Partition *p = partitions.front();
|
|
|
|
- partitions.pop_front();
|
|
|
|
- delete(p);
|
|
|
|
- }
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * text-dump the DFA (for debugging).
|
|
|
|
- */
|
|
|
|
-void DFA::dump(ostream& os)
|
|
|
|
-{
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
|
|
|
- if (*i == start || (*i)->accept) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- os << **i;
|
|
|
|
- if (*i == start)
|
|
|
|
- os << " <==";
|
2011-03-25 09:04:51 +01:00
|
|
|
- if ((*i)->accept) {
|
|
|
|
- os << " (0x" << hex << (*i)->accept << " " << (*i)->audit << dec << ')';
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- os << endl;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- os << endl;
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
|
|
|
- if ((*i)->cases.otherwise)
|
|
|
|
- os << **i << " -> " << (*i)->cases.otherwise << endl;
|
|
|
|
- for (Cases::iterator j = (*i)->cases.begin(); j != (*i)->cases.end(); j++) {
|
|
|
|
- os << **i << " -> " << j->second << ": " << j->first << endl;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- os << endl;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Create a dot (graphviz) graph from the DFA (for debugging).
|
|
|
|
- */
|
|
|
|
-void DFA::dump_dot_graph(ostream& os)
|
|
|
|
-{
|
|
|
|
- os << "digraph \"dfa\" {" << endl;
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (*i == nonmatching)
|
|
|
|
- continue;
|
|
|
|
-
|
|
|
|
- os << "\t\"" << **i << "\" [" << endl;
|
|
|
|
- if (*i == start) {
|
|
|
|
- os << "\t\tstyle=bold" << endl;
|
|
|
|
- }
|
2011-03-25 09:04:51 +01:00
|
|
|
- uint32_t perms = (*i)->accept;
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (perms) {
|
|
|
|
- os << "\t\tlabel=\"" << **i << "\\n("
|
|
|
|
- << perms << ")\"" << endl;
|
|
|
|
- }
|
|
|
|
- os << "\t]" << endl;
|
|
|
|
- }
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
|
|
|
- Cases& cases = (*i)->cases;
|
2011-01-17 17:43:05 +01:00
|
|
|
- Chars excluded;
|
|
|
|
-
|
|
|
|
- for (Cases::iterator j = cases.begin(); j != cases.end(); j++) {
|
|
|
|
- if (j->second == nonmatching)
|
|
|
|
- excluded.insert(j->first);
|
|
|
|
- else {
|
2011-03-25 09:04:51 +01:00
|
|
|
- os << "\t\"" << **i << "\" -> \"";
|
|
|
|
- os << j->second << "\" [" << endl;
|
|
|
|
- os << "\t\tlabel=\"" << j->first << "\"" << endl;
|
|
|
|
- os << "\t]" << endl;
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- }
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (cases.otherwise && cases.otherwise != nonmatching) {
|
|
|
|
- os << "\t\"" << **i << "\" -> \"" << cases.otherwise
|
2011-01-17 17:43:05 +01:00
|
|
|
- << "\" [" << endl;
|
|
|
|
- if (!excluded.empty()) {
|
|
|
|
- os << "\t\tlabel=\"[^";
|
|
|
|
- for (Chars::iterator i = excluded.begin();
|
|
|
|
- i != excluded.end();
|
|
|
|
- i++) {
|
|
|
|
- os << *i;
|
|
|
|
- }
|
|
|
|
- os << "]\"" << endl;
|
|
|
|
- }
|
|
|
|
- os << "\t]" << endl;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- os << '}' << endl;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Compute character equivalence classes in the DFA to save space in the
|
|
|
|
- * transition table.
|
|
|
|
- */
|
|
|
|
-map<uchar, uchar> DFA::equivalence_classes(dfaflags_t flags)
|
|
|
|
-{
|
|
|
|
- map<uchar, uchar> classes;
|
|
|
|
- uchar next_class = 1;
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
|
|
|
- Cases& cases = (*i)->cases;
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
- /* Group edges to the same next state together */
|
|
|
|
- map<const State *, Chars> node_sets;
|
|
|
|
- for (Cases::iterator j = cases.begin(); j != cases.end(); j++)
|
|
|
|
- node_sets[j->second].insert(j->first);
|
|
|
|
-
|
|
|
|
- for (map<const State *, Chars>::iterator j = node_sets.begin();
|
|
|
|
- j != node_sets.end();
|
|
|
|
- j++) {
|
|
|
|
- /* Group edges to the same next state together by class */
|
|
|
|
- map<uchar, Chars> node_classes;
|
|
|
|
- bool class_used = false;
|
|
|
|
- for (Chars::iterator k = j->second.begin();
|
|
|
|
- k != j->second.end();
|
|
|
|
- k++) {
|
|
|
|
- pair<map<uchar, uchar>::iterator, bool> x =
|
|
|
|
- classes.insert(make_pair(*k, next_class));
|
|
|
|
- if (x.second)
|
|
|
|
- class_used = true;
|
|
|
|
- pair<map<uchar, Chars>::iterator, bool> y =
|
|
|
|
- node_classes.insert(make_pair(x.first->second, Chars()));
|
|
|
|
- y.first->second.insert(*k);
|
|
|
|
- }
|
|
|
|
- if (class_used) {
|
|
|
|
- next_class++;
|
|
|
|
- class_used = false;
|
|
|
|
- }
|
|
|
|
- for (map<uchar, Chars>::iterator k = node_classes.begin();
|
|
|
|
- k != node_classes.end();
|
|
|
|
- k++) {
|
|
|
|
- /**
|
|
|
|
- * If any other characters are in the same class, move
|
|
|
|
- * the characters in this class into their own new class
|
|
|
|
- */
|
|
|
|
- map<uchar, uchar>::iterator l;
|
|
|
|
- for (l = classes.begin(); l != classes.end(); l++) {
|
|
|
|
- if (l->second == k->first &&
|
|
|
|
- k->second.find(l->first) == k->second.end()) {
|
|
|
|
- class_used = true;
|
|
|
|
- break;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- if (class_used) {
|
|
|
|
- for (Chars::iterator l = k->second.begin();
|
|
|
|
- l != k->second.end();
|
|
|
|
- l++) {
|
|
|
|
- classes[*l] = next_class;
|
|
|
|
- }
|
|
|
|
- next_class++;
|
|
|
|
- class_used = false;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- if (flags & DFA_DUMP_EQUIV_STATS)
|
|
|
|
- fprintf(stderr, "Equiv class reduces to %d classes\n", next_class - 1);
|
|
|
|
- return classes;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Text-dump the equivalence classes (for debugging).
|
|
|
|
- */
|
|
|
|
-void dump_equivalence_classes(ostream& os, map<uchar, uchar>& eq)
|
|
|
|
-{
|
|
|
|
- map<uchar, Chars> rev;
|
|
|
|
-
|
|
|
|
- for (map<uchar, uchar>::iterator i = eq.begin(); i != eq.end(); i++) {
|
|
|
|
- Chars& chars = rev.insert(make_pair(i->second,
|
|
|
|
- Chars())).first->second;
|
|
|
|
- chars.insert(i->first);
|
|
|
|
- }
|
|
|
|
- os << "(eq):" << endl;
|
|
|
|
- for (map<uchar, Chars>::iterator i = rev.begin(); i != rev.end(); i++) {
|
|
|
|
- os << (int)i->first << ':';
|
|
|
|
- Chars& chars = i->second;
|
|
|
|
- for (Chars::iterator j = chars.begin(); j != chars.end(); j++) {
|
|
|
|
- os << ' ' << *j;
|
|
|
|
- }
|
|
|
|
- os << endl;
|
|
|
|
- }
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Replace characters with classes (which are also represented as
|
|
|
|
- * characters) in the DFA transition table.
|
|
|
|
- */
|
|
|
|
-void DFA::apply_equivalence_classes(map<uchar, uchar>& eq)
|
|
|
|
-{
|
|
|
|
- /**
|
|
|
|
- * Note: We only transform the transition table; the nodes continue to
|
|
|
|
- * contain the original characters.
|
|
|
|
- */
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- map<uchar, State *> tmp;
|
2011-03-25 09:04:51 +01:00
|
|
|
- tmp.swap((*i)->cases.cases);
|
2011-01-17 17:43:05 +01:00
|
|
|
- for (Cases::iterator j = tmp.begin(); j != tmp.end(); j++)
|
2011-03-25 09:04:51 +01:00
|
|
|
- (*i)->cases.cases.insert(make_pair(eq[j->first], j->second));
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Flip the children of all cat nodes. This causes strings to be matched
|
|
|
|
- * back-forth.
|
|
|
|
- */
|
|
|
|
-void flip_tree(Node *node)
|
|
|
|
-{
|
|
|
|
- for (depth_first_traversal i(node); i; i++) {
|
|
|
|
- if (CatNode *cat = dynamic_cast<CatNode *>(*i)) {
|
|
|
|
- swap(cat->child[0], cat->child[1]);
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-class TransitionTable {
|
|
|
|
- typedef vector<pair<const State *, size_t> > DefaultBase;
|
|
|
|
- typedef vector<pair<const State *, const State *> > NextCheck;
|
|
|
|
-public:
|
|
|
|
- TransitionTable(DFA& dfa, map<uchar, uchar>& eq, dfaflags_t flags);
|
|
|
|
- void dump(ostream& os);
|
|
|
|
- void flex_table(ostream& os, const char *name);
|
|
|
|
- void init_free_list(vector <pair<size_t, size_t> > &free_list, size_t prev, size_t start);
|
|
|
|
- bool fits_in(vector <pair<size_t, size_t> > &free_list,
|
|
|
|
- size_t base, Cases& cases);
|
|
|
|
- void insert_state(vector <pair<size_t, size_t> > &free_list,
|
|
|
|
- State *state, DFA& dfa);
|
|
|
|
-
|
|
|
|
-private:
|
|
|
|
- vector<uint32_t> accept;
|
|
|
|
- vector<uint32_t> accept2;
|
|
|
|
- DefaultBase default_base;
|
|
|
|
- NextCheck next_check;
|
|
|
|
- map<const State *, size_t> num;
|
|
|
|
- map<uchar, uchar>& eq;
|
|
|
|
- uchar max_eq;
|
|
|
|
- size_t first_free;
|
|
|
|
-};
|
|
|
|
-
|
|
|
|
-
|
|
|
|
-void TransitionTable::init_free_list(vector <pair<size_t, size_t> > &free_list,
|
|
|
|
- size_t prev, size_t start) {
|
|
|
|
- for (size_t i = start; i < free_list.size(); i++) {
|
|
|
|
- if (prev)
|
|
|
|
- free_list[prev].second = i;
|
|
|
|
- free_list[i].first = prev;
|
|
|
|
- prev = i;
|
|
|
|
- }
|
|
|
|
- free_list[free_list.size() -1].second = 0;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * new Construct the transition table.
|
|
|
|
- */
|
|
|
|
-TransitionTable::TransitionTable(DFA& dfa, map<uchar, uchar>& eq,
|
|
|
|
- dfaflags_t flags)
|
|
|
|
- : eq(eq)
|
|
|
|
-{
|
|
|
|
-
|
|
|
|
- if (flags & DFA_DUMP_TRANS_PROGRESS)
|
2011-03-25 09:04:51 +01:00
|
|
|
- fprintf(stderr, "Compressing trans table:\r");
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
-
|
|
|
|
- if (eq.empty())
|
|
|
|
- max_eq = 255;
|
|
|
|
- else {
|
|
|
|
- max_eq = 0;
|
|
|
|
- for(map<uchar, uchar>::iterator i = eq.begin(); i != eq.end(); i++) {
|
|
|
|
- if (i->second > max_eq)
|
|
|
|
- max_eq = i->second;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- /* Do initial setup adding up all the transitions and sorting by
|
|
|
|
- * transition count.
|
|
|
|
- */
|
|
|
|
- size_t optimal = 2;
|
|
|
|
- multimap <size_t, State *> order;
|
|
|
|
- vector <pair<size_t, size_t> > free_list;
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Partition::iterator i = dfa.states.begin(); i != dfa.states.end(); i++) {
|
|
|
|
- if (*i == dfa.start || *i == dfa.nonmatching)
|
2011-01-17 17:43:05 +01:00
|
|
|
- continue;
|
2011-03-25 09:04:51 +01:00
|
|
|
- optimal += (*i)->cases.cases.size();
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (flags & DFA_CONTROL_TRANS_HIGH) {
|
|
|
|
- size_t range = 0;
|
2011-03-25 09:04:51 +01:00
|
|
|
- if ((*i)->cases.cases.size())
|
|
|
|
- range = (*i)->cases.cases.rbegin()->first - (*i)->cases.begin()->first;
|
|
|
|
- size_t ord = ((256 - (*i)->cases.cases.size()) << 8) |
|
2011-01-17 17:43:05 +01:00
|
|
|
- (256 - range);
|
|
|
|
- /* reverse sort by entry count, most entries first */
|
2011-03-25 09:04:51 +01:00
|
|
|
- order.insert(make_pair(ord, *i));
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- /* Insert the dummy nonmatching transition by hand */
|
|
|
|
- next_check.push_back(make_pair(dfa.nonmatching, dfa.nonmatching));
|
|
|
|
- default_base.push_back(make_pair(dfa.nonmatching, 0));
|
|
|
|
- num.insert(make_pair(dfa.nonmatching, num.size()));
|
|
|
|
-
|
|
|
|
- accept.resize(dfa.states.size());
|
|
|
|
- accept2.resize(dfa.states.size());
|
|
|
|
- next_check.resize(optimal);
|
|
|
|
- free_list.resize(optimal);
|
|
|
|
-
|
|
|
|
- accept[0] = 0;
|
|
|
|
- accept2[0] = 0;
|
|
|
|
- first_free = 1;
|
|
|
|
- init_free_list(free_list, 0, 1);
|
|
|
|
-
|
|
|
|
- insert_state(free_list, dfa.start, dfa);
|
|
|
|
- accept[1] = 0;
|
|
|
|
- accept2[1] = 0;
|
|
|
|
- num.insert(make_pair(dfa.start, num.size()));
|
|
|
|
-
|
|
|
|
- int count = 2;
|
|
|
|
-
|
|
|
|
- if (!(flags & DFA_CONTROL_TRANS_HIGH)) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (Partition::iterator i = dfa.states.begin(); i != dfa.states.end();
|
2011-01-17 17:43:05 +01:00
|
|
|
- i++) {
|
|
|
|
- if (*i != dfa.nonmatching && *i != dfa.start) {
|
|
|
|
- insert_state(free_list, *i, dfa);
|
2011-03-25 09:04:51 +01:00
|
|
|
- accept[num.size()] = (*i)->accept;
|
|
|
|
- accept2[num.size()] = (*i)->audit;
|
2011-01-17 17:43:05 +01:00
|
|
|
- num.insert(make_pair(*i, num.size()));
|
|
|
|
- }
|
|
|
|
- if (flags & (DFA_DUMP_TRANS_PROGRESS)) {
|
|
|
|
- count++;
|
|
|
|
- if (count % 100 == 0)
|
2011-03-25 09:04:51 +01:00
|
|
|
- fprintf(stderr, "\033[2KCompressing trans table: insert state: %d/%ld\r", count, dfa.states.size());
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- } else {
|
|
|
|
- for (multimap <size_t, State *>::iterator i = order.begin();
|
|
|
|
- i != order.end(); i++) {
|
|
|
|
- if (i->second != dfa.nonmatching && i->second != dfa.start) {
|
|
|
|
- insert_state(free_list, i->second, dfa);
|
2011-03-25 09:04:51 +01:00
|
|
|
- accept[num.size()] = i->second->accept;
|
|
|
|
- accept2[num.size()] = i->second->audit;
|
2011-01-17 17:43:05 +01:00
|
|
|
- num.insert(make_pair(i->second, num.size()));
|
|
|
|
- }
|
|
|
|
- if (flags & (DFA_DUMP_TRANS_PROGRESS)) {
|
|
|
|
- count++;
|
|
|
|
- if (count % 100 == 0)
|
2011-03-25 09:04:51 +01:00
|
|
|
- fprintf(stderr, "\033[2KCompressing trans table: insert state: %d/%ld\r", count, dfa.states.size());
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- if (flags & (DFA_DUMP_TRANS_STATS | DFA_DUMP_TRANS_PROGRESS)) {
|
|
|
|
- ssize_t size = 4 * next_check.size() + 6 * dfa.states.size();
|
2011-03-25 09:04:51 +01:00
|
|
|
- fprintf(stderr, "\033[2KCompressed trans table: states %ld, next/check %ld, optimal next/check %ld avg/state %.2f, compression %ld/%ld = %.2f %%\n", dfa.states.size(), next_check.size(), optimal, (float)next_check.size()/(float)dfa.states.size(), size, 512 * dfa.states.size(), 100.0 - ((float) size * 100.0 / (float)(512 * dfa.states.size())));
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Does <cases> fit into position <base> of the transition table?
|
|
|
|
- */
|
2011-03-25 09:04:51 +01:00
|
|
|
-bool TransitionTable::fits_in(vector <pair<size_t, size_t> > &free_list __attribute__((unused)),
|
2011-01-17 17:43:05 +01:00
|
|
|
- size_t pos, Cases& cases)
|
|
|
|
-{
|
|
|
|
- size_t c, base = pos - cases.begin()->first;
|
|
|
|
- for (Cases::iterator i = cases.begin(); i != cases.end(); i++) {
|
|
|
|
- c = base + i->first;
|
|
|
|
- /* if it overflows the next_check array it fits in as we will
|
|
|
|
- * resize */
|
|
|
|
- if (c >= next_check.size())
|
|
|
|
- return true;
|
|
|
|
- if (next_check[c].second)
|
|
|
|
- return false;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- return true;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Insert <state> of <dfa> into the transition table.
|
|
|
|
- */
|
|
|
|
-void TransitionTable::insert_state(vector <pair<size_t, size_t> > &free_list,
|
|
|
|
- State *from, DFA& dfa)
|
|
|
|
-{
|
|
|
|
- State *default_state = dfa.nonmatching;
|
|
|
|
- size_t base = 0;
|
|
|
|
- int resize;
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- Cases& cases = from->cases;
|
2011-01-17 17:43:05 +01:00
|
|
|
- size_t c = cases.begin()->first;
|
|
|
|
- size_t prev = 0;
|
|
|
|
- size_t x = first_free;
|
|
|
|
-
|
|
|
|
- if (cases.otherwise)
|
|
|
|
- default_state = cases.otherwise;
|
|
|
|
- if (cases.cases.empty())
|
|
|
|
- goto do_insert;
|
|
|
|
-
|
|
|
|
-repeat:
|
|
|
|
- resize = 0;
|
|
|
|
- /* get the first free entry that won't underflow */
|
|
|
|
- while (x && (x < c)) {
|
|
|
|
- prev = x;
|
|
|
|
- x = free_list[x].second;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- /* try inserting until we succeed. */
|
|
|
|
- while (x && !fits_in(free_list, x, cases)) {
|
|
|
|
- prev = x;
|
|
|
|
- x = free_list[x].second;
|
|
|
|
- }
|
|
|
|
- if (!x) {
|
|
|
|
- resize = 256 - cases.begin()->first;
|
|
|
|
- x = free_list.size();
|
|
|
|
- /* set prev to last free */
|
|
|
|
- } else if (x + 255 - cases.begin()->first >= next_check.size()) {
|
|
|
|
- resize = (255 - cases.begin()->first - (next_check.size() - 1 - x));
|
|
|
|
- for (size_t y = x; y; y = free_list[y].second)
|
|
|
|
- prev = y;
|
|
|
|
- }
|
|
|
|
- if (resize) {
|
|
|
|
- /* expand next_check and free_list */
|
|
|
|
- size_t old_size = free_list.size();
|
|
|
|
- next_check.resize(next_check.size() + resize);
|
|
|
|
- free_list.resize(free_list.size() + resize);
|
|
|
|
- init_free_list(free_list, prev, old_size);
|
|
|
|
- if (!first_free)
|
|
|
|
- first_free = old_size;;
|
|
|
|
- if (x == old_size)
|
|
|
|
- goto repeat;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- base = x - c;
|
|
|
|
- for (Cases::iterator j = cases.begin(); j != cases.end(); j++) {
|
|
|
|
- next_check[base + j->first] = make_pair(j->second, from);
|
|
|
|
- size_t prev = free_list[base + j->first].first;
|
|
|
|
- size_t next = free_list[base + j->first].second;
|
|
|
|
- if (prev)
|
|
|
|
- free_list[prev].second = next;
|
|
|
|
- if (next)
|
|
|
|
- free_list[next].first = prev;
|
|
|
|
- if (base + j->first == first_free)
|
|
|
|
- first_free = next;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
-do_insert:
|
|
|
|
- default_base.push_back(make_pair(default_state, base));
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Text-dump the transition table (for debugging).
|
|
|
|
- */
|
|
|
|
-void TransitionTable::dump(ostream& os)
|
|
|
|
-{
|
|
|
|
- map<size_t, const State *> st;
|
|
|
|
- for (map<const State *, size_t>::iterator i = num.begin();
|
|
|
|
- i != num.end();
|
|
|
|
- i++) {
|
|
|
|
- st.insert(make_pair(i->second, i->first));
|
|
|
|
- }
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- os << "size=" << default_base.size() << " (accept, default, base): {state} -> {default state}" << endl;
|
2011-01-17 17:43:05 +01:00
|
|
|
- for (size_t i = 0; i < default_base.size(); i++) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- os << i << ": ";
|
2011-01-17 17:43:05 +01:00
|
|
|
- os << "(" << accept[i] << ", "
|
|
|
|
- << num[default_base[i].first] << ", "
|
|
|
|
- << default_base[i].second << ")";
|
|
|
|
- if (st[i])
|
|
|
|
- os << " " << *st[i];
|
|
|
|
- if (default_base[i].first)
|
|
|
|
- os << " -> " << *default_base[i].first;
|
|
|
|
- os << endl;
|
|
|
|
- }
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- os << "size=" << next_check.size() << " (next, check): {check state} -> {next state} : offset from base" << endl;
|
2011-01-17 17:43:05 +01:00
|
|
|
- for (size_t i = 0; i < next_check.size(); i++) {
|
|
|
|
- if (!next_check[i].second)
|
|
|
|
- continue;
|
|
|
|
-
|
|
|
|
- os << i << ": ";
|
|
|
|
- if (next_check[i].second) {
|
|
|
|
- os << "(" << num[next_check[i].first] << ", "
|
|
|
|
- << num[next_check[i].second] << ")" << " "
|
|
|
|
- << *next_check[i].second << " -> "
|
|
|
|
- << *next_check[i].first << ": ";
|
|
|
|
-
|
|
|
|
- size_t offs = i - default_base[num[next_check[i].second]].second;
|
|
|
|
- if (eq.size())
|
|
|
|
- os << offs;
|
|
|
|
- else
|
|
|
|
- os << (uchar)offs;
|
|
|
|
- }
|
|
|
|
- os << endl;
|
|
|
|
- }
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-#if 0
|
|
|
|
-template<class Iter>
|
|
|
|
-class FirstIterator {
|
|
|
|
-public:
|
|
|
|
- FirstIterator(Iter pos) : pos(pos) { }
|
|
|
|
- typename Iter::value_type::first_type operator*() { return pos->first; }
|
|
|
|
- bool operator!=(FirstIterator<Iter>& i) { return pos != i.pos; }
|
|
|
|
- void operator++() { ++pos; }
|
|
|
|
- ssize_t operator-(FirstIterator<Iter> i) { return pos - i.pos; }
|
|
|
|
-private:
|
|
|
|
- Iter pos;
|
|
|
|
-};
|
|
|
|
-
|
|
|
|
-template<class Iter>
|
|
|
|
-FirstIterator<Iter> first_iterator(Iter iter)
|
|
|
|
-{
|
|
|
|
- return FirstIterator<Iter>(iter);
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-template<class Iter>
|
|
|
|
-class SecondIterator {
|
|
|
|
-public:
|
|
|
|
- SecondIterator(Iter pos) : pos(pos) { }
|
|
|
|
- typename Iter::value_type::second_type operator*() { return pos->second; }
|
|
|
|
- bool operator!=(SecondIterator<Iter>& i) { return pos != i.pos; }
|
|
|
|
- void operator++() { ++pos; }
|
|
|
|
- ssize_t operator-(SecondIterator<Iter> i) { return pos - i.pos; }
|
|
|
|
-private:
|
|
|
|
- Iter pos;
|
|
|
|
-};
|
|
|
|
-
|
|
|
|
-template<class Iter>
|
|
|
|
-SecondIterator<Iter> second_iterator(Iter iter)
|
|
|
|
-{
|
|
|
|
- return SecondIterator<Iter>(iter);
|
|
|
|
-}
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Create a flex-style binary dump of the DFA tables. The table format
|
|
|
|
- * was partly reverse engineered from the flex sources and from
|
|
|
|
- * examining the tables that flex creates with its --tables-file option.
|
|
|
|
- * (Only the -Cf and -Ce formats are currently supported.)
|
|
|
|
- */
|
|
|
|
-
|
|
|
|
-#include "flex-tables.h"
|
|
|
|
-#include "regexp.h"
|
|
|
|
-
|
|
|
|
-static inline size_t pad64(size_t i)
|
|
|
|
-{
|
|
|
|
- return (i + (size_t)7) & ~(size_t)7;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-string fill64(size_t i)
|
|
|
|
-{
|
|
|
|
- const char zeroes[8] = { };
|
|
|
|
- string fill(zeroes, (i & 7) ? 8 - (i & 7) : 0);
|
|
|
|
- return fill;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-template<class Iter>
|
|
|
|
-size_t flex_table_size(Iter pos, Iter end)
|
|
|
|
-{
|
|
|
|
- return pad64(sizeof(struct table_header) + sizeof(*pos) * (end - pos));
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-template<class Iter>
|
|
|
|
-void write_flex_table(ostream& os, int id, Iter pos, Iter end)
|
|
|
|
-{
|
2011-03-25 09:04:51 +01:00
|
|
|
- struct table_header td = { 0, 0, 0, 0 };
|
2011-01-17 17:43:05 +01:00
|
|
|
- size_t size = end - pos;
|
|
|
|
-
|
|
|
|
- td.td_id = htons(id);
|
|
|
|
- td.td_flags = htons(sizeof(*pos));
|
|
|
|
- td.td_lolen = htonl(size);
|
|
|
|
- os.write((char *)&td, sizeof(td));
|
|
|
|
-
|
|
|
|
- for (; pos != end; ++pos) {
|
|
|
|
- switch(sizeof(*pos)) {
|
|
|
|
- case 4:
|
|
|
|
- os.put((char)(*pos >> 24));
|
|
|
|
- os.put((char)(*pos >> 16));
|
|
|
|
- case 2:
|
|
|
|
- os.put((char)(*pos >> 8));
|
|
|
|
- case 1:
|
|
|
|
- os.put((char)*pos);
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- os << fill64(sizeof(td) + sizeof(*pos) * size);
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-void TransitionTable::flex_table(ostream& os, const char *name)
|
|
|
|
-{
|
|
|
|
- const char th_version[] = "notflex";
|
2011-03-25 09:04:51 +01:00
|
|
|
- struct table_set_header th = { 0, 0, 0, 0 };
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
- /**
|
|
|
|
- * Change the following two data types to adjust the maximum flex
|
|
|
|
- * table size.
|
|
|
|
- */
|
|
|
|
- typedef uint16_t state_t;
|
|
|
|
- typedef uint32_t trans_t;
|
|
|
|
-
|
|
|
|
- if (default_base.size() >= (state_t)-1) {
|
|
|
|
- cerr << "Too many states (" << default_base.size() << ") for "
|
|
|
|
- "type state_t" << endl;
|
|
|
|
- exit(1);
|
|
|
|
- }
|
|
|
|
- if (next_check.size() >= (trans_t)-1) {
|
|
|
|
- cerr << "Too many transitions (" << next_check.size() << ") for "
|
|
|
|
- "type trans_t" << endl;
|
|
|
|
- exit(1);
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- /**
|
|
|
|
- * Create copies of the data structures so that we can dump the tables
|
|
|
|
- * using the generic write_flex_table() routine.
|
|
|
|
- */
|
|
|
|
- vector<uint8_t> equiv_vec;
|
|
|
|
- if (eq.size()) {
|
|
|
|
- equiv_vec.resize(256);
|
|
|
|
- for (map<uchar, uchar>::iterator i = eq.begin(); i != eq.end(); i++) {
|
|
|
|
- equiv_vec[i->first] = i->second;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- vector<state_t> default_vec;
|
|
|
|
- vector<trans_t> base_vec;
|
|
|
|
- for (DefaultBase::iterator i = default_base.begin();
|
|
|
|
- i != default_base.end();
|
|
|
|
- i++) {
|
|
|
|
- default_vec.push_back(num[i->first]);
|
|
|
|
- base_vec.push_back(i->second);
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- vector<state_t> next_vec;
|
|
|
|
- vector<state_t> check_vec;
|
|
|
|
- for (NextCheck::iterator i = next_check.begin();
|
|
|
|
- i != next_check.end();
|
|
|
|
- i++) {
|
|
|
|
- next_vec.push_back(num[i->first]);
|
|
|
|
- check_vec.push_back(num[i->second]);
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- /* Write the actual flex parser table. */
|
|
|
|
-
|
|
|
|
- size_t hsize = pad64(sizeof(th) + sizeof(th_version) + strlen(name) + 1);
|
|
|
|
- th.th_magic = htonl(YYTH_REGEXP_MAGIC);
|
|
|
|
- th.th_hsize = htonl(hsize);
|
|
|
|
- th.th_ssize = htonl(hsize +
|
|
|
|
- flex_table_size(accept.begin(), accept.end()) +
|
|
|
|
- flex_table_size(accept2.begin(), accept2.end()) +
|
|
|
|
- (eq.size() ?
|
|
|
|
- flex_table_size(equiv_vec.begin(), equiv_vec.end()) : 0) +
|
|
|
|
- flex_table_size(base_vec.begin(), base_vec.end()) +
|
|
|
|
- flex_table_size(default_vec.begin(), default_vec.end()) +
|
|
|
|
- flex_table_size(next_vec.begin(), next_vec.end()) +
|
|
|
|
- flex_table_size(check_vec.begin(), check_vec.end()));
|
|
|
|
- os.write((char *)&th, sizeof(th));
|
|
|
|
- os << th_version << (char)0 << name << (char)0;
|
|
|
|
- os << fill64(sizeof(th) + sizeof(th_version) + strlen(name) + 1);
|
|
|
|
-
|
|
|
|
-
|
|
|
|
- write_flex_table(os, YYTD_ID_ACCEPT, accept.begin(), accept.end());
|
|
|
|
- write_flex_table(os, YYTD_ID_ACCEPT2, accept2.begin(), accept2.end());
|
|
|
|
- if (eq.size())
|
|
|
|
- write_flex_table(os, YYTD_ID_EC, equiv_vec.begin(), equiv_vec.end());
|
|
|
|
- write_flex_table(os, YYTD_ID_BASE, base_vec.begin(), base_vec.end());
|
|
|
|
- write_flex_table(os, YYTD_ID_DEF, default_vec.begin(), default_vec.end());
|
|
|
|
- write_flex_table(os, YYTD_ID_NXT, next_vec.begin(), next_vec.end());
|
|
|
|
- write_flex_table(os, YYTD_ID_CHK, check_vec.begin(), check_vec.end());
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-#if 0
|
|
|
|
-typedef set<ImportantNode *> AcceptNodes;
|
|
|
|
-map<ImportantNode *, AcceptNodes> dominance(DFA& dfa)
|
|
|
|
-{
|
|
|
|
- map<ImportantNode *, AcceptNodes> is_dominated;
|
|
|
|
-
|
|
|
|
- for (States::iterator i = dfa.states.begin(); i != dfa.states.end(); i++) {
|
|
|
|
- AcceptNodes set1;
|
|
|
|
- for (State::iterator j = (*i)->begin(); j != (*i)->end(); j++) {
|
|
|
|
- if (AcceptNode *accept = dynamic_cast<AcceptNode *>(*j))
|
|
|
|
- set1.insert(accept);
|
|
|
|
- }
|
|
|
|
- for (AcceptNodes::iterator j = set1.begin(); j != set1.end(); j++) {
|
|
|
|
- pair<map<ImportantNode *, AcceptNodes>::iterator, bool> x =
|
|
|
|
- is_dominated.insert(make_pair(*j, set1));
|
|
|
|
- if (!x.second) {
|
|
|
|
- AcceptNodes &set2(x.first->second), set3;
|
|
|
|
- for (AcceptNodes::iterator l = set2.begin();
|
|
|
|
- l != set2.end();
|
|
|
|
- l++) {
|
|
|
|
- if (set1.find(*l) != set1.end())
|
|
|
|
- set3.insert(*l);
|
|
|
|
- }
|
|
|
|
- set3.swap(set2);
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- return is_dominated;
|
|
|
|
-}
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
-void dump_regexp_rec(ostream& os, Node *tree)
|
|
|
|
-{
|
|
|
|
- if (tree->child[0])
|
|
|
|
- dump_regexp_rec(os, tree->child[0]);
|
|
|
|
- os << *tree;
|
|
|
|
- if (tree->child[1])
|
|
|
|
- dump_regexp_rec(os, tree->child[1]);
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-void dump_regexp(ostream& os, Node *tree)
|
|
|
|
-{
|
|
|
|
- dump_regexp_rec(os, tree);
|
|
|
|
- os << endl;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-#include <sstream>
|
|
|
|
-#include <ext/stdio_filebuf.h>
|
|
|
|
-
|
|
|
|
-struct aare_ruleset {
|
|
|
|
- int reverse;
|
|
|
|
- Node *root;
|
|
|
|
-};
|
|
|
|
-
|
|
|
|
-extern "C" aare_ruleset_t *aare_new_ruleset(int reverse)
|
|
|
|
-{
|
|
|
|
- aare_ruleset_t *container = (aare_ruleset_t *) malloc(sizeof(aare_ruleset_t));
|
|
|
|
- if (!container)
|
|
|
|
- return NULL;
|
|
|
|
-
|
|
|
|
- container->root = NULL;
|
|
|
|
- container->reverse = reverse;
|
|
|
|
-
|
|
|
|
- return container;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-extern "C" void aare_delete_ruleset(aare_ruleset_t *rules)
|
|
|
|
-{
|
|
|
|
- if (rules) {
|
|
|
|
- if (rules->root)
|
|
|
|
- rules->root->release();
|
|
|
|
- free(rules);
|
|
|
|
- }
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-static inline int diff_qualifiers(uint32_t perm1, uint32_t perm2)
|
|
|
|
-{
|
|
|
|
- return ((perm1 & AA_EXEC_TYPE) && (perm2 & AA_EXEC_TYPE) &&
|
|
|
|
- (perm1 & AA_EXEC_TYPE) != (perm2 & AA_EXEC_TYPE));
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/**
|
|
|
|
- * Compute the permission flags that this state corresponds to. If we
|
|
|
|
- * have any exact matches, then they override the execute and safe
|
|
|
|
- * execute flags.
|
|
|
|
- */
|
2011-03-25 09:04:51 +01:00
|
|
|
-uint32_t accept_perms(NodeSet *state, uint32_t *audit_ctl, int *error)
|
2011-01-17 17:43:05 +01:00
|
|
|
-{
|
|
|
|
- uint32_t perms = 0, exact_match_perms = 0, audit = 0, exact_audit = 0,
|
|
|
|
- quiet = 0, deny = 0;
|
|
|
|
-
|
|
|
|
- if (error)
|
|
|
|
- *error = 0;
|
2011-03-25 09:04:51 +01:00
|
|
|
- for (NodeSet::iterator i = state->begin(); i != state->end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- MatchFlag *match;
|
|
|
|
- if (!(match= dynamic_cast<MatchFlag *>(*i)))
|
|
|
|
- continue;
|
|
|
|
- if (dynamic_cast<ExactMatchFlag *>(match)) {
|
|
|
|
- /* exact match only ever happens with x */
|
|
|
|
- if (!is_merged_x_consistent(exact_match_perms,
|
|
|
|
- match->flag) && error)
|
|
|
|
- *error = 1;;
|
|
|
|
- exact_match_perms |= match->flag;
|
|
|
|
- exact_audit |= match->audit;
|
|
|
|
- } else if (dynamic_cast<DenyMatchFlag *>(match)) {
|
|
|
|
- deny |= match->flag;
|
|
|
|
- quiet |= match->audit;
|
|
|
|
- } else {
|
|
|
|
- if (!is_merged_x_consistent(perms, match->flag) && error)
|
|
|
|
- *error = 1;
|
|
|
|
- perms |= match->flag;
|
|
|
|
- audit |= match->audit;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
-//if (audit || quiet)
|
|
|
|
-//fprintf(stderr, "perms: 0x%x, audit: 0x%x exact: 0x%x eaud: 0x%x deny: 0x%x quiet: 0x%x\n", perms, audit, exact_match_perms, exact_audit, deny, quiet);
|
|
|
|
-
|
|
|
|
- perms |= exact_match_perms &
|
|
|
|
- ~(AA_USER_EXEC_TYPE | AA_OTHER_EXEC_TYPE);
|
|
|
|
-
|
|
|
|
- if (exact_match_perms & AA_USER_EXEC_TYPE) {
|
|
|
|
- perms = (exact_match_perms & AA_USER_EXEC_TYPE) |
|
|
|
|
- (perms & ~AA_USER_EXEC_TYPE);
|
|
|
|
- audit = (exact_audit & AA_USER_EXEC_TYPE) |
|
|
|
|
- (audit & ~ AA_USER_EXEC_TYPE);
|
|
|
|
- }
|
|
|
|
- if (exact_match_perms & AA_OTHER_EXEC_TYPE) {
|
|
|
|
- perms = (exact_match_perms & AA_OTHER_EXEC_TYPE) |
|
|
|
|
- (perms & ~AA_OTHER_EXEC_TYPE);
|
|
|
|
- audit = (exact_audit & AA_OTHER_EXEC_TYPE) |
|
|
|
|
- (audit & ~AA_OTHER_EXEC_TYPE);
|
|
|
|
- }
|
|
|
|
- if (perms & AA_USER_EXEC & deny)
|
|
|
|
- perms &= ~AA_USER_EXEC_TYPE;
|
|
|
|
-
|
|
|
|
- if (perms & AA_OTHER_EXEC & deny)
|
|
|
|
- perms &= ~AA_OTHER_EXEC_TYPE;
|
|
|
|
-
|
|
|
|
- perms &= ~deny;
|
|
|
|
-
|
|
|
|
- if (audit_ctl)
|
|
|
|
- *audit_ctl = PACK_AUDIT_CTL(audit, quiet & deny);
|
|
|
|
-
|
|
|
|
-// if (perms & AA_ERROR_BIT) {
|
|
|
|
-// fprintf(stderr, "error bit 0x%x\n", perms);
|
|
|
|
-// exit(255);
|
|
|
|
-//}
|
|
|
|
-
|
|
|
|
- //if (perms & AA_EXEC_BITS)
|
|
|
|
- //fprintf(stderr, "accept perm: 0x%x\n", perms);
|
|
|
|
- /*
|
|
|
|
- if (perms & ~AA_VALID_PERMS)
|
|
|
|
- yyerror(_("Internal error accumulated invalid perm 0x%llx\n"), perms);
|
|
|
|
- */
|
|
|
|
-
|
|
|
|
-//if (perms & AA_CHANGE_HAT)
|
|
|
|
-// fprintf(stderr, "change_hat 0x%x\n", perms);
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (*error)
|
|
|
|
- fprintf(stderr, "profile has merged rule with conflicting x modifiers\n");
|
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
- return perms;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-extern "C" int aare_add_rule(aare_ruleset_t *rules, char *rule, int deny,
|
2011-03-25 09:04:51 +01:00
|
|
|
- uint32_t perms, uint32_t audit, dfaflags_t flags)
|
2011-01-17 17:43:05 +01:00
|
|
|
-{
|
2011-03-25 09:04:51 +01:00
|
|
|
- return aare_add_rule_vec(rules, deny, perms, audit, 1, &rule, flags);
|
2011-01-17 17:43:05 +01:00
|
|
|
-}
|
|
|
|
-
|
|
|
|
-#define FLAGS_WIDTH 2
|
|
|
|
-#define MATCH_FLAGS_SIZE (sizeof(uint32_t) * 8 - 1)
|
|
|
|
-MatchFlag *match_flags[FLAGS_WIDTH][MATCH_FLAGS_SIZE];
|
|
|
|
-DenyMatchFlag *deny_flags[FLAGS_WIDTH][MATCH_FLAGS_SIZE];
|
2011-01-25 13:16:44 +01:00
|
|
|
-#define EXEC_MATCH_FLAGS_SIZE (AA_EXEC_COUNT *2 * 2 * 2) /* double for each of ix pux, unsafe x bits * u::o */
|
|
|
|
-MatchFlag *exec_match_flags[FLAGS_WIDTH][EXEC_MATCH_FLAGS_SIZE]; /* mods + unsafe + ix + pux * u::o*/
|
|
|
|
-ExactMatchFlag *exact_match_flags[FLAGS_WIDTH][EXEC_MATCH_FLAGS_SIZE];/* mods + unsafe + ix + pux *u::o*/
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
-extern "C" void aare_reset_matchflags(void)
|
|
|
|
-{
|
|
|
|
- uint32_t i, j;
|
|
|
|
-#define RESET_FLAGS(group, size) { \
|
|
|
|
- for (i = 0; i < FLAGS_WIDTH; i++) { \
|
|
|
|
- for (j = 0; j < size; j++) { \
|
2011-03-25 09:04:51 +01:00
|
|
|
- if ((group)[i][j]) delete (group)[i][j]; \
|
2011-01-17 17:43:05 +01:00
|
|
|
- (group)[i][j] = NULL; \
|
|
|
|
- } \
|
|
|
|
- } \
|
|
|
|
-}
|
|
|
|
- RESET_FLAGS(match_flags,MATCH_FLAGS_SIZE);
|
|
|
|
- RESET_FLAGS(deny_flags,MATCH_FLAGS_SIZE);
|
|
|
|
- RESET_FLAGS(exec_match_flags,EXEC_MATCH_FLAGS_SIZE);
|
|
|
|
- RESET_FLAGS(exact_match_flags,EXEC_MATCH_FLAGS_SIZE);
|
|
|
|
-#undef RESET_FLAGS
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-extern "C" int aare_add_rule_vec(aare_ruleset_t *rules, int deny,
|
|
|
|
- uint32_t perms, uint32_t audit,
|
2011-03-25 09:04:51 +01:00
|
|
|
- int count, char **rulev,
|
|
|
|
- dfaflags_t flags)
|
2011-01-17 17:43:05 +01:00
|
|
|
-{
|
|
|
|
- Node *tree = NULL, *accept;
|
|
|
|
- int exact_match;
|
|
|
|
-
|
|
|
|
- assert(perms != 0);
|
|
|
|
-
|
|
|
|
- if (regexp_parse(&tree, rulev[0]))
|
|
|
|
- return 0;
|
|
|
|
- for (int i = 1; i < count; i++) {
|
|
|
|
- Node *subtree = NULL;
|
|
|
|
- Node *node = new CharNode(0);
|
|
|
|
- if (!node)
|
|
|
|
- return 0;
|
|
|
|
- tree = new CatNode(tree, node);
|
|
|
|
- if (regexp_parse(&subtree, rulev[i]))
|
|
|
|
- return 0;
|
|
|
|
- tree = new CatNode(tree, subtree);
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- /*
|
|
|
|
- * Check if we have an expression with or without wildcards. This
|
|
|
|
- * determines how exec modifiers are merged in accept_perms() based
|
|
|
|
- * on how we split permission bitmasks here.
|
|
|
|
- */
|
|
|
|
- exact_match = 1;
|
|
|
|
- for (depth_first_traversal i(tree); i; i++) {
|
|
|
|
- if (dynamic_cast<StarNode *>(*i) ||
|
|
|
|
- dynamic_cast<PlusNode *>(*i) ||
|
|
|
|
- dynamic_cast<AnyCharNode *>(*i) ||
|
|
|
|
- dynamic_cast<CharSetNode *>(*i) ||
|
|
|
|
- dynamic_cast<NotCharSetNode *>(*i))
|
|
|
|
- exact_match = 0;
|
|
|
|
- }
|
|
|
|
-
|
|
|
|
- if (rules->reverse)
|
|
|
|
- flip_tree(tree);
|
|
|
|
-
|
|
|
|
-
|
2011-01-25 13:16:44 +01:00
|
|
|
-/* 0x7f == 4 bits x mods + 1 bit unsafe mask + 1 bit ix, + 1 pux after shift */
|
|
|
|
-#define EXTRACT_X_INDEX(perm, shift) (((perm) >> (shift + 7)) & 0x7f)
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
-//if (perms & ALL_AA_EXEC_TYPE && (!perms & AA_EXEC_BITS))
|
|
|
|
-// fprintf(stderr, "adding X rule without MAY_EXEC: 0x%x %s\n", perms, rulev[0]);
|
|
|
|
-
|
|
|
|
-//if (perms & ALL_EXEC_TYPE)
|
|
|
|
-// fprintf(stderr, "adding X rule %s 0x%x\n", rulev[0], perms);
|
|
|
|
-
|
|
|
|
-//if (audit)
|
|
|
|
-//fprintf(stderr, "adding rule with audit bits set: 0x%x %s\n", audit, rulev[0]);
|
|
|
|
-
|
|
|
|
-//if (perms & AA_CHANGE_HAT)
|
|
|
|
-// fprintf(stderr, "adding change_hat rule %s\n", rulev[0]);
|
|
|
|
-
|
|
|
|
-/* the permissions set is assumed to be non-empty if any audit
|
|
|
|
- * bits are specified */
|
|
|
|
- accept = NULL;
|
|
|
|
- for (unsigned int n = 0; perms && n < (sizeof(perms) * 8) ; n++) {
|
|
|
|
- uint32_t mask = 1 << n;
|
|
|
|
-
|
|
|
|
- if (perms & mask) {
|
|
|
|
- int ai = audit & mask ? 1 : 0;
|
|
|
|
- perms &= ~mask;
|
|
|
|
-
|
|
|
|
- Node *flag;
|
|
|
|
- if (mask & ALL_AA_EXEC_TYPE)
|
|
|
|
- /* these cases are covered by EXEC_BITS */
|
|
|
|
- continue;
|
|
|
|
- if (deny) {
|
|
|
|
- if (deny_flags[ai][n]) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- flag = deny_flags[ai][n];
|
2011-01-17 17:43:05 +01:00
|
|
|
- } else {
|
|
|
|
-//fprintf(stderr, "Adding deny ai %d mask 0x%x audit 0x%x\n", ai, mask, audit & mask);
|
|
|
|
- deny_flags[ai][n] = new DenyMatchFlag(mask, audit&mask);
|
2011-03-25 09:04:51 +01:00
|
|
|
- flag = deny_flags[ai][n];
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- } else if (mask & AA_EXEC_BITS) {
|
|
|
|
- uint32_t eperm = 0;
|
|
|
|
- uint32_t index = 0;
|
|
|
|
- if (mask & AA_USER_EXEC) {
|
|
|
|
- eperm = mask | (perms & AA_USER_EXEC_TYPE);
|
|
|
|
- index = EXTRACT_X_INDEX(eperm, AA_USER_SHIFT);
|
|
|
|
- } else {
|
|
|
|
- eperm = mask | (perms & AA_OTHER_EXEC_TYPE);
|
|
|
|
- index = EXTRACT_X_INDEX(eperm, AA_OTHER_SHIFT) + (AA_EXEC_COUNT << 2);
|
|
|
|
- }
|
|
|
|
-//fprintf(stderr, "index %d eperm 0x%x\n", index, eperm);
|
|
|
|
- if (exact_match) {
|
|
|
|
- if (exact_match_flags[ai][index]) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- flag = exact_match_flags[ai][index];
|
2011-01-17 17:43:05 +01:00
|
|
|
- } else {
|
|
|
|
- exact_match_flags[ai][index] = new ExactMatchFlag(eperm, audit&mask);
|
2011-03-25 09:04:51 +01:00
|
|
|
- flag = exact_match_flags[ai][index];
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- } else {
|
|
|
|
- if (exec_match_flags[ai][index]) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- flag = exec_match_flags[ai][index];
|
2011-01-17 17:43:05 +01:00
|
|
|
- } else {
|
|
|
|
- exec_match_flags[ai][index] = new MatchFlag(eperm, audit&mask);
|
2011-03-25 09:04:51 +01:00
|
|
|
- flag = exec_match_flags[ai][index];
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- } else {
|
|
|
|
- if (match_flags[ai][n]) {
|
2011-03-25 09:04:51 +01:00
|
|
|
- flag = match_flags[ai][n];
|
2011-01-17 17:43:05 +01:00
|
|
|
- } else {
|
|
|
|
- match_flags[ai][n] = new MatchFlag(mask, audit&mask);
|
2011-03-25 09:04:51 +01:00
|
|
|
- flag = match_flags[ai][n];
|
2011-01-17 17:43:05 +01:00
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- if (accept)
|
|
|
|
- accept = new AltNode(accept, flag);
|
|
|
|
- else
|
|
|
|
- accept = flag;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (flags & DFA_DUMP_RULE_EXPR) {
|
|
|
|
- cerr << "rule: ";
|
|
|
|
- cerr << rulev[0];
|
|
|
|
- for (int i = 1; i < count; i++) {
|
|
|
|
- cerr << "\\x00";
|
|
|
|
- cerr << rulev[i];
|
|
|
|
- }
|
|
|
|
- cerr << " -> ";
|
|
|
|
- tree->dump(cerr);
|
|
|
|
- cerr << "\n\n";
|
|
|
|
- }
|
|
|
|
-
|
2011-01-17 17:43:05 +01:00
|
|
|
- if (rules->root)
|
|
|
|
- rules->root = new AltNode(rules->root, new CatNode(tree, accept));
|
|
|
|
- else
|
|
|
|
- rules->root = new CatNode(tree, accept);
|
|
|
|
-
|
|
|
|
- return 1;
|
|
|
|
-
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-/* create a dfa from the ruleset
|
|
|
|
- * returns: buffer contain dfa tables, @size set to the size of the tables
|
|
|
|
- * else NULL on failure
|
|
|
|
- */
|
|
|
|
-extern "C" void *aare_create_dfa(aare_ruleset_t *rules, size_t *size, dfaflags_t flags)
|
|
|
|
-{
|
|
|
|
- char *buffer = NULL;
|
|
|
|
-
|
|
|
|
- label_nodes(rules->root);
|
|
|
|
- if (flags & DFA_DUMP_TREE) {
|
|
|
|
- cerr << "\nDFA: Expression Tree\n";
|
|
|
|
- rules->root->dump(cerr);
|
|
|
|
- cerr << "\n\n";
|
|
|
|
- }
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (flags & DFA_CONTROL_TREE_SIMPLE) {
|
2011-01-17 17:43:05 +01:00
|
|
|
- rules->root = simplify_tree(rules->root, flags);
|
|
|
|
-
|
|
|
|
- if (flags & DFA_DUMP_SIMPLE_TREE) {
|
|
|
|
- cerr << "\nDFA: Simplified Expression Tree\n";
|
|
|
|
- rules->root->dump(cerr);
|
|
|
|
- cerr << "\n\n";
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- stringstream stream;
|
|
|
|
- try {
|
|
|
|
- DFA dfa(rules->root, flags);
|
|
|
|
- if (flags & DFA_DUMP_UNIQ_PERMS)
|
|
|
|
- dfa.dump_uniq_perms("dfa");
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (flags & DFA_CONTROL_MINIMIZE) {
|
|
|
|
- dfa.minimize(flags);
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (flags & DFA_DUMP_MIN_UNIQ_PERMS)
|
|
|
|
- dfa.dump_uniq_perms("minimized dfa");
|
|
|
|
- }
|
|
|
|
- if (flags & DFA_CONTROL_REMOVE_UNREACHABLE)
|
|
|
|
- dfa.remove_unreachable(flags);
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (flags & DFA_DUMP_STATES)
|
|
|
|
- dfa.dump(cerr);
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (flags & DFA_DUMP_GRAPH)
|
|
|
|
- dfa.dump_dot_graph(cerr);
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- map<uchar, uchar> eq;
|
|
|
|
- if (flags & DFA_CONTROL_EQUIV) {
|
|
|
|
- eq = dfa.equivalence_classes(flags);
|
|
|
|
- dfa.apply_equivalence_classes(eq);
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
2011-03-25 09:04:51 +01:00
|
|
|
- if (flags & DFA_DUMP_EQUIV) {
|
|
|
|
- cerr << "\nDFA equivalence class\n";
|
|
|
|
- dump_equivalence_classes(cerr, eq);
|
|
|
|
- }
|
|
|
|
- } else if (flags & DFA_DUMP_EQUIV)
|
|
|
|
- cerr << "\nDFA did not generate an equivalence class\n";
|
|
|
|
-
|
|
|
|
- TransitionTable transition_table(dfa, eq, flags);
|
|
|
|
- if (flags & DFA_DUMP_TRANS_TABLE)
|
|
|
|
- transition_table.dump(cerr);
|
|
|
|
- transition_table.flex_table(stream, "");
|
|
|
|
- } catch (int error) {
|
|
|
|
- *size = 0;
|
|
|
|
- return NULL;
|
|
|
|
- }
|
2011-01-17 17:43:05 +01:00
|
|
|
-
|
|
|
|
- stringbuf *buf = stream.rdbuf();
|
|
|
|
-
|
|
|
|
- buf->pubseekpos(0);
|
|
|
|
- *size = buf->in_avail();
|
|
|
|
-
|
|
|
|
- buffer = (char *)malloc(*size);
|
|
|
|
- if (!buffer)
|
|
|
|
- return NULL;
|
|
|
|
- buf->sgetn(buffer, *size);
|
|
|
|
- return buffer;
|
|
|
|
-}
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/parser/libapparmor_re/regexp.yy
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -0,0 +1,3082 @@
|
2011-01-17 17:43:05 +01:00
|
|
|
+/*
|
|
|
|
+ * regexp.y -- Regular Expression Matcher Generator
|
|
|
|
+ * (C) 2006, 2007 Andreas Gruenbacher <agruen@suse.de>
|
|
|
|
+ *
|
|
|
|
+ * Implementation based on the Lexical Analysis chapter of:
|
|
|
|
+ * Alfred V. Aho, Ravi Sethi, Jeffrey D. Ullman:
|
|
|
|
+ * Compilers: Principles, Techniques, and Tools (The "Dragon Book"),
|
|
|
|
+ * Addison-Wesley, 1986.
|
|
|
|
+ *
|
|
|
|
+ * This program is free software; you can redistribute it and/or modify
|
|
|
|
+ * it under the terms of the GNU General Public License version 2 as
|
|
|
|
+ * published by the Free Software Foundation.
|
|
|
|
+ *
|
|
|
|
+ * See http://www.gnu.org for more details.
|
|
|
|
+ */
|
|
|
|
+
|
|
|
|
+%{
|
|
|
|
+ /* #define DEBUG_TREE */
|
|
|
|
+
|
|
|
|
+ #include <list>
|
|
|
|
+ #include <vector>
|
2011-03-25 09:04:51 +01:00
|
|
|
+ #include <stack>
|
2011-01-17 17:43:05 +01:00
|
|
|
+ #include <set>
|
|
|
|
+ #include <map>
|
|
|
|
+ #include <ostream>
|
|
|
|
+ #include <iostream>
|
|
|
|
+ #include <fstream>
|
|
|
|
+
|
|
|
|
+ using namespace std;
|
|
|
|
+
|
|
|
|
+ typedef unsigned char uchar;
|
|
|
|
+ typedef set<uchar> Chars;
|
|
|
|
+
|
|
|
|
+ ostream& operator<<(ostream& os, uchar c);
|
|
|
|
+
|
|
|
|
+ /* Compute the union of two sets. */
|
|
|
|
+ template<class T>
|
|
|
|
+ set<T> operator+(const set<T>& a, const set<T>& b)
|
|
|
|
+ {
|
|
|
|
+ set<T> c(a);
|
|
|
|
+ c.insert(b.begin(), b.end());
|
|
|
|
+ return c;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /**
|
2011-03-25 09:04:51 +01:00
|
|
|
+ * When creating DFAs from regex trees, a DFA state is constructed from
|
|
|
|
+ * a set of important nodes in the syntax tree. This includes AcceptNodes,
|
|
|
|
+ * which indicate that when a match ends in a particular state, the
|
|
|
|
+ * regular expressions that the AcceptNode belongs to match.
|
2011-01-17 17:43:05 +01:00
|
|
|
+ */
|
|
|
|
+ class ImportantNode;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ typedef set <ImportantNode *> NodeSet;
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+ /**
|
2011-03-25 09:04:51 +01:00
|
|
|
+ * Out-edges from a state to another: we store the follow-set of Nodes
|
2011-01-17 17:43:05 +01:00
|
|
|
+ * for each input character that is not a default match in
|
|
|
|
+ * cases (i.e., following a CharNode or CharSetNode), and default
|
|
|
|
+ * matches in otherwise as well as in all matching explicit cases
|
|
|
|
+ * (i.e., following an AnyCharNode or NotCharSetNode). This avoids
|
|
|
|
+ * enumerating all the explicit tranitions for default matches.
|
|
|
|
+ */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ typedef struct NodeCases {
|
|
|
|
+ typedef map<uchar, NodeSet *>::iterator iterator;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ iterator begin() { return cases.begin(); }
|
|
|
|
+ iterator end() { return cases.end(); }
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ NodeCases() : otherwise(0) { }
|
|
|
|
+ map<uchar, NodeSet *> cases;
|
|
|
|
+ NodeSet *otherwise;
|
|
|
|
+ } NodeCases;
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+
|
|
|
|
+ /* An abstract node in the syntax tree. */
|
|
|
|
+ class Node {
|
|
|
|
+ public:
|
|
|
|
+ Node() :
|
2011-03-25 09:04:51 +01:00
|
|
|
+ nullable(false) { child[0] = child[1] = 0; }
|
2011-01-17 17:43:05 +01:00
|
|
|
+ Node(Node *left) :
|
2011-03-25 09:04:51 +01:00
|
|
|
+ nullable(false) { child[0] = left; child[1] = 0; }
|
2011-01-17 17:43:05 +01:00
|
|
|
+ Node(Node *left, Node *right) :
|
2011-03-25 09:04:51 +01:00
|
|
|
+ nullable(false) { child[0] = left; child[1] = right; }
|
2011-01-17 17:43:05 +01:00
|
|
|
+ virtual ~Node()
|
|
|
|
+ {
|
|
|
|
+ if (child[0])
|
|
|
|
+ child[0]->release();
|
|
|
|
+ if (child[1])
|
|
|
|
+ child[1]->release();
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /**
|
|
|
|
+ * See the "Dragon Book" for an explanation of nullable, firstpos,
|
|
|
|
+ * lastpos, and followpos.
|
|
|
|
+ */
|
|
|
|
+ virtual void compute_nullable() { }
|
|
|
|
+ virtual void compute_firstpos() = 0;
|
|
|
|
+ virtual void compute_lastpos() = 0;
|
|
|
|
+ virtual void compute_followpos() { }
|
|
|
|
+ virtual int eq(Node *other) = 0;
|
|
|
|
+ virtual ostream& dump(ostream& os) = 0;
|
|
|
|
+
|
|
|
|
+ bool nullable;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ NodeSet firstpos, lastpos, followpos;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ /* child 0 is left, child 1 is right */
|
|
|
|
+ Node *child[2];
|
|
|
|
+
|
|
|
|
+ unsigned int label; /* unique number for debug etc */
|
|
|
|
+ /**
|
2011-03-25 09:04:51 +01:00
|
|
|
+ * We indirectly release Nodes through a virtual function because
|
|
|
|
+ * accept and Eps Nodes are shared, and must be treated specially.
|
|
|
|
+ * We could use full reference counting here but the indirect release
|
|
|
|
+ * is sufficient and has less overhead
|
2011-01-17 17:43:05 +01:00
|
|
|
+ */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ virtual void release(void) {
|
|
|
|
+ delete this;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ };
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ class InnerNode : public Node {
|
|
|
|
+ public:
|
|
|
|
+ InnerNode() : Node() { };
|
|
|
|
+ InnerNode(Node *left) : Node(left) {};
|
|
|
|
+ InnerNode(Node *left, Node *right) : Node(left, right) { };
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ class OneChildNode : public InnerNode {
|
|
|
|
+ public:
|
|
|
|
+ OneChildNode(Node *left) : InnerNode(left) { };
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ class TwoChildNode : public InnerNode {
|
|
|
|
+ public:
|
|
|
|
+ TwoChildNode(Node *left, Node *right) : InnerNode(left, right) { };
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ class LeafNode : public Node {
|
|
|
|
+ public:
|
|
|
|
+ LeafNode() : Node() { };
|
|
|
|
+
|
|
|
|
+ };
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+ /* Match nothing (//). */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ class EpsNode : public LeafNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ public:
|
2011-03-25 09:04:51 +01:00
|
|
|
+ EpsNode() : LeafNode()
|
2011-01-17 17:43:05 +01:00
|
|
|
+ {
|
|
|
|
+ nullable = true;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ label = 0;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ void release(void)
|
|
|
|
+ {
|
|
|
|
+ /* don't delete Eps nodes because there is a single static instance
|
|
|
|
+ * shared by all trees. Look for epsnode in the code
|
|
|
|
+ */
|
|
|
|
+ }
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+ void compute_firstpos()
|
|
|
|
+ {
|
|
|
|
+ }
|
|
|
|
+ void compute_lastpos()
|
|
|
|
+ {
|
|
|
|
+ }
|
|
|
|
+ int eq(Node *other) {
|
|
|
|
+ if (dynamic_cast<EpsNode *>(other))
|
|
|
|
+ return 1;
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+ ostream& dump(ostream& os)
|
|
|
|
+ {
|
|
|
|
+ return os << "[]";
|
|
|
|
+ }
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ /**
|
|
|
|
+ * Leaf nodes in the syntax tree are important to us: they describe the
|
|
|
|
+ * characters that the regular expression matches. We also consider
|
|
|
|
+ * AcceptNodes import: they indicate when a regular expression matches.
|
|
|
|
+ */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ class ImportantNode : public LeafNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ public:
|
2011-03-25 09:04:51 +01:00
|
|
|
+ ImportantNode() : LeafNode() { }
|
2011-01-17 17:43:05 +01:00
|
|
|
+ void compute_firstpos()
|
|
|
|
+ {
|
|
|
|
+ firstpos.insert(this);
|
|
|
|
+ }
|
|
|
|
+ void compute_lastpos() {
|
|
|
|
+ lastpos.insert(this);
|
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ virtual void follow(NodeCases& cases) = 0;
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ /* common base class for all the different classes that contain
|
|
|
|
+ * character information.
|
|
|
|
+ */
|
|
|
|
+ class CNode : public ImportantNode {
|
|
|
|
+ public:
|
|
|
|
+ CNode() : ImportantNode() { }
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ /* Match one specific character (/c/). */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ class CharNode : public CNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ public:
|
|
|
|
+ CharNode(uchar c) : c(c) { }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ void follow(NodeCases& cases)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ NodeSet **x = &cases.cases[c];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (!*x) {
|
|
|
|
+ if (cases.otherwise)
|
2011-03-25 09:04:51 +01:00
|
|
|
+ *x = new NodeSet(*cases.otherwise);
|
2011-01-17 17:43:05 +01:00
|
|
|
+ else
|
2011-03-25 09:04:51 +01:00
|
|
|
+ *x = new NodeSet;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ (*x)->insert(followpos.begin(), followpos.end());
|
|
|
|
+ }
|
|
|
|
+ int eq(Node *other) {
|
|
|
|
+ CharNode *o = dynamic_cast<CharNode *>(other);
|
|
|
|
+ if (o) {
|
|
|
|
+ return c == o->c;
|
|
|
|
+ }
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+ ostream& dump(ostream& os)
|
|
|
|
+ {
|
|
|
|
+ return os << c;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ uchar c;
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ /* Match a set of characters (/[abc]/). */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ class CharSetNode : public CNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ public:
|
|
|
|
+ CharSetNode(Chars& chars) : chars(chars) { }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ void follow(NodeCases& cases)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ {
|
|
|
|
+ for (Chars::iterator i = chars.begin(); i != chars.end(); i++) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ NodeSet **x = &cases.cases[*i];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (!*x) {
|
|
|
|
+ if (cases.otherwise)
|
2011-03-25 09:04:51 +01:00
|
|
|
+ *x = new NodeSet(*cases.otherwise);
|
2011-01-17 17:43:05 +01:00
|
|
|
+ else
|
2011-03-25 09:04:51 +01:00
|
|
|
+ *x = new NodeSet;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ (*x)->insert(followpos.begin(), followpos.end());
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ int eq(Node *other) {
|
|
|
|
+ CharSetNode *o = dynamic_cast<CharSetNode *>(other);
|
|
|
|
+ if (!o || chars.size() != o->chars.size())
|
|
|
|
+ return 0;
|
|
|
|
+
|
|
|
|
+ for (Chars::iterator i = chars.begin(), j = o->chars.begin();
|
|
|
|
+ i != chars.end() && j != o->chars.end();
|
|
|
|
+ i++, j++) {
|
|
|
|
+ if (*i != *j)
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+ return 1;
|
|
|
|
+ }
|
|
|
|
+ ostream& dump(ostream& os)
|
|
|
|
+ {
|
|
|
|
+ os << '[';
|
|
|
|
+ for (Chars::iterator i = chars.begin(); i != chars.end(); i++)
|
|
|
|
+ os << *i;
|
|
|
|
+ return os << ']';
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ Chars chars;
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ /* Match all except one character (/[^abc]/). */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ class NotCharSetNode : public CNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ public:
|
|
|
|
+ NotCharSetNode(Chars& chars) : chars(chars) { }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ void follow(NodeCases& cases)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ {
|
|
|
|
+ if (!cases.otherwise)
|
2011-03-25 09:04:51 +01:00
|
|
|
+ cases.otherwise = new NodeSet;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ for (Chars::iterator j = chars.begin(); j != chars.end(); j++) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ NodeSet **x = &cases.cases[*j];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (!*x)
|
2011-03-25 09:04:51 +01:00
|
|
|
+ *x = new NodeSet(*cases.otherwise);
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ /**
|
|
|
|
+ * Note: Add to the nonmatching characters after copying away the
|
|
|
|
+ * old otherwise state for the matching characters.
|
|
|
|
+ */
|
|
|
|
+ cases.otherwise->insert(followpos.begin(), followpos.end());
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (NodeCases::iterator i = cases.begin(); i != cases.end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (chars.find(i->first) == chars.end())
|
|
|
|
+ i->second->insert(followpos.begin(), followpos.end());
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ int eq(Node *other) {
|
|
|
|
+ NotCharSetNode *o = dynamic_cast<NotCharSetNode *>(other);
|
|
|
|
+ if (!o || chars.size() != o->chars.size())
|
|
|
|
+ return 0;
|
|
|
|
+
|
|
|
|
+ for (Chars::iterator i = chars.begin(), j = o->chars.begin();
|
|
|
|
+ i != chars.end() && j != o->chars.end();
|
|
|
|
+ i++, j++) {
|
|
|
|
+ if (*i != *j)
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+ return 1;
|
|
|
|
+ }
|
|
|
|
+ ostream& dump(ostream& os)
|
|
|
|
+ {
|
|
|
|
+ os << "[^";
|
|
|
|
+ for (Chars::iterator i = chars.begin(); i != chars.end(); i++)
|
|
|
|
+ os << *i;
|
|
|
|
+ return os << ']';
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ Chars chars;
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ /* Match any character (/./). */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ class AnyCharNode : public CNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ public:
|
|
|
|
+ AnyCharNode() { }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ void follow(NodeCases& cases)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ {
|
|
|
|
+ if (!cases.otherwise)
|
2011-03-25 09:04:51 +01:00
|
|
|
+ cases.otherwise = new NodeSet;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ cases.otherwise->insert(followpos.begin(), followpos.end());
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (NodeCases::iterator i = cases.begin(); i != cases.end(); i++)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ i->second->insert(followpos.begin(), followpos.end());
|
|
|
|
+ }
|
|
|
|
+ int eq(Node *other) {
|
|
|
|
+ if (dynamic_cast<AnyCharNode *>(other))
|
|
|
|
+ return 1;
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+ ostream& dump(ostream& os) {
|
|
|
|
+ return os << ".";
|
|
|
|
+ }
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ /**
|
|
|
|
+ * Indicate that a regular expression matches. An AcceptNode itself
|
|
|
|
+ * doesn't match anything, so it will never generate any transitions.
|
|
|
|
+ */
|
|
|
|
+ class AcceptNode : public ImportantNode {
|
|
|
|
+ public:
|
|
|
|
+ AcceptNode() {}
|
2011-03-25 09:04:51 +01:00
|
|
|
+ void release(void)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ /* don't delete AcceptNode via release as they are shared,
|
|
|
|
+ * and will be deleted when the table the are stored in is deleted
|
|
|
|
+ */
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+
|
|
|
|
+ void follow(NodeCases& cases __attribute__((unused)))
|
2011-01-17 17:43:05 +01:00
|
|
|
+ {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ /* Nothing to follow. */
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ /* requires accept nodes to be common by pointer */
|
2011-01-17 17:43:05 +01:00
|
|
|
+ int eq(Node *other) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (dynamic_cast<AcceptNode *>(other))
|
|
|
|
+ return (this == other);
|
2011-01-17 17:43:05 +01:00
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ /* Match a node zero or more times. (This is a unary operator.) */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ class StarNode : public OneChildNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ public:
|
|
|
|
+ StarNode(Node *left) :
|
2011-03-25 09:04:51 +01:00
|
|
|
+ OneChildNode(left)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ {
|
|
|
|
+ nullable = true;
|
|
|
|
+ }
|
|
|
|
+ void compute_firstpos()
|
|
|
|
+ {
|
|
|
|
+ firstpos = child[0]->firstpos;
|
|
|
|
+ }
|
|
|
|
+ void compute_lastpos()
|
|
|
|
+ {
|
|
|
|
+ lastpos = child[0]->lastpos;
|
|
|
|
+ }
|
|
|
|
+ void compute_followpos()
|
|
|
|
+ {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ NodeSet from = child[0]->lastpos, to = child[0]->firstpos;
|
|
|
|
+ for(NodeSet::iterator i = from.begin(); i != from.end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ (*i)->followpos.insert(to.begin(), to.end());
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ int eq(Node *other) {
|
|
|
|
+ if (dynamic_cast<StarNode *>(other))
|
|
|
|
+ return child[0]->eq(other->child[0]);
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+ ostream& dump(ostream& os)
|
|
|
|
+ {
|
|
|
|
+ os << '(';
|
|
|
|
+ child[0]->dump(os);
|
|
|
|
+ return os << ")*";
|
|
|
|
+ }
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ /* Match a node one or more times. (This is a unary operator.) */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ class PlusNode : public OneChildNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ public:
|
|
|
|
+ PlusNode(Node *left) :
|
2011-03-25 09:04:51 +01:00
|
|
|
+ OneChildNode(left) { }
|
2011-01-17 17:43:05 +01:00
|
|
|
+ void compute_nullable()
|
|
|
|
+ {
|
|
|
|
+ nullable = child[0]->nullable;
|
|
|
|
+ }
|
|
|
|
+ void compute_firstpos()
|
|
|
|
+ {
|
|
|
|
+ firstpos = child[0]->firstpos;
|
|
|
|
+ }
|
|
|
|
+ void compute_lastpos()
|
|
|
|
+ {
|
|
|
|
+ lastpos = child[0]->lastpos;
|
|
|
|
+ }
|
|
|
|
+ void compute_followpos()
|
|
|
|
+ {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ NodeSet from = child[0]->lastpos, to = child[0]->firstpos;
|
|
|
|
+ for(NodeSet::iterator i = from.begin(); i != from.end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ (*i)->followpos.insert(to.begin(), to.end());
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ int eq(Node *other) {
|
|
|
|
+ if (dynamic_cast<PlusNode *>(other))
|
|
|
|
+ return child[0]->eq(other->child[0]);
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+ ostream& dump(ostream& os)
|
|
|
|
+ {
|
|
|
|
+ os << '(';
|
|
|
|
+ child[0]->dump(os);
|
|
|
|
+ return os << ")+";
|
|
|
|
+ }
|
|
|
|
+ };
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ /* Match a pair of consecutive nodes. */
|
|
|
|
+ class CatNode : public TwoChildNode {
|
|
|
|
+ public:
|
|
|
|
+ CatNode(Node *left, Node *right) :
|
|
|
|
+ TwoChildNode(left, right) { }
|
|
|
|
+ void compute_nullable()
|
|
|
|
+ {
|
|
|
|
+ nullable = child[0]->nullable && child[1]->nullable;
|
|
|
|
+ }
|
|
|
|
+ void compute_firstpos()
|
|
|
|
+ {
|
|
|
|
+ if (child[0]->nullable)
|
|
|
|
+ firstpos = child[0]->firstpos + child[1]->firstpos;
|
|
|
|
+ else
|
|
|
|
+ firstpos = child[0]->firstpos;
|
|
|
|
+ }
|
|
|
|
+ void compute_lastpos()
|
|
|
|
+ {
|
|
|
|
+ if (child[1]->nullable)
|
|
|
|
+ lastpos = child[0]->lastpos + child[1]->lastpos;
|
|
|
|
+ else
|
|
|
|
+ lastpos = child[1]->lastpos;
|
|
|
|
+ }
|
|
|
|
+ void compute_followpos()
|
|
|
|
+ {
|
|
|
|
+ NodeSet from = child[0]->lastpos, to = child[1]->firstpos;
|
|
|
|
+ for(NodeSet::iterator i = from.begin(); i != from.end(); i++) {
|
|
|
|
+ (*i)->followpos.insert(to.begin(), to.end());
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ int eq(Node *other) {
|
|
|
|
+ if (dynamic_cast<CatNode *>(other)) {
|
|
|
|
+ if (!child[0]->eq(other->child[0]))
|
|
|
|
+ return 0;
|
|
|
|
+ return child[1]->eq(other->child[1]);
|
|
|
|
+ }
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+ ostream& dump(ostream& os)
|
|
|
|
+ {
|
|
|
|
+ child[0]->dump(os);
|
|
|
|
+ child[1]->dump(os);
|
|
|
|
+ return os;
|
|
|
|
+ //return os << ' ';
|
|
|
|
+ }
|
|
|
|
+ };
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+ /* Match one of two alternative nodes. */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ class AltNode : public TwoChildNode {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ public:
|
|
|
|
+ AltNode(Node *left, Node *right) :
|
2011-03-25 09:04:51 +01:00
|
|
|
+ TwoChildNode(left, right) { }
|
2011-01-17 17:43:05 +01:00
|
|
|
+ void compute_nullable()
|
|
|
|
+ {
|
|
|
|
+ nullable = child[0]->nullable || child[1]->nullable;
|
|
|
|
+ }
|
|
|
|
+ void compute_lastpos()
|
|
|
|
+ {
|
|
|
|
+ lastpos = child[0]->lastpos + child[1]->lastpos;
|
|
|
|
+ }
|
|
|
|
+ void compute_firstpos()
|
|
|
|
+ {
|
|
|
|
+ firstpos = child[0]->firstpos + child[1]->firstpos;
|
|
|
|
+ }
|
|
|
|
+ int eq(Node *other) {
|
|
|
|
+ if (dynamic_cast<AltNode *>(other)) {
|
|
|
|
+ if (!child[0]->eq(other->child[0]))
|
|
|
|
+ return 0;
|
|
|
|
+ return child[1]->eq(other->child[1]);
|
|
|
|
+ }
|
|
|
|
+ return 0;
|
|
|
|
+ }
|
|
|
|
+ ostream& dump(ostream& os)
|
|
|
|
+ {
|
|
|
|
+ os << '(';
|
|
|
|
+ child[0]->dump(os);
|
|
|
|
+ os << '|';
|
|
|
|
+ child[1]->dump(os);
|
|
|
|
+ os << ')';
|
|
|
|
+ return os;
|
|
|
|
+ }
|
|
|
|
+ };
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+/* Use a single static EpsNode as it carries no node specific information */
|
|
|
|
+static EpsNode epsnode;
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+/*
|
2011-03-25 09:04:51 +01:00
|
|
|
+ * Normalize the regex parse tree for factoring and cancelations. Normalization
|
|
|
|
+ * reorganizes internal (alt and cat) nodes into a fixed "normalized" form that
|
|
|
|
+ * simplifies factoring code, in that it produces a canonicalized form for
|
|
|
|
+ * the direction being normalized so that the factoring code does not have
|
|
|
|
+ * to consider as many cases.
|
|
|
|
+ *
|
2011-01-17 17:43:05 +01:00
|
|
|
+ * left normalization (dir == 0) uses these rules
|
|
|
|
+ * (E | a) -> (a | E)
|
|
|
|
+ * (a | b) | c -> a | (b | c)
|
|
|
|
+ * (ab)c -> a(bc)
|
|
|
|
+ *
|
|
|
|
+ * right normalization (dir == 1) uses the same rules but reversed
|
|
|
|
+ * (a | E) -> (E | a)
|
|
|
|
+ * a | (b | c) -> (a | b) | c
|
|
|
|
+ * a(bc) -> (ab)c
|
2011-03-25 09:04:51 +01:00
|
|
|
+ *
|
|
|
|
+ * Note: This is written iteratively for a given node (the top node stays
|
|
|
|
+ * fixed and the children are rotated) instead of recursively.
|
|
|
|
+ * For a given node under examination rotate over nodes from
|
|
|
|
+ * dir to !dir. Until no dir direction node meets the criterial.
|
|
|
|
+ * Then recurse to the children (which will have a different node type)
|
|
|
|
+ * to make sure they are normalized.
|
|
|
|
+ * Normalization of a child node is guarenteed to not affect the
|
|
|
|
+ * normalization of the parent.
|
|
|
|
+ *
|
|
|
|
+ * For cat nodes the depth first traverse order is guarenteed to be
|
|
|
|
+ * maintained. This is not necessary for altnodes.
|
|
|
|
+ *
|
|
|
|
+ * Eg. For left normalization
|
|
|
|
+ *
|
|
|
|
+ * |1 |1
|
|
|
|
+ * / \ / \
|
|
|
|
+ * |2 T -> a |2
|
|
|
|
+ * / \ / \
|
|
|
|
+ * |3 c b |3
|
|
|
|
+ * / \ / \
|
|
|
|
+ * a b c T
|
|
|
|
+ *
|
2011-01-17 17:43:05 +01:00
|
|
|
+ */
|
2011-03-25 09:04:51 +01:00
|
|
|
+static void rotate_node(Node *t, int dir) {
|
|
|
|
+ // (a | b) | c -> a | (b | c)
|
|
|
|
+ // (ab)c -> a(bc)
|
|
|
|
+ Node *left = t->child[dir];
|
|
|
|
+ t->child[dir] = left->child[dir];
|
|
|
|
+ left->child[dir] = left->child[!dir];
|
|
|
|
+ left->child[!dir] = t->child[!dir];
|
|
|
|
+ t->child[!dir] = left;
|
|
|
|
+}
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+void normalize_tree(Node *t, int dir)
|
|
|
|
+{
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (dynamic_cast<LeafNode *>(t))
|
2011-01-17 17:43:05 +01:00
|
|
|
+ return;
|
|
|
|
+
|
|
|
|
+ for (;;) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if ((&epsnode == t->child[dir]) &&
|
|
|
|
+ (&epsnode != t->child[!dir]) &&
|
|
|
|
+ dynamic_cast<TwoChildNode *>(t)) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ // (E | a) -> (a | E)
|
|
|
|
+ // Ea -> aE
|
|
|
|
+ Node *c = t->child[dir];
|
|
|
|
+ t->child[dir] = t->child[!dir];
|
|
|
|
+ t->child[!dir] = c;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ // Don't break here as 'a' may be a tree that
|
|
|
|
+ // can be pulled up.
|
2011-01-17 17:43:05 +01:00
|
|
|
+ } else if ((dynamic_cast<AltNode *>(t) &&
|
|
|
|
+ dynamic_cast<AltNode *>(t->child[dir])) ||
|
|
|
|
+ (dynamic_cast<CatNode *>(t) &&
|
|
|
|
+ dynamic_cast<CatNode *>(t->child[dir]))) {
|
|
|
|
+ // (a | b) | c -> a | (b | c)
|
|
|
|
+ // (ab)c -> a(bc)
|
2011-03-25 09:04:51 +01:00
|
|
|
+ rotate_node(t, dir);
|
2011-01-17 17:43:05 +01:00
|
|
|
+ } else if (dynamic_cast<AltNode *>(t) &&
|
|
|
|
+ dynamic_cast<CharSetNode *>(t->child[dir]) &&
|
|
|
|
+ dynamic_cast<CharNode *>(t->child[!dir])) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ // [a] | b -> b | [a]
|
2011-01-17 17:43:05 +01:00
|
|
|
+ Node *c = t->child[dir];
|
|
|
|
+ t->child[dir] = t->child[!dir];
|
|
|
|
+ t->child[!dir] = c;
|
|
|
|
+ } else {
|
|
|
|
+ break;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ if (t->child[dir])
|
|
|
|
+ normalize_tree(t->child[dir], dir);
|
|
|
|
+ if (t->child[!dir])
|
|
|
|
+ normalize_tree(t->child[!dir], dir);
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+//charset conversion is disabled for now,
|
|
|
|
+//it hinders tree optimization in some cases, so it need to be either
|
|
|
|
+//done post optimization, or have extra factoring rules added
|
|
|
|
+#if 0
|
|
|
|
+static Node *merge_charset(Node *a, Node *b)
|
|
|
|
+{
|
|
|
|
+ if (dynamic_cast<CharNode *>(a) &&
|
|
|
|
+ dynamic_cast<CharNode *>(b)) {
|
|
|
|
+ Chars chars;
|
|
|
|
+ chars.insert(dynamic_cast<CharNode *>(a)->c);
|
|
|
|
+ chars.insert(dynamic_cast<CharNode *>(b)->c);
|
|
|
|
+ CharSetNode *n = new CharSetNode(chars);
|
|
|
|
+ return n;
|
|
|
|
+ } else if (dynamic_cast<CharNode *>(a) &&
|
|
|
|
+ dynamic_cast<CharSetNode *>(b)) {
|
|
|
|
+ Chars *chars = &dynamic_cast<CharSetNode *>(b)->chars;
|
|
|
|
+ chars->insert(dynamic_cast<CharNode *>(a)->c);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ return b;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ } else if (dynamic_cast<CharSetNode *>(a) &&
|
|
|
|
+ dynamic_cast<CharSetNode *>(b)) {
|
|
|
|
+ Chars *from = &dynamic_cast<CharSetNode *>(a)->chars;
|
|
|
|
+ Chars *to = &dynamic_cast<CharSetNode *>(b)->chars;
|
|
|
|
+ for (Chars::iterator i = from->begin(); i != from->end(); i++)
|
|
|
|
+ to->insert(*i);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ return b;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ //return ???;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static Node *alt_to_charsets(Node *t, int dir)
|
|
|
|
+{
|
|
|
|
+/*
|
|
|
|
+ Node *first = NULL;
|
|
|
|
+ Node *p = t;
|
|
|
|
+ Node *i = t;
|
|
|
|
+ for (;dynamic_cast<AltNode *>(i);) {
|
|
|
|
+ if (dynamic_cast<CharNode *>(i->child[dir]) ||
|
|
|
|
+ dynamic_cast<CharNodeSet *>(i->child[dir])) {
|
|
|
|
+ if (!first) {
|
|
|
|
+ first = i;
|
|
|
|
+ p = i;
|
|
|
|
+ i = i->child[!dir];
|
|
|
|
+ } else {
|
|
|
|
+ first->child[dir] = merge_charset(first->child[dir],
|
|
|
|
+ i->child[dir]);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ p->child[!dir] = i->child[!dir];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ Node *tmp = i;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ i = tmp->child[!dir];
|
|
|
|
+ tmp->child[!dir] = NULL;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ tmp->release();
|
|
|
|
+ }
|
|
|
|
+ } else {
|
|
|
|
+ p = i;
|
|
|
|
+ i = i->child[!dir];
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ // last altnode of chain check other dir as well
|
|
|
|
+ if (first && (dynamic_cast<charNode *>(i) ||
|
|
|
|
+ dynamic_cast<charNodeSet *>(i))) {
|
|
|
|
+
|
|
|
|
+ }
|
|
|
|
+*/
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ if (dynamic_cast<CharNode *>(t->child[dir]) ||
|
|
|
|
+ dynamic_cast<CharSetNode *>(t->child[dir]))
|
|
|
|
+ char_test = true;
|
|
|
|
+ (char_test &&
|
|
|
|
+ (dynamic_cast<CharNode *>(i->child[dir]) ||
|
|
|
|
+ dynamic_cast<CharSetNode *>(i->child[dir])))) {
|
|
|
|
+*/
|
|
|
|
+ return t;
|
|
|
|
+}
|
|
|
|
+#endif
|
|
|
|
+
|
|
|
|
+static Node *basic_alt_factor(Node *t, int dir)
|
|
|
|
+{
|
|
|
|
+ if (!dynamic_cast<AltNode *>(t))
|
|
|
|
+ return t;
|
|
|
|
+
|
|
|
|
+ if (t->child[dir]->eq(t->child[!dir])) {
|
|
|
|
+ // (a | a) -> a
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Node *tmp = t->child[dir];
|
|
|
|
+ t->child[dir] = NULL;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ t->release();
|
|
|
|
+ return tmp;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ // (ab) | (ac) -> a(b|c)
|
|
|
|
+ if (dynamic_cast<CatNode *>(t->child[dir]) &&
|
|
|
|
+ dynamic_cast<CatNode *>(t->child[!dir]) &&
|
|
|
|
+ t->child[dir]->child[dir]->eq(t->child[!dir]->child[dir])) {
|
|
|
|
+ // (ab) | (ac) -> a(b|c)
|
|
|
|
+ Node *left = t->child[dir];
|
|
|
|
+ Node *right = t->child[!dir];
|
|
|
|
+ t->child[dir] = left->child[!dir];
|
2011-03-25 09:04:51 +01:00
|
|
|
+ t->child[!dir] = right->child[!dir];
|
|
|
|
+ right->child[!dir] = NULL;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ right->release();
|
2011-03-25 09:04:51 +01:00
|
|
|
+ left->child[!dir] = t;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ return left;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ // a | (ab) -> a (E | b) -> a (b | E)
|
|
|
|
+ if (dynamic_cast<CatNode *>(t->child[!dir]) &&
|
|
|
|
+ t->child[dir]->eq(t->child[!dir]->child[dir])) {
|
|
|
|
+ Node *c = t->child[!dir];
|
|
|
|
+ t->child[dir]->release();
|
|
|
|
+ t->child[dir] = c->child[!dir];
|
2011-03-25 09:04:51 +01:00
|
|
|
+ t->child[!dir] = &epsnode;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ c->child[!dir] = t;
|
|
|
|
+ return c;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ // ab | (a) -> a (b | E)
|
|
|
|
+ if (dynamic_cast<CatNode *>(t->child[dir]) &&
|
|
|
|
+ t->child[dir]->child[dir]->eq(t->child[!dir])) {
|
|
|
|
+ Node *c = t->child[dir];
|
|
|
|
+ t->child[!dir]->release();
|
|
|
|
+ t->child[dir] = c->child[!dir];
|
2011-03-25 09:04:51 +01:00
|
|
|
+ t->child[!dir] = &epsnode;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ c->child[!dir] = t;
|
|
|
|
+ return c;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ return t;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static Node *basic_simplify(Node *t, int dir)
|
|
|
|
+{
|
|
|
|
+ if (dynamic_cast<CatNode *>(t) &&
|
2011-03-25 09:04:51 +01:00
|
|
|
+ &epsnode == t->child[!dir]) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ // aE -> a
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Node *tmp = t->child[dir];
|
|
|
|
+ t->child[dir] = NULL;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ t->release();
|
|
|
|
+ return tmp;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ return basic_alt_factor(t, dir);
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * assumes a normalized tree. reductions shown for left normalization
|
|
|
|
+ * aE -> a
|
|
|
|
+ * (a | a) -> a
|
|
|
|
+ ** factoring patterns
|
|
|
|
+ * a | (a | b) -> (a | b)
|
|
|
|
+ * a | (ab) -> a (E | b) -> a (b | E)
|
|
|
|
+ * (ab) | (ac) -> a(b|c)
|
|
|
|
+ *
|
|
|
|
+ * returns t - if no simplifications were made
|
|
|
|
+ * a new root node - if simplifications were made
|
|
|
|
+ */
|
|
|
|
+Node *simplify_tree_base(Node *t, int dir, bool &mod)
|
|
|
|
+{
|
|
|
|
+ if (dynamic_cast<ImportantNode *>(t))
|
|
|
|
+ return t;
|
|
|
|
+
|
|
|
|
+ for (int i=0; i < 2; i++) {
|
|
|
|
+ if (t->child[i]) {
|
|
|
|
+ Node *c = simplify_tree_base(t->child[i], dir, mod);
|
|
|
|
+ if (c != t->child[i]) {
|
|
|
|
+ t->child[i] = c;
|
|
|
|
+ mod = true;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ // only iterate on loop if modification made
|
|
|
|
+ for (;; mod = true) {
|
|
|
|
+
|
|
|
|
+ Node *tmp = basic_simplify(t, dir);
|
|
|
|
+ if (tmp != t) {
|
|
|
|
+ t = tmp;
|
|
|
|
+ continue;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ /* all tests after this must meet 2 alt node condition */
|
|
|
|
+ if (!dynamic_cast<AltNode *>(t) ||
|
|
|
|
+ !dynamic_cast<AltNode *>(t->child[!dir]))
|
|
|
|
+ break;
|
|
|
|
+
|
|
|
|
+ // a | (a | b) -> (a | b)
|
|
|
|
+ // a | (b | (c | a)) -> (b | (c | a))
|
|
|
|
+ Node *p = t;
|
|
|
|
+ Node *i = t->child[!dir];
|
|
|
|
+ for (;dynamic_cast<AltNode *>(i); p = i, i = i->child[!dir]) {
|
|
|
|
+ if (t->child[dir]->eq(i->child[dir])) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Node *tmp = t->child[!dir];
|
|
|
|
+ t->child[!dir] = NULL;
|
|
|
|
+ t->release();
|
|
|
|
+ t = tmp;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ continue;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ // last altnode of chain check other dir as well
|
|
|
|
+ if (t->child[dir]->eq(p->child[!dir])) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Node *tmp = t->child[!dir];
|
|
|
|
+ t->child[!dir] = NULL;
|
|
|
|
+ t->release();
|
|
|
|
+ t = tmp;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ continue;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ //exact match didn't work, try factoring front
|
|
|
|
+ //a | (ac | (ad | () -> (a (E | c)) | (...)
|
|
|
|
+ //ab | (ac | (...)) -> (a (b | c)) | (...)
|
|
|
|
+ //ab | (a | (...)) -> (a (b | E)) | (...)
|
|
|
|
+ Node *pp;
|
|
|
|
+ int count = 0;
|
|
|
|
+ Node *subject = t->child[dir];
|
|
|
|
+ Node *a = subject;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (dynamic_cast<CatNode *>(subject))
|
|
|
|
+ a = subject->child[dir];
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+ for (pp = p = t, i = t->child[!dir];
|
|
|
|
+ dynamic_cast<AltNode *>(i); ) {
|
|
|
|
+ if ((dynamic_cast<CatNode *>(i->child[dir]) &&
|
|
|
|
+ a->eq(i->child[dir]->child[dir])) ||
|
|
|
|
+ (a->eq(i->child[dir]))) {
|
|
|
|
+ // extract matching alt node
|
|
|
|
+ p->child[!dir] = i->child[!dir];
|
|
|
|
+ i->child[!dir] = subject;
|
|
|
|
+ subject = basic_simplify(i, dir);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (dynamic_cast<CatNode *>(subject))
|
|
|
|
+ a = subject->child[dir];
|
|
|
|
+ else
|
|
|
|
+ a = subject;
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+ i = p->child[!dir];
|
|
|
|
+ count++;
|
|
|
|
+ } else {
|
|
|
|
+ pp = p; p = i; i = i->child[!dir];
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ // last altnode in chain check other dir as well
|
|
|
|
+ if ((dynamic_cast<CatNode *>(i) &&
|
|
|
|
+ a->eq(i->child[dir])) ||
|
|
|
|
+ (a->eq(i))) {
|
|
|
|
+ count++;
|
|
|
|
+ if (t == p) {
|
|
|
|
+ t->child[dir] = subject;
|
|
|
|
+ t = basic_simplify(t, dir);
|
|
|
|
+ } else {
|
|
|
|
+ t->child[dir] = p->child[dir];
|
|
|
|
+ p->child[dir] = subject;
|
|
|
|
+ pp->child[!dir] = basic_simplify(p, dir);
|
|
|
|
+ }
|
|
|
|
+ } else {
|
|
|
|
+ t->child[dir] = i;
|
|
|
|
+ p->child[!dir] = subject;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ if (count == 0)
|
|
|
|
+ break;
|
|
|
|
+ }
|
|
|
|
+ return t;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+int debug_tree(Node *t)
|
|
|
|
+{
|
|
|
|
+ int nodes = 1;
|
|
|
|
+
|
|
|
|
+ if (!dynamic_cast<ImportantNode *>(t)) {
|
|
|
|
+ if (t->child[0])
|
|
|
|
+ nodes += debug_tree(t->child[0]);
|
|
|
|
+ if (t->child[1])
|
|
|
|
+ nodes += debug_tree(t->child[1]);
|
|
|
|
+ }
|
|
|
|
+ return nodes;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+struct node_counts {
|
|
|
|
+ int charnode;
|
|
|
|
+ int charset;
|
|
|
|
+ int notcharset;
|
|
|
|
+ int alt;
|
|
|
|
+ int plus;
|
|
|
|
+ int star;
|
|
|
|
+ int any;
|
|
|
|
+ int cat;
|
|
|
|
+};
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+static void count_tree_nodes(Node *t, struct node_counts *counts)
|
|
|
|
+{
|
|
|
|
+ if (dynamic_cast<AltNode *>(t)) {
|
|
|
|
+ counts->alt++;
|
|
|
|
+ count_tree_nodes(t->child[0], counts);
|
|
|
|
+ count_tree_nodes(t->child[1], counts);
|
|
|
|
+ } else if (dynamic_cast<CatNode *>(t)) {
|
|
|
|
+ counts->cat++;
|
|
|
|
+ count_tree_nodes(t->child[0], counts);
|
|
|
|
+ count_tree_nodes(t->child[1], counts);
|
|
|
|
+ } else if (dynamic_cast<PlusNode *>(t)) {
|
|
|
|
+ counts->plus++;
|
|
|
|
+ count_tree_nodes(t->child[0], counts);
|
|
|
|
+ } else if (dynamic_cast<StarNode *>(t)) {
|
|
|
|
+ counts->star++;
|
|
|
|
+ count_tree_nodes(t->child[0], counts);
|
|
|
|
+ } else if (dynamic_cast<CharNode *>(t)) {
|
|
|
|
+ counts->charnode++;
|
|
|
|
+ } else if (dynamic_cast<AnyCharNode *>(t)) {
|
|
|
|
+ counts->any++;
|
|
|
|
+ } else if (dynamic_cast<CharSetNode *>(t)) {
|
|
|
|
+ counts->charset++;
|
|
|
|
+ } else if (dynamic_cast<NotCharSetNode *>(t)) {
|
|
|
|
+ counts->notcharset++;
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+#include "stdio.h"
|
|
|
|
+#include "stdint.h"
|
|
|
|
+#include "apparmor_re.h"
|
|
|
|
+
|
|
|
|
+Node *simplify_tree(Node *t, dfaflags_t flags)
|
|
|
|
+{
|
|
|
|
+ bool update;
|
|
|
|
+
|
|
|
|
+ if (flags & DFA_DUMP_TREE_STATS) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ struct node_counts counts = { 0, 0, 0, 0, 0, 0, 0, 0 };
|
2011-01-17 17:43:05 +01:00
|
|
|
+ count_tree_nodes(t, &counts);
|
|
|
|
+ fprintf(stderr, "expr tree: c %d, [] %d, [^] %d, | %d, + %d, * %d, . %d, cat %d\n", counts.charnode, counts.charset, counts.notcharset, counts.alt, counts.plus, counts.star, counts.any, counts.cat);
|
|
|
|
+ }
|
|
|
|
+ do {
|
|
|
|
+ update = false;
|
|
|
|
+ //default to right normalize first as this reduces the number
|
|
|
|
+ //of trailing nodes which might follow an internal *
|
|
|
|
+ //or **, which is where state explosion can happen
|
|
|
|
+ //eg. in one test this makes the difference between
|
|
|
|
+ // the dfa having about 7 thousands states,
|
|
|
|
+ // and it having about 1.25 million states
|
|
|
|
+ int dir = 1;
|
|
|
|
+ if (flags & DFA_CONTROL_TREE_LEFT)
|
|
|
|
+ dir = 0;
|
|
|
|
+ for (int count = 0; count < 2; count++) {
|
|
|
|
+ bool modified;
|
|
|
|
+ do {
|
|
|
|
+ modified = false;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_CONTROL_TREE_NORMAL)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ normalize_tree(t, dir);
|
|
|
|
+ t = simplify_tree_base(t, dir, modified);
|
|
|
|
+ if (modified)
|
|
|
|
+ update = true;
|
|
|
|
+ } while (modified);
|
|
|
|
+ if (flags & DFA_CONTROL_TREE_LEFT)
|
|
|
|
+ dir++;
|
|
|
|
+ else
|
|
|
|
+ dir--;
|
|
|
|
+ }
|
|
|
|
+ } while(update);
|
|
|
|
+ if (flags & DFA_DUMP_TREE_STATS) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ struct node_counts counts = { 0, 0, 0, 0, 0, 0, 0, 0 };
|
2011-01-17 17:43:05 +01:00
|
|
|
+ count_tree_nodes(t, &counts);
|
|
|
|
+ fprintf(stderr, "simplified expr tree: c %d, [] %d, [^] %d, | %d, + %d, * %d, . %d, cat %d\n", counts.charnode, counts.charset, counts.notcharset, counts.alt, counts.plus, counts.star, counts.any, counts.cat);
|
|
|
|
+ }
|
|
|
|
+ return t;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+%}
|
|
|
|
+
|
|
|
|
+%union {
|
|
|
|
+ char c;
|
|
|
|
+ Node *node;
|
|
|
|
+ Chars *cset;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+%{
|
|
|
|
+ void regexp_error(Node **, const char *, const char *);
|
|
|
|
+# define YYLEX_PARAM &text
|
|
|
|
+ int regexp_lex(YYSTYPE *, const char **);
|
|
|
|
+
|
|
|
|
+ static inline Chars*
|
|
|
|
+ insert_char(Chars* cset, uchar a)
|
|
|
|
+ {
|
|
|
|
+ cset->insert(a);
|
|
|
|
+ return cset;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ static inline Chars*
|
|
|
|
+ insert_char_range(Chars* cset, uchar a, uchar b)
|
|
|
|
+ {
|
|
|
|
+ if (a > b)
|
|
|
|
+ swap(a, b);
|
|
|
|
+ for (uchar i = a; i <= b; i++)
|
|
|
|
+ cset->insert(i);
|
|
|
|
+ return cset;
|
|
|
|
+ }
|
|
|
|
+%}
|
|
|
|
+
|
|
|
|
+%pure-parser
|
|
|
|
+/* %error-verbose */
|
|
|
|
+%parse-param {Node **root}
|
|
|
|
+%parse-param {const char *text}
|
|
|
|
+%name-prefix = "regexp_"
|
|
|
|
+
|
|
|
|
+%token <c> CHAR
|
|
|
|
+%type <c> regex_char cset_char1 cset_char cset_charN
|
|
|
|
+%type <cset> charset cset_chars
|
|
|
|
+%type <node> regexp expr terms0 terms qterm term
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Note: destroy all nodes upon failure, but *not* the start symbol once
|
|
|
|
+ * parsing succeeds!
|
|
|
|
+ */
|
|
|
|
+%destructor { $$->release(); } expr terms0 terms qterm term
|
|
|
|
+
|
|
|
|
+%%
|
|
|
|
+
|
|
|
|
+/* FIXME: Does not parse "[--]", "[---]", "[^^-x]". I don't actually know
|
|
|
|
+ which precise grammer Perl regexps use, and rediscovering that
|
|
|
|
+ is proving to be painful. */
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+regexp : /* empty */ { *root = $$ = &epsnode; }
|
2011-01-17 17:43:05 +01:00
|
|
|
+ | expr { *root = $$ = $1; }
|
|
|
|
+ ;
|
|
|
|
+
|
|
|
|
+expr : terms
|
|
|
|
+ | expr '|' terms0 { $$ = new AltNode($1, $3); }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ | '|' terms0 { $$ = new AltNode(&epsnode, $2); }
|
2011-01-17 17:43:05 +01:00
|
|
|
+ ;
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+terms0 : /* empty */ { $$ = &epsnode; }
|
2011-01-17 17:43:05 +01:00
|
|
|
+ | terms
|
|
|
|
+ ;
|
|
|
|
+
|
|
|
|
+terms : qterm
|
|
|
|
+ | terms qterm { $$ = new CatNode($1, $2); }
|
|
|
|
+ ;
|
|
|
|
+
|
|
|
|
+qterm : term
|
|
|
|
+ | term '*' { $$ = new StarNode($1); }
|
|
|
|
+ | term '+' { $$ = new PlusNode($1); }
|
|
|
|
+ ;
|
|
|
|
+
|
|
|
|
+term : '.' { $$ = new AnyCharNode; }
|
|
|
|
+ | regex_char { $$ = new CharNode($1); }
|
|
|
|
+ | '[' charset ']' { $$ = new CharSetNode(*$2);
|
|
|
|
+ delete $2; }
|
|
|
|
+ | '[' '^' charset ']'
|
|
|
|
+ { $$ = new NotCharSetNode(*$3);
|
|
|
|
+ delete $3; }
|
|
|
|
+ | '[' '^' '^' cset_chars ']'
|
|
|
|
+ { $4->insert('^');
|
|
|
|
+ $$ = new NotCharSetNode(*$4);
|
|
|
|
+ delete $4; }
|
|
|
|
+ | '(' regexp ')' { $$ = $2; }
|
|
|
|
+ ;
|
|
|
|
+
|
|
|
|
+regex_char : CHAR
|
|
|
|
+ | '^' { $$ = '^'; }
|
|
|
|
+ | '-' { $$ = '-'; }
|
|
|
|
+ | ']' { $$ = ']'; }
|
|
|
|
+ ;
|
|
|
|
+
|
|
|
|
+charset : cset_char1 cset_chars
|
|
|
|
+ { $$ = insert_char($2, $1); }
|
|
|
|
+ | cset_char1 '-' cset_charN cset_chars
|
|
|
|
+ { $$ = insert_char_range($4, $1, $3); }
|
|
|
|
+ ;
|
|
|
|
+
|
|
|
|
+cset_chars : /* nothing */ { $$ = new Chars; }
|
|
|
|
+ | cset_chars cset_charN
|
|
|
|
+ { $$ = insert_char($1, $2); }
|
|
|
|
+ | cset_chars cset_charN '-' cset_charN
|
|
|
|
+ { $$ = insert_char_range($1, $2, $4); }
|
|
|
|
+ ;
|
|
|
|
+
|
|
|
|
+cset_char1 : cset_char
|
|
|
|
+ | ']' { $$ = ']'; }
|
|
|
|
+ | '-' { $$ = '-'; }
|
|
|
|
+ ;
|
|
|
|
+
|
|
|
|
+cset_charN : cset_char
|
|
|
|
+ | '^' { $$ = '^'; }
|
|
|
|
+ ;
|
|
|
|
+
|
|
|
|
+cset_char : CHAR
|
|
|
|
+ | '[' { $$ = '['; }
|
|
|
|
+ | '*' { $$ = '*'; }
|
|
|
|
+ | '+' { $$ = '+'; }
|
|
|
|
+ | '.' { $$ = '.'; }
|
|
|
|
+ | '|' { $$ = '|'; }
|
|
|
|
+ | '(' { $$ = '('; }
|
|
|
|
+ | ')' { $$ = ')'; }
|
|
|
|
+ ;
|
|
|
|
+
|
|
|
|
+%%
|
|
|
|
+
|
|
|
|
+#include <string.h>
|
|
|
|
+#include <getopt.h>
|
|
|
|
+#include <assert.h>
|
|
|
|
+#include <arpa/inet.h>
|
|
|
|
+
|
|
|
|
+#include <iostream>
|
|
|
|
+#include <fstream>
|
|
|
|
+
|
|
|
|
+#include "../immunix.h"
|
|
|
|
+
|
|
|
|
+/* Traverse the syntax tree depth-first in an iterator-like manner. */
|
|
|
|
+class depth_first_traversal {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ stack<Node *> pos;
|
|
|
|
+ void push_left(Node *node)
|
|
|
|
+ {
|
|
|
|
+ pos.push(node);
|
|
|
|
+
|
|
|
|
+ while (dynamic_cast<InnerNode *>(node)) {
|
|
|
|
+ pos.push(node->child[0]);
|
|
|
|
+ node = node->child[0];
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+public:
|
|
|
|
+ depth_first_traversal(Node *node) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ push_left(node);
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ Node *operator*()
|
|
|
|
+ {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ return pos.top();
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ Node* operator->()
|
|
|
|
+ {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ return pos.top();
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ operator bool()
|
|
|
|
+ {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ return !pos.empty();
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ void operator++(int)
|
|
|
|
+ {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Node *last = pos.top();
|
|
|
|
+ pos.pop();
|
|
|
|
+
|
|
|
|
+ if (!pos.empty()) {
|
|
|
|
+ /* no need to dynamic cast, as we just popped a node so the top node
|
|
|
|
+ * must be an inner node */
|
|
|
|
+ InnerNode *node = (InnerNode *)(pos.top());
|
|
|
|
+
|
|
|
|
+ if (node->child[1] && node->child[1] != last) {
|
|
|
|
+ push_left(node->child[1]);
|
|
|
|
+ }
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+};
|
|
|
|
+
|
|
|
|
+ostream& operator<<(ostream& os, Node& node)
|
|
|
|
+{
|
|
|
|
+ node.dump(os);
|
|
|
|
+ return os;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+ostream& operator<<(ostream& os, uchar c)
|
|
|
|
+{
|
|
|
|
+ const char *search = "\a\033\f\n\r\t|*+[](). ",
|
|
|
|
+ *replace = "aefnrt|*+[](). ", *s;
|
|
|
|
+
|
|
|
|
+ if ((s = strchr(search, c)) && *s != '\0')
|
|
|
|
+ os << '\\' << replace[s - search];
|
|
|
|
+ else if (c < 32 || c >= 127)
|
|
|
|
+ os << '\\' << '0' << char('0' + (c >> 6))
|
|
|
|
+ << char('0' + ((c >> 3) & 7)) << char('0' + (c & 7));
|
|
|
|
+ else
|
|
|
|
+ os << (char)c;
|
|
|
|
+ return os;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+int
|
|
|
|
+octdigit(char c)
|
|
|
|
+{
|
|
|
|
+ if (c >= '0' && c <= '7')
|
|
|
|
+ return c - '0';
|
|
|
|
+ return -1;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+int
|
|
|
|
+hexdigit(char c)
|
|
|
|
+{
|
|
|
|
+ if (c >= '0' && c <= '9')
|
|
|
|
+ return c - '0';
|
|
|
|
+ else if (c >= 'A' && c <= 'F')
|
|
|
|
+ return 10 + c - 'A';
|
|
|
|
+ else if (c >= 'a' && c <= 'f')
|
|
|
|
+ return 10 + c - 'A';
|
|
|
|
+ else
|
|
|
|
+ return -1;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+int
|
|
|
|
+regexp_lex(YYSTYPE *val, const char **pos)
|
|
|
|
+{
|
|
|
|
+ int c;
|
|
|
|
+
|
|
|
|
+ val->c = **pos;
|
|
|
|
+ switch(*(*pos)++) {
|
|
|
|
+ case '\0':
|
|
|
|
+ (*pos)--;
|
|
|
|
+ return 0;
|
|
|
|
+
|
|
|
|
+ case '*': case '+': case '.': case '|': case '^': case '-':
|
|
|
|
+ case '[': case ']': case '(' : case ')':
|
|
|
|
+ return *(*pos - 1);
|
|
|
|
+
|
|
|
|
+ case '\\':
|
|
|
|
+ val->c = **pos;
|
|
|
|
+ switch(*(*pos)++) {
|
|
|
|
+ case '\0':
|
|
|
|
+ (*pos)--;
|
|
|
|
+ /* fall through */
|
|
|
|
+ case '\\':
|
|
|
|
+ val->c = '\\';
|
|
|
|
+ break;
|
|
|
|
+
|
|
|
|
+ case '0':
|
|
|
|
+ val->c = 0;
|
|
|
|
+ if ((c = octdigit(**pos)) >= 0) {
|
|
|
|
+ val->c = c;
|
|
|
|
+ (*pos)++;
|
|
|
|
+ }
|
|
|
|
+ if ((c = octdigit(**pos)) >= 0) {
|
|
|
|
+ val->c = (val->c << 3) + c;
|
|
|
|
+ (*pos)++;
|
|
|
|
+ }
|
|
|
|
+ if ((c = octdigit(**pos)) >= 0) {
|
|
|
|
+ val->c = (val->c << 3) + c;
|
|
|
|
+ (*pos)++;
|
|
|
|
+ }
|
|
|
|
+ break;
|
|
|
|
+
|
|
|
|
+ case 'x':
|
|
|
|
+ val->c = 0;
|
|
|
|
+ if ((c = hexdigit(**pos)) >= 0) {
|
|
|
|
+ val->c = c;
|
|
|
|
+ (*pos)++;
|
|
|
|
+ }
|
|
|
|
+ if ((c = hexdigit(**pos)) >= 0) {
|
|
|
|
+ val->c = (val->c << 4) + c;
|
|
|
|
+ (*pos)++;
|
|
|
|
+ }
|
|
|
|
+ break;
|
|
|
|
+
|
|
|
|
+ case 'a':
|
|
|
|
+ val->c = '\a';
|
|
|
|
+ break;
|
|
|
|
+
|
|
|
|
+ case 'e':
|
|
|
|
+ val->c = 033 /* ESC */;
|
|
|
|
+ break;
|
|
|
|
+
|
|
|
|
+ case 'f':
|
|
|
|
+ val->c = '\f';
|
|
|
|
+ break;
|
|
|
|
+
|
|
|
|
+ case 'n':
|
|
|
|
+ val->c = '\n';
|
|
|
|
+ break;
|
|
|
|
+
|
|
|
|
+ case 'r':
|
|
|
|
+ val->c = '\r';
|
|
|
|
+ break;
|
|
|
|
+
|
|
|
|
+ case 't':
|
|
|
|
+ val->c = '\t';
|
|
|
|
+ break;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ return CHAR;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+void
|
2011-03-25 09:04:51 +01:00
|
|
|
+regexp_error(Node ** __attribute__((unused)),
|
|
|
|
+ const char *text __attribute__((unused)),
|
|
|
|
+ const char *error __attribute__((unused)))
|
2011-01-17 17:43:05 +01:00
|
|
|
+{
|
|
|
|
+ /* We don't want the library to print error messages. */
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Assign a consecutive number to each node. This is only needed for
|
|
|
|
+ * pretty-printing the debug output.
|
2011-03-25 09:04:51 +01:00
|
|
|
+ *
|
|
|
|
+ * The epsnode is labeled 0. Start labeling at 1
|
2011-01-17 17:43:05 +01:00
|
|
|
+ */
|
|
|
|
+void label_nodes(Node *root)
|
|
|
|
+{
|
2011-03-25 09:04:51 +01:00
|
|
|
+ int nodes = 1;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ for (depth_first_traversal i(root); i; i++)
|
|
|
|
+ i->label = nodes++;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Text-dump a state (for debugging).
|
|
|
|
+ */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ostream& operator<<(ostream& os, const NodeSet& state)
|
2011-01-17 17:43:05 +01:00
|
|
|
+{
|
|
|
|
+ os << '{';
|
|
|
|
+ if (!state.empty()) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ NodeSet::iterator i = state.begin();
|
2011-01-17 17:43:05 +01:00
|
|
|
+ for(;;) {
|
|
|
|
+ os << (*i)->label;
|
|
|
|
+ if (++i == state.end())
|
|
|
|
+ break;
|
|
|
|
+ os << ',';
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ os << '}';
|
|
|
|
+ return os;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Text-dump the syntax tree (for debugging).
|
|
|
|
+ */
|
|
|
|
+void dump_syntax_tree(ostream& os, Node *node) {
|
|
|
|
+ for (depth_first_traversal i(node); i; i++) {
|
|
|
|
+ os << i->label << '\t';
|
|
|
|
+ if ((*i)->child[0] == 0)
|
|
|
|
+ os << **i << '\t' << (*i)->followpos << endl;
|
|
|
|
+ else {
|
|
|
|
+ if ((*i)->child[1] == 0)
|
|
|
|
+ os << (*i)->child[0]->label << **i;
|
|
|
|
+ else
|
|
|
|
+ os << (*i)->child[0]->label << **i
|
|
|
|
+ << (*i)->child[1]->label;
|
|
|
|
+ os << '\t' << (*i)->firstpos
|
|
|
|
+ << (*i)->lastpos << endl;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ os << endl;
|
|
|
|
+}
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+/* Comparison operator for sets of <NodeSet *>.
|
|
|
|
+ * Compare set hashes, and if the sets have the same hash
|
|
|
|
+ * do compare pointer comparison on set of <Node *>, the pointer comparison
|
|
|
|
+ * allows us to determine which Sets of <Node *> we have seen already from
|
|
|
|
+ * new ones when constructing the DFA.
|
|
|
|
+ */
|
|
|
|
+struct deref_less_than {
|
|
|
|
+ bool operator()(pair <unsigned long, NodeSet *> const & lhs, pair <unsigned long, NodeSet *> const & rhs) const
|
|
|
|
+ {
|
|
|
|
+ if (lhs.first == rhs.first)
|
|
|
|
+ return *(lhs.second) < *(rhs.second);
|
|
|
|
+ else
|
|
|
|
+ return lhs.first < rhs.first;
|
|
|
|
+ }
|
2011-01-17 17:43:05 +01:00
|
|
|
+};
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+unsigned long hash_NodeSet(const NodeSet *ns)
|
|
|
|
+{
|
|
|
|
+ unsigned long hash = 5381;
|
|
|
|
+
|
|
|
|
+ for (NodeSet::iterator i = ns->begin(); i != ns->end(); i++) {
|
|
|
|
+ hash = ((hash << 5) + hash) + (unsigned long) *i;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ return hash;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+class State;
|
2011-01-17 17:43:05 +01:00
|
|
|
+/**
|
2011-03-25 09:04:51 +01:00
|
|
|
+ * State cases are identical to NodesCases except they map to State *
|
|
|
|
+ * instead of NodeSet.
|
|
|
|
+ * Out-edges from a state to another: we store the follow State
|
|
|
|
+ * for each input character that is not a default match in cases and
|
|
|
|
+ * default matches in otherwise as well as in all matching explicit cases
|
|
|
|
+ * This avoids enumerating all the explicit tranitions for default matches.
|
2011-01-17 17:43:05 +01:00
|
|
|
+ */
|
2011-03-25 09:04:51 +01:00
|
|
|
+typedef struct Cases {
|
|
|
|
+ typedef map<uchar, State *>::iterator iterator;
|
|
|
|
+ iterator begin() { return cases.begin(); }
|
|
|
|
+ iterator end() { return cases.end(); }
|
|
|
|
+
|
|
|
|
+ Cases() : otherwise(0) { }
|
|
|
|
+ map<uchar, State *> cases;
|
|
|
|
+ State *otherwise;
|
|
|
|
+} Cases;
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+typedef list<State *> Partition;
|
2011-03-25 09:04:51 +01:00
|
|
|
+
|
|
|
|
+uint32_t accept_perms(NodeSet *state, uint32_t *audit_ctl, int *error);
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * State - DFA individual state information
|
|
|
|
+ * label: a unique label to identify the state used for pretty printing
|
|
|
|
+ * the non-matching state is setup to have label == 0 and
|
|
|
|
+ * the start state is setup to have label == 1
|
|
|
|
+ * audit: the audit permission mask for the state
|
|
|
|
+ * accept: the accept permissions for the state
|
|
|
|
+ * cases: set of transitions from this state
|
|
|
|
+ * parition: Is a temporary work variable used during dfa minimization.
|
|
|
|
+ * it can be replaced with a map, but that is slower and uses more
|
|
|
|
+ * memory.
|
|
|
|
+ * nodes: Is a temporary work variable used during dfa creation. It can
|
|
|
|
+ * be replaced by using the nodemap, but that is slower
|
|
|
|
+ */
|
|
|
|
+class State {
|
|
|
|
+public:
|
|
|
|
+ State() : label (0), audit(0), accept(0), cases(), nodes(NULL) { };
|
|
|
|
+ State(int l): label (l), audit(0), accept(0), cases(), nodes(NULL) { };
|
|
|
|
+ State(int l, NodeSet *n) throw (int):
|
|
|
|
+ label(l), audit(0), accept(0), cases(), nodes(n)
|
|
|
|
+ {
|
|
|
|
+ int error;
|
|
|
|
+
|
|
|
|
+ /* Compute permissions associated with the State. */
|
|
|
|
+ accept = accept_perms(nodes, &audit, &error);
|
|
|
|
+ if (error) {
|
|
|
|
+cerr << "Failing on accept perms " << error << "\n";
|
|
|
|
+ throw error;
|
|
|
|
+ }
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+ int label;
|
|
|
|
+ uint32_t audit, accept;
|
|
|
|
+ Cases cases;
|
|
|
|
+ union {
|
|
|
|
+ Partition *partition;
|
|
|
|
+ NodeSet *nodes;
|
|
|
|
+ };
|
|
|
|
+};
|
|
|
|
+
|
|
|
|
+ostream& operator<<(ostream& os, const State& state)
|
|
|
|
+{
|
|
|
|
+ /* dump the state label */
|
|
|
|
+ os << '{';
|
|
|
|
+ os << state.label;
|
|
|
|
+ os << '}';
|
|
|
|
+ return os;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+typedef map<pair<unsigned long, NodeSet *>, State *, deref_less_than > NodeMap;
|
2011-01-17 17:43:05 +01:00
|
|
|
+/* Transitions in the DFA. */
|
2011-03-25 09:04:51 +01:00
|
|
|
+
|
|
|
|
+/* dfa_stats - structure to group various stats about dfa creation
|
|
|
|
+ * duplicates - how many duplicate NodeSets where encountered and discarded
|
|
|
|
+ * proto_max - maximum length of a NodeSet encountered during dfa construction
|
|
|
|
+ * proto_sum - sum of NodeSet length during dfa construction. Used to find
|
|
|
|
+ * average length.
|
|
|
|
+ */
|
|
|
|
+typedef struct dfa_stats {
|
|
|
|
+ unsigned int duplicates, proto_max, proto_sum;
|
|
|
|
+} dfa_stats_t;
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+class DFA {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ void dump_node_to_dfa(void);
|
|
|
|
+ State* add_new_state(NodeMap &nodemap, pair <unsigned long, NodeSet *> index, NodeSet *nodes, dfa_stats_t &stats);
|
|
|
|
+ void update_state_transitions(NodeMap &nodemap, list <State *> &work_queue, State *state, dfa_stats_t &stats);
|
|
|
|
+ State *find_target_state(NodeMap &nodemap, list <State *> &work_queue,
|
|
|
|
+ NodeSet *nodes, dfa_stats_t &stats);
|
2011-01-17 17:43:05 +01:00
|
|
|
+public:
|
|
|
|
+ DFA(Node *root, dfaflags_t flags);
|
|
|
|
+ virtual ~DFA();
|
|
|
|
+ void remove_unreachable(dfaflags_t flags);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ bool same_mappings(State *s1, State *s2);
|
2011-01-17 17:43:05 +01:00
|
|
|
+ size_t hash_trans(State *s);
|
|
|
|
+ void minimize(dfaflags_t flags);
|
|
|
|
+ void dump(ostream& os);
|
|
|
|
+ void dump_dot_graph(ostream& os);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ void dump_uniq_perms(const char *s);
|
2011-01-17 17:43:05 +01:00
|
|
|
+ map<uchar, uchar> equivalence_classes(dfaflags_t flags);
|
|
|
|
+ void apply_equivalence_classes(map<uchar, uchar>& eq);
|
|
|
|
+ Node *root;
|
|
|
|
+ State *nonmatching, *start;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Partition states;
|
2011-01-17 17:43:05 +01:00
|
|
|
+};
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+State* DFA::add_new_state(NodeMap &nodemap, pair <unsigned long, NodeSet *> index, NodeSet *nodes, dfa_stats_t &stats)
|
|
|
|
+{
|
|
|
|
+ State *state = new State(nodemap.size(), nodes);
|
|
|
|
+ states.push_back(state);
|
|
|
|
+ nodemap.insert(make_pair(index, state));
|
|
|
|
+ stats.proto_sum += nodes->size();
|
|
|
|
+ if (nodes->size() > stats.proto_max)
|
|
|
|
+ stats.proto_max = nodes->size();
|
|
|
|
+ return state;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+State *DFA::find_target_state(NodeMap &nodemap, list <State *> &work_queue,
|
|
|
|
+ NodeSet *nodes, dfa_stats_t &stats)
|
|
|
|
+{
|
|
|
|
+ State *target;
|
|
|
|
+
|
|
|
|
+ pair <unsigned long, NodeSet *> index = make_pair(hash_NodeSet(nodes), nodes);
|
|
|
|
+
|
|
|
|
+ map<pair <unsigned long, NodeSet *>, State *, deref_less_than>::iterator x = nodemap.find(index);
|
|
|
|
+
|
|
|
|
+ if (x == nodemap.end()) {
|
|
|
|
+ /* set of nodes isn't known so create new state, and nodes to
|
|
|
|
+ * state mapping
|
|
|
|
+ */
|
|
|
|
+ target = add_new_state(nodemap, index, nodes, stats);
|
|
|
|
+ work_queue.push_back(target);
|
|
|
|
+ } else {
|
|
|
|
+ /* set of nodes already has a mapping so free this one */
|
|
|
|
+ stats.duplicates++;
|
|
|
|
+ delete (nodes);
|
|
|
|
+ target = x->second;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ return target;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+void DFA::update_state_transitions(NodeMap &nodemap,
|
|
|
|
+ list <State *> &work_queue, State *state,
|
|
|
|
+ dfa_stats_t &stats)
|
|
|
|
+{
|
|
|
|
+ /* Compute possible transitions for state->nodes. This is done by
|
|
|
|
+ * iterating over all the nodes in state->nodes and combining the
|
|
|
|
+ * transitions.
|
|
|
|
+ *
|
|
|
|
+ * The resultant transition set is a mapping of characters to
|
|
|
|
+ * sets of nodes.
|
|
|
|
+ */
|
|
|
|
+ NodeCases cases;
|
|
|
|
+ for (NodeSet::iterator i = state->nodes->begin(); i != state->nodes->end(); i++)
|
|
|
|
+ (*i)->follow(cases);
|
|
|
|
+
|
|
|
|
+ /* Now for each set of nodes in the computed transitions, make
|
|
|
|
+ * sure that there is a state that maps to it, and add the
|
|
|
|
+ * matching case to the state.
|
|
|
|
+ */
|
|
|
|
+
|
|
|
|
+ /* check the default transition first */
|
|
|
|
+ if (cases.otherwise)
|
|
|
|
+ state->cases.otherwise = find_target_state(nodemap, work_queue,
|
|
|
|
+ cases.otherwise,
|
|
|
|
+ stats);;
|
|
|
|
+
|
|
|
|
+ /* For each transition from *from, check if the set of nodes it
|
|
|
|
+ * transitions to already has been mapped to a state
|
|
|
|
+ */
|
|
|
|
+ for (NodeCases::iterator j = cases.begin(); j != cases.end(); j++) {
|
|
|
|
+ State *target;
|
|
|
|
+ target = find_target_state(nodemap, work_queue, j->second,
|
|
|
|
+ stats);
|
|
|
|
+
|
|
|
|
+ /* Don't insert transition that the default transition
|
|
|
|
+ * already covers
|
|
|
|
+ */
|
|
|
|
+ if (target != state->cases.otherwise)
|
|
|
|
+ state->cases.cases[j->first] = target;
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+/* WARNING: This routine can only be called from within DFA creation as
|
|
|
|
+ * the nodes value is only valid during dfa construction.
|
|
|
|
+ */
|
|
|
|
+void DFA::dump_node_to_dfa(void)
|
|
|
|
+{
|
|
|
|
+ cerr << "Mapping of States to expr nodes\n"
|
|
|
|
+ " State <= Nodes\n"
|
|
|
|
+ "-------------------\n";
|
|
|
|
+ for (Partition::iterator i = states.begin(); i != states.end(); i++)
|
|
|
|
+ cerr << " " << (*i)->label << " <= " << *(*i)->nodes << "\n";
|
|
|
|
+}
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+/**
|
|
|
|
+ * Construct a DFA from a syntax tree.
|
|
|
|
+ */
|
|
|
|
+DFA::DFA(Node *root, dfaflags_t flags) : root(root)
|
|
|
|
+{
|
2011-03-25 09:04:51 +01:00
|
|
|
+ dfa_stats_t stats = { 0, 0, 0 };
|
|
|
|
+ int i = 0;
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_DUMP_PROGRESS)
|
|
|
|
+ fprintf(stderr, "Creating dfa:\r");
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (depth_first_traversal i(root); i; i++) {
|
|
|
|
+ (*i)->compute_nullable();
|
|
|
|
+ (*i)->compute_firstpos();
|
|
|
|
+ (*i)->compute_lastpos();
|
|
|
|
+ }
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_DUMP_PROGRESS)
|
|
|
|
+ fprintf(stderr, "Creating dfa: followpos\r");
|
|
|
|
+ for (depth_first_traversal i(root); i; i++) {
|
|
|
|
+ (*i)->compute_followpos();
|
|
|
|
+ }
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ NodeMap nodemap;
|
|
|
|
+ NodeSet *emptynode = new NodeSet;
|
|
|
|
+ nonmatching = add_new_state(nodemap,
|
|
|
|
+ make_pair(hash_NodeSet(emptynode), emptynode),
|
|
|
|
+ emptynode, stats);
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ NodeSet *first = new NodeSet(root->firstpos);
|
|
|
|
+ start = add_new_state(nodemap, make_pair(hash_NodeSet(first), first),
|
|
|
|
+ first, stats);
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ /* the work_queue contains the states that need to have their
|
|
|
|
+ * transitions computed. This could be done with a recursive
|
|
|
|
+ * algorithm instead of a work_queue, but it would be slightly slower
|
|
|
|
+ * and consume more memory.
|
|
|
|
+ *
|
|
|
|
+ * TODO: currently the work_queue is treated in a breadth first
|
|
|
|
+ * search manner. Test using the work_queue in a depth first
|
|
|
|
+ * manner, this may help reduce the number of entries on the
|
|
|
|
+ * work_queue at any given time, thus reducing peak memory use.
|
|
|
|
+ */
|
|
|
|
+ list<State *> work_queue;
|
|
|
|
+ work_queue.push_back(start);
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ while (!work_queue.empty()) {
|
|
|
|
+ if (i % 1000 == 0 && (flags & DFA_DUMP_PROGRESS))
|
|
|
|
+ fprintf(stderr, "\033[2KCreating dfa: queue %ld\tstates %ld\teliminated duplicates %d\r", work_queue.size(), states.size(), stats.duplicates);
|
|
|
|
+ i++;
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ State *from = work_queue.front();
|
|
|
|
+ work_queue.pop_front();
|
|
|
|
+
|
|
|
|
+ /* Update 'from's transitions, and if it transitions to any
|
|
|
|
+ * unknown State create it and add it to the work_queue
|
|
|
|
+ */
|
|
|
|
+ update_state_transitions(nodemap, work_queue, from, stats);
|
|
|
|
+
|
|
|
|
+ } /* for (NodeSet *nodes ... */
|
|
|
|
+
|
|
|
|
+ /* cleanup Sets of nodes used computing the DFA as they are no longer
|
|
|
|
+ * needed.
|
|
|
|
+ */
|
|
|
|
+ for (depth_first_traversal i(root); i; i++) {
|
|
|
|
+ (*i)->firstpos.clear();
|
|
|
|
+ (*i)->lastpos.clear();
|
|
|
|
+ (*i)->followpos.clear();
|
|
|
|
+ }
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_DUMP_NODE_TO_DFA)
|
|
|
|
+ dump_node_to_dfa();
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (NodeMap::iterator i = nodemap.begin(); i != nodemap.end(); i++)
|
|
|
|
+ delete i->first.second;
|
|
|
|
+ nodemap.clear();
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & (DFA_DUMP_STATS))
|
|
|
|
+ fprintf(stderr, "\033[2KCreated dfa: states %ld,\teliminated duplicates %d,\tprotostate sets: longest %u, avg %u\n", states.size(), stats.duplicates, stats.proto_max, (unsigned int) (stats.proto_sum/states.size()));
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+}
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+DFA::~DFA()
|
|
|
|
+{
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Partition::iterator i = states.begin(); i != states.end(); i++)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ delete *i;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+class MatchFlag : public AcceptNode {
|
|
|
|
+public:
|
|
|
|
+MatchFlag(uint32_t flag, uint32_t audit) : flag(flag), audit(audit) {}
|
|
|
|
+ ostream& dump(ostream& os)
|
|
|
|
+ {
|
|
|
|
+ return os << '<' << flag << '>';
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ uint32_t flag;
|
|
|
|
+ uint32_t audit;
|
|
|
|
+ };
|
|
|
|
+
|
|
|
|
+class ExactMatchFlag : public MatchFlag {
|
|
|
|
+public:
|
|
|
|
+ ExactMatchFlag(uint32_t flag, uint32_t audit) : MatchFlag(flag, audit) {}
|
|
|
|
+};
|
|
|
|
+
|
|
|
|
+class DenyMatchFlag : public MatchFlag {
|
|
|
|
+public:
|
|
|
|
+ DenyMatchFlag(uint32_t flag, uint32_t quiet) : MatchFlag(flag, quiet) {}
|
|
|
|
+};
|
|
|
|
+
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+void DFA::dump_uniq_perms(const char *s)
|
2011-01-17 17:43:05 +01:00
|
|
|
+{
|
2011-03-25 09:04:51 +01:00
|
|
|
+ set < pair<uint32_t, uint32_t> > uniq;
|
|
|
|
+ for (Partition::iterator i = states.begin(); i != states.end(); i++)
|
|
|
|
+ uniq.insert(make_pair((*i)->accept, (*i)->audit));
|
|
|
|
+
|
|
|
|
+ cerr << "Unique Permission sets: " << s << " (" << uniq.size() << ")\n";
|
|
|
|
+ cerr << "----------------------\n";
|
|
|
|
+ for (set< pair<uint32_t, uint32_t> >::iterator i = uniq.begin();
|
|
|
|
+ i != uniq.end(); i++) {
|
|
|
|
+ cerr << " " << hex << i->first << " " << i->second << dec <<"\n";
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+/* Remove dead or unreachable states */
|
|
|
|
+void DFA::remove_unreachable(dfaflags_t flags)
|
|
|
|
+{
|
|
|
|
+ set <State *> reachable;
|
|
|
|
+ list <State *> work_queue;
|
|
|
|
+
|
|
|
|
+ /* find the set of reachable states */
|
|
|
|
+ reachable.insert(nonmatching);
|
|
|
|
+ work_queue.push_back(start);
|
|
|
|
+ while (!work_queue.empty()) {
|
|
|
|
+ State *from = work_queue.front();
|
|
|
|
+ work_queue.pop_front();
|
|
|
|
+ reachable.insert(from);
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (from->cases.otherwise &&
|
|
|
|
+ (reachable.find(from->cases.otherwise) == reachable.end()))
|
|
|
|
+ work_queue.push_back(from->cases.otherwise);
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Cases::iterator j = from->cases.begin();
|
|
|
|
+ j != from->cases.end(); j++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (reachable.find(j->second) == reachable.end())
|
|
|
|
+ work_queue.push_back(j->second);
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /* walk the set of states and remove any that aren't reachable */
|
|
|
|
+ if (reachable.size() < states.size()) {
|
|
|
|
+ int count = 0;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Partition::iterator i;
|
|
|
|
+ Partition::iterator next;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ for (i = states.begin(); i != states.end(); i = next) {
|
|
|
|
+ next = i;
|
|
|
|
+ next++;
|
|
|
|
+ if (reachable.find(*i) == reachable.end()) {
|
|
|
|
+ if (flags & DFA_DUMP_UNREACHABLE) {
|
|
|
|
+ cerr << "unreachable: "<< **i;
|
|
|
|
+ if (*i == start)
|
|
|
|
+ cerr << " <==";
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if ((*i)->accept) {
|
|
|
|
+ cerr << " (0x" << hex << (*i)->accept
|
|
|
|
+ << " " << (*i)->audit << dec << ')';
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ cerr << endl;
|
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ State *current = *i;
|
|
|
|
+ states.erase(i);
|
|
|
|
+ delete(current);
|
|
|
|
+ count++;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ if (count && (flags & DFA_DUMP_STATS))
|
|
|
|
+ cerr << "DFA: states " << states.size() << " removed "
|
|
|
|
+ << count << " unreachable states\n";
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/* test if two states have the same transitions under partition_map */
|
2011-03-25 09:04:51 +01:00
|
|
|
+bool DFA::same_mappings(State *s1, State *s2)
|
2011-01-17 17:43:05 +01:00
|
|
|
+{
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (s1->cases.otherwise && s1->cases.otherwise != nonmatching) {
|
|
|
|
+ if (!s2->cases.otherwise || s2->cases.otherwise == nonmatching)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ return false;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Partition *p1 = s1->cases.otherwise->partition;
|
|
|
|
+ Partition *p2 = s2->cases.otherwise->partition;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (p1 != p2)
|
|
|
|
+ return false;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ } else if (s2->cases.otherwise && s2->cases.otherwise != nonmatching) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ return false;
|
|
|
|
+ }
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (s1->cases.cases.size() != s2->cases.cases.size())
|
2011-01-17 17:43:05 +01:00
|
|
|
+ return false;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Cases::iterator j1 = s1->cases.begin(); j1 != s1->cases.end();
|
2011-01-17 17:43:05 +01:00
|
|
|
+ j1++){
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Cases::iterator j2 = s2->cases.cases.find(j1->first);
|
|
|
|
+ if (j2 == s2->cases.end())
|
2011-01-17 17:43:05 +01:00
|
|
|
+ return false;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Partition *p1 = j1->second->partition;
|
|
|
|
+ Partition *p2 = j2->second->partition;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (p1 != p2)
|
|
|
|
+ return false;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ return true;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/* Do simple djb2 hashing against a States transition cases
|
|
|
|
+ * this provides a rough initial guess at state equivalence as if a state
|
|
|
|
+ * has a different number of transitions or has transitions on different
|
|
|
|
+ * cases they will never be equivalent.
|
|
|
|
+ * Note: this only hashes based off of the alphabet (not destination)
|
|
|
|
+ * as different destinations could end up being equiv
|
|
|
|
+ */
|
|
|
|
+size_t DFA::hash_trans(State *s)
|
|
|
|
+{
|
|
|
|
+ unsigned long hash = 5381;
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Cases::iterator j = s->cases.begin(); j != s->cases.end(); j++){
|
2011-01-17 17:43:05 +01:00
|
|
|
+ hash = ((hash << 5) + hash) + j->first;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ State *k = j->second;
|
|
|
|
+ hash = ((hash << 5) + hash) + k->cases.cases.size();
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (s->cases.otherwise && s->cases.otherwise != nonmatching) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ hash = ((hash << 5) + hash) + 5381;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ State *k = s->cases.otherwise;
|
|
|
|
+ hash = ((hash << 5) + hash) + k->cases.cases.size();
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ hash = (hash << 8) | s->cases.cases.size();
|
2011-01-17 17:43:05 +01:00
|
|
|
+ return hash;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/* minimize the number of dfa states */
|
|
|
|
+void DFA::minimize(dfaflags_t flags)
|
|
|
|
+{
|
|
|
|
+ map <pair <uint64_t, size_t>, Partition *> perm_map;
|
|
|
|
+ list <Partition *> partitions;
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ /* Set up the initial partitions
|
|
|
|
+ * minimium of - 1 non accepting, and 1 accepting
|
|
|
|
+ * if trans hashing is used the accepting and non-accepting partitions
|
|
|
|
+ * can be further split based on the number and type of transitions
|
|
|
|
+ * a state makes.
|
|
|
|
+ * If permission hashing is enabled the accepting partitions can
|
|
|
|
+ * be further divided by permissions. This can result in not
|
|
|
|
+ * obtaining a truely minimized dfa but comes close, and can speedup
|
|
|
|
+ * minimization.
|
2011-01-17 17:43:05 +01:00
|
|
|
+ */
|
|
|
|
+ int accept_count = 0;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ int final_accept = 0;
|
|
|
|
+ for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
|
|
|
+ uint64_t perm_hash = 0;
|
|
|
|
+ if (flags & DFA_CONTROL_MINIMIZE_HASH_PERMS) {
|
|
|
|
+ /* make every unique perm create a new partition */
|
|
|
|
+ perm_hash = ((uint64_t)(*i)->audit)<<32 |
|
|
|
|
+ (uint64_t)(*i)->accept;
|
|
|
|
+ } else if ((*i)->audit || (*i)->accept) {
|
|
|
|
+ /* combine all perms together into a single parition */
|
|
|
|
+ perm_hash = 1;
|
|
|
|
+ } /* else not an accept state so 0 for perm_hash */
|
|
|
|
+
|
|
|
|
+ size_t trans_hash = 0;
|
|
|
|
+ if (flags & DFA_CONTROL_MINIMIZE_HASH_TRANS)
|
|
|
|
+ trans_hash = hash_trans(*i);
|
|
|
|
+ pair <uint64_t, size_t> group = make_pair(perm_hash, trans_hash);
|
2011-01-17 17:43:05 +01:00
|
|
|
+ map <pair <uint64_t, size_t>, Partition *>::iterator p = perm_map.find(group);
|
|
|
|
+ if (p == perm_map.end()) {
|
|
|
|
+ Partition *part = new Partition();
|
|
|
|
+ part->push_back(*i);
|
|
|
|
+ perm_map.insert(make_pair(group, part));
|
|
|
|
+ partitions.push_back(part);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ (*i)->partition = part;
|
|
|
|
+ if (perm_hash)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ accept_count++;
|
|
|
|
+ } else {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ (*i)->partition = p->second;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ p->second->push_back(*i);
|
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if ((flags & DFA_DUMP_PROGRESS) &&
|
|
|
|
+ (partitions.size() % 1000 == 0))
|
2011-03-25 09:04:51 +01:00
|
|
|
+ cerr << "\033[2KMinimize dfa: partitions " << partitions.size() << "\tinit " << partitions.size() << " (accept " << accept_count << ")\r";
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ /* perm_map is no longer needed so free the memory it is using.
|
|
|
|
+ * Don't remove - doing it manually here helps reduce peak memory usage.
|
|
|
|
+ */
|
|
|
|
+ perm_map.clear();
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+ int init_count = partitions.size();
|
|
|
|
+ if (flags & DFA_DUMP_PROGRESS)
|
2011-03-25 09:04:51 +01:00
|
|
|
+ cerr << "\033[2KMinimize dfa: partitions " << partitions.size() << "\tinit " << init_count << " (accept " << accept_count << ")\r";
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+ /* Now do repartitioning until each partition contains the set of
|
|
|
|
+ * states that are the same. This will happen when the partition
|
|
|
|
+ * splitting stables. With a worse case of 1 state per partition
|
|
|
|
+ * ie. already minimized.
|
|
|
|
+ */
|
|
|
|
+ Partition *new_part;
|
|
|
|
+ int new_part_count;
|
|
|
|
+ do {
|
|
|
|
+ new_part_count = 0;
|
|
|
|
+ for (list <Partition *>::iterator p = partitions.begin();
|
|
|
|
+ p != partitions.end(); p++) {
|
|
|
|
+ new_part = NULL;
|
|
|
|
+ State *rep = *((*p)->begin());
|
|
|
|
+ Partition::iterator next;
|
|
|
|
+ for (Partition::iterator s = ++(*p)->begin();
|
|
|
|
+ s != (*p)->end(); ) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (same_mappings(rep, *s)) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ ++s;
|
|
|
|
+ continue;
|
|
|
|
+ }
|
|
|
|
+ if (!new_part) {
|
|
|
|
+ new_part = new Partition;
|
|
|
|
+ list <Partition *>::iterator tmp = p;
|
|
|
|
+ partitions.insert(++tmp, new_part);
|
|
|
|
+ new_part_count++;
|
|
|
|
+ }
|
|
|
|
+ new_part->push_back(*s);
|
|
|
|
+ s = (*p)->erase(s);
|
|
|
|
+ }
|
|
|
|
+ /* remapping partition_map for new_part entries
|
|
|
|
+ * Do not do this above as it messes up same_mappings
|
|
|
|
+ */
|
|
|
|
+ if (new_part) {
|
|
|
|
+ for (Partition::iterator m = new_part->begin();
|
|
|
|
+ m != new_part->end(); m++) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ (*m)->partition = new_part;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ if ((flags & DFA_DUMP_PROGRESS) &&
|
|
|
|
+ (partitions.size() % 100 == 0))
|
2011-03-25 09:04:51 +01:00
|
|
|
+ cerr << "\033[2KMinimize dfa: partitions " << partitions.size() << "\tinit " << init_count << " (accept " << accept_count << ")\r";
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ } while(new_part_count);
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (partitions.size() == states.size()) {
|
|
|
|
+ if (flags & DFA_DUMP_STATS)
|
|
|
|
+ cerr << "\033[2KDfa minimization no states removed: partitions " << partitions.size() << "\tinit " << init_count << " (accept " << accept_count << ")\n";
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+
|
|
|
|
+ goto out;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /* Remap the dfa so it uses the representative states
|
|
|
|
+ * Use the first state of a partition as the representative state
|
|
|
|
+ * At this point all states with in a partion have transitions
|
2011-03-25 09:04:51 +01:00
|
|
|
+ * to states within the same partitions, however this can slow
|
|
|
|
+ * down compressed dfa compression as there are more states,
|
2011-01-17 17:43:05 +01:00
|
|
|
+ */
|
|
|
|
+ for (list <Partition *>::iterator p = partitions.begin();
|
|
|
|
+ p != partitions.end(); p++) {
|
|
|
|
+ /* representative state for this partition */
|
|
|
|
+ State *rep = *((*p)->begin());
|
|
|
|
+
|
|
|
|
+ /* update representative state's transitions */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (rep->cases.otherwise) {
|
|
|
|
+ Partition *partition = rep->cases.otherwise->partition;
|
|
|
|
+ rep->cases.otherwise = *partition->begin();
|
|
|
|
+ }
|
|
|
|
+ for (Cases::iterator c = rep->cases.begin();
|
|
|
|
+ c != rep->cases.end(); c++) {
|
|
|
|
+ Partition *partition = c->second->partition;
|
|
|
|
+ c->second = *partition->begin();
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+//if ((*p)->size() > 1)
|
|
|
|
+//cerr << rep->label << ": ";
|
|
|
|
+ /* clear the state label for all non representative states,
|
|
|
|
+ * and accumulate permissions */
|
|
|
|
+ for (Partition::iterator i = ++(*p)->begin(); i != (*p)->end(); i++) {
|
|
|
|
+//cerr << " " << (*i)->label;
|
|
|
|
+ (*i)->label = -1;
|
|
|
|
+ rep->accept |= (*i)->accept;
|
|
|
|
+ rep->audit |= (*i)->audit;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (rep->accept || rep->audit)
|
|
|
|
+ final_accept++;
|
|
|
|
+//if ((*p)->size() > 1)
|
|
|
|
+//cerr << "\n";
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_DUMP_STATS)
|
|
|
|
+ cerr << "\033[2KMinimized dfa: final partitions " << partitions.size() << " (accept " << final_accept << ")" << "\tinit " << init_count << " (accept " << accept_count << ")\n";
|
|
|
|
+
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+ /* make sure nonmatching and start state are up to date with the
|
|
|
|
+ * mappings */
|
|
|
|
+ {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Partition *partition = nonmatching->partition;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (*partition->begin() != nonmatching) {
|
|
|
|
+ nonmatching = *partition->begin();
|
|
|
|
+ }
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ partition = start->partition;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (*partition->begin() != start) {
|
|
|
|
+ start = *partition->begin();
|
|
|
|
+ }
|
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+ /* Now that the states have been remapped, remove all states
|
2011-03-25 09:04:51 +01:00
|
|
|
+ * that are not the representive states for their partition, they
|
|
|
|
+ * will have a label == -1
|
2011-01-17 17:43:05 +01:00
|
|
|
+ */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Partition::iterator i = states.begin(); i != states.end(); ) {
|
|
|
|
+ if ((*i)->label == -1) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ State *s = *i;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ i = states.erase(i);
|
2011-01-17 17:43:05 +01:00
|
|
|
+ delete(s);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ } else
|
|
|
|
+ i++;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+out:
|
|
|
|
+ /* Cleanup */
|
|
|
|
+ while (!partitions.empty()) {
|
|
|
|
+ Partition *p = partitions.front();
|
|
|
|
+ partitions.pop_front();
|
|
|
|
+ delete(p);
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * text-dump the DFA (for debugging).
|
|
|
|
+ */
|
|
|
|
+void DFA::dump(ostream& os)
|
|
|
|
+{
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
|
|
|
+ if (*i == start || (*i)->accept) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ os << **i;
|
|
|
|
+ if (*i == start)
|
|
|
|
+ os << " <==";
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if ((*i)->accept) {
|
|
|
|
+ os << " (0x" << hex << (*i)->accept << " " << (*i)->audit << dec << ')';
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ os << endl;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ os << endl;
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
|
|
|
+ if ((*i)->cases.otherwise)
|
|
|
|
+ os << **i << " -> " << (*i)->cases.otherwise << endl;
|
|
|
|
+ for (Cases::iterator j = (*i)->cases.begin(); j != (*i)->cases.end(); j++) {
|
|
|
|
+ os << **i << " -> " << j->second << ": " << j->first << endl;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ os << endl;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Create a dot (graphviz) graph from the DFA (for debugging).
|
|
|
|
+ */
|
|
|
|
+void DFA::dump_dot_graph(ostream& os)
|
|
|
|
+{
|
|
|
|
+ os << "digraph \"dfa\" {" << endl;
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (*i == nonmatching)
|
|
|
|
+ continue;
|
|
|
|
+
|
|
|
|
+ os << "\t\"" << **i << "\" [" << endl;
|
|
|
|
+ if (*i == start) {
|
|
|
|
+ os << "\t\tstyle=bold" << endl;
|
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ uint32_t perms = (*i)->accept;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (perms) {
|
|
|
|
+ os << "\t\tlabel=\"" << **i << "\\n("
|
|
|
|
+ << perms << ")\"" << endl;
|
|
|
|
+ }
|
|
|
|
+ os << "\t]" << endl;
|
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
|
|
|
+ Cases& cases = (*i)->cases;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ Chars excluded;
|
|
|
|
+
|
|
|
|
+ for (Cases::iterator j = cases.begin(); j != cases.end(); j++) {
|
|
|
|
+ if (j->second == nonmatching)
|
|
|
|
+ excluded.insert(j->first);
|
|
|
|
+ else {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ os << "\t\"" << **i << "\" -> \"";
|
|
|
|
+ os << j->second << "\" [" << endl;
|
|
|
|
+ os << "\t\tlabel=\"" << j->first << "\"" << endl;
|
|
|
|
+ os << "\t]" << endl;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ }
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (cases.otherwise && cases.otherwise != nonmatching) {
|
|
|
|
+ os << "\t\"" << **i << "\" -> \"" << cases.otherwise
|
2011-01-17 17:43:05 +01:00
|
|
|
+ << "\" [" << endl;
|
|
|
|
+ if (!excluded.empty()) {
|
|
|
|
+ os << "\t\tlabel=\"[^";
|
|
|
|
+ for (Chars::iterator i = excluded.begin();
|
|
|
|
+ i != excluded.end();
|
|
|
|
+ i++) {
|
|
|
|
+ os << *i;
|
|
|
|
+ }
|
|
|
|
+ os << "]\"" << endl;
|
|
|
|
+ }
|
|
|
|
+ os << "\t]" << endl;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ os << '}' << endl;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Compute character equivalence classes in the DFA to save space in the
|
|
|
|
+ * transition table.
|
|
|
|
+ */
|
|
|
|
+map<uchar, uchar> DFA::equivalence_classes(dfaflags_t flags)
|
|
|
|
+{
|
|
|
|
+ map<uchar, uchar> classes;
|
|
|
|
+ uchar next_class = 1;
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
|
|
|
+ Cases& cases = (*i)->cases;
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+ /* Group edges to the same next state together */
|
|
|
|
+ map<const State *, Chars> node_sets;
|
|
|
|
+ for (Cases::iterator j = cases.begin(); j != cases.end(); j++)
|
|
|
|
+ node_sets[j->second].insert(j->first);
|
|
|
|
+
|
|
|
|
+ for (map<const State *, Chars>::iterator j = node_sets.begin();
|
|
|
|
+ j != node_sets.end();
|
|
|
|
+ j++) {
|
|
|
|
+ /* Group edges to the same next state together by class */
|
|
|
|
+ map<uchar, Chars> node_classes;
|
|
|
|
+ bool class_used = false;
|
|
|
|
+ for (Chars::iterator k = j->second.begin();
|
|
|
|
+ k != j->second.end();
|
|
|
|
+ k++) {
|
|
|
|
+ pair<map<uchar, uchar>::iterator, bool> x =
|
|
|
|
+ classes.insert(make_pair(*k, next_class));
|
|
|
|
+ if (x.second)
|
|
|
|
+ class_used = true;
|
|
|
|
+ pair<map<uchar, Chars>::iterator, bool> y =
|
|
|
|
+ node_classes.insert(make_pair(x.first->second, Chars()));
|
|
|
|
+ y.first->second.insert(*k);
|
|
|
|
+ }
|
|
|
|
+ if (class_used) {
|
|
|
|
+ next_class++;
|
|
|
|
+ class_used = false;
|
|
|
|
+ }
|
|
|
|
+ for (map<uchar, Chars>::iterator k = node_classes.begin();
|
|
|
|
+ k != node_classes.end();
|
|
|
|
+ k++) {
|
|
|
|
+ /**
|
|
|
|
+ * If any other characters are in the same class, move
|
|
|
|
+ * the characters in this class into their own new class
|
|
|
|
+ */
|
|
|
|
+ map<uchar, uchar>::iterator l;
|
|
|
|
+ for (l = classes.begin(); l != classes.end(); l++) {
|
|
|
|
+ if (l->second == k->first &&
|
|
|
|
+ k->second.find(l->first) == k->second.end()) {
|
|
|
|
+ class_used = true;
|
|
|
|
+ break;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ if (class_used) {
|
|
|
|
+ for (Chars::iterator l = k->second.begin();
|
|
|
|
+ l != k->second.end();
|
|
|
|
+ l++) {
|
|
|
|
+ classes[*l] = next_class;
|
|
|
|
+ }
|
|
|
|
+ next_class++;
|
|
|
|
+ class_used = false;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ if (flags & DFA_DUMP_EQUIV_STATS)
|
|
|
|
+ fprintf(stderr, "Equiv class reduces to %d classes\n", next_class - 1);
|
|
|
|
+ return classes;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Text-dump the equivalence classes (for debugging).
|
|
|
|
+ */
|
|
|
|
+void dump_equivalence_classes(ostream& os, map<uchar, uchar>& eq)
|
|
|
|
+{
|
|
|
|
+ map<uchar, Chars> rev;
|
|
|
|
+
|
|
|
|
+ for (map<uchar, uchar>::iterator i = eq.begin(); i != eq.end(); i++) {
|
|
|
|
+ Chars& chars = rev.insert(make_pair(i->second,
|
|
|
|
+ Chars())).first->second;
|
|
|
|
+ chars.insert(i->first);
|
|
|
|
+ }
|
|
|
|
+ os << "(eq):" << endl;
|
|
|
|
+ for (map<uchar, Chars>::iterator i = rev.begin(); i != rev.end(); i++) {
|
|
|
|
+ os << (int)i->first << ':';
|
|
|
|
+ Chars& chars = i->second;
|
|
|
|
+ for (Chars::iterator j = chars.begin(); j != chars.end(); j++) {
|
|
|
|
+ os << ' ' << *j;
|
|
|
|
+ }
|
|
|
|
+ os << endl;
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Replace characters with classes (which are also represented as
|
|
|
|
+ * characters) in the DFA transition table.
|
|
|
|
+ */
|
|
|
|
+void DFA::apply_equivalence_classes(map<uchar, uchar>& eq)
|
|
|
|
+{
|
|
|
|
+ /**
|
|
|
|
+ * Note: We only transform the transition table; the nodes continue to
|
|
|
|
+ * contain the original characters.
|
|
|
|
+ */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Partition::iterator i = states.begin(); i != states.end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ map<uchar, State *> tmp;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ tmp.swap((*i)->cases.cases);
|
2011-01-17 17:43:05 +01:00
|
|
|
+ for (Cases::iterator j = tmp.begin(); j != tmp.end(); j++)
|
2011-03-25 09:04:51 +01:00
|
|
|
+ (*i)->cases.cases.insert(make_pair(eq[j->first], j->second));
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Flip the children of all cat nodes. This causes strings to be matched
|
|
|
|
+ * back-forth.
|
|
|
|
+ */
|
|
|
|
+void flip_tree(Node *node)
|
|
|
|
+{
|
|
|
|
+ for (depth_first_traversal i(node); i; i++) {
|
|
|
|
+ if (CatNode *cat = dynamic_cast<CatNode *>(*i)) {
|
|
|
|
+ swap(cat->child[0], cat->child[1]);
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+class TransitionTable {
|
|
|
|
+ typedef vector<pair<const State *, size_t> > DefaultBase;
|
|
|
|
+ typedef vector<pair<const State *, const State *> > NextCheck;
|
|
|
|
+public:
|
|
|
|
+ TransitionTable(DFA& dfa, map<uchar, uchar>& eq, dfaflags_t flags);
|
|
|
|
+ void dump(ostream& os);
|
|
|
|
+ void flex_table(ostream& os, const char *name);
|
|
|
|
+ void init_free_list(vector <pair<size_t, size_t> > &free_list, size_t prev, size_t start);
|
|
|
|
+ bool fits_in(vector <pair<size_t, size_t> > &free_list,
|
|
|
|
+ size_t base, Cases& cases);
|
|
|
|
+ void insert_state(vector <pair<size_t, size_t> > &free_list,
|
|
|
|
+ State *state, DFA& dfa);
|
|
|
|
+
|
|
|
|
+private:
|
|
|
|
+ vector<uint32_t> accept;
|
|
|
|
+ vector<uint32_t> accept2;
|
|
|
|
+ DefaultBase default_base;
|
|
|
|
+ NextCheck next_check;
|
|
|
|
+ map<const State *, size_t> num;
|
|
|
|
+ map<uchar, uchar>& eq;
|
|
|
|
+ uchar max_eq;
|
|
|
|
+ size_t first_free;
|
|
|
|
+};
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+void TransitionTable::init_free_list(vector <pair<size_t, size_t> > &free_list,
|
|
|
|
+ size_t prev, size_t start) {
|
|
|
|
+ for (size_t i = start; i < free_list.size(); i++) {
|
|
|
|
+ if (prev)
|
|
|
|
+ free_list[prev].second = i;
|
|
|
|
+ free_list[i].first = prev;
|
|
|
|
+ prev = i;
|
|
|
|
+ }
|
|
|
|
+ free_list[free_list.size() -1].second = 0;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * new Construct the transition table.
|
|
|
|
+ */
|
|
|
|
+TransitionTable::TransitionTable(DFA& dfa, map<uchar, uchar>& eq,
|
|
|
|
+ dfaflags_t flags)
|
|
|
|
+ : eq(eq)
|
|
|
|
+{
|
|
|
|
+
|
|
|
|
+ if (flags & DFA_DUMP_TRANS_PROGRESS)
|
2011-03-25 09:04:51 +01:00
|
|
|
+ fprintf(stderr, "Compressing trans table:\r");
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+
|
|
|
|
+ if (eq.empty())
|
|
|
|
+ max_eq = 255;
|
|
|
|
+ else {
|
|
|
|
+ max_eq = 0;
|
|
|
|
+ for(map<uchar, uchar>::iterator i = eq.begin(); i != eq.end(); i++) {
|
|
|
|
+ if (i->second > max_eq)
|
|
|
|
+ max_eq = i->second;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /* Do initial setup adding up all the transitions and sorting by
|
|
|
|
+ * transition count.
|
|
|
|
+ */
|
|
|
|
+ size_t optimal = 2;
|
|
|
|
+ multimap <size_t, State *> order;
|
|
|
|
+ vector <pair<size_t, size_t> > free_list;
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Partition::iterator i = dfa.states.begin(); i != dfa.states.end(); i++) {
|
|
|
|
+ if (*i == dfa.start || *i == dfa.nonmatching)
|
2011-01-17 17:43:05 +01:00
|
|
|
+ continue;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ optimal += (*i)->cases.cases.size();
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (flags & DFA_CONTROL_TRANS_HIGH) {
|
|
|
|
+ size_t range = 0;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if ((*i)->cases.cases.size())
|
|
|
|
+ range = (*i)->cases.cases.rbegin()->first - (*i)->cases.begin()->first;
|
|
|
|
+ size_t ord = ((256 - (*i)->cases.cases.size()) << 8) |
|
2011-01-17 17:43:05 +01:00
|
|
|
+ (256 - range);
|
|
|
|
+ /* reverse sort by entry count, most entries first */
|
2011-03-25 09:04:51 +01:00
|
|
|
+ order.insert(make_pair(ord, *i));
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /* Insert the dummy nonmatching transition by hand */
|
|
|
|
+ next_check.push_back(make_pair(dfa.nonmatching, dfa.nonmatching));
|
|
|
|
+ default_base.push_back(make_pair(dfa.nonmatching, 0));
|
|
|
|
+ num.insert(make_pair(dfa.nonmatching, num.size()));
|
|
|
|
+
|
|
|
|
+ accept.resize(dfa.states.size());
|
|
|
|
+ accept2.resize(dfa.states.size());
|
|
|
|
+ next_check.resize(optimal);
|
|
|
|
+ free_list.resize(optimal);
|
|
|
|
+
|
|
|
|
+ accept[0] = 0;
|
|
|
|
+ accept2[0] = 0;
|
|
|
|
+ first_free = 1;
|
|
|
|
+ init_free_list(free_list, 0, 1);
|
|
|
|
+
|
|
|
|
+ insert_state(free_list, dfa.start, dfa);
|
|
|
|
+ accept[1] = 0;
|
|
|
|
+ accept2[1] = 0;
|
|
|
|
+ num.insert(make_pair(dfa.start, num.size()));
|
|
|
|
+
|
|
|
|
+ int count = 2;
|
|
|
|
+
|
|
|
|
+ if (!(flags & DFA_CONTROL_TRANS_HIGH)) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (Partition::iterator i = dfa.states.begin(); i != dfa.states.end();
|
2011-01-17 17:43:05 +01:00
|
|
|
+ i++) {
|
|
|
|
+ if (*i != dfa.nonmatching && *i != dfa.start) {
|
|
|
|
+ insert_state(free_list, *i, dfa);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ accept[num.size()] = (*i)->accept;
|
|
|
|
+ accept2[num.size()] = (*i)->audit;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ num.insert(make_pair(*i, num.size()));
|
|
|
|
+ }
|
|
|
|
+ if (flags & (DFA_DUMP_TRANS_PROGRESS)) {
|
|
|
|
+ count++;
|
|
|
|
+ if (count % 100 == 0)
|
2011-03-25 09:04:51 +01:00
|
|
|
+ fprintf(stderr, "\033[2KCompressing trans table: insert state: %d/%ld\r", count, dfa.states.size());
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ } else {
|
|
|
|
+ for (multimap <size_t, State *>::iterator i = order.begin();
|
|
|
|
+ i != order.end(); i++) {
|
|
|
|
+ if (i->second != dfa.nonmatching && i->second != dfa.start) {
|
|
|
|
+ insert_state(free_list, i->second, dfa);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ accept[num.size()] = i->second->accept;
|
|
|
|
+ accept2[num.size()] = i->second->audit;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ num.insert(make_pair(i->second, num.size()));
|
|
|
|
+ }
|
|
|
|
+ if (flags & (DFA_DUMP_TRANS_PROGRESS)) {
|
|
|
|
+ count++;
|
|
|
|
+ if (count % 100 == 0)
|
2011-03-25 09:04:51 +01:00
|
|
|
+ fprintf(stderr, "\033[2KCompressing trans table: insert state: %d/%ld\r", count, dfa.states.size());
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ if (flags & (DFA_DUMP_TRANS_STATS | DFA_DUMP_TRANS_PROGRESS)) {
|
|
|
|
+ ssize_t size = 4 * next_check.size() + 6 * dfa.states.size();
|
2011-03-25 09:04:51 +01:00
|
|
|
+ fprintf(stderr, "\033[2KCompressed trans table: states %ld, next/check %ld, optimal next/check %ld avg/state %.2f, compression %ld/%ld = %.2f %%\n", dfa.states.size(), next_check.size(), optimal, (float)next_check.size()/(float)dfa.states.size(), size, 512 * dfa.states.size(), 100.0 - ((float) size * 100.0 / (float)(512 * dfa.states.size())));
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Does <cases> fit into position <base> of the transition table?
|
|
|
|
+ */
|
2011-03-25 09:04:51 +01:00
|
|
|
+bool TransitionTable::fits_in(vector <pair<size_t, size_t> > &free_list __attribute__((unused)),
|
2011-01-17 17:43:05 +01:00
|
|
|
+ size_t pos, Cases& cases)
|
|
|
|
+{
|
|
|
|
+ size_t c, base = pos - cases.begin()->first;
|
|
|
|
+ for (Cases::iterator i = cases.begin(); i != cases.end(); i++) {
|
|
|
|
+ c = base + i->first;
|
|
|
|
+ /* if it overflows the next_check array it fits in as we will
|
|
|
|
+ * resize */
|
|
|
|
+ if (c >= next_check.size())
|
|
|
|
+ return true;
|
|
|
|
+ if (next_check[c].second)
|
|
|
|
+ return false;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ return true;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Insert <state> of <dfa> into the transition table.
|
|
|
|
+ */
|
|
|
|
+void TransitionTable::insert_state(vector <pair<size_t, size_t> > &free_list,
|
|
|
|
+ State *from, DFA& dfa)
|
|
|
|
+{
|
|
|
|
+ State *default_state = dfa.nonmatching;
|
|
|
|
+ size_t base = 0;
|
|
|
|
+ int resize;
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ Cases& cases = from->cases;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ size_t c = cases.begin()->first;
|
|
|
|
+ size_t prev = 0;
|
|
|
|
+ size_t x = first_free;
|
|
|
|
+
|
|
|
|
+ if (cases.otherwise)
|
|
|
|
+ default_state = cases.otherwise;
|
|
|
|
+ if (cases.cases.empty())
|
|
|
|
+ goto do_insert;
|
|
|
|
+
|
|
|
|
+repeat:
|
|
|
|
+ resize = 0;
|
|
|
|
+ /* get the first free entry that won't underflow */
|
|
|
|
+ while (x && (x < c)) {
|
|
|
|
+ prev = x;
|
|
|
|
+ x = free_list[x].second;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /* try inserting until we succeed. */
|
|
|
|
+ while (x && !fits_in(free_list, x, cases)) {
|
|
|
|
+ prev = x;
|
|
|
|
+ x = free_list[x].second;
|
|
|
|
+ }
|
|
|
|
+ if (!x) {
|
|
|
|
+ resize = 256 - cases.begin()->first;
|
|
|
|
+ x = free_list.size();
|
|
|
|
+ /* set prev to last free */
|
|
|
|
+ } else if (x + 255 - cases.begin()->first >= next_check.size()) {
|
|
|
|
+ resize = (255 - cases.begin()->first - (next_check.size() - 1 - x));
|
|
|
|
+ for (size_t y = x; y; y = free_list[y].second)
|
|
|
|
+ prev = y;
|
|
|
|
+ }
|
|
|
|
+ if (resize) {
|
|
|
|
+ /* expand next_check and free_list */
|
|
|
|
+ size_t old_size = free_list.size();
|
|
|
|
+ next_check.resize(next_check.size() + resize);
|
|
|
|
+ free_list.resize(free_list.size() + resize);
|
|
|
|
+ init_free_list(free_list, prev, old_size);
|
|
|
|
+ if (!first_free)
|
|
|
|
+ first_free = old_size;;
|
|
|
|
+ if (x == old_size)
|
|
|
|
+ goto repeat;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ base = x - c;
|
|
|
|
+ for (Cases::iterator j = cases.begin(); j != cases.end(); j++) {
|
|
|
|
+ next_check[base + j->first] = make_pair(j->second, from);
|
|
|
|
+ size_t prev = free_list[base + j->first].first;
|
|
|
|
+ size_t next = free_list[base + j->first].second;
|
|
|
|
+ if (prev)
|
|
|
|
+ free_list[prev].second = next;
|
|
|
|
+ if (next)
|
|
|
|
+ free_list[next].first = prev;
|
|
|
|
+ if (base + j->first == first_free)
|
|
|
|
+ first_free = next;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+do_insert:
|
|
|
|
+ default_base.push_back(make_pair(default_state, base));
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Text-dump the transition table (for debugging).
|
|
|
|
+ */
|
|
|
|
+void TransitionTable::dump(ostream& os)
|
|
|
|
+{
|
|
|
|
+ map<size_t, const State *> st;
|
|
|
|
+ for (map<const State *, size_t>::iterator i = num.begin();
|
|
|
|
+ i != num.end();
|
|
|
|
+ i++) {
|
|
|
|
+ st.insert(make_pair(i->second, i->first));
|
|
|
|
+ }
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ os << "size=" << default_base.size() << " (accept, default, base): {state} -> {default state}" << endl;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ for (size_t i = 0; i < default_base.size(); i++) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ os << i << ": ";
|
2011-01-17 17:43:05 +01:00
|
|
|
+ os << "(" << accept[i] << ", "
|
|
|
|
+ << num[default_base[i].first] << ", "
|
|
|
|
+ << default_base[i].second << ")";
|
|
|
|
+ if (st[i])
|
|
|
|
+ os << " " << *st[i];
|
|
|
|
+ if (default_base[i].first)
|
|
|
|
+ os << " -> " << *default_base[i].first;
|
|
|
|
+ os << endl;
|
|
|
|
+ }
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ os << "size=" << next_check.size() << " (next, check): {check state} -> {next state} : offset from base" << endl;
|
2011-01-17 17:43:05 +01:00
|
|
|
+ for (size_t i = 0; i < next_check.size(); i++) {
|
|
|
|
+ if (!next_check[i].second)
|
|
|
|
+ continue;
|
|
|
|
+
|
|
|
|
+ os << i << ": ";
|
|
|
|
+ if (next_check[i].second) {
|
|
|
|
+ os << "(" << num[next_check[i].first] << ", "
|
|
|
|
+ << num[next_check[i].second] << ")" << " "
|
|
|
|
+ << *next_check[i].second << " -> "
|
|
|
|
+ << *next_check[i].first << ": ";
|
|
|
|
+
|
|
|
|
+ size_t offs = i - default_base[num[next_check[i].second]].second;
|
|
|
|
+ if (eq.size())
|
|
|
|
+ os << offs;
|
|
|
|
+ else
|
|
|
|
+ os << (uchar)offs;
|
|
|
|
+ }
|
|
|
|
+ os << endl;
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+#if 0
|
|
|
|
+template<class Iter>
|
|
|
|
+class FirstIterator {
|
|
|
|
+public:
|
|
|
|
+ FirstIterator(Iter pos) : pos(pos) { }
|
|
|
|
+ typename Iter::value_type::first_type operator*() { return pos->first; }
|
|
|
|
+ bool operator!=(FirstIterator<Iter>& i) { return pos != i.pos; }
|
|
|
|
+ void operator++() { ++pos; }
|
|
|
|
+ ssize_t operator-(FirstIterator<Iter> i) { return pos - i.pos; }
|
|
|
|
+private:
|
|
|
|
+ Iter pos;
|
|
|
|
+};
|
|
|
|
+
|
|
|
|
+template<class Iter>
|
|
|
|
+FirstIterator<Iter> first_iterator(Iter iter)
|
|
|
|
+{
|
|
|
|
+ return FirstIterator<Iter>(iter);
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+template<class Iter>
|
|
|
|
+class SecondIterator {
|
|
|
|
+public:
|
|
|
|
+ SecondIterator(Iter pos) : pos(pos) { }
|
|
|
|
+ typename Iter::value_type::second_type operator*() { return pos->second; }
|
|
|
|
+ bool operator!=(SecondIterator<Iter>& i) { return pos != i.pos; }
|
|
|
|
+ void operator++() { ++pos; }
|
|
|
|
+ ssize_t operator-(SecondIterator<Iter> i) { return pos - i.pos; }
|
|
|
|
+private:
|
|
|
|
+ Iter pos;
|
|
|
|
+};
|
|
|
|
+
|
|
|
|
+template<class Iter>
|
|
|
|
+SecondIterator<Iter> second_iterator(Iter iter)
|
|
|
|
+{
|
|
|
|
+ return SecondIterator<Iter>(iter);
|
|
|
|
+}
|
|
|
|
+#endif
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Create a flex-style binary dump of the DFA tables. The table format
|
|
|
|
+ * was partly reverse engineered from the flex sources and from
|
|
|
|
+ * examining the tables that flex creates with its --tables-file option.
|
|
|
|
+ * (Only the -Cf and -Ce formats are currently supported.)
|
|
|
|
+ */
|
|
|
|
+
|
|
|
|
+#include "flex-tables.h"
|
|
|
|
+#include "regexp.h"
|
|
|
|
+
|
|
|
|
+static inline size_t pad64(size_t i)
|
|
|
|
+{
|
|
|
|
+ return (i + (size_t)7) & ~(size_t)7;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+string fill64(size_t i)
|
|
|
|
+{
|
|
|
|
+ const char zeroes[8] = { };
|
|
|
|
+ string fill(zeroes, (i & 7) ? 8 - (i & 7) : 0);
|
|
|
|
+ return fill;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+template<class Iter>
|
|
|
|
+size_t flex_table_size(Iter pos, Iter end)
|
|
|
|
+{
|
|
|
|
+ return pad64(sizeof(struct table_header) + sizeof(*pos) * (end - pos));
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+template<class Iter>
|
|
|
|
+void write_flex_table(ostream& os, int id, Iter pos, Iter end)
|
|
|
|
+{
|
2011-03-25 09:04:51 +01:00
|
|
|
+ struct table_header td = { 0, 0, 0, 0 };
|
2011-01-17 17:43:05 +01:00
|
|
|
+ size_t size = end - pos;
|
|
|
|
+
|
|
|
|
+ td.td_id = htons(id);
|
|
|
|
+ td.td_flags = htons(sizeof(*pos));
|
|
|
|
+ td.td_lolen = htonl(size);
|
|
|
|
+ os.write((char *)&td, sizeof(td));
|
|
|
|
+
|
|
|
|
+ for (; pos != end; ++pos) {
|
|
|
|
+ switch(sizeof(*pos)) {
|
|
|
|
+ case 4:
|
|
|
|
+ os.put((char)(*pos >> 24));
|
|
|
|
+ os.put((char)(*pos >> 16));
|
|
|
|
+ case 2:
|
|
|
|
+ os.put((char)(*pos >> 8));
|
|
|
|
+ case 1:
|
|
|
|
+ os.put((char)*pos);
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ os << fill64(sizeof(td) + sizeof(*pos) * size);
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+void TransitionTable::flex_table(ostream& os, const char *name)
|
|
|
|
+{
|
|
|
|
+ const char th_version[] = "notflex";
|
2011-03-25 09:04:51 +01:00
|
|
|
+ struct table_set_header th = { 0, 0, 0, 0 };
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+ /**
|
|
|
|
+ * Change the following two data types to adjust the maximum flex
|
|
|
|
+ * table size.
|
|
|
|
+ */
|
|
|
|
+ typedef uint16_t state_t;
|
|
|
|
+ typedef uint32_t trans_t;
|
|
|
|
+
|
|
|
|
+ if (default_base.size() >= (state_t)-1) {
|
|
|
|
+ cerr << "Too many states (" << default_base.size() << ") for "
|
|
|
|
+ "type state_t" << endl;
|
|
|
|
+ exit(1);
|
|
|
|
+ }
|
|
|
|
+ if (next_check.size() >= (trans_t)-1) {
|
|
|
|
+ cerr << "Too many transitions (" << next_check.size() << ") for "
|
|
|
|
+ "type trans_t" << endl;
|
|
|
|
+ exit(1);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /**
|
|
|
|
+ * Create copies of the data structures so that we can dump the tables
|
|
|
|
+ * using the generic write_flex_table() routine.
|
|
|
|
+ */
|
|
|
|
+ vector<uint8_t> equiv_vec;
|
|
|
|
+ if (eq.size()) {
|
|
|
|
+ equiv_vec.resize(256);
|
|
|
|
+ for (map<uchar, uchar>::iterator i = eq.begin(); i != eq.end(); i++) {
|
|
|
|
+ equiv_vec[i->first] = i->second;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ vector<state_t> default_vec;
|
|
|
|
+ vector<trans_t> base_vec;
|
|
|
|
+ for (DefaultBase::iterator i = default_base.begin();
|
|
|
|
+ i != default_base.end();
|
|
|
|
+ i++) {
|
|
|
|
+ default_vec.push_back(num[i->first]);
|
|
|
|
+ base_vec.push_back(i->second);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ vector<state_t> next_vec;
|
|
|
|
+ vector<state_t> check_vec;
|
|
|
|
+ for (NextCheck::iterator i = next_check.begin();
|
|
|
|
+ i != next_check.end();
|
|
|
|
+ i++) {
|
|
|
|
+ next_vec.push_back(num[i->first]);
|
|
|
|
+ check_vec.push_back(num[i->second]);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /* Write the actual flex parser table. */
|
|
|
|
+
|
|
|
|
+ size_t hsize = pad64(sizeof(th) + sizeof(th_version) + strlen(name) + 1);
|
|
|
|
+ th.th_magic = htonl(YYTH_REGEXP_MAGIC);
|
|
|
|
+ th.th_hsize = htonl(hsize);
|
|
|
|
+ th.th_ssize = htonl(hsize +
|
|
|
|
+ flex_table_size(accept.begin(), accept.end()) +
|
|
|
|
+ flex_table_size(accept2.begin(), accept2.end()) +
|
|
|
|
+ (eq.size() ?
|
|
|
|
+ flex_table_size(equiv_vec.begin(), equiv_vec.end()) : 0) +
|
|
|
|
+ flex_table_size(base_vec.begin(), base_vec.end()) +
|
|
|
|
+ flex_table_size(default_vec.begin(), default_vec.end()) +
|
|
|
|
+ flex_table_size(next_vec.begin(), next_vec.end()) +
|
|
|
|
+ flex_table_size(check_vec.begin(), check_vec.end()));
|
|
|
|
+ os.write((char *)&th, sizeof(th));
|
|
|
|
+ os << th_version << (char)0 << name << (char)0;
|
|
|
|
+ os << fill64(sizeof(th) + sizeof(th_version) + strlen(name) + 1);
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ write_flex_table(os, YYTD_ID_ACCEPT, accept.begin(), accept.end());
|
|
|
|
+ write_flex_table(os, YYTD_ID_ACCEPT2, accept2.begin(), accept2.end());
|
|
|
|
+ if (eq.size())
|
|
|
|
+ write_flex_table(os, YYTD_ID_EC, equiv_vec.begin(), equiv_vec.end());
|
|
|
|
+ write_flex_table(os, YYTD_ID_BASE, base_vec.begin(), base_vec.end());
|
|
|
|
+ write_flex_table(os, YYTD_ID_DEF, default_vec.begin(), default_vec.end());
|
|
|
|
+ write_flex_table(os, YYTD_ID_NXT, next_vec.begin(), next_vec.end());
|
|
|
|
+ write_flex_table(os, YYTD_ID_CHK, check_vec.begin(), check_vec.end());
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+#if 0
|
|
|
|
+typedef set<ImportantNode *> AcceptNodes;
|
|
|
|
+map<ImportantNode *, AcceptNodes> dominance(DFA& dfa)
|
|
|
|
+{
|
|
|
|
+ map<ImportantNode *, AcceptNodes> is_dominated;
|
|
|
|
+
|
|
|
|
+ for (States::iterator i = dfa.states.begin(); i != dfa.states.end(); i++) {
|
|
|
|
+ AcceptNodes set1;
|
|
|
|
+ for (State::iterator j = (*i)->begin(); j != (*i)->end(); j++) {
|
|
|
|
+ if (AcceptNode *accept = dynamic_cast<AcceptNode *>(*j))
|
|
|
|
+ set1.insert(accept);
|
|
|
|
+ }
|
|
|
|
+ for (AcceptNodes::iterator j = set1.begin(); j != set1.end(); j++) {
|
|
|
|
+ pair<map<ImportantNode *, AcceptNodes>::iterator, bool> x =
|
|
|
|
+ is_dominated.insert(make_pair(*j, set1));
|
|
|
|
+ if (!x.second) {
|
|
|
|
+ AcceptNodes &set2(x.first->second), set3;
|
|
|
|
+ for (AcceptNodes::iterator l = set2.begin();
|
|
|
|
+ l != set2.end();
|
|
|
|
+ l++) {
|
|
|
|
+ if (set1.find(*l) != set1.end())
|
|
|
|
+ set3.insert(*l);
|
|
|
|
+ }
|
|
|
|
+ set3.swap(set2);
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ return is_dominated;
|
|
|
|
+}
|
|
|
|
+#endif
|
|
|
|
+
|
|
|
|
+void dump_regexp_rec(ostream& os, Node *tree)
|
|
|
|
+{
|
|
|
|
+ if (tree->child[0])
|
|
|
|
+ dump_regexp_rec(os, tree->child[0]);
|
|
|
|
+ os << *tree;
|
|
|
|
+ if (tree->child[1])
|
|
|
|
+ dump_regexp_rec(os, tree->child[1]);
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+void dump_regexp(ostream& os, Node *tree)
|
|
|
|
+{
|
|
|
|
+ dump_regexp_rec(os, tree);
|
|
|
|
+ os << endl;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+#include <sstream>
|
|
|
|
+#include <ext/stdio_filebuf.h>
|
|
|
|
+
|
|
|
|
+struct aare_ruleset {
|
|
|
|
+ int reverse;
|
|
|
|
+ Node *root;
|
|
|
|
+};
|
|
|
|
+
|
|
|
|
+extern "C" aare_ruleset_t *aare_new_ruleset(int reverse)
|
|
|
|
+{
|
|
|
|
+ aare_ruleset_t *container = (aare_ruleset_t *) malloc(sizeof(aare_ruleset_t));
|
|
|
|
+ if (!container)
|
|
|
|
+ return NULL;
|
|
|
|
+
|
|
|
|
+ container->root = NULL;
|
|
|
|
+ container->reverse = reverse;
|
|
|
|
+
|
|
|
|
+ return container;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+extern "C" void aare_delete_ruleset(aare_ruleset_t *rules)
|
|
|
|
+{
|
|
|
|
+ if (rules) {
|
|
|
|
+ if (rules->root)
|
|
|
|
+ rules->root->release();
|
|
|
|
+ free(rules);
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static inline int diff_qualifiers(uint32_t perm1, uint32_t perm2)
|
|
|
|
+{
|
|
|
|
+ return ((perm1 & AA_EXEC_TYPE) && (perm2 & AA_EXEC_TYPE) &&
|
|
|
|
+ (perm1 & AA_EXEC_TYPE) != (perm2 & AA_EXEC_TYPE));
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/**
|
|
|
|
+ * Compute the permission flags that this state corresponds to. If we
|
|
|
|
+ * have any exact matches, then they override the execute and safe
|
|
|
|
+ * execute flags.
|
|
|
|
+ */
|
2011-03-25 09:04:51 +01:00
|
|
|
+uint32_t accept_perms(NodeSet *state, uint32_t *audit_ctl, int *error)
|
2011-01-17 17:43:05 +01:00
|
|
|
+{
|
|
|
|
+ uint32_t perms = 0, exact_match_perms = 0, audit = 0, exact_audit = 0,
|
|
|
|
+ quiet = 0, deny = 0;
|
|
|
|
+
|
|
|
|
+ if (error)
|
|
|
|
+ *error = 0;
|
2011-03-25 09:04:51 +01:00
|
|
|
+ for (NodeSet::iterator i = state->begin(); i != state->end(); i++) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ MatchFlag *match;
|
|
|
|
+ if (!(match= dynamic_cast<MatchFlag *>(*i)))
|
|
|
|
+ continue;
|
|
|
|
+ if (dynamic_cast<ExactMatchFlag *>(match)) {
|
|
|
|
+ /* exact match only ever happens with x */
|
|
|
|
+ if (!is_merged_x_consistent(exact_match_perms,
|
|
|
|
+ match->flag) && error)
|
|
|
|
+ *error = 1;;
|
|
|
|
+ exact_match_perms |= match->flag;
|
|
|
|
+ exact_audit |= match->audit;
|
|
|
|
+ } else if (dynamic_cast<DenyMatchFlag *>(match)) {
|
|
|
|
+ deny |= match->flag;
|
|
|
|
+ quiet |= match->audit;
|
|
|
|
+ } else {
|
|
|
|
+ if (!is_merged_x_consistent(perms, match->flag) && error)
|
|
|
|
+ *error = 1;
|
|
|
|
+ perms |= match->flag;
|
|
|
|
+ audit |= match->audit;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+//if (audit || quiet)
|
|
|
|
+//fprintf(stderr, "perms: 0x%x, audit: 0x%x exact: 0x%x eaud: 0x%x deny: 0x%x quiet: 0x%x\n", perms, audit, exact_match_perms, exact_audit, deny, quiet);
|
|
|
|
+
|
|
|
|
+ perms |= exact_match_perms &
|
|
|
|
+ ~(AA_USER_EXEC_TYPE | AA_OTHER_EXEC_TYPE);
|
|
|
|
+
|
|
|
|
+ if (exact_match_perms & AA_USER_EXEC_TYPE) {
|
|
|
|
+ perms = (exact_match_perms & AA_USER_EXEC_TYPE) |
|
|
|
|
+ (perms & ~AA_USER_EXEC_TYPE);
|
|
|
|
+ audit = (exact_audit & AA_USER_EXEC_TYPE) |
|
|
|
|
+ (audit & ~ AA_USER_EXEC_TYPE);
|
|
|
|
+ }
|
|
|
|
+ if (exact_match_perms & AA_OTHER_EXEC_TYPE) {
|
|
|
|
+ perms = (exact_match_perms & AA_OTHER_EXEC_TYPE) |
|
|
|
|
+ (perms & ~AA_OTHER_EXEC_TYPE);
|
|
|
|
+ audit = (exact_audit & AA_OTHER_EXEC_TYPE) |
|
|
|
|
+ (audit & ~AA_OTHER_EXEC_TYPE);
|
|
|
|
+ }
|
|
|
|
+ if (perms & AA_USER_EXEC & deny)
|
|
|
|
+ perms &= ~AA_USER_EXEC_TYPE;
|
|
|
|
+
|
|
|
|
+ if (perms & AA_OTHER_EXEC & deny)
|
|
|
|
+ perms &= ~AA_OTHER_EXEC_TYPE;
|
|
|
|
+
|
|
|
|
+ perms &= ~deny;
|
|
|
|
+
|
|
|
|
+ if (audit_ctl)
|
|
|
|
+ *audit_ctl = PACK_AUDIT_CTL(audit, quiet & deny);
|
|
|
|
+
|
|
|
|
+// if (perms & AA_ERROR_BIT) {
|
|
|
|
+// fprintf(stderr, "error bit 0x%x\n", perms);
|
|
|
|
+// exit(255);
|
|
|
|
+//}
|
|
|
|
+
|
|
|
|
+ //if (perms & AA_EXEC_BITS)
|
|
|
|
+ //fprintf(stderr, "accept perm: 0x%x\n", perms);
|
|
|
|
+ /*
|
|
|
|
+ if (perms & ~AA_VALID_PERMS)
|
|
|
|
+ yyerror(_("Internal error accumulated invalid perm 0x%llx\n"), perms);
|
|
|
|
+ */
|
|
|
|
+
|
|
|
|
+//if (perms & AA_CHANGE_HAT)
|
|
|
|
+// fprintf(stderr, "change_hat 0x%x\n", perms);
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (*error)
|
|
|
|
+ fprintf(stderr, "profile has merged rule with conflicting x modifiers\n");
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+ return perms;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+extern "C" int aare_add_rule(aare_ruleset_t *rules, char *rule, int deny,
|
2011-03-25 09:04:51 +01:00
|
|
|
+ uint32_t perms, uint32_t audit, dfaflags_t flags)
|
2011-01-17 17:43:05 +01:00
|
|
|
+{
|
2011-03-25 09:04:51 +01:00
|
|
|
+ return aare_add_rule_vec(rules, deny, perms, audit, 1, &rule, flags);
|
2011-01-17 17:43:05 +01:00
|
|
|
+}
|
|
|
|
+
|
|
|
|
+#define FLAGS_WIDTH 2
|
|
|
|
+#define MATCH_FLAGS_SIZE (sizeof(uint32_t) * 8 - 1)
|
|
|
|
+MatchFlag *match_flags[FLAGS_WIDTH][MATCH_FLAGS_SIZE];
|
|
|
|
+DenyMatchFlag *deny_flags[FLAGS_WIDTH][MATCH_FLAGS_SIZE];
|
2011-01-25 13:16:44 +01:00
|
|
|
+#define EXEC_MATCH_FLAGS_SIZE (AA_EXEC_COUNT *2 * 2 * 2) /* double for each of ix pux, unsafe x bits * u::o */
|
|
|
|
+MatchFlag *exec_match_flags[FLAGS_WIDTH][EXEC_MATCH_FLAGS_SIZE]; /* mods + unsafe + ix + pux * u::o*/
|
|
|
|
+ExactMatchFlag *exact_match_flags[FLAGS_WIDTH][EXEC_MATCH_FLAGS_SIZE];/* mods + unsafe + ix + pux *u::o*/
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+extern "C" void aare_reset_matchflags(void)
|
|
|
|
+{
|
|
|
|
+ uint32_t i, j;
|
|
|
|
+#define RESET_FLAGS(group, size) { \
|
|
|
|
+ for (i = 0; i < FLAGS_WIDTH; i++) { \
|
|
|
|
+ for (j = 0; j < size; j++) { \
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if ((group)[i][j]) delete (group)[i][j]; \
|
2011-01-17 17:43:05 +01:00
|
|
|
+ (group)[i][j] = NULL; \
|
|
|
|
+ } \
|
|
|
|
+ } \
|
|
|
|
+}
|
|
|
|
+ RESET_FLAGS(match_flags,MATCH_FLAGS_SIZE);
|
|
|
|
+ RESET_FLAGS(deny_flags,MATCH_FLAGS_SIZE);
|
|
|
|
+ RESET_FLAGS(exec_match_flags,EXEC_MATCH_FLAGS_SIZE);
|
|
|
|
+ RESET_FLAGS(exact_match_flags,EXEC_MATCH_FLAGS_SIZE);
|
|
|
|
+#undef RESET_FLAGS
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+extern "C" int aare_add_rule_vec(aare_ruleset_t *rules, int deny,
|
|
|
|
+ uint32_t perms, uint32_t audit,
|
2011-03-25 09:04:51 +01:00
|
|
|
+ int count, char **rulev,
|
|
|
|
+ dfaflags_t flags)
|
2011-01-17 17:43:05 +01:00
|
|
|
+{
|
|
|
|
+ Node *tree = NULL, *accept;
|
|
|
|
+ int exact_match;
|
|
|
|
+
|
|
|
|
+ assert(perms != 0);
|
|
|
|
+
|
|
|
|
+ if (regexp_parse(&tree, rulev[0]))
|
|
|
|
+ return 0;
|
|
|
|
+ for (int i = 1; i < count; i++) {
|
|
|
|
+ Node *subtree = NULL;
|
|
|
|
+ Node *node = new CharNode(0);
|
|
|
|
+ if (!node)
|
|
|
|
+ return 0;
|
|
|
|
+ tree = new CatNode(tree, node);
|
|
|
|
+ if (regexp_parse(&subtree, rulev[i]))
|
|
|
|
+ return 0;
|
|
|
|
+ tree = new CatNode(tree, subtree);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ /*
|
|
|
|
+ * Check if we have an expression with or without wildcards. This
|
|
|
|
+ * determines how exec modifiers are merged in accept_perms() based
|
|
|
|
+ * on how we split permission bitmasks here.
|
|
|
|
+ */
|
|
|
|
+ exact_match = 1;
|
|
|
|
+ for (depth_first_traversal i(tree); i; i++) {
|
|
|
|
+ if (dynamic_cast<StarNode *>(*i) ||
|
|
|
|
+ dynamic_cast<PlusNode *>(*i) ||
|
|
|
|
+ dynamic_cast<AnyCharNode *>(*i) ||
|
|
|
|
+ dynamic_cast<CharSetNode *>(*i) ||
|
|
|
|
+ dynamic_cast<NotCharSetNode *>(*i))
|
|
|
|
+ exact_match = 0;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ if (rules->reverse)
|
|
|
|
+ flip_tree(tree);
|
|
|
|
+
|
|
|
|
+
|
2011-01-25 13:16:44 +01:00
|
|
|
+/* 0x7f == 4 bits x mods + 1 bit unsafe mask + 1 bit ix, + 1 pux after shift */
|
|
|
|
+#define EXTRACT_X_INDEX(perm, shift) (((perm) >> (shift + 7)) & 0x7f)
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+//if (perms & ALL_AA_EXEC_TYPE && (!perms & AA_EXEC_BITS))
|
|
|
|
+// fprintf(stderr, "adding X rule without MAY_EXEC: 0x%x %s\n", perms, rulev[0]);
|
|
|
|
+
|
|
|
|
+//if (perms & ALL_EXEC_TYPE)
|
|
|
|
+// fprintf(stderr, "adding X rule %s 0x%x\n", rulev[0], perms);
|
|
|
|
+
|
|
|
|
+//if (audit)
|
|
|
|
+//fprintf(stderr, "adding rule with audit bits set: 0x%x %s\n", audit, rulev[0]);
|
|
|
|
+
|
|
|
|
+//if (perms & AA_CHANGE_HAT)
|
|
|
|
+// fprintf(stderr, "adding change_hat rule %s\n", rulev[0]);
|
|
|
|
+
|
|
|
|
+/* the permissions set is assumed to be non-empty if any audit
|
|
|
|
+ * bits are specified */
|
|
|
|
+ accept = NULL;
|
|
|
|
+ for (unsigned int n = 0; perms && n < (sizeof(perms) * 8) ; n++) {
|
|
|
|
+ uint32_t mask = 1 << n;
|
|
|
|
+
|
|
|
|
+ if (perms & mask) {
|
|
|
|
+ int ai = audit & mask ? 1 : 0;
|
|
|
|
+ perms &= ~mask;
|
|
|
|
+
|
|
|
|
+ Node *flag;
|
|
|
|
+ if (mask & ALL_AA_EXEC_TYPE)
|
|
|
|
+ /* these cases are covered by EXEC_BITS */
|
|
|
|
+ continue;
|
|
|
|
+ if (deny) {
|
|
|
|
+ if (deny_flags[ai][n]) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ flag = deny_flags[ai][n];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ } else {
|
|
|
|
+//fprintf(stderr, "Adding deny ai %d mask 0x%x audit 0x%x\n", ai, mask, audit & mask);
|
|
|
|
+ deny_flags[ai][n] = new DenyMatchFlag(mask, audit&mask);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ flag = deny_flags[ai][n];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ } else if (mask & AA_EXEC_BITS) {
|
|
|
|
+ uint32_t eperm = 0;
|
|
|
|
+ uint32_t index = 0;
|
|
|
|
+ if (mask & AA_USER_EXEC) {
|
|
|
|
+ eperm = mask | (perms & AA_USER_EXEC_TYPE);
|
|
|
|
+ index = EXTRACT_X_INDEX(eperm, AA_USER_SHIFT);
|
|
|
|
+ } else {
|
|
|
|
+ eperm = mask | (perms & AA_OTHER_EXEC_TYPE);
|
|
|
|
+ index = EXTRACT_X_INDEX(eperm, AA_OTHER_SHIFT) + (AA_EXEC_COUNT << 2);
|
|
|
|
+ }
|
|
|
|
+//fprintf(stderr, "index %d eperm 0x%x\n", index, eperm);
|
|
|
|
+ if (exact_match) {
|
|
|
|
+ if (exact_match_flags[ai][index]) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ flag = exact_match_flags[ai][index];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ } else {
|
|
|
|
+ exact_match_flags[ai][index] = new ExactMatchFlag(eperm, audit&mask);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ flag = exact_match_flags[ai][index];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ } else {
|
|
|
|
+ if (exec_match_flags[ai][index]) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ flag = exec_match_flags[ai][index];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ } else {
|
|
|
|
+ exec_match_flags[ai][index] = new MatchFlag(eperm, audit&mask);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ flag = exec_match_flags[ai][index];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ } else {
|
|
|
|
+ if (match_flags[ai][n]) {
|
2011-03-25 09:04:51 +01:00
|
|
|
+ flag = match_flags[ai][n];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ } else {
|
|
|
|
+ match_flags[ai][n] = new MatchFlag(mask, audit&mask);
|
2011-03-25 09:04:51 +01:00
|
|
|
+ flag = match_flags[ai][n];
|
2011-01-17 17:43:05 +01:00
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ if (accept)
|
|
|
|
+ accept = new AltNode(accept, flag);
|
|
|
|
+ else
|
|
|
|
+ accept = flag;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_DUMP_RULE_EXPR) {
|
|
|
|
+ cerr << "rule: ";
|
|
|
|
+ cerr << rulev[0];
|
|
|
|
+ for (int i = 1; i < count; i++) {
|
|
|
|
+ cerr << "\\x00";
|
|
|
|
+ cerr << rulev[i];
|
|
|
|
+ }
|
|
|
|
+ cerr << " -> ";
|
|
|
|
+ tree->dump(cerr);
|
|
|
|
+ cerr << "\n\n";
|
|
|
|
+ }
|
|
|
|
+
|
2011-01-17 17:43:05 +01:00
|
|
|
+ if (rules->root)
|
|
|
|
+ rules->root = new AltNode(rules->root, new CatNode(tree, accept));
|
|
|
|
+ else
|
|
|
|
+ rules->root = new CatNode(tree, accept);
|
|
|
|
+
|
|
|
|
+ return 1;
|
|
|
|
+
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/* create a dfa from the ruleset
|
|
|
|
+ * returns: buffer contain dfa tables, @size set to the size of the tables
|
|
|
|
+ * else NULL on failure
|
|
|
|
+ */
|
|
|
|
+extern "C" void *aare_create_dfa(aare_ruleset_t *rules, size_t *size, dfaflags_t flags)
|
|
|
|
+{
|
|
|
|
+ char *buffer = NULL;
|
|
|
|
+
|
|
|
|
+ label_nodes(rules->root);
|
|
|
|
+ if (flags & DFA_DUMP_TREE) {
|
|
|
|
+ cerr << "\nDFA: Expression Tree\n";
|
|
|
|
+ rules->root->dump(cerr);
|
|
|
|
+ cerr << "\n\n";
|
|
|
|
+ }
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_CONTROL_TREE_SIMPLE) {
|
2011-01-17 17:43:05 +01:00
|
|
|
+ rules->root = simplify_tree(rules->root, flags);
|
|
|
|
+
|
|
|
|
+ if (flags & DFA_DUMP_SIMPLE_TREE) {
|
|
|
|
+ cerr << "\nDFA: Simplified Expression Tree\n";
|
|
|
|
+ rules->root->dump(cerr);
|
|
|
|
+ cerr << "\n\n";
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ stringstream stream;
|
|
|
|
+ try {
|
|
|
|
+ DFA dfa(rules->root, flags);
|
|
|
|
+ if (flags & DFA_DUMP_UNIQ_PERMS)
|
|
|
|
+ dfa.dump_uniq_perms("dfa");
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_CONTROL_MINIMIZE) {
|
|
|
|
+ dfa.minimize(flags);
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_DUMP_MIN_UNIQ_PERMS)
|
|
|
|
+ dfa.dump_uniq_perms("minimized dfa");
|
|
|
|
+ }
|
|
|
|
+ if (flags & DFA_CONTROL_REMOVE_UNREACHABLE)
|
|
|
|
+ dfa.remove_unreachable(flags);
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_DUMP_STATES)
|
|
|
|
+ dfa.dump(cerr);
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_DUMP_GRAPH)
|
|
|
|
+ dfa.dump_dot_graph(cerr);
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ map<uchar, uchar> eq;
|
|
|
|
+ if (flags & DFA_CONTROL_EQUIV) {
|
|
|
|
+ eq = dfa.equivalence_classes(flags);
|
|
|
|
+ dfa.apply_equivalence_classes(eq);
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+ if (flags & DFA_DUMP_EQUIV) {
|
|
|
|
+ cerr << "\nDFA equivalence class\n";
|
|
|
|
+ dump_equivalence_classes(cerr, eq);
|
|
|
|
+ }
|
|
|
|
+ } else if (flags & DFA_DUMP_EQUIV)
|
|
|
|
+ cerr << "\nDFA did not generate an equivalence class\n";
|
|
|
|
+
|
|
|
|
+ TransitionTable transition_table(dfa, eq, flags);
|
|
|
|
+ if (flags & DFA_DUMP_TRANS_TABLE)
|
|
|
|
+ transition_table.dump(cerr);
|
|
|
|
+ transition_table.flex_table(stream, "");
|
|
|
|
+ } catch (int error) {
|
|
|
|
+ *size = 0;
|
|
|
|
+ return NULL;
|
|
|
|
+ }
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+ stringbuf *buf = stream.rdbuf();
|
|
|
|
+
|
|
|
|
+ buf->pubseekpos(0);
|
|
|
|
+ *size = buf->in_avail();
|
|
|
|
+
|
|
|
|
+ buffer = (char *)malloc(*size);
|
|
|
|
+ if (!buffer)
|
|
|
|
+ return NULL;
|
|
|
|
+ buf->sgetn(buffer, *size);
|
|
|
|
+ return buffer;
|
|
|
|
+}
|
|
|
|
--- a/parser/parser_alias.c
|
|
|
|
+++ b/parser/parser_alias.c
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -15,6 +15,7 @@
|
2011-01-17 17:43:05 +01:00
|
|
|
* along with this program; if not, contact Novell, Inc.
|
|
|
|
*/
|
|
|
|
|
|
|
|
+#define _GNU_SOURCE 1
|
|
|
|
#include <search.h>
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <stdarg.h>
|
|
|
|
--- a/parser/parser_main.c
|
|
|
|
+++ b/parser/parser_main.c
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -41,7 +41,6 @@
|
2011-01-17 17:43:05 +01:00
|
|
|
#include <sys/stat.h>
|
|
|
|
|
|
|
|
#include "parser.h"
|
|
|
|
-#include "parser_version.h"
|
|
|
|
#include "parser_include.h"
|
|
|
|
#include "libapparmor_re/apparmor_re.h"
|
|
|
|
|
2011-09-09 11:06:14 +02:00
|
|
|
@@ -138,7 +137,7 @@ static int debug = 0;
|
2011-01-17 17:43:05 +01:00
|
|
|
|
|
|
|
static void display_version(void)
|
|
|
|
{
|
|
|
|
- printf("%s version " PARSER_VERSION "\n%s\n", parser_title,
|
|
|
|
+ printf("%s version " PACKAGE_VERSION "\n%s\n", parser_title,
|
|
|
|
parser_copyright);
|
|
|
|
}
|
|
|
|
|
|
|
|
--- a/parser/parser_policy.c
|
|
|
|
+++ b/parser/parser_policy.c
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -19,6 +19,7 @@
|
|
|
|
* Ltd.
|
2011-01-17 17:43:05 +01:00
|
|
|
*/
|
|
|
|
|
|
|
|
+#define _GNU_SOURCE 1
|
|
|
|
#include <stdio.h>
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <stdarg.h>
|
|
|
|
--- a/parser/parser_regex.c
|
|
|
|
+++ b/parser/parser_regex.c
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -22,6 +22,8 @@
|
|
|
|
#include <linux/limits.h>
|
2011-01-17 17:43:05 +01:00
|
|
|
#define _(s) gettext(s)
|
|
|
|
|
|
|
|
+#include <pcre.h>
|
|
|
|
+
|
|
|
|
/* #define DEBUG */
|
|
|
|
|
|
|
|
#include "parser.h"
|
|
|
|
--- a/parser/parser_symtab.c
|
|
|
|
+++ b/parser/parser_symtab.c
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -15,6 +15,7 @@
|
2011-01-17 17:43:05 +01:00
|
|
|
* along with this program; if not, contact Novell, Inc.
|
|
|
|
*/
|
|
|
|
|
|
|
|
+#define _GNU_SOURCE 1
|
|
|
|
#include <search.h>
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <stdarg.h>
|
|
|
|
--- a/parser/po/Makefile
|
|
|
|
+++ b/parser/po/Makefile
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -12,9 +12,11 @@ all:
|
2011-01-17 17:43:05 +01:00
|
|
|
|
|
|
|
DISABLED_LANGS=
|
|
|
|
|
|
|
|
-include ../common/Make-po.rules
|
|
|
|
-../common/Make-po.rules:
|
|
|
|
- make -C .. common/Make.rules
|
|
|
|
+NAME="apparmor-parser"
|
|
|
|
+
|
|
|
|
+include ../../common/Make-po.rules
|
|
|
|
+../../common/Make-po.rules:
|
|
|
|
+ make -C ../.. common/Make.rules
|
|
|
|
|
|
|
|
XGETTEXT_ARGS+=--language=C --keyword=_ $(shell if [ -f ${NAME}.pot ] ; then echo -n -j ; fi)
|
|
|
|
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/po/Makefile.am
|
|
|
|
@@ -0,0 +1,2 @@
|
|
|
|
+
|
|
|
|
+
|
|
|
|
--- a/profiles/Makefile
|
|
|
|
+++ b/profiles/Makefile
|
2011-09-09 11:06:14 +02:00
|
|
|
@@ -20,7 +20,7 @@
|
2011-03-25 09:04:51 +01:00
|
|
|
# Makefile for LSM-based AppArmor profiles
|
2011-01-17 17:43:05 +01:00
|
|
|
|
|
|
|
NAME=apparmor-profiles
|
2011-09-09 11:06:14 +02:00
|
|
|
-ALL: local
|
|
|
|
+all: local
|
|
|
|
COMMONDIR=../common/
|
2011-01-17 17:43:05 +01:00
|
|
|
|
2011-09-09 11:06:14 +02:00
|
|
|
include common/Make.rules
|
2011-01-17 17:43:05 +01:00
|
|
|
--- /dev/null
|
|
|
|
+++ b/tests/Makefile.am
|
|
|
|
@@ -0,0 +1 @@
|
|
|
|
+SUBDIRS = regression
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/tests/regression/Makefile.am
|
|
|
|
@@ -0,0 +1 @@
|
|
|
|
+SUBDIRS = subdomain
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/tests/regression/subdomain/Makefile.am
|
|
|
|
@@ -0,0 +1,109 @@
|
|
|
|
+TESTS = access \
|
|
|
|
+ capabilities \
|
|
|
|
+ changeprofile \
|
|
|
|
+ changehat \
|
|
|
|
+ changehat_pthread \
|
|
|
|
+ changehat_fork \
|
|
|
|
+ changehat_misc \
|
|
|
|
+ chdir \
|
|
|
|
+ clone \
|
|
|
|
+ deleted \
|
|
|
|
+ environ \
|
|
|
|
+ exec \
|
|
|
|
+ exec_qual \
|
|
|
|
+ fchdir \
|
|
|
|
+ fork \
|
|
|
|
+ i18n \
|
|
|
|
+ link \
|
|
|
|
+ link_subset \
|
|
|
|
+ mkdir \
|
|
|
|
+ mmap \
|
|
|
|
+ mount \
|
|
|
|
+ mult_mount \
|
|
|
|
+ named_pipe \
|
|
|
|
+ net_raw \
|
|
|
|
+ open \
|
|
|
|
+ openat \
|
|
|
|
+ pipe \
|
|
|
|
+ ptrace \
|
|
|
|
+ pwrite \
|
|
|
|
+ regex \
|
|
|
|
+ rename \
|
|
|
|
+ readdir \
|
|
|
|
+ rw \
|
|
|
|
+ swap \
|
|
|
|
+ sd_flags \
|
|
|
|
+ setattr \
|
|
|
|
+ symlink \
|
|
|
|
+ syscall \
|
|
|
|
+ unix_fd_server \
|
|
|
|
+ unlink\
|
|
|
|
+ xattrs\
|
|
|
|
+ longpath
|
|
|
|
+
|
|
|
|
+check_PROGRAMS = $(TESTS)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+SOURCES = \
|
|
|
|
+ access.c \
|
|
|
|
+ changeprofile.c \
|
|
|
|
+ changehat.c \
|
|
|
|
+ changehat_fork.c \
|
|
|
|
+ changehat_misc.c \
|
|
|
|
+ changehat_misc2.c \
|
|
|
|
+ changehat_twice.c \
|
|
|
|
+ changehat_fail.c \
|
|
|
|
+ changehat_wrapper.c \
|
|
|
|
+ changehat_pthread.c \
|
|
|
|
+ chdir.c \
|
|
|
|
+ chgrp.c \
|
|
|
|
+ chmod.c \
|
|
|
|
+ chown.c \
|
|
|
|
+ clone.c \
|
|
|
|
+ deleted.c \
|
|
|
|
+ environ.c \
|
|
|
|
+ env_check.c \
|
|
|
|
+ exec.c \
|
|
|
|
+ exec_qual.c \
|
|
|
|
+ exec_qual2.c \
|
|
|
|
+ fchdir.c \
|
|
|
|
+ fchgrp.c \
|
|
|
|
+ fchmod.c \
|
|
|
|
+ fchown.c \
|
|
|
|
+ fork.c \
|
|
|
|
+ link.c \
|
|
|
|
+ link_subset.c \
|
|
|
|
+ mmap.c \
|
|
|
|
+ mkdir.c \
|
|
|
|
+ mount.c \
|
|
|
|
+ named_pipe.c \
|
|
|
|
+ net_raw.c \
|
|
|
|
+ open.c \
|
|
|
|
+ openat.c \
|
|
|
|
+ pipe.c \
|
|
|
|
+ ptrace.c \
|
|
|
|
+ ptrace_helper.c \
|
|
|
|
+ pwrite.c \
|
|
|
|
+ rename.c \
|
|
|
|
+ readdir.c \
|
|
|
|
+ rw.c \
|
|
|
|
+ symlink.c \
|
|
|
|
+ syscall_mknod.c \
|
|
|
|
+ swap.c \
|
|
|
|
+ syscall_chroot.c \
|
|
|
|
+ syscall_mlockall.c \
|
|
|
|
+ syscall_ptrace.c \
|
|
|
|
+ syscall_reboot.c \
|
|
|
|
+ syscall_setpriority.c \
|
|
|
|
+ syscall_sethostname.c \
|
|
|
|
+ syscall_setdomainname.c \
|
|
|
|
+ syscall_setscheduler.c \
|
|
|
|
+ syscall_sysctl.c \
|
|
|
|
+ sysctl_proc.c \
|
|
|
|
+ tcp.c \
|
|
|
|
+ unix_fd_client.c \
|
|
|
|
+ unix_fd_server.c \
|
|
|
|
+ unlink.c \
|
|
|
|
+ xattrs.c
|
|
|
|
+
|
|
|
|
+changehat_pthread_LDFLAGS = -pthread
|
|
|
|
--- /dev/null
|
2011-03-25 09:04:51 +01:00
|
|
|
+++ b/utils/Immunix/Makefile.am
|
|
|
|
@@ -0,0 +1,3 @@
|
|
|
|
+perlmoddir =$(VENDOR_PERL)/Immunix
|
|
|
|
+
|
2011-09-09 11:06:14 +02:00
|
|
|
+perlmod_DATA = AppArmor.pm Repository.pm Config.pm Reports.pm Severity.pm SubDomain.pm
|
2011-03-25 09:04:51 +01:00
|
|
|
--- /dev/null
|
2011-01-17 17:43:05 +01:00
|
|
|
+++ b/utils/Makefile.PL
|
|
|
|
@@ -0,0 +1,15 @@
|
|
|
|
+#!/usr/bin/perl -w
|
|
|
|
+
|
|
|
|
+use ExtUtils::MakeMaker;
|
|
|
|
+
|
|
|
|
+use vars qw($CFLAGS $OBJECT $VERSION $OPTIMIZE);
|
|
|
|
+
|
|
|
|
+WriteMakefile(
|
|
|
|
+ 'NAME' => 'AppArmor',
|
|
|
|
+ 'MAKEFILE' => 'Makefile.perl',
|
|
|
|
+ 'FIRST_MAKEFILE' => 'Makefile.perl',
|
|
|
|
+ 'ABSTRACT' => q[AppArmor utility interface],
|
|
|
|
+ 'EXE_FILES'=> ['genprof', 'logprof', 'autodep', 'audit',
|
|
|
|
+ 'complain', 'enforce', 'unconfined', 'aa-eventd',
|
|
|
|
+ 'apparmor_status', 'apparmor_notify'],
|
|
|
|
+);
|
|
|
|
--- /dev/null
|
|
|
|
+++ b/utils/Makefile.am
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -0,0 +1,36 @@
|
|
|
|
+dist_man_MANS = aa-autodep.8 aa-complain.8 aa-enforce.8 aa-logprof.8 \
|
|
|
|
+ aa-genprof.8 aa-unconfined.8 aa-audit.8 aa-status.8 \
|
|
|
|
+ aa-decode.8 aa-notify.8 logprof.conf.5
|
2011-01-17 17:43:05 +01:00
|
|
|
+noinst_DATA = $(addsuffix .html,$(dist_man_MANS))
|
|
|
|
+
|
2011-03-25 09:04:51 +01:00
|
|
|
+sbin_SCRIPTS = aa-genprof aa-logprof aa-autodep aa-audit aa-complain \
|
|
|
|
+ aa-enforce aa-unconfined aa-eventd aa-status aa-decode \
|
|
|
|
+ aa-notify
|
2011-01-17 17:43:05 +01:00
|
|
|
+
|
|
|
|
+etc_apparmor_DATA = logprof.conf notify.conf severity.db
|
|
|
|
+
|
|
|
|
+install-data-local:
|
|
|
|
+ $(mkinstalldirs) $(DESTDIR)/var/log/apparmor
|
|
|
|
+
|
|
|
|
+CLEANFILES = $(dist_man_MANS) Makefile.perl blib $(dist_man_MANS)
|
|
|
|
+
|
|
|
|
+PODARGS = --center=AppArmor --release=NOVELL/SUSE
|
|
|
|
+
|
|
|
|
+pod2man = pod2man $(PODARGS) --section $(subst .,,$(suffix $<)) $< > $@
|
|
|
|
+
|
|
|
|
+.pod.5:
|
|
|
|
+ $(pod2man)
|
|
|
|
+.pod.7:
|
|
|
|
+ $(pod2man)
|
|
|
|
+.pod.8:
|
|
|
|
+ $(pod2man)
|
|
|
|
+
|
|
|
|
+pod2html = pod2html --header --css ../common/apparmor.css --infile=$< --outfile=$@
|
|
|
|
+
|
|
|
|
+%.5.html : %.pod
|
|
|
|
+ $(pod2html)
|
|
|
|
+%.7.html : %.pod
|
|
|
|
+ $(pod2html)
|
|
|
|
+%.8.html : %.pod
|
|
|
|
+ $(pod2html)
|
2011-03-25 09:04:51 +01:00
|
|
|
+SUBDIRS = po Immunix
|
2011-01-17 17:43:05 +01:00
|
|
|
--- a/utils/po/Makefile
|
|
|
|
+++ b/utils/po/Makefile
|
2011-03-25 09:04:51 +01:00
|
|
|
@@ -18,10 +18,12 @@ all:
|
2011-01-17 17:43:05 +01:00
|
|
|
# As translations get added, they will automatically be included, unless
|
|
|
|
# the lang is explicitly added to DISABLED_LANGS; e.g. DISABLED_LANGS=en es
|
|
|
|
|
|
|
|
+NAME="apparmor-utils"
|
|
|
|
+
|
|
|
|
DISABLED_LANGS=
|
|
|
|
|
|
|
|
-include ../common/Make-po.rules
|
|
|
|
-../common/Make-po.rules:
|
|
|
|
- make -C .. common/Make.rules
|
|
|
|
+include ../../common/Make-po.rules
|
|
|
|
+../../common/Make-po.rules:
|
|
|
|
+ make -C ../.. common/Make.rules
|
|
|
|
|
|
|
|
XGETTEXT_ARGS+=--language=perl
|