From 6c19989b85ff8e39d571e18f173552689da4b9ef Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Sat, 20 May 2017 21:50:43 0400
Subject: [PATCH] bookvolbib type inferencing for Common Lisp
Goal: Proving Axiom Correct
\index{Baker, Henry G.}
\begin{chunk}{axiom.bib}
@misc{Bake90,
author = "Baker, Henry G.",
title = "The Nimble Type Inferencer for Common Lisp84",
link = "\url{http://home.pipeline.com/~hbaker1/TInference.html}",
year = "1990",
abstract =
"We describe a framework and an algorithm for doing type inference
analysis on programs written in full Common Lisp84 (Common Lisp
without the CLOS objectoriented extensions). The objective of type
inference is to determine tight lattice upper bounds on the range of
runtime data types for Common Lisp program variables and
temporaries. Depending upon the lattice used, type inference can also
provide range analysis information for numeric variables. This lattice
upper bound information can be used by an optimizing compiler to
choose more restrictive, and hence more efficient, representations for
these program variables. Our analysis also produces tighter control
flow information, which can be used to eliminate redundant tests which
result in dead code. The overall goal of type inference is to
mechanically extract from Common Lisp programs the same degree of
representation information that is usually provided by the programmer
in traditional stronglytyped languages. In this way, we can provide
some classes of Common Lisp programs execution time efficiency
expected only for more stronglytyped compiled languages.
The Nimble type inference system follows the traditional
lattice/algebraic data flow techniques [Kaplan80], rather than the
logical/theoremproving unification techniques of ML [Milner78]. It
can handle polymorphic variables and functions in a natural way, and
provides for ``casebased'' analysis that is quite similar to that used
intuitively by programmers. Additionally, this inference system can
deduce the termination of some simple loops, thus providing
surprisingly tight upper lattice bounds for many loop variables.
By using a higher resolution lattice, more precise typing of primitive
functions, polymorphic types and case analysis, the Nimble type
inference algorithm can often produce sharper bounds than
unificationbased type inference techniques. At the present time,
however, our treatment of higherorder data structures and functions
is not as elegant as that of the unification techniques."
}
\end{chunk}

books/bookvolbib.pamphlet  45 ++++++++++++++++++
changelog  2 +
patch  100 +++++++++++++++++
src/axiomwebsite/patches.html  2 +
4 files changed, 91 insertions(+), 58 deletions()
diff git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index c47dd22..d8a82d6 100644
 a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ 14194,6 +14194,51 @@ Proc ISSAC 97 pp172175 (1997)
\subsection{B} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Baker, Henry G.}
+\begin{chunk}{axiom.bib}
+@misc{Bake90,
+ author = "Baker, Henry G.",
+ title = "The Nimble Type Inferencer for Common Lisp84",
+ link = "\url{http://home.pipeline.com/~hbaker1/TInference.html}",
+ year = "1990",
+ abstract =
+ "We describe a framework and an algorithm for doing type inference
+ analysis on programs written in full Common Lisp84 (Common Lisp
+ without the CLOS objectoriented extensions). The objective of type
+ inference is to determine tight lattice upper bounds on the range of
+ runtime data types for Common Lisp program variables and
+ temporaries. Depending upon the lattice used, type inference can also
+ provide range analysis information for numeric variables. This lattice
+ upper bound information can be used by an optimizing compiler to
+ choose more restrictive, and hence more efficient, representations for
+ these program variables. Our analysis also produces tighter control
+ flow information, which can be used to eliminate redundant tests which
+ result in dead code. The overall goal of type inference is to
+ mechanically extract from Common Lisp programs the same degree of
+ representation information that is usually provided by the programmer
+ in traditional stronglytyped languages. In this way, we can provide
+ some classes of Common Lisp programs execution time efficiency
+ expected only for more stronglytyped compiled languages.
+
+ The Nimble type inference system follows the traditional
+ lattice/algebraic data flow techniques [Kaplan80], rather than the
+ logical/theoremproving unification techniques of ML [Milner78]. It
+ can handle polymorphic variables and functions in a natural way, and
+ provides for ``casebased'' analysis that is quite similar to that used
+ intuitively by programmers. Additionally, this inference system can
+ deduce the termination of some simple loops, thus providing
+ surprisingly tight upper lattice bounds for many loop variables.
+
+ By using a higher resolution lattice, more precise typing of primitive
+ functions, polymorphic types and case analysis, the Nimble type
+ inference algorithm can often produce sharper bounds than
+ unificationbased type inference techniques. At the present time,
+ however, our treatment of higherorder data structures and functions
+ is not as elegant as that of the unification techniques."
+}
+
+\end{chunk}
+
\index{Beaumont, James}
\index{Bradford, Russell}
\index{Davenport, James H.}
diff git a/changelog b/changelog
index 5b461da..5cca925 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,5 @@
+20170520 tpd src/axiomwebsite/patches.html 20170520.02.tpd.patch
+20170520 tpd bookvolbib type inferencing for Common Lisp
20170520 tpd src/axiomwebsite/patches.html 20170520.01.tpd.patch
20170520 tpd bookvolbib cylindrical algorithmic decomposition references
20170518 tpd src/axiomwebsite/patches.html 20170518.02.tpd.patch
diff git a/patch b/patch
index 831a166..7ae8830 100644
 a/patch
+++ b/patch
@@ 1,64 +1,48 @@
bookvolbib cylindrical algorithmic decomposition references
+bookvolbib type inferencing for Common Lisp
Goal: Axiom Literate Programming
+Goal: Proving Axiom Correct
\index{Richardson, Daniel}
+\index{Baker, Henry G.}
\begin{chunk}{axiom.bib}
@InCollection{Rich98,
 author = "Richardson, Daniel",
 title = "Local Theories and Cylindrical Decomposition",
 booktitle = "Quantifier Elimination and Cylindrical Algebraic Decomposition",
 publisher = "Springer",
 year = "1998",
 isbn = "3211827943",
 abstract =
 "There are many interesting problems which can be expressed in the
 language of elementary algebra, or in one of its extensions, but which
 do not really depend on the coordinate system, and in which the
 variables can be restricted to an arbitrary small neighborhood of some
 point. It seems that it ought to be possible to use cylindrical
 decomposition techniques to solve such problems, taking advantage
 of their special features. This article attempts to do this, but
 many unsolved problems remain.",
 keywords = "axiomref"
}

\end{chunk}

\index{Weispfenning, V.}
\begin{chunk}{axiom.bib}
@InCollection{Weis98,
 author = "Weispfenning, V.",
 title = "A New Approach to Quantifier Elimination for Real Algebra",
 booktitle = "Quantifier Elimination and Cylindrical Algebraic Decomposition",
 publisher = "Springer",
 year = "1998",
 isbn = "3211827943",
 abstract =
 "Quantifier elimination for the elementary formal theory of real
 numbers is a facinating area of research at the intersection of
 various field of mathematics and computer science, such as
 mathematical logic, commutative algebra and algebraic geometry,
 computer algebra, computational geometry and complexity
 theory. Originally the method of quantifier elimination was invented
 (among others by Th. Skolem) in mathematical logic as a technical tool
 for solving the decision problem for a formalized mathematical
 theory. For the elementary formal theory of real numbers (or more
 accurately of real closed fields) such a quantifier elimination
 procedure was established in the 1930s by A. Tarski, using an
 extension of Sturm's theorem of the 1830s for counting the number of
 real zeros of a univariate polynomial in a given interval. Since then
 an abundance of new decision and quantifier elimination methods for
 this theory with variations and optimizations has been published with
 the aim both of establishing the theoretical complexity of the problem
 and of finding methods that are of practical importance (see Arnon
 1988a and the discussion and references in Renegar 1992a, 1992b, 1992c
 for a comparison of these methods). For subproblems such as
 elimination of quantifiers with respect to variables, that are
 linearly or quadratically restricted, specialized methods have been
 developed with good success (see Weispfenning 1988, Loos and
 Weispfenning 1993; Hong 1992d; Weispfenning 1997).",
 keywords = "axiomref"
+@misc{Bake90,
+ author = "Baker, Henry G.",
+ title = "The Nimble Type Inferencer for Common Lisp84",
+ link = "\url{http://home.pipeline.com/~hbaker1/TInference.html}",
+ year = "1990",
+ abstract =
+ "We describe a framework and an algorithm for doing type inference
+ analysis on programs written in full Common Lisp84 (Common Lisp
+ without the CLOS objectoriented extensions). The objective of type
+ inference is to determine tight lattice upper bounds on the range of
+ runtime data types for Common Lisp program variables and
+ temporaries. Depending upon the lattice used, type inference can also
+ provide range analysis information for numeric variables. This lattice
+ upper bound information can be used by an optimizing compiler to
+ choose more restrictive, and hence more efficient, representations for
+ these program variables. Our analysis also produces tighter control
+ flow information, which can be used to eliminate redundant tests which
+ result in dead code. The overall goal of type inference is to
+ mechanically extract from Common Lisp programs the same degree of
+ representation information that is usually provided by the programmer
+ in traditional stronglytyped languages. In this way, we can provide
+ some classes of Common Lisp programs execution time efficiency
+ expected only for more stronglytyped compiled languages.
+
+ The Nimble type inference system follows the traditional
+ lattice/algebraic data flow techniques [Kaplan80], rather than the
+ logical/theoremproving unification techniques of ML [Milner78]. It
+ can handle polymorphic variables and functions in a natural way, and
+ provides for ``casebased'' analysis that is quite similar to that used
+ intuitively by programmers. Additionally, this inference system can
+ deduce the termination of some simple loops, thus providing
+ surprisingly tight upper lattice bounds for many loop variables.
+
+ By using a higher resolution lattice, more precise typing of primitive
+ functions, polymorphic types and case analysis, the Nimble type
+ inference algorithm can often produce sharper bounds than
+ unificationbased type inference techniques. At the present time,
+ however, our treatment of higherorder data structures and functions
+ is not as elegant as that of the unification techniques."
}
\end{chunk}
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index e31b9e1..70e2b9d 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5734,6 +5734,8 @@ Makefile fix makefile.ubuntu64 chunk
download.html update download table for BSD, ubuntu, ubuntu64
20170520.01.tpd.patch
bookvolbib cylindrical algorithmic decomposition references
+20170520.02.tpd.patch
+bookvolbib type inferencing for Common Lisp

1.7.5.4