From 1b8a0a11ac70cc54d36976e4609b1d58528e7ca7 Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Fri, 21 Sep 2018 14:34:17 0400
Subject: [PATCH] books/bookvolbib add references
MIMEVersion: 1.0
ContentType: text/plain; charset=UTF8
ContentTransferEncoding: 8bit
Goal: Proving Axiom Sane
\index{Hearn, Peter W.O.}
\begin{chunk}{axiom.bib}
@inproceedings{Hear18,
author = "O'Hearn, Peter W.",
title = {{Continuous Reasoning: Scaling the Impact of Formal Methods}},
booktitle = "LICS 18",
year = "2018",
publisher = "ACM",
isbn = "9781450355834",
abstract =
"This paper describes work in continuous reasoning , where formal
reasoning about a (changing) codebase is done in a fashion which
mirrors the iterative, continuous model of software development that
is increasingly practiced in indus try. We suggest that advances in
continuous reasoning will allow formal reasoning to scale to more
programs, and more programmers. The paper describes the rationale for
contin uous reasoning, outlines some success cases from within
industry, and proposes directions for work by the scientific
community.",
paper = "Hear18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Lester, David R.}
\begin{chunk}{axiom.bib}
@phdthesis{Lest89,
author = "Lester, David R.",
title = {{Combinator Graph Reduction: A Congruence and its Applications}},
year = "1989",
school = "Oxford University",
isbn = "0902928554",
abstract =
"The GMachine is an efficient implementation of lazy functional
languages developed by Augustsson and Johnsson. This thesis may be
read as a formal mathematical proof that the Gmachine is correct with
respect to a denotational semantic specification of a simple
language. It also has more general implications. A simple lazy
functional language is defined both denotationally and operationally;
both are defined to handle erroneous results. The operational
semantics models combinator graph reduction, and is based on reduction
to weak head normal form. The two semantic definitions are shown to be
congruent. Because of error handling the language is not
confluent. Complete strictness is shown to be a necessary and
sufficient condition for changing lazy function calls to strict
ones. As strictness analyses are usually used with confluent
languages, methods are discussed to restore this property. The
operational semantic model uses indirection nodes to implement
sharing. An alternative, which is without indirection nodes, is shown
to be operationally equivalent for terminating programs. The Gmachine
is shown to be a representation of the combinator graph reduction
operational model. It may be represented by the composition of a small
set of combinators which correspond to an abstract machine instruction
set. Using a modified form of graph isomorphism, alternative sequences
of instructions are shown to be isomorphic, and hence may be used
interchangeably.",
paper = "Lest89.pdf"
}
\end{chunk}
\index{Jones, Simon Peyton}
\begin{chunk}{axiom.bib}
@book{Jone00,
author = "Jones, Simon Peyton",
title = {{Implementing Functional Languages: A Tutorial}},
publisher = "University of Glasgow",
year = "2000",
link = "\url{https://www.microsoft.com/enus/research/wpcontent/uploads/1992/01/student.pdf}",
paper = "Jone00.pdf"
}
\end{chunk}
\index{Hudak, Paul}
\begin{chunk}{axiom.bib}
@misc{Huda89,
author = "Hudak, Paul",
title = {{The Conception, Evolution, and Application of Functional
Programming Languages}},
link = "\url{http://haskell.cs.yale.edu/wpcontent/uploads/2011/01/cs.pdf}",
year = "1989",
abstract =
"The foundations of functional programming languages are examined from
both historical and technical perspectives. Their evolution is traced
through several critical periods: early work on lambda calculus and
combinatory calculus, Lisp, Iswim, FP, ML, and modern functional
languages such as Miranda 1 and Haskell. The fundamental premises on
which the functional programming methodology stands are critically
analyzed with respect to philosophical, theoretical, and pragmatic
concerns. Particular attention is paid to the main features that
characterize modern functional languages: higherorder functions, lazy
evaluation, equations and patternmatching, strong static typing and
type inference, and data abstraction. In addition, current research
areas—such as parallelism, nondeterminism, input/output, and
stateoriented computations—are examined with the goal of predicting
the future development and application of functional languages.",
paper = "Huda89.pdf",
keywords = "printed"
}
\end{chunk}
\index{Grayson, Daniel R.}
\begin{chunk}{axiom.bib}
@misc{Gray18,
author = "Grayson, Daniel R.",
title = {{An Introduction to Univalent Foundations for Mathematicians}},
link = "\url{http://arxiv.org/pdfs/1711.01477v3}",
year = "2018",
abstract =
"We offer an introduction for mathematicians to the univalent
foundations of Vladimir Voevodsky, aiming to explain how he chose to
encode mathematics in type theory and how the encoding reveals a
potentially viable foundation for all of modern mathematics that can
serve as an alternative to set theory",
paper = "Gray18.pdf",
keywords = "printed"
}
\end{chunk}
\index{BreazuTannen, Val}
\index{Coquand, Thierry}
\index{Gunter, Carl A.}
\index{Scedrov, Andre}
\begin{chunk}{axiom.bib}
@article{Tann93,
author = "BreazuTannen, Val and Coquand, Thierry and Gunter, Carl A.
and Scedrov, Andre",
title = {{Inheritance as Implicit Coercion}},
journal = "Information and Computation",
volume = "93",
pages = "172221",
year = "1991",
abstract =
"We present a method for providing semantic interpretations for
languages with a type system featuring inheritance polymorphism. Our
approach is illustrated on an extension of the language Fun of
Cardelli and Wegner, which we interpret via a translation into an
extended polymorphic lambda calculus. Our goal is to interpret
inheritances in Fun via coercion functions which are definable in the
target of the translation. Existing techniques in the theory of
semantic domains can be then used to interpret the extended
polymorphic lambda calculus, thus providing many models for the
original language. This technique makes it possible to model a rich
type discipline which includes parametric polymorphism and recursive
types as well as inheritance. A central difficulty in providing
interpretations for explit type disciplines featuring inheritance in
the sense discussed in this paper arises from the fact that programs
can typecheck in more than one way. Since interpretations follow the
typechecking derivations, coherence theorems are required: that is,
one must prove that the meaning of a program does not depend on the
way it was typechecked. Proofs of such theorems for our proposed
interpretation are the basic technical results of this paper.
Interestingly, proving coherence in the presence of recursive
types, variants, and abstract types forced us to reexamine fundamental
equational properties that arise in proof theory (in the form of
commutative reductions) and domain theory (in the form of strict vs
nonstrict functions).",
paper = "Tann93.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kauers, Manuel}
\begin{chunk}{axiom.bib}
@inproceedings{Kaue08b,
author = "Kauers, Manuel",
title = {{Computer Algebra for Special Function Inequalities}},
booktitle = "Tapas in Experimental Mathematics",
pages = "215235",
year = "2008",
abstract =
"Recent coputer proofs for some special function inequalities are
presented. The algorithmic ideas underlying these computer proofs
are described, and the conceptual difference to existing
algorithms for proving special function identities is discussed.",
paper = "Kaue08b.pdf",
keywords = "printed"
}
\end{chunk}
\index{Callaghan, Paul}
\begin{chunk}{axiom.bib}
@article{Call08,
author = "Callaghan, Paul",
title = {{Coercive Subtyping via Mappings of Reduction Behaviour}},
journal = "Electronic Notes in Theoretical Computer Science",
volume = "196",
pages = "5368",
year = "2008",
abstract =
"This paper reports preliminary work on a novel approach to
Coercive Subtyping that is based on relationships between
reduction behaviour of source and target types in coerced
terms. Coercive subtyping is a subset of recordbased subtyping,
allowing socalled coercion functions to carry the subtyping. This
allows many novel and powerful forms of subtyping and
abbreviation, with applicaions including interfaces to theorem
provers and programming with dependent type systems. However, the
use of coercion functions introduces nontrivial overheads, and
requires difficult proof of properties such as coherence in order
to guarantee sensible results. These points restrict the
practicality of coercive subtyping. We begin with the idea that
coercing a value $v$ from type $U$ to a type %T% intuitively means
that we wish to compute with $v$ as if it was a value in $T$, not
that $v$ must be converted into a value in $T$. Instead, we
explore how to compute on $U$ in terms of computation on $T$, and
develop a framework for mapping computations on some $T$ on
computations on some $U$ via a simple extension of the elimination
rule of $T$. By exposing how computations on different types are
relatd, we gain insight on and make progress with several aspects
of coercive subtyping, including (a) distinguishing classes of
coercion and finding reasons to deprecate use of some classes; (b)
alternative techniques for improving key properties of coercions;
(c) greater efficiency from implementations of coercions.",
paper = "Call08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Harper, Robert}
\begin{chunk}{axiom.bib}
@misc{Harp18,
author = "Harper, Robert",
title = {{Computational Type Theory}},
year = "2018",
link =
"\url{http://www.cs.uoregon.edu/research/summerschool/summer18/topics.php}",
comment = "OPLSS 2018"
}
\end{chunk}
\index{Aspinall, David}
\index{Compagnoni, Adriana}
\begin{chunk}{axiom.bib}
@article{Aspi01,
author = "Aspinall, David and Compagnoni, Adriana",
title = {{Subtyping Dependent Types}},
journal = "Theoretical Computer Science",
volume = "266",
number = "12",
pages = "273309",
year = "2001",
abstract =
"The need for subtyping in typesystems with dependent
types has been realized for some years. But it is hard to
prove that systems combining the two features have
fundamental properties such as subject reduction. Here we in
vestigate a subtyping extension of the system $\lambda$P
which is an abstract version of the type system of the
Edinburgh Logical Framework LF. By using an equivalent
formulation, we establish some important properties of
the new system $\lambda{\rm P}_{\le}$ including subject
reduction. Our analysis culminates in a complete and
terminating algorithm which establishes the decidability
of typechecking",
paper = "Aspi01.pdf",
keywords = "printed"
}
\end{chunk}
\index{Gordon, Michael J.}
\index{Milner, Arthur J.}
\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@book{Gord79,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
title = "Edinburgh LCF",
publisher = "SpringerVerlag",
year = "1979",
isbn = "9783540097242"
}
\end{chunk}
\index{Gordon, Michael J.}
\index{Milner, Arthur J.}
\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@incollection{Gord79a,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
title = "Front Matter",
booktitle = "Edinburgh LCF",
publisher = "SpringerVerlag",
year = "1979",
isbn = "9783540097242",
pages = "18",
paper = "Gord79a.pdf"
}
\end{chunk}
\index{Gordon, Michael J.}
\index{Milner, Arthur J.}
\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@incollection{Gord79b,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
title = "Introduction",
booktitle = "Edinburgh LCF",
publisher = "SpringerVerlag",
year = "1979",
isbn = "9783540097242",
pages = "112",
paper = "Gord79b.pdf"
}
\end{chunk}
\index{Gordon, Michael J.}
\index{Milner, Arthur J.}
\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@incollection{Gord79c,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
title = "ML",
booktitle = "Edinburgh LCF",
publisher = "SpringerVerlag",
year = "1979",
isbn = "9783540097242",
pages = "1361",
paper = "Gord79c.pdf"
}
\end{chunk}
\index{Gordon, Michael J.}
\index{Milner, Arthur J.}
\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@incollection{Gord79d,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
title = "PPLAMBDA",
booktitle = "Edinburgh LCF",
publisher = "SpringerVerlag",
year = "1979",
isbn = "9783540097242",
pages = "6286",
paper = "Gord79d.pdf"
}
\end{chunk}
\index{Gordon, Michael J.}
\index{Milner, Arthur J.}
\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@incollection{Gord79e,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
title = "APPENDIX",
booktitle = "Edinburgh LCF",
publisher = "SpringerVerlag",
year = "1979",
isbn = "9783540097242",
pages = "87159",
paper = "Gord79e.pdf"
}
\end{chunk}
\index{Reynolds, John C.}
\begin{chunk}{axiom.bib}
@misc{Reyn94,
author = "Reynolds, John C.",
title = {{An Introduction to the Polymorphic Lambda Calculus}},
year = "1994",
paper = "Reyn94.pdf",
keywords = "printed"
}
\end{chunk}
\index{Meili, Mario}
\begin{chunk}{axiom.bib}
@misc{Meilxx,
author = "Meili, Mario",
title = {{Polymorphic Lambda Calculus}},
year = "unknown",
link = "\urlhttp://wiki.ifs.hsr.ch/SemProgAnTr/files/mmeili_polymorphic_lambda_calculus_final.pdf}",
paper = "Meilxx.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
@misc{Wadl03,
author = "Wadler, Philip",
title = {{The GirardReynolds Isomorphism}},
journal = "Information and Computation",
volume = "186",
number = "2",
pages = "260280",
year = "2003",
abstract =
"The secondorder polymorphic lambda calculus, F2, was
independently discovered by Girard and Reynolds. Girard
additionally proved a {\sl representation} theorem: every function
on natural numbers that can be proved total in secondorder
intutionistic propositional logic, P2, can be represented in
F2. Reynolds additionally proved an {\sl abstraction} theorem: for
a suitable notion of logical relation, every term in F2 takes
related arguments into related results. We observe that the
essence of Girard's result is a projection from P2 into F2, and
that the essence of Reynolds's result is an embedding of F2 into
P2, and that the Reynolds embedding followed by the Girard
projection is the identity. The Girard projection discards all
firstorder quantifiers, so it seems unreasonable to expect that
the Girard projection followed by the Reynolds embedding should
also be the identity. However, we show that in the presence of
Reynolds's parametricity property that this is indeed the case,
for propositions corresponding to inductive definitions of
naturals, products, sums, and fixpoint types.",
paper = "Wadl03.pdf",
keywords = "printed"
}
\end{chunk}
\index{Barendregt, Hendrik Pieter}
\begin{chunk}{axiom.bib}
@article{Bare91,
author = "Barendregt, Hendrik Pieter",
title = {{An Introduction to Generalized Type Systems}},
journal = "Journal of Functional Programming",
volume = "1",
number = "2",
year = "1991",
pages = "125154",
abstract =
"Programming languages often come with type systems. Some of these are
simple, others are sophisticated. As a stylistic representation of
types in programming languages several versions of typed lambda
calculus are studied. During the last 20 years many of these systems
have appeared, so there is some need of classification. Working
towards a taxonomy, Barendregt (1991) gives a finestructure of the
theory of constructions (Coquand and Huet 1988) in the form of a
canonical cube of eight type systems ordered by inclusion. Berardi
(1988) and Terlouw (1988) have independently generalized the method of
constructing systems in the λcube. Moreover, Berardi (1988, 1990)
showed that the generalized type systems are flexible enough to
describe many logical systems. In that way the wellknown
propositionsastypes interpretation obtains a nice canonical form.",
paper = "Bare91.pdf",
keywords = "printed"
}
\end{chunk}
\index{Arkoudas, Konstantine}
\index{Musser, David}
\begin{chunk}{axiom.bib}
@book{Arko17,
author = "Arkoudas, Konstantine and Musser, David",
title = {{Fundamental Proof Methods in Computer Science}},
publisher = "MIT Press",
year = "2017",
isbn = "9780262035538"
}
\end{chunk}
\index{Dahl, O.J.}
\index{Dijkstra, E.W.}
\index{Hoare, C.A.R}
\begin{chunk}{axiom.bib}
@book{Dahl72a,
author = "Dahl, O.J. and Dijkstra, E.W. and Hoare, C.A.R",
title = {{Structured Programming}},
publisher = "Academic Press",
year = "1972",
isbn = "0122005562"
}
\end{chunk}
\index{Nipkow, Tobias}
\index{Tabacznyj, Christophe}
\index{Paulson, Lawrence C.}
\index{Chaieb, Amine}
\index{Rasmussen, Thomas M.}
\index{Avigad, Jeremy}
\begin{chunk}{axiom.bib}
@misc{Nipk18,
author = "Nipkow, Tobias and Tabacznyj, Christophe and
Paulson, Lawrence C. and Chaieb, Amine and Rasmussen, Thomas M.
and Avigad, Jeremy",
title = {{GCD in Isabelle}},
link = "\url{http://isabelle.in.tum.ed/dist/library/HOL/HOL/GCD.html}",
year = "2018"
}
\end{chunk}
\index{McCarthy, John}
\index{Abrahams, Paul W.}
\index{Edwards, Daniel J.}
\index{Hart, Timothy P.}
\index{Levin, Michael I.}
\begin{chunk}{axiom.bib}
@book{Mcca62,
author = "McCarthy, John and Abrahams, Paul W. and Edwards, Daniel J.
and Hart, Timothy P. and Levin, Michael I.",
title = {{LISP 1.5 Programmer's Manual}},
link = "\url{http://www.softwarepreservation.org/projects/LISP/book/LISP%201.5%20Programmers%20Manual.pdf}",
publisher = "MIT Press",
isbn = "0262130114",
year = "1962"
}
\end{chunk}

books/axiom.bib  428 ++++++++
books/bookvol13.pamphlet  313 ++++++
books/bookvolbib.pamphlet  603 ++++++++++++
changelog  4 +
patch  1881 +++++++++
src/axiomwebsite/patches.html  2 +
6 files changed, 1691 insertions(+), 1540 deletions()
diff git a/books/axiom.bib b/books/axiom.bib
index 4883459..ac9040a 100644
 a/books/axiom.bib
+++ b/books/axiom.bib
@@ 1446,6 +1446,15 @@ paper = "Brea89.pdf"
paper = "Jone87.pdf"
}
+@book{Jone00,
+ author = "Jones, Simon Peyton",
+ title = {{Implementing Functional Languages: A Tutorial}},
+ publisher = "University of Glasgow",
+ year = "2000",
+ link = "\url{https://www.microsoft.com/enus/research/wpcontent/uploads/1992/01/student.pdf}",
+ paper = "Jone00.pdf"
+}
+
@book{Joua90,
author = "Jouannaud, JeanPierre and Kirchner, Claude",
title = {{Solving Equations in Abstract Algebras: A Rulebased Survey of
@@ 2338,7 +2347,8 @@ paper = "Brea89.pdf"
the coherence of a class of lambdacalculusbased languages that use
the intersection type discipline, including both a purely functional
programming language and the Algollike programming language Forsythe.",
 paper = "Reyn91.pdf"
+ paper = "Reyn91.pdf",
+ keywords = "printed"
}
@book{Robi96,
@@ 6826,6 +6836,21 @@ paper = "Brea89.pdf"
publisher = "Dover Publications"
}
+@inproceedings{Kaue08b,
+ author = "Kauers, Manuel",
+ title = {{Computer Algebra for Special Function Inequalities}},
+ booktitle = "Tapas in Experimental Mathematics",
+ pages = "215235",
+ year = "2008",
+ abstract =
+ "Recent coputer proofs for some special function inequalities are
+ presented. The algorithmic ideas underlying these computer proofs
+ are described, and the conceptual difference to existing
+ algorithms for proving special function identities is discussed.",
+ paper = "Kaue08b.pdf",
+ keywords = "printed"
+}
+
@misc{Ng68,
author = "Ng, Edward W. and Geller, Murray",
title = {{A Table of Integrals of the Error functions}},
@@ 9445,6 +9470,297 @@ paper = "Brea89.pdf"
link = "\url{http://www.wolfram.com}"
}
+@book{Arko17,
+ author = "Arkoudas, Konstantine and Musser, David",
+ title = {{Fundamental Proof Methods in Computer Science}},
+ publisher = "MIT Press",
+ year = "2017",
+ isbn = "9780262035538"
+}
+
+@article{Aspi01,
+ author = "Aspinall, David and Compagnoni, Adriana",
+ title = {{Subtyping Dependent Types}},
+ journal = "Theoretical Computer Science",
+ volume = "266",
+ number = "12",
+ pages = "273309",
+ year = "2001",
+ abstract =
+ "The need for subtyping in typesystems with dependent
+ types has been realized for some years. But it is hard to
+ prove that systems combining the two features have
+ fundamental properties such as subject reduction. Here we in
+ vestigate a subtyping extension of the system $\lambda$P
+ which is an abstract version of the type system of the
+ Edinburgh Logical Framework LF. By using an equivalent
+ formulation, we establish some important properties of
+ the new system $\lambda{\rm P}_{\le}$ including subject
+ reduction. Our analysis culminates in a complete and
+ terminating algorithm which establishes the decidability
+ of typechecking",
+ paper = "Aspi01.pdf",
+ keywords = "printed"
+}
+
+@article{Bare91,
+ author = "Barendregt, Hendrik Pieter",
+ title = {{An Introduction to Generalized Type Systems}},
+ journal = "Journal of Functional Programming",
+ volume = "1",
+ number = "2",
+ year = "1991",
+ pages = "125154",
+ abstract =
+ "Programming languages often come with type systems. Some of these are
+ simple, others are sophisticated. As a stylistic representation of
+ types in programming languages several versions of typed lambda
+ calculus are studied. During the last 20 years many of these systems
+ have appeared, so there is some need of classification. Working
+ towards a taxonomy, Barendregt (1991) gives a finestructure of the
+ theory of constructions (Coquand and Huet 1988) in the form of a
+ canonical cube of eight type systems ordered by inclusion. Berardi
+ (1988) and Terlouw (1988) have independently generalized the method of
+ constructing systems in the λcube. Moreover, Berardi (1988, 1990)
+ showed that the generalized type systems are flexible enough to
+ describe many logical systems. In that way the wellknown
+ propositionsastypes interpretation obtains a nice canonical form.",
+ paper = "Bare91.pdf",
+ keywords = "printed"
+}
+
+@article{Call08,
+ author = "Callaghan, Paul",
+ title = {{Coercive Subtyping via Mappings of Reduction Behaviour}},
+ journal = "Electronic Notes in Theoretical Computer Science",
+ volume = "196",
+ pages = "5368",
+ year = "2008",
+ abstract =
+ "This paper reports preliminary work on a novel approach to
+ Coercive Subtyping that is based on relationships between
+ reduction behaviour of source and target types in coerced
+ terms. Coercive subtyping is a subset of recordbased subtyping,
+ allowing socalled coercion functions to carry the subtyping. This
+ allows many novel and powerful forms of subtyping and
+ abbreviation, with applicaions including interfaces to theorem
+ provers and programming with dependent type systems. However, the
+ use of coercion functions introduces nontrivial overheads, and
+ requires difficult proof of properties such as coherence in order
+ to guarantee sensible results. These points restrict the
+ practicality of coercive subtyping. We begin with the idea that
+ coercing a value $v$ from type $U$ to a type %T% intuitively means
+ that we wish to compute with $v$ as if it was a value in $T$, not
+ that $v$ must be converted into a value in $T$. Instead, we
+ explore how to compute on $U$ in terms of computation on $T$, and
+ develop a framework for mapping computations on some $T$ on
+ computations on some $U$ via a simple extension of the elimination
+ rule of $T$. By exposing how computations on different types are
+ relatd, we gain insight on and make progress with several aspects
+ of coercive subtyping, including (a) distinguishing classes of
+ coercion and finding reasons to deprecate use of some classes; (b)
+ alternative techniques for improving key properties of coercions;
+ (c) greater efficiency from implementations of coercions.",
+ paper = "Call08.pdf",
+ keywords = "printed"
+}
+
+@book{Dahl72a,
+ author = "Dahl, O.J. and Dijkstra, E.W. and Hoare, C.A.R",
+ title = {{Structured Programming}},
+ publisher = "Academic Press",
+ year = "1972",
+ isbn = "0122005562"
+}
+
+@incollection{Gord79a,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "Front Matter",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "18",
+ paper = "Gord79a.pdf"
+}
+
+@incollection{Gord79b,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "Introduction",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "112",
+ paper = "Gord79b.pdf"
+}
+
+@incollection{Gord79c,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "ML",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "1361",
+ paper = "Gord79c.pdf"
+}
+
+@incollection{Gord79d,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "PPLAMBDA",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "6286",
+ paper = "Gord79d.pdf"
+}
+
+@incollection{Gord79e,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "APPENDIX",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "87159",
+ paper = "Gord79e.pdf"
+}
+
+@misc{Gray18,
+ author = "Grayson, Daniel R.",
+ title = {{An Introduction to Univalent Foundations for Mathematicians}},
+ link = "\url{http://arxiv.org/pdfs/1711.01477v3}",
+ year = "2018",
+ abstract =
+ "We offer an introduction for mathematicians to the univalent
+ foundations of Vladimir Voevodsky, aiming to explain how he chose to
+ encode mathematics in type theory and how the encoding reveals a
+ potentially viable foundation for all of modern mathematics that can
+ serve as an alternative to set theory",
+ paper = "Gray18.pdf",
+ keywords = "printed"
+}
+
+@misc{Harp18,
+ author = "Harper, Robert",
+ title = {{Computational Type Theory}},
+ year = "2018",
+ link =
+ "\url{http://www.cs.uoregon.edu/research/summerschool/summer18/topics.php}",
+ comment = "OPLSS 2018"
+}
+
+@book{Mcca62,
+ author = "McCarthy, John and Abrahams, Paul W. and Edwards, Daniel J.
+ and Hart, Timothy P. and Levin, Michael I.",
+ title = {{LISP 1.5 Programmer's Manual}},
+ link = "\url{http://www.softwarepreservation.org/projects/LISP/book/LISP%201.5%20Programmers%20Manual.pdf}",
+ publisher = "MIT Press",
+ isbn = "0262130114",
+ year = "1962"
+}
+
+@misc{Meilxx,
+ author = "Meili, Mario",
+ title = {{Polymorphic Lambda Calculus}},
+ year = "unknown",
+ link = "\url{http://wiki.ifs.hsr.ch/SemProgAnTr/files/mmeili_polymorphic_lambda_calculus_final.pdf}",
+ paper = "Meilxx.pdf",
+ keywords = "printed"
+}
+
+@misc{Nipk18,
+ author = "Nipkow, Tobias and Tabacznyj, Christophe and
+ Paulson, Lawrence C. and Chaieb, Amine and Rasmussen, Thomas M.
+ and Avigad, Jeremy",
+ title = {{GCD in Isabelle}},
+ link = "\url{http://isabelle.in.tum.ed/dist/library/HOL/HOL/GCD.html}",
+ year = "2018"
+}
+
+@misc{Reyn94,
+ author = "Reynolds, John C.",
+ title = {{An Introduction to the Polymorphic Lambda Calculus}},
+ year = "1994",
+ paper = "Reyn94.pdf",
+ keywords = "printed"
+}
+
+@article{Tann93,
+ author = "BreazuTannen, Val and Coquand, Thierry and Gunter, Carl A.
+ and Scedrov, Andre",
+ title = {{Inheritance as Implicit Coercion}},
+ journal = "Information and Computation",
+ volume = "93",
+ pages = "172221",
+ year = "1991",
+ abstract =
+ "We present a method for providing semantic interpretations for
+ languages with a type system featuring inheritance polymorphism. Our
+ approach is illustrated on an extension of the language Fun of
+ Cardelli and Wegner, which we interpret via a translation into an
+ extended polymorphic lambda calculus. Our goal is to interpret
+ inheritances in Fun via coercion functions which are definable in the
+ target of the translation. Existing techniques in the theory of
+ semantic domains can be then used to interpret the extended
+ polymorphic lambda calculus, thus providing many models for the
+ original language. This technique makes it possible to model a rich
+ type discipline which includes parametric polymorphism and recursive
+ types as well as inheritance. A central difficulty in providing
+ interpretations for explit type disciplines featuring inheritance in
+ the sense discussed in this paper arises from the fact that programs
+ can typecheck in more than one way. Since interpretations follow the
+ typechecking derivations, coherence theorems are required: that is,
+ one must prove that the meaning of a program does not depend on the
+ way it was typechecked. Proofs of such theorems for our proposed
+ interpretation are the basic technical results of this paper.
+ Interestingly, proving coherence in the presence of recursive
+ types, variants, and abstract types forced us to reexamine fundamental
+ equational properties that arise in proof theory (in the form of
+ commutative reductions) and domain theory (in the form of strict vs
+ nonstrict functions).",
+ paper = "Tann93.pdf",
+ keywords = "printed"
+}
+
+@misc{Wadl03,
+ author = "Wadler, Philip",
+ title = {{The GirardReynolds Isomorphism}},
+ journal = "Information and Computation",
+ volume = "186",
+ number = "2",
+ pages = "260280",
+ year = "2003",
+ abstract =
+ "The secondorder polymorphic lambda calculus, F2, was
+ independently discovered by Girard and Reynolds. Girard
+ additionally proved a {\sl representation} theorem: every function
+ on natural numbers that can be proved total in secondorder
+ intutionistic propositional logic, P2, can be represented in
+ F2. Reynolds additionally proved an {\sl abstraction} theorem: for
+ a suitable notion of logical relation, every term in F2 takes
+ related arguments into related results. We observe that the
+ essence of Girard's result is a projection from P2 into F2, and
+ that the essence of Reynolds's result is an embedding of F2 into
+ P2, and that the Reynolds embedding followed by the Girard
+ projection is the identity. The Girard projection discards all
+ firstorder quantifiers, so it seems unreasonable to expect that
+ the Girard projection followed by the Reynolds embedding should
+ also be the identity. However, we show that in the presence of
+ Reynolds's parametricity property that this is indeed the case,
+ for propositions corresponding to inductive definitions of
+ naturals, products, sums, and fixpoint types.",
+ paper = "Wadl03.pdf",
+ keywords = "printed"
+}
+
@inproceedings{Abad90,
author = "Abadi, Martin and Cardelli, Luca and Curien, PierreLouis
and Levy, JeanJacques",
@@ 11962,26 +12278,6 @@ paper = "Brea89.pdf"
paper = "Gord79.pdf"
}
@article{Gord79a,
 author = "Gordon, Michael J. and Milner, Arthur j. and
 Wadsworth, Christopher P.",
 title = {{Edinburgh LCF, A Mechanised Logic of Computation: ML}},
 journal = "LNCS",
 volume = "78",
 year = "1979",
 paper = "Gord79a.pdf"
}

@article{Gord79b,
 author = "Gordon, Michael J. and Milner, Arthur j. and
 Wadsworth, Christopher P.",
 title = {{Edinburgh LCF, A Mechanised Logic of Computation: PPLAMBDA}},
 journal = "LNCS",
 volume = "78",
 year = "1979",
 paper = "Gord79b.pdf"
}

@inproceedings{Gord89,
author = "Gordon, Michael J.C.",
title = {{Mechanizing Programming Logics in Higher Order Logic}},
@@ 12356,6 +12652,31 @@ paper = "Brea89.pdf"
pages = "83174"
}
+@misc{Huda89,
+ author = "Hudak, Paul",
+ title = {{The Conception, Evolution, and Application of Functional
+ Programming Languages}},
+ link = "\url{http://haskell.cs.yale.edu/wpcontent/uploads/2011/01/cs.pdf}",
+ year = "1989",
+ abstract =
+ "The foundations of functional programming languages are examined from
+ both historical and technical perspectives. Their evolution is traced
+ through several critical periods: early work on lambda calculus and
+ combinatory calculus, Lisp, Iswim, FP, ML, and modern functional
+ languages such as Miranda 1 and Haskell. The fundamental premises on
+ which the functional programming methodology stands are critically
+ analyzed with respect to philosophical, theoretical, and pragmatic
+ concerns. Particular attention is paid to the main features that
+ characterize modern functional languages: higherorder functions, lazy
+ evaluation, equations and patternmatching, strong static typing and
+ type inference, and data abstraction. In addition, current research
+ areas—such as parallelism, nondeterminism, input/output, and
+ stateoriented computations—are examined with the goal of predicting
+ the future development and application of functional languages.",
+ paper = "Huda89.pdf",
+ keywords = "printed"
+}
+
@article{Huet75,
author = "Huet, Gerard P.",
title = {{A Unification Algorithm for typed $\lambda$Calculus}},
@@ 12393,7 +12714,8 @@ paper = "Brea89.pdf"
denotational semantics for the programming language. We establish the
correctness of the transformation method itself, and give techniques
pertaining to its actual implementation. The paper is illustrated with
 recursion removal examples."
+ recursion removal examples.",
+ paper = "Huet78.pdf"
}
@inproceedings{Hugh90,
@@ 13000,6 +13322,39 @@ paper = "Brea89.pdf"
paper = "Lero09.pdf"
}
+@phdthesis{Lest89,
+ author = "Lester, David R.",
+ title = {{Combinator Graph Reduction: A Congruence and its Applications}},
+ year = "1989",
+ school = "Oxford University",
+ isbn = "0902928554",
+ abstract =
+ "The GMachine is an efficient implementation of lazy functional
+ languages developed by Augustsson and Johnsson. This thesis may be
+ read as a formal mathematical proof that the Gmachine is correct with
+ respect to a denotational semantic specification of a simple
+ language. It also has more general implications. A simple lazy
+ functional language is defined both denotationally and operationally;
+ both are defined to handle erroneous results. The operational
+ semantics models combinator graph reduction, and is based on reduction
+ to weak head normal form. The two semantic definitions are shown to be
+ congruent. Because of error handling the language is not
+ confluent. Complete strictness is shown to be a necessary and
+ sufficient condition for changing lazy function calls to strict
+ ones. As strictness analyses are usually used with confluent
+ languages, methods are discussed to restore this property. The
+ operational semantic model uses indirection nodes to implement
+ sharing. An alternative, which is without indirection nodes, is shown
+ to be operationally equivalent for terminating programs. The Gmachine
+ is shown to be a representation of the combinator graph reduction
+ operational model. It may be represented by the composition of a small
+ set of combinators which correspond to an abstract machine instruction
+ set. Using a modified form of graph isomorphism, alternative sequences
+ of instructions are shown to be isomorphic, and hence may be used
+ interchangeably.",
+ paper = "Lest89.pdf"
+}
+
@inproceedings{Leto04,
author = "Letouzey, Pierre",
title = {{A New Extraction for Coq}},
@@ 14112,7 +14467,7 @@ paper = "Brea89.pdf"
@book{Pier91b,
author = "Pierce, Benjamin C.",
 title = {{ Basic Category Theory for Computer Scientists}},
+ title = {{Basic Category Theory for Computer Scientists}},
publisher = "MIT Press",
year = "1991",
isbn = "0262660717"
@@ 15257,7 +15612,7 @@ paper = "Brea89.pdf"
pages = "201226",
year = "2007",
abstract =
 "eanYves Girard and John Reynolds independently discovered the
+ "JeanYves Girard and John Reynolds independently discovered the
secondorder polymorphic lambda calculus, F2. Girard additionally
proved a Representation Theorem : every function on natural numbers
that can be proved total in secondorder intuitionistic predicate
@@ 19082,8 +19437,29 @@ paper = "Brea89.pdf"
paper = "Harr06a.pdf"
}
+@inproceedings{Hear18,
+ author = "O'Hearn, Peter W.",
+ title = {{Continuous Reasoning: Scaling the Impact of Formal Methods}},
+ booktitle = "LICS 18",
+ year = "2018",
+ publisher = "ACM",
+ isbn = "9781450355834",
+ abstract =
+ "This paper describes work in continuous reasoning , where formal
+ reasoning about a (changing) codebase is done in a fashion which
+ mirrors the iterative, continuous model of software development that
+ is increasingly practiced in indus try. We suggest that advances in
+ continuous reasoning will allow formal reasoning to scale to more
+ programs, and more programmers. The paper describes the rationale for
+ contin uous reasoning, outlines some success cases from within
+ industry, and proposes directions for work by the scientific
+ community.",
+ paper = "Hear18.pdf",
+ keywords = "printed"
+}
+
@misc{Hear12,
 author = "Hearn, Peter W.O.",
+ author = "O'Hearn, Peter W.",
title = {{A Primer on Separation Logic (and Automatic Program
Verification and Analysis}},
year = "2012",
@@ 34398,7 +34774,7 @@ paper = "Brea89.pdf"
coercion, which is based on rewrite rules. From this definition, an
efficient coercion algorithm for SCRATCHPAD is constructed using graph
techniques.",
 keywords = "axiomref",
+ keywords = "axiomref, printed",
beebe = "Fortenbacher:1990:ETI"
}
diff git a/books/bookvol13.pamphlet b/books/bookvol13.pamphlet
index f367e6d..4eaca41 100644
 a/books/bookvol13.pamphlet
+++ b/books/bookvol13.pamphlet
@@ 3,6 +3,7 @@
\usepackage{bbold}
\usepackage{amsmath}
\usepackage{turnstile}
+\usepackage{mathrsfs}
\input{bookheader.tex}
\mainmatter
\setcounter{chapter}{0} % Chapter 1
@@ 247,9 +248,9 @@ Axiom tries to encode mathematical algorithms. Unlike other systems it
is built on the scaffold of group theory which provides a sound
mathematical foundation. As such, it seems only reasonable that the
algorithms in Axiom can be proven correct, hence the project to Prove
Axiom Correct (PAC).
+Axiom Sane (PAS).
The PAC project will not succeed. This is perfectly obvious from the
+The PAS project will not succeed. This is perfectly obvious from the
outset. But, given the law of the excluded middle (that is,
$A \lor ~A$) is not applicable in this case, the fact that the project
"does not succeed" does not imply failure. Learning will occur.
@@ 3903,7 +3904,7 @@ function(al) $F_x$ with domain $[0,x]$ such that for all $y\le x$
then $F(b)$ is defined to be $F_b(b)$ for the unique such $F_b$ with
${\rm dom}(F_b)=[0,b]$.
\section{Homann \cite{Homa94}}
+\subsection{Homann \cite{Homa94}}
algorithm schemata
Name: gcd(?a,?b)=?g\\
@@ 4190,6 +4191,312 @@ $f(r){\rm deg}(P))$ and lcof($P$) for its
+{\bf leading coefficient} $a_p={\rm cof}_{{\rm deg}(P)}(P)$. By
+convention ${\rm lcof}(0)=1$.
+
+Suppose that $P$ and $Q$ are two polynomials in $D[X]$. The polynomial
+$Q$ is a {\bf divisor of} $P$ if $P=AQ$ for some $A\in K[X]$. Thus,
+while every $P$ divides 0, 0 divides 0 and no other polynomial.
+
+If $Q\ne 0$, the {\bf remainder} in the {\bf euclidean division of}
+$P$ {\bf by} $Q$, denoted ${\rm Rem}(P,Q)$, is the unique polynomial
+$R\in K[X]$ of degree smaller than the degree of $Q$ such that
+$P=AQ+R$ with $A\in K[X]$. The {\bf quotient} in the euclidean
+division of $P$ by $Q$, denoted ${\rm Quo}(P,Q)$, is $A$.
+
+{\bf Exercise} Prove that, if $Q\ne 0$, there exists a unique pair
+$(R,A)$ of polynomials in $K[X]$ such that $P=AQ+R$,
+${\rm deg}(R)<{\rm deg}(Q)$.
+
+Clearly, ${\rm Rem}(aP,bQ)=a{\rm Rem}(P,Q)$, for any $a,b\in K$
+with $b\ne 0$. At a root $x$ of $Q$, ${\rm Rem}(P,Q)(x)=P(x)$.
+
+{\bf Exercise}:
+Prove that $x$ is a root of $P$ in $K$ if and only if $Xx$ is a
+divisor of $P$ in $K[X]$.
+
+{\bf Exercise}:
+Prove that if $C$ is algebraically closed, every $P\in C[X]$ can be
+written uniquely as
+\[P=a(Xx_1)^{\mu_1}\cdots(Xx_k)^{\mu_k}\]
+with $x_1,\ldots,x_k$ distinct elements of $C$.
+
+A {\bf greatest common divisor of} $P$ {\bf and} $Q$, denoted
+$gcd(P,Q)$, is a polynomial $G\in K[X]$ such that $G$ is a divisor of
+both $P$ and $Q$, and any divisor of both $P$ and $Q$ is a divisor of
+$G$. Observe that this definition implies that $P$ is a greatest
+common divisor of $P$ and 0. Clearly, any two greatest common divisors
+(say $G_1$, $G_2$) of $P$ and $Q$ must divide each other and have
+equal degree. Hence $G_1=aG_2$ for some $a\in K$. Thus, any two
+greatest common divisors of $P$ and $Q$ are proportional by an element
+in $K\backslash \{0\}$. Two polynomials are {\bf coprime} if their greatest
+common divisor is an element of $K\backslash \{0\}$.
+
+A {\bf least common multiple of} $P$ {\bf and} $Q$, lcm($P,Q$) is a
+polynomial $G\in K[X]$ such that $G$ is a multiple of both $P$ and
+$Q$, and any multiple of both $P$ and $Q$ is a multiple of both
+$G$. Clearly, any two least common multiples $L_1$, $L_2$ of $P$ and
+$Q$ must divide each other and have equal degree. Hence,
+$L_1=aL_2$ for some $a\in K$. Thus, any two least common multiple of
+$P$ and $Q$ are proportional by an element in $K\backslash \{0\}$.
+
+It follows immediately from the definitions that:
+
+{\bf Proposition}: {\sl Let $P\in K[X]$ and $Q\in K[X]$, not both
+zero. Then $PQ/G$ is a least common multiple of $P$ and $Q$.}
+
+{\bf Corollary}:
+\[{\rm deg}({\rm lcm}(P,Q))={\rm deg}(P)+{\rm deg}(Q)
+{\rm deg}({\rm gcd}(P,Q))\]
+
+We now prove that greatest common divisors and least common multiple
+exist by using euclidean division repeatedly.
+
+{\bf Definition} {\bf [Signed remainder sequence]} Given
+$P,Q\in K[X]$, not both 0, we define the
+{\bf signed remainder sequence of} $P$ {\bf and} $Q$.
+\[{\rm SRemS}(P,Q)={\rm SRemS}_0(P,Q), {\rm SRemS}_1(P,Q),
+\ldots,{\rm SRemS}_k(P,Q)\]
+by
+\[\begin{array}{rcl}
+{\rm SRemS}_0(P,Q) & = & P\\
+{\rm SRemS}_1(P,Q) & = & Q\\
+{\rm SRemS}_2(P,Q) & = &
+{\rm Rem}({\rm SRemS}_0(P,Q), {\rm SRemS}_1(P,Q))\\
+\vdots\\
+{\rm SRemS}_k(P,Q) & = &
+{\rm Rem}({\rm SRemS}_{k2}(P,Q),
+{\rm SRemS}_{k1}(P,Q))\ne 0\\
+{\rm SRemS}_{k+1}(P,Q) & = &
+{\rm Rem}({\rm SRemS}_{k1}(P,Q),
+{\rm SRemS}_k(P,Q))=0\\
+\end{array}\]
+
+The signs introduced here are unimportant in the algebraically closed
+case. They play an important role when we consider analogous problems
+over real closed fields.
+
+In the above, each ${\rm SRemS}_i(P,Q)$ is the negative of the
+remainder in the euclidean division of
+${\rm SRemS}_{i2}(P,Q)$ by ${\rm SRemS}_{i1}(P,Q)$ for
+$2\le i\le k+1$, and the sequence ends with
+${\rm SRemS}_k(P,Q)$ when ${\rm SRemS}_{k+1}(P,Q)=0$,
+for $k\ge 0$.
+
+{\bf Proposition}: {\sl The polynomial ${\rm SRemS}_k(P,Q)$ is a
+greatest common divisor of $P$ and $Q$}
+
+{\bf Proof}: Observe that if a polynomial $A$ divides two polynomials
+$B,C$ then it also divides $UB+VC$ for arbitrary polynomials
+$U,V$. Since
+\[{\rm SRemS}_{k+1}(P,Q)=
+{\rm Rem}({\rm SRemS}_{k1}(P,Q),{\rm SRemS}_k(P,Q))=0\]
+${\rm SRemS}_k(P,Q)$ divides ${\rm SRemS}_{k1}(P,Q)$ and since,
+\[{\rm SRemS}_{k2}(P,Q)=
+{\rm SRemS}_k(P,Q)+A{\rm SRemS}_{k1}(P,Q)\]
+
+${\rm SRemS}_k(P,Q)$ divides ${\rm SRemS}_{k2}(P,Q)$ using the above
+observation. Continuing this process one obtains that
+${\rm SRemS}_k(P,Q)$ divides ${\rm SRemS}_1(P,Q)=Q$ and
+${\rm SRemS}_0(P,Q)=P$
+
+Also, if any polynomial divides ${\rm SRemS}_0(P,Q)$,
+${\rm SRemS}_1(P,Q)$ (that is $P$, $Q$ then it divides
+${\rm SRemS}_2(P,Q)$ and hence ${\rm SRemS}_3(P,Q)$ and so on.
+Hence, it divides ${\rm SRemS}_k(P,Q)$.
+
+Note that the signed remainder sequence of $P$ and 0 is $P$ and when
+$Q$ is not 0, the signed remaineder sequence of 0 and $Q$ is 0, $Q$.
+
+Also, note that by unwinding the definitions of the
+${\rm SRemS}_i(P,Q)$, we can express
+${\rm SRemS}_k(P,Q)=gcd(P,Q)$ as $UP+VQ$ for some polynomials $U$, $V$
+in $K[X]$. We prove bounds on the degrees of $U$, $V$ by elucidating
+the preceding remark.
+
+{\bf Proposition}: {\sl If $G$ is a greatest common divisor of $P$ and
+$Q$, then there exist $U$ and $V$ with}
+\[UP+VQ=G\]
+{\sl Moreover, if ${\rm deg}(G)=g$, $U$ and $V$ can be chosen so that
+${\rm deg}(U) q  d_{i2}\\
+\end{array}\]
+
+Hence, ${\rm deg}({\rm SRemU}_{i+1}) = q  d_i$, Simlarly,
+
+\[\begin{array}{rcl}
+{\rm deg}({\rm SRemV}_{i1}(P,Q)) & = & pd_{i2}\\
+{\rm deg}({\rm SRemV}_i(P,Q)) & = & pd_{i1}\\
+{\rm deg}(A_{i+1}{\rm SRemV}_i(P,Q)) & = & d_{i1}  d_i + p  d_{i1}\\
+& = & p  d_i > p  d_{i2}\\
+\end{array}\]
+
+Hence, ${\rm deg}({\rm SRemV}_{i+1}) = p  d_i$.
+
+{\bf Proof}: The claim follows from the Lemma and Proposition since
+${\rm SRemS}_k(P,Q)$ is the gcd of $P$ and $Q$, taking
+\[U={\rm SRemU}_k(P,Q), V={\rm SRemV}_k(P,Q)\]
+and noting that $pd_{k1} < pg$, $qd_{k1} < qg$.
+
+The extended signed remainder sequence also provides a least common
+multiple of $P$ and $Q$.
+
+{\bf Proposition} {\sl The equality}
+\[{\rm SRemU}_{k+1}(P,Q) P = {\rm SRemV}_{k+1}(P,Q) Q\]
+{\sl holds and ${\rm SRemU}_{k+1}(P,Q)P = {\rm SRemV}_{k+1}(P,Q) Q$
+is a least common multiple of $P$ and $Q$.}
+
+{\bf Proof:} Since $d_k={\rm deg}({\rm gcd}(P,Q))$,
+${\rm deg}({\rm SRemU}_{k+1}(P,Q))=qd_k$,
+${\rm deg}({\rm SRemV}_k(P.Q))=pd_k$ and
+\[{\rm SRemU}_{k+1}(P,Q) P + {\rm SRemV}_{k+1}(P,Q) Q = 0\]
+it follows that
+\[{\rm SRemU}_{k+1}(P,Q) P  {\rm SRemV}_{k+1}(P,Q) Q \]
+is a common multiple of $P$ and $Q$ of degree $p+qd_k$, hence a least
+common multiple of $P$ and $Q$.
+
+{\bf Definition} {\bf [Greatest common divisor of a family]}
+A {\bf greatest common divisor of a finite family of polynomials} is a
+divisor of all the polynomials in the family that is also a multiple of
+any polynomial that divides every polynomial in the family. A greatest
+common divisor of a family can be obtained inductively on the number
+of elements of the family by
+\[\begin{array}{rcl}
+{\rm gcd}(\emptyset) & = & 0\\
+{\rm gcd}(\mathcal{P} \cup \{P\}) & = &
+{\rm gcd}(P,{\rm gcd}(\mathcal{P}))
+\end{array}
+\]
+
+Note that
+\begin{itemize}
+\item $x\in C$ is a root of every polynomial in $\mathcal{P}$
+if and only if it is the root of gcd($\mathcal{P}$)
+\item $x\in C$ is not a root of any polynomial in $\mathcal{Q}$
+if and only if
+it is not a root of $\prod_{Q\in \mathcal{Q}}Q$
+(with the convention that the proeuct of the empy family is 1).
+\item every root of $P$ in $C$ is a root of $Q$ if and only if
+${\rm gcd}(P,Q^{deg(\mathcal{P})})$
+(with the convention that $Q^{deg(0)}=0)$.
+\end{itemize}
+
+WIth these observations the following lemma is clear:
+
+{\bf Lemma} {\sl If $\mathcal{P}$,$\mathcal{Q}$
+are two finite subsets of D[X], then there is
+an $x\in C$ such that
+\[\left(\bigwedge_{P\in \mathcal{P}} P(x)=0\right)\land
+\left(\bigwedge_{Q\in \mathcal{Q}} Q(x)\ne 0\right)
+\]
+if and only if
+\[{\rm deg}({\rm gcd}({\rm gcd}(\mathcal{P}),
+\prod_{Q\in \mathcal{Q}} Q^d))\ne {\rm deg}({\rm gcd}(\mathcal{P}))
+\]
+where $d$ is any integer greater than
+${\rm deg}({\rm gcd}(\mathcal{P}))$.
+
+Note that when $\mathcal{Q}=\emptyset$,
+since $\prod_{Q\in \emptyset}Q=1$, the lemma says
+that there is an $x\in C$ such that
+$\bigwedge_{P\in \mathcal{P}}P(x)=0$ if and
+only if ${\rm deg}({\rm deg}(\mathcal{P}))\ne 0$. Note also that when
+$\mathcal{P}=\emptyset$, the lemma says that there is an $x\in C$ such that
+$\bigwedge_{Q\in \mathcal{Q}}Q(x)\ne 0$ if and only if
+${\rm deg}(\prod_{Q\in \mathcal{Q}}Q)\ge 0$, i.e. $1\ne \mathcal{Q}$.
+
\subsection{Berger and Schwichtenberg \cite{Berg95}}
The Greatest Common Divisor: A Case Study for
diff git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index 036457f..09e432f 100644
 a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ 2138,6 +2138,19 @@ paper = "Brea89.pdf"
\end{chunk}
+\index{Jones, Simon Peyton}
+\begin{chunk}{axiom.bib}
+@book{Jone00,
+ author = "Jones, Simon Peyton",
+ title = {{Implementing Functional Languages: A Tutorial}},
+ publisher = "University of Glasgow",
+ year = "2000",
+ link = "\url{https://www.microsoft.com/enus/research/wpcontent/uploads/1992/01/student.pdf}",
+ paper = "Jone00.pdf"
+}
+
+\end{chunk}
+
\index{Jouannaud, JeanPierre}
\index{Kirchner, Claude}
\begin{chunk}{axiom.bib}
@@ 3273,7 +3286,8 @@ paper = "Brea89.pdf"
the coherence of a class of lambdacalculusbased languages that use
the intersection type discipline, including both a purely functional
programming language and the Algollike programming language Forsythe.",
 paper = "Reyn91.pdf"
+ paper = "Reyn91.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 9268,6 +9282,25 @@ when shown in factored form.
\end{chunk}
+\index{Kauers, Manuel}
+\begin{chunk}{axiom.bib}
+@inproceedings{Kaue08b,
+ author = "Kauers, Manuel",
+ title = {{Computer Algebra for Special Function Inequalities}},
+ booktitle = "Tapas in Experimental Mathematics",
+ pages = "215235",
+ year = "2008",
+ abstract =
+ "Recent coputer proofs for some special function inequalities are
+ presented. The algorithmic ideas underlying these computer proofs
+ are described, and the conceptual difference to existing
+ algorithms for proving special function identities is discussed.",
+ paper = "Kaue08b.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Ng, Edward W.}
\index{Geller, Murray}
\begin{chunk}{axiom.bib}
@@ 12502,6 +12535,440 @@ when shown in factored form.
\end{chunk}
+\section{Coercion Survey  Fall 2018}
+
+\subsection{A} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{Arkoudas, Konstantine}
+\index{Musser, David}
+\begin{chunk}{axiom.bib}
+@book{Arko17,
+ author = "Arkoudas, Konstantine and Musser, David",
+ title = {{Fundamental Proof Methods in Computer Science}},
+ publisher = "MIT Press",
+ year = "2017",
+ isbn = "9780262035538"
+}
+
+\end{chunk}
+
+\index{Aspinall, David}
+\index{Compagnoni, Adriana}
+\begin{chunk}{axiom.bib}
+@article{Aspi01,
+ author = "Aspinall, David and Compagnoni, Adriana",
+ title = {{Subtyping Dependent Types}},
+ journal = "Theoretical Computer Science",
+ volume = "266",
+ number = "12",
+ pages = "273309",
+ year = "2001",
+ abstract =
+ "The need for subtyping in typesystems with dependent
+ types has been realized for some years. But it is hard to
+ prove that systems combining the two features have
+ fundamental properties such as subject reduction. Here we in
+ vestigate a subtyping extension of the system $\lambda$P
+ which is an abstract version of the type system of the
+ Edinburgh Logical Framework LF. By using an equivalent
+ formulation, we establish some important properties of
+ the new system $\lambda{\rm P}_{\le}$ including subject
+ reduction. Our analysis culminates in a complete and
+ terminating algorithm which establishes the decidability
+ of typechecking",
+ paper = "Aspi01.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\subsection{B} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{Barendregt, Hendrik Pieter}
+\begin{chunk}{axiom.bib}
+@article{Bare91,
+ author = "Barendregt, Hendrik Pieter",
+ title = {{An Introduction to Generalized Type Systems}},
+ journal = "Journal of Functional Programming",
+ volume = "1",
+ number = "2",
+ year = "1991",
+ pages = "125154",
+ abstract =
+ "Programming languages often come with type systems. Some of these are
+ simple, others are sophisticated. As a stylistic representation of
+ types in programming languages several versions of typed lambda
+ calculus are studied. During the last 20 years many of these systems
+ have appeared, so there is some need of classification. Working
+ towards a taxonomy, Barendregt (1991) gives a finestructure of the
+ theory of constructions (Coquand and Huet 1988) in the form of a
+ canonical cube of eight type systems ordered by inclusion. Berardi
+ (1988) and Terlouw (1988) have independently generalized the method of
+ constructing systems in the λcube. Moreover, Berardi (1988, 1990)
+ showed that the generalized type systems are flexible enough to
+ describe many logical systems. In that way the wellknown
+ propositionsastypes interpretation obtains a nice canonical form.",
+ paper = "Bare91.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\subsection{C} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{Callaghan, Paul}
+\begin{chunk}{axiom.bib}
+@article{Call08,
+ author = "Callaghan, Paul",
+ title = {{Coercive Subtyping via Mappings of Reduction Behaviour}},
+ journal = "Electronic Notes in Theoretical Computer Science",
+ volume = "196",
+ pages = "5368",
+ year = "2008",
+ abstract =
+ "This paper reports preliminary work on a novel approach to
+ Coercive Subtyping that is based on relationships between
+ reduction behaviour of source and target types in coerced
+ terms. Coercive subtyping is a subset of recordbased subtyping,
+ allowing socalled coercion functions to carry the subtyping. This
+ allows many novel and powerful forms of subtyping and
+ abbreviation, with applicaions including interfaces to theorem
+ provers and programming with dependent type systems. However, the
+ use of coercion functions introduces nontrivial overheads, and
+ requires difficult proof of properties such as coherence in order
+ to guarantee sensible results. These points restrict the
+ practicality of coercive subtyping. We begin with the idea that
+ coercing a value $v$ from type $U$ to a type %T% intuitively means
+ that we wish to compute with $v$ as if it was a value in $T$, not
+ that $v$ must be converted into a value in $T$. Instead, we
+ explore how to compute on $U$ in terms of computation on $T$, and
+ develop a framework for mapping computations on some $T$ on
+ computations on some $U$ via a simple extension of the elimination
+ rule of $T$. By exposing how computations on different types are
+ relatd, we gain insight on and make progress with several aspects
+ of coercive subtyping, including (a) distinguishing classes of
+ coercion and finding reasons to deprecate use of some classes; (b)
+ alternative techniques for improving key properties of coercions;
+ (c) greater efficiency from implementations of coercions.",
+ paper = "Call08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\subsection{D} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{Dahl, O.J.}
+\index{Dijkstra, E.W.}
+\index{Hoare, C.A.R}
+\begin{chunk}{axiom.bib}
+@book{Dahl72a,
+ author = "Dahl, O.J. and Dijkstra, E.W. and Hoare, C.A.R",
+ title = {{Structured Programming}},
+ publisher = "Academic Press",
+ year = "1972",
+ isbn = "0122005562"
+}
+
+\end{chunk}
+
+\subsection{E} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\subsection{F} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\subsection{G} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{Gordon, Michael J.}
+\index{Milner, Arthur J.}
+\index{Wadsworth, Christopher P.}
+\begin{chunk}{axiom.bib}
+@incollection{Gord79a,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "Front Matter",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "18",
+ paper = "Gord79a.pdf"
+}
+
+\end{chunk}
+
+\index{Gordon, Michael J.}
+\index{Milner, Arthur J.}
+\index{Wadsworth, Christopher P.}
+\begin{chunk}{axiom.bib}
+@incollection{Gord79b,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "Introduction",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "112",
+ paper = "Gord79b.pdf"
+}
+
+\end{chunk}
+
+\index{Gordon, Michael J.}
+\index{Milner, Arthur J.}
+\index{Wadsworth, Christopher P.}
+\begin{chunk}{axiom.bib}
+@incollection{Gord79c,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "ML",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "1361",
+ paper = "Gord79c.pdf"
+}
+
+\end{chunk}
+
+\index{Gordon, Michael J.}
+\index{Milner, Arthur J.}
+\index{Wadsworth, Christopher P.}
+\begin{chunk}{axiom.bib}
+@incollection{Gord79d,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "PPLAMBDA",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "6286",
+ paper = "Gord79d.pdf"
+}
+
+\end{chunk}
+
+\index{Gordon, Michael J.}
+\index{Milner, Arthur J.}
+\index{Wadsworth, Christopher P.}
+\begin{chunk}{axiom.bib}
+@incollection{Gord79e,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "APPENDIX",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "87159",
+ paper = "Gord79e.pdf"
+}
+
+\end{chunk}
+
+\index{Grayson, Daniel R.}
+\begin{chunk}{axiom.bib}
+@misc{Gray18,
+ author = "Grayson, Daniel R.",
+ title = {{An Introduction to Univalent Foundations for Mathematicians}},
+ link = "\url{http://arxiv.org/pdfs/1711.01477v3}",
+ year = "2018",
+ abstract =
+ "We offer an introduction for mathematicians to the univalent
+ foundations of Vladimir Voevodsky, aiming to explain how he chose to
+ encode mathematics in type theory and how the encoding reveals a
+ potentially viable foundation for all of modern mathematics that can
+ serve as an alternative to set theory",
+ paper = "Gray18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\subsection{H} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{Harper, Robert}
+\begin{chunk}{axiom.bib}
+@misc{Harp18,
+ author = "Harper, Robert",
+ title = {{Computational Type Theory}},
+ year = "2018",
+ link =
+ "\url{http://www.cs.uoregon.edu/research/summerschool/summer18/topics.php}",
+ comment = "OPLSS 2018"
+}
+
+\end{chunk}
+
+\subsection{I} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\subsection{J} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\subsection{K} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\subsection{L} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\subsection{M} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{McCarthy, John}
+\index{Abrahams, Paul W.}
+\index{Edwards, Daniel J.}
+\index{Hart, Timothy P.}
+\index{Levin, Michael I.}
+\begin{chunk}{axiom.bib}
+@book{Mcca62,
+ author = "McCarthy, John and Abrahams, Paul W. and Edwards, Daniel J.
+ and Hart, Timothy P. and Levin, Michael I.",
+ title = {{LISP 1.5 Programmer's Manual}},
+ link = "\url{http://www.softwarepreservation.org/projects/LISP/book/LISP%201.5%20Programmers%20Manual.pdf}",
+ publisher = "MIT Press",
+ isbn = "0262130114",
+ year = "1962"
+}
+
+\end{chunk}
+
+\index{Meili, Mario}
+\begin{chunk}{axiom.bib}
+@misc{Meilxx,
+ author = "Meili, Mario",
+ title = {{Polymorphic Lambda Calculus}},
+ year = "unknown",
+ link = "\url{http://wiki.ifs.hsr.ch/SemProgAnTr/files/mmeili_polymorphic_lambda_calculus_final.pdf}",
+ paper = "Meilxx.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\subsection{N} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{Nipkow, Tobias}
+\index{Tabacznyj, Christophe}
+\index{Paulson, Lawrence C.}
+\index{Chaieb, Amine}
+\index{Rasmussen, Thomas M.}
+\index{Avigad, Jeremy}
+\begin{chunk}{axiom.bib}
+@misc{Nipk18,
+ author = "Nipkow, Tobias and Tabacznyj, Christophe and
+ Paulson, Lawrence C. and Chaieb, Amine and Rasmussen, Thomas M.
+ and Avigad, Jeremy",
+ title = {{GCD in Isabelle}},
+ link = "\url{http://isabelle.in.tum.ed/dist/library/HOL/HOL/GCD.html}",
+ year = "2018"
+}
+
+\end{chunk}
+
+\subsection{O} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\subsection{P} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\subsection{Q} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\subsection{R} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{Reynolds, John C.}
+\begin{chunk}{axiom.bib}
+@misc{Reyn94,
+ author = "Reynolds, John C.",
+ title = {{An Introduction to the Polymorphic Lambda Calculus}},
+ year = "1994",
+ paper = "Reyn94.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\subsection{S} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\subsection{T} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{BreazuTannen, Val}
+\index{Coquand, Thierry}
+\index{Gunter, Carl A.}
+\index{Scedrov, Andre}
+\begin{chunk}{axiom.bib}
+@article{Tann93,
+ author = "BreazuTannen, Val and Coquand, Thierry and Gunter, Carl A.
+ and Scedrov, Andre",
+ title = {{Inheritance as Implicit Coercion}},
+ journal = "Information and Computation",
+ volume = "93",
+ pages = "172221",
+ year = "1991",
+ abstract =
+ "We present a method for providing semantic interpretations for
+ languages with a type system featuring inheritance polymorphism. Our
+ approach is illustrated on an extension of the language Fun of
+ Cardelli and Wegner, which we interpret via a translation into an
+ extended polymorphic lambda calculus. Our goal is to interpret
+ inheritances in Fun via coercion functions which are definable in the
+ target of the translation. Existing techniques in the theory of
+ semantic domains can be then used to interpret the extended
+ polymorphic lambda calculus, thus providing many models for the
+ original language. This technique makes it possible to model a rich
+ type discipline which includes parametric polymorphism and recursive
+ types as well as inheritance. A central difficulty in providing
+ interpretations for explit type disciplines featuring inheritance in
+ the sense discussed in this paper arises from the fact that programs
+ can typecheck in more than one way. Since interpretations follow the
+ typechecking derivations, coherence theorems are required: that is,
+ one must prove that the meaning of a program does not depend on the
+ way it was typechecked. Proofs of such theorems for our proposed
+ interpretation are the basic technical results of this paper.
+ Interestingly, proving coherence in the presence of recursive
+ types, variants, and abstract types forced us to reexamine fundamental
+ equational properties that arise in proof theory (in the form of
+ commutative reductions) and domain theory (in the form of strict vs
+ nonstrict functions).",
+ paper = "Tann93.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\subsection{U} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\subsection{V} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\subsection{W} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\index{Wadler, Philip}
+\begin{chunk}{axiom.bib}
+@misc{Wadl03,
+ author = "Wadler, Philip",
+ title = {{The GirardReynolds Isomorphism}},
+ journal = "Information and Computation",
+ volume = "186",
+ number = "2",
+ pages = "260280",
+ year = "2003",
+ abstract =
+ "The secondorder polymorphic lambda calculus, F2, was
+ independently discovered by Girard and Reynolds. Girard
+ additionally proved a {\sl representation} theorem: every function
+ on natural numbers that can be proved total in secondorder
+ intutionistic propositional logic, P2, can be represented in
+ F2. Reynolds additionally proved an {\sl abstraction} theorem: for
+ a suitable notion of logical relation, every term in F2 takes
+ related arguments into related results. We observe that the
+ essence of Girard's result is a projection from P2 into F2, and
+ that the essence of Reynolds's result is an embedding of F2 into
+ P2, and that the Reynolds embedding followed by the Girard
+ projection is the identity. The Girard projection discards all
+ firstorder quantifiers, so it seems unreasonable to expect that
+ the Girard projection followed by the Reynolds embedding should
+ also be the identity. However, we show that in the presence of
+ Reynolds's parametricity property that this is indeed the case,
+ for propositions corresponding to inductive definitions of
+ naturals, products, sums, and fixpoint types.",
+ paper = "Wadl03.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\subsection{X} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\subsection{Y} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\subsection{Z} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
\section{Proving Axiom Correct  Spring 2018}
\subsection{A} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ 15627,38 +16094,6 @@ when shown in factored form.
\end{chunk}
\index{Gordon, Michael J.}
\index{Milner, Arthur j.}
\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@article{Gord79a,
 author = "Gordon, Michael J. and Milner, Arthur j. and
 Wadsworth, Christopher P.",
 title = {{Edinburgh LCF, A Mechanised Logic of Computation: ML}},
 journal = "LNCS",
 volume = "78",
 year = "1979",
 paper = "Gord79a.pdf"
}

\end{chunk}

\index{Gordon, Michael J.}
\index{Milner, Arthur j.}
\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@article{Gord79b,
 author = "Gordon, Michael J. and Milner, Arthur j. and
 Wadsworth, Christopher P.",
 title = {{Edinburgh LCF, A Mechanised Logic of Computation: PPLAMBDA}},
 journal = "LNCS",
 volume = "78",
 year = "1979",
 paper = "Gord79b.pdf"
}

\end{chunk}

\index{Gordon, Michael J.C.}
\begin{chunk}{axiom.bib}
@inproceedings{Gord89,
@@ 16127,6 +16562,35 @@ when shown in factored form.
\end{chunk}
+\index{Hudak, Paul}
+\begin{chunk}{axiom.bib}
+@misc{Huda89,
+ author = "Hudak, Paul",
+ title = {{The Conception, Evolution, and Application of Functional
+ Programming Languages}},
+ link = "\url{http://haskell.cs.yale.edu/wpcontent/uploads/2011/01/cs.pdf}",
+ year = "1989",
+ abstract =
+ "The foundations of functional programming languages are examined from
+ both historical and technical perspectives. Their evolution is traced
+ through several critical periods: early work on lambda calculus and
+ combinatory calculus, Lisp, Iswim, FP, ML, and modern functional
+ languages such as Miranda 1 and Haskell. The fundamental premises on
+ which the functional programming methodology stands are critically
+ analyzed with respect to philosophical, theoretical, and pragmatic
+ concerns. Particular attention is paid to the main features that
+ characterize modern functional languages: higherorder functions, lazy
+ evaluation, equations and patternmatching, strong static typing and
+ type inference, and data abstraction. In addition, current research
+ areas—such as parallelism, nondeterminism, input/output, and
+ stateoriented computations—are examined with the goal of predicting
+ the future development and application of functional languages.",
+ paper = "Huda89.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Huet, G\'erard P.}
\begin{chunk}{axiom.bib}
@article{Huet75,
@@ 16171,7 +16635,8 @@ when shown in factored form.
denotational semantics for the programming language. We establish the
correctness of the transformation method itself, and give techniques
pertaining to its actual implementation. The paper is illustrated with
 recursion removal examples."
+ recursion removal examples.",
+ paper = "Huet78.pdf"
}
\end{chunk}
@@ 16944,6 +17409,43 @@ when shown in factored form.
\end{chunk}
+\index{Lester, David R.}
+\begin{chunk}{axiom.bib}
+@phdthesis{Lest89,
+ author = "Lester, David R.",
+ title = {{Combinator Graph Reduction: A Congruence and its Applications}},
+ year = "1989",
+ school = "Oxford University",
+ isbn = "0902928554",
+ abstract =
+ "The GMachine is an efficient implementation of lazy functional
+ languages developed by Augustsson and Johnsson. This thesis may be
+ read as a formal mathematical proof that the Gmachine is correct with
+ respect to a denotational semantic specification of a simple
+ language. It also has more general implications. A simple lazy
+ functional language is defined both denotationally and operationally;
+ both are defined to handle erroneous results. The operational
+ semantics models combinator graph reduction, and is based on reduction
+ to weak head normal form. The two semantic definitions are shown to be
+ congruent. Because of error handling the language is not
+ confluent. Complete strictness is shown to be a necessary and
+ sufficient condition for changing lazy function calls to strict
+ ones. As strictness analyses are usually used with confluent
+ languages, methods are discussed to restore this property. The
+ operational semantic model uses indirection nodes to implement
+ sharing. An alternative, which is without indirection nodes, is shown
+ to be operationally equivalent for terminating programs. The Gmachine
+ is shown to be a representation of the combinator graph reduction
+ operational model. It may be represented by the composition of a small
+ set of combinators which correspond to an abstract machine instruction
+ set. Using a modified form of graph isomorphism, alternative sequences
+ of instructions are shown to be isomorphic, and hence may be used
+ interchangeably.",
+ paper = "Lest89.pdf"
+}
+
+\end{chunk}
+
\index{Letouzey, Pierre}
\begin{chunk}{axiom.bib}
@inproceedings{Leto04,
@@ 18307,7 +18809,7 @@ when shown in factored form.
\begin{chunk}{axiom.bib}
@book{Pier91b,
author = "Pierce, Benjamin C.",
 title = {{ Basic Category Theory for Computer Scientists}},
+ title = {{Basic Category Theory for Computer Scientists}},
publisher = "MIT Press",
year = "1991",
isbn = "0262660717"
@@ 19691,7 +20193,7 @@ when shown in factored form.
pages = "201226",
year = "2007",
abstract =
 "eanYves Girard and John Reynolds independently discovered the
+ "JeanYves Girard and John Reynolds independently discovered the
secondorder polymorphic lambda calculus, F2. Girard additionally
proved a Representation Theorem : every function on natural numbers
that can be proved total in secondorder intuitionistic predicate
@@ 24595,8 +25097,33 @@ when shown in factored form.
\index{Hearn, Peter W.O.}
\begin{chunk}{axiom.bib}
+@inproceedings{Hear18,
+ author = "O'Hearn, Peter W.",
+ title = {{Continuous Reasoning: Scaling the Impact of Formal Methods}},
+ booktitle = "LICS 18",
+ year = "2018",
+ publisher = "ACM",
+ isbn = "9781450355834",
+ abstract =
+ "This paper describes work in continuous reasoning , where formal
+ reasoning about a (changing) codebase is done in a fashion which
+ mirrors the iterative, continuous model of software development that
+ is increasingly practiced in indus try. We suggest that advances in
+ continuous reasoning will allow formal reasoning to scale to more
+ programs, and more programmers. The paper describes the rationale for
+ contin uous reasoning, outlines some success cases from within
+ industry, and proposes directions for work by the scientific
+ community.",
+ paper = "Hear18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Hearn, Peter W.O.}
+\begin{chunk}{axiom.bib}
@misc{Hear12,
 author = "Hearn, Peter W.O.",
+ author = "O'Hearn, Peter W.",
title = {{A Primer on Separation Logic (and Automatic Program
Verification and Analysis}},
year = "2012",
@@ 46639,7 +47166,7 @@ LCCN QA155.7.E4 I57 1984
coercion, which is based on rewrite rules. From this definition, an
efficient coercion algorithm for SCRATCHPAD is constructed using graph
techniques.",
 keywords = "axiomref",
+ keywords = "axiomref, printed",
beebe = "Fortenbacher:1990:ETI"
}
diff git a/changelog b/changelog
index cdb997f..f1bec19 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,7 @@
+20180921 tpd src/axiomwebsite/patches.html 20180921.01.tpd.patch
+20180921 tpd books/bookvol13 add review
+20180921 tpd books/bookvolbib add references
+20180921 tpd books/axiom.bib add references
20180811 tpd src/axiomwebsite/patches.html 20180811.01.tpd.patch
20180811 tpd books/bookvolbib add references
20180717 tpd src/axiomwebsite/patches.html 20180717.01.tpd.patch
diff git a/patch b/patch
index e9e6b12..dc33b19 100644
 a/patch
+++ b/patch
@@ 2,1594 +2,529 @@ books/bookvolbib add references
Goal: Proving Axiom Sane
\index{Scott, Dana S.}
+\index{Hearn, Peter W.O.}
\begin{chunk}{axiom.bib}
@article{Scot93,
 author = "Scott, Dana S.",
 title = {{A TypeTheoretical Alternative to ISWIM, CUCH, OWHY}},
 journal = "Theoretical Computer Science",
 volume = 121,
 number = "12",
 year = "1993",
 pages = "411440",
 abstract =
 "The paper (first written in 1969 and circulated privately) concerns
 the definition, axiomatization, and applications of the hereditarily
 monotone and continuous functionals generated from the integers and
 the Booleans (plus “undefined” elements). The system is formulated as
 a typed system of combinators (or as a typed λcalculus) with a
 recursion operator (the least fixedpoint operator), and its proof
 rules are contrasted to a certain extent with those of the untyped
 λcalculus. For publication (1993), a new preface has been added, and
 many bibliographical references and comments in footnotes have been
 appended.",
 paper = "Scot93.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Winkler, Franz}
\begin{chunk}{axiom.bib}
@inproceedings{Wink85,
 author = "Winkler, Franz",
 title = {{Reducing the Complexity of the KnuthBendix Completion
 Algorithm: A ``Unification'' of Different Approaches}},
 booktitle = "European Conference on Computer Algebra (EUROCAL 85)",
 pages = "378389",
 publisher = "Springer",
 year = "1985",
 isbn = "9783540159834",
 abstract =
 "The KnuthBendix completion procedure for rewrite rule systems is
 of wide applicability in symbolic and algebraic computation.
 Attempts to reduce the complexity of this completion algorithm are
 reported in the literature. Already in their seminal 1967 paper
 D.E. Knuth and P.B. Bendix have suggested to keep all the rules
 iterreduced during the execution of the algorithm. G. Huet has
 presented a version of the completion algorithm in which every
 rewrite rule is kept in reduced form with respect to all the other
 rules in the system. Borrowing an idea of Buchberger's for the
 completion of bases of polynomial ideals the author has proposed
 in 1983 a criterion for detecting ``unnecessary'' critical
 pairs. If a critical pair is recognized as unnecessary then one
 need not apply the costly process of computing normal forms to
 it. It has been unclear whether these approaches can be
 combined. We demonstrate that it is possible to keep all the
 rewrite rules interreduced and still use a criterion for
 eliminating unnecessary critical pairs.",
 paper = "Wink85.pdf",
 keywords = "printed"
}

\end{chunk}

\index{KandriRody, Abdelilah}
\index{Kapur, Deepak}
\index{Winkler, Franz}
\begin{chunk}{axiom.bib}
@inproceedings{Kand89,
 author = "KandriRody, Abdelilah and Kapur, Deepak and Winkler, Franz",
 title = {{KnuthBendix Procedure and Buchberger Algorithm  A Synthesis}},
 booktitle = "ISSAC'89",
 publisher = "ACM Press",
 pages = 5567",
 year = "1989",
 isbn = "0897913256",
 abstract =
 "The KnuthBendix procedure for the completion of a rewrite rule
 system and the Buchberger algorithm for computing a Groebner basis
 of a polynomial ideal are very similar in two respects: they both
 start wtih an arbitrary specification of an algebraic structure
 (axioms for an equational theory and a basis for a polynomial
 ideal, respectively) which is transformed to a very special
 specification of this algebraic structure (a complete rewrite rule
 system and a Groebner basis of the polynomial ideal, respectively).
 This special specification allows to decide many problems concerning
 the given algebraic structure. Moreover, both algorithms achieve
 their goals by employing the same basic concepts: formation of
 critical pairs and completion.

 Although the two methods are obviously related, the exact nature
 of this relation remains to be clarified. Based on previous work
 we show how the KnuthBendix procedure and the Buchberger algorithm
 can be seen as special cases of a more general completion procedure.",
 paper = "Kand89.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Winkler, Franz}
\begin{chunk}{axiom.bib}
@misc{Wink92,
 author = "Winkler, Franz",
 title = {{Computer Algebra}},
 booktitle = "Encyclopedia of Physical Science and Technology",
 volume = "4",
 year = "1992",
 publisher = "Academic Press",
 paper = "Wink92.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Winkler, Franz}
begin{chunk}{axiom.bib}
@inproceedings{Wink95,
 author = "Winkler, Franz",
 title = {{Computer Algebra  Problems and Developments}},
 booktitle = "Computers in Industry 2 (SCAFI'92)",
 pages = "117",
 year = "1995",
 isbn = "9780471955290",
 paper = "Wink95.pdf"
}

\end{chunk}

\index{Winkler, Franz}
\begin{chunk}{axiom.bib}
@techreport{Wink99,
 author = "Winkler, Franz",
 title = {{Advances and Problems in Algebraic Computation}},
 type = "technical report",
 number = "9949",
 year = "1999",
 institution = "RISC, Linz",
+@inproceedings{Hear18,
+ author = "O'Hearn, Peter W.",
+ title = {{Continuous Reasoning: Scaling the Impact of Formal Methods}},
+ booktitle = "LICS 18",
+ year = "2018",
+ publisher = "ACM",
+ isbn = "9781450355834",
abstract =
 "In the last years there has been dramatic progress in all areas
 of algebraic computation. Many working mathematiians have access
 to and actually use algebraic software systems for their research
 No end of this development is in sight. We report on some active
 topics in algebraic computation, namely the theory of integration
 and symbolic solution of systems of differential equations, the
 solution of systems of algebraic equations, the factorization of
 polynomials, and the design and analysis of algebraic curves and
 surfaces.",
 paper = "Wink99.pdf",
+ "This paper describes work in continuous reasoning , where formal
+ reasoning about a (changing) codebase is done in a fashion which
+ mirrors the iterative, continuous model of software development that
+ is increasingly practiced in indus try. We suggest that advances in
+ continuous reasoning will allow formal reasoning to scale to more
+ programs, and more programmers. The paper describes the rationale for
+ contin uous reasoning, outlines some success cases from within
+ industry, and proposes directions for work by the scientific
+ community.",
+ paper = "Hear18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Winkler, Franz}
+\index{Lester, David R.}
\begin{chunk}{axiom.bib}
@inproceedings{Wink06,
 author = "Winkler, Franz",
 title = {{Computer Algebra and Geometry  Some Interactions}},
 booktitle = "Proc. of Coll. on Constructive Algebra and System
 Theory (CAST)",
 year = "2006",
 publisher = "unknown",
 isbn = "906984477x",
 pages = "127138",
 abstract =
 "Algebraic curves and surfaces have been studied intensively in
 algebraic geometry for centuries. Thus, there exists a huge amount
 of theoretical knowledge about these geometric objects. Recently,
 algebraic curves and surfaces play an important and ever
 increasing role in computer aided geometric design, computer
 vision, computer aided manufacturing, coding theory, and
 cryptography, just to name a few application areas. Consequently,
 theoretical results need to be adapted to practical needs. We need
 efficient algorithms for generating, representing, manipulating,
 analyzing, rendering algebraic curves and surfaces. Exact computer
 algebraic methods can be employed effectively for dealing with
 these geometric problems.",
 paper = "Wink06.pdf"
}

\end{chunk}

\index{Davies, Rowan}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@article{Davi01,
 author = "Davies, Rowan and Pfenning, Frank",
 title = {{A Modal Analysis of Staged Computation}},
 journal = "J. ACM",
 volume = "48",
 number = "3",
 pages = "555604",
 year = "2001",
 abstract =
 "We show that a type system based on the intuitionistic modal logic S4
 provides an expressive framework for specifying and analyzing
 computation stages in the context of typed $\lambda$calculi and
 functional languages. We directly demonstrate the sense in which our
 $\lambda_e^{\rightarrow\box}$calculus captures staging, and also give
 a conservative embedding of Nielson and Nielson's twolevel functional
 language in our functional language MiniML${}^\box$, thus proving that
 bindingtime correctness is equivalent to modal correctness on this
 fragment. In addition, MiniML${}^\box$ can also express immediate
 evaluation and sharing of code across multiple stages, thus supporting
 runtime code generation as well as partial evaluation.",
 paper = "Davi01.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Pfenning, Frank}
\index{Davies, Rowan}
\begin{chunk}{axiom.bib}
@article{Pfen01,
 author = "Pfenning, Frank and Davies, Rowan",
 title = {{A Judgemental Reconstruction of Modal Logic}},
 journal = "Mathematical Structures in Computer Science",
 volume = "11",
 pages = "511540",
 year = "2001",
 abstract =
 "We reconsider the foundations of modal logic, following MartinLof's
 methodology of distinguishing judgments from propositions. We give
 constructive meaning explanations for necessity and possibility which
 yields a simple and uniform system of natural deduction for
 intuitionistic modal logic which does not exhibit anomalies found in
 other proposals. We also give a new presentation of lax logic and nd
 that the lax modality is already expressible using possibility and
 necessity. Through a computational interpretation of proofs in modal
 logic we further obtain a new formulation of Moggi's monadic
 metalanguage.",
 paper = "Pfen01.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Lamport, Leslie}
\begin{chunk}{axiom.bib}
@misc{Lamp03,
 author = "Lamport, Leslie",
 title = {{The Future of Computing: Logic or Biology}},
 link =
 "\url{https://lamport.azurewebsites.net/pubs/futureofcomputing.pdf}",
 year = "2003",
 paper = "Lamp03.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Glymour, Clark}
\index{Serafin, Luke}
\begin{chunk}{axiom.bib}
@misc{Glym15,
 author = "Glymour, Clark and Serafin, Luke",
 title = {{Mathematical Metaphysics}},
 link = "\url{http://shelf1.library.cmu.edu/HSS/2015/a1626190.pdf}",
 year = "2015",
 paper = "Glym15.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Reynolds, John C.}
\begin{chunk}{axiom.bib}
@article{Reyn93,
 author = "Reynolds, John C.",
 title = {{The Discoveries of Continuations}},
 journal = "Lisp and Symbolic Computation",
 volume = "6",
 pages = "233248",
 year = "1993",
 abstract =
 "We give a brief account of the discoveries of continuations and
 related concepts.",
 paper = "Reyn93.pdf",
 keywords = "printed"
+@phdthesis{Lest89,
+ author = "Lester, David R.",
+ title = {{Combinator Graph Reduction: A Congruence and its Applications}},
+ year = "1989",
+ school = "Oxford University",
+ isbn = "0902928554",
+ abstract =
+ "The GMachine is an efficient implementation of lazy functional
+ languages developed by Augustsson and Johnsson. This thesis may be
+ read as a formal mathematical proof that the Gmachine is correct with
+ respect to a denotational semantic specification of a simple
+ language. It also has more general implications. A simple lazy
+ functional language is defined both denotationally and operationally;
+ both are defined to handle erroneous results. The operational
+ semantics models combinator graph reduction, and is based on reduction
+ to weak head normal form. The two semantic definitions are shown to be
+ congruent. Because of error handling the language is not
+ confluent. Complete strictness is shown to be a necessary and
+ sufficient condition for changing lazy function calls to strict
+ ones. As strictness analyses are usually used with confluent
+ languages, methods are discussed to restore this property. The
+ operational semantic model uses indirection nodes to implement
+ sharing. An alternative, which is without indirection nodes, is shown
+ to be operationally equivalent for terminating programs. The Gmachine
+ is shown to be a representation of the combinator graph reduction
+ operational model. It may be represented by the composition of a small
+ set of combinators which correspond to an abstract machine instruction
+ set. Using a modified form of graph isomorphism, alternative sequences
+ of instructions are shown to be isomorphic, and hence may be used
+ interchangeably.",
+ paper = "Lest89.pdf"
+}
+
+\end{chunk}
+
+\index{Jones, Simon Peyton}
+\begin{chunk}{axiom.bib}
+@book{Jone00,
+ author = "Jones, Simon Peyton",
+ title = {{Implementing Functional Languages: A Tutorial}},
+ publisher = "University of Glasgow",
+ year = "2000",
+ link = "\url{https://www.microsoft.com/enus/research/wpcontent/uploads/1992/01/student.pdf}",
+ paper = "Jone00.pdf"
}
\end{chunk}
\index{Oles, Frank Joseph}
+\index{Hudak, Paul}
\begin{chunk}{axiom.bib}
@phdthesis{Oles82,
 author = "Oles, Frank Joseph",
 title = {{A CategoryTheoretic Approach to the Semantics of
+@misc{Huda89,
+ author = "Hudak, Paul",
+ title = {{The Conception, Evolution, and Application of Functional
Programming Languages}},
 school = "Syracuse University",
 year = "1982",
 abstract =
 "Here we create a framework for handling the semantics of fully
 typed programming languages with imperative features, higherorder
 ALGOLlike procedures, block structure, and implicit
 conversions. Our approach involves introduction of a new family of
 programming languages, the {\sl coercive typed $\lambda$calculi},
 denoted by $L$ in the body of the dissertation. By appropriately
 choosing the linguistic constants (i.e. generators) of $L$, we can
 view phrases of variants of ALGOL as syntactically sugared phrases
 of $L$.

 This dissertation breaks into three parts. In the first part,
 consisting of the first chapter, we supply basic definitions and
 motivate the idea that functor categories arise naturally in the
 explanation of block structure and stack discipline. The second
 part, consisting of the next three chapters, is dedicated to the
 general theory of the semantics of the coercive typed
 $\lambda$calculus; the interplay between posets, algebras, and
 Cartesian closed categories is particularly intense here. The
 remaining four chapters make up the final part, in which we apply
 the general theory to give both direct and continuation semantics
 for desugared variants of ALGOL. To do so, it is necessary to show
 certain functor categories are Cartesian closed and to describe a
 category $\Sigma$ of store shapes. An interesting novelty in the
 presentation of continuation semantics is the view that commands
 form a procedural, rather than a primitive, phrase type.",
 paper = "Oles82.pdf"
}

\end{chunk}

\index{Abdali, S. Kamal}
\index{Winkler, Franz}
\begin{chunk}{axiom.bib}
@techreport{Abda81,
 author = "Abdali, S. Kamal and Winkler, Franz",
 title = {{A LambdaCalculus Model for Generating Verification Conditions}},
 type = "technical report",
 institution = "Rensselaer Polytechnic Institute",
 number = "CS8104",
 year = "1981",
 abstract =
 "A lambdacalculusbased method is developed for the automatic
 generation of verification conditions. A programming language is
 specified in which inductive assertions associated with a program
 are incorporated within the body of the program by means of assert
 and maintainwhile statements. This programming language includes
 the following features: assignments, conditionals, loops,
 compounds, ALGOLtype block structure. A model is developed which
 consists of rules to translate each statement in this programming
 language into the lambacalculus. The model is such that the
 lambdaexpression translation of any program reduces to a list
 (tuple) of lambdaexpression representations of all verification
 conditions of the program. A proof of this property is given.",
 paper = "Abda81.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Abdali, S. Kamal}
\begin{chunk}{axiom.bib}
@book{Abda73,
 author = "Abdali, S. Kamal",
 title = {{A Simple LambdaCalculus Model of Programming Languages}},
 isbn = "9781332196029",
 publisher = "Forgotten Books",
 year = "1973",
 abstract =
 "We present a simple correspondence between a large subset of
 ALGOL 60 language and lambdacalculus. With the aid of this
 correspondence, a program can be translated into a single
 lambdaexpression. In general, the representation of a program is
 specified by means of a system of simultaneous conversion
 relations among the representations of the program
 constituents. Highlevel programming language features are treated
 directly, not in terms of the representations of machinelevel
 operations. The model includes inputoutput in such a way that
 when the representation of a (convergent) program is applied to
 the input item representations, the resulting combination reduces
 to a tuple of the representations of the output items. This model
 does not introduce any imperative notions into the calculus; the
 descriptive programming constructs, such as expressions, and the
 imperative ones, such as assignment and jumps, are translated
 alike into pure lambda expressions. The applicability of the model
 to the problems of proving program equivalence and correctness is
 illustrated by means of simple examples."
 paper = "Abda73.pdf"
}

\end{chunk}

\index{Abdali, S. Kamal}
\begin{chunk}{axiom.bib}
@article{Abda74,
 author = "Abdali, S. Kamal",
 title = {{A LambdaCalculus Model of Programming Languages 
 I. Simple Constructs}},
 journal = "J. of Computer Languages",
 volume = "1",
 pages = "287301",
 year = "1974",
 abstract =
 "A simple correspondence is presented between the elementary
 constructs of programming languages and the lambdacalculus. The
 correspondence is obtained by using intuitive, functional
 interpretation of programming language constructs, completely
 avoiding the notions of machine memory and address. In particular,
 the concepts of program variable and assignments are accounted for
 in terms of the concepts of mathematical variable and
 substitution, respectively. Lambdaexpression representations are
 given of assignments, conditional and compound statements,
 inputoutput, and blocks. Algol 60 is used as the illustrative
 language.",
 paper = "Abda74.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Abdali, S. Kamal}
\begin{chunk}{axiom.bib}
@article{Abda74a,
 author = "Abdali, S. Kamal",
 title = {{A LambdaCalculus Model of Programming Languages 
 II. Jumps and Procedures}},
 journal = "J. of Computer Languages",
 volume = "1",
 pages = "303320",
 year = "1974",
 abstract =
 "The correspondence between programming languages and the
 lambdacalculus presented in Part I of the paper is extended there
 to include iteration statements, jumps, and procedures. Programs
 containing loops are represented by lambdaexpressions whose
 components are specified recursively by means of systems of
 simultaneous conversion relations. The representation of
 callbyname and sideeffects in a program is accomplished without
 any resort to the concepts of memory andaddress by making use of a
 number of variables in addition to those declared by the programs;
 with the aid of these additional variables, the parameter linkage
 operations can be simulated by pure substitution. The
 applicability of the model to the problems of proving program
 correctness and equivalence is demonstrated by means of examples.",
 paper = "Abda74a.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Abdali, S. Kamal}
\begin{chunk}{axiom.bib}
@phdthesis{Abda74b,
 author = "Abdali, S. Kamal",
 title = {{A Combinatory Logic Model of Programming Languages}},
 school = "University of Wisconsin",
 year = "1974",
 abstract =
 "A simple correspondence is presented between a large subset of the
 ALGOL 60 language and the combinatory logic. With the aid of this
 correspondence, a program can be translated into a single combinatory
 object. The combinatory object representing a program is specified,
 in general, by means of a system of reduction relations among the
 representations of the program constituents. This object denotes, in
 terms of the combinatory logic, the function that the program is
 intended to compute.

 The model has been derived by using intuitive, functional
 interpretations of the constructs of programming languages, completely
 avoiding the notions of machine command and address. In particular,
 the concepts of program variable, assignment, and procedure have been
 accounted for in terms of the concepts of mathematical variable,
 substitution, and function, respectively.

 Highlevel programming language features are represented in the
 combinatory logic directly, not in terms of the representations of
 machinelevel operations. Inputoutput is treated in such a
 manner that when the representation of a program is applied to the
 representations of the input items, the resulting combination reduces
 to a tuple of the representations of the output items.

 The applicability of the model to the problems of
 proving program equivalence and correctness is illustrated
 by means of examples.",
 paper = "Abda74b.pdf"
}

\end{chunk}

\index{Krall, Andreas}
\begin{chunk}{axiom.bib}
@misc{Kralxx,
 author = "Krall, Andreas",
 title = {{Implementation Techniques for Prolog}},
 link = "\url{https://pdfs.semanticscholar.org/fdbf/aa46bf6ab2148595f638fe9afe97033583ee.pdf}",
 year = "unknown",
+ link = "\url{http://haskell.cs.yale.edu/wpcontent/uploads/2011/01/cs.pdf}",
+ year = "1989",
abstract =
 "This paper is a short survey about currently used implementation
 techniques for Prolog. It gives an introduction to unification and
 resolution in Prolog and presents the memory model and a basic
 execution model. These models are expanded to the Vienna Abstract
 Machine (VAM) with its two versions, the VAM${}_{2p}$ and the
 VAM${}_{1p}$, and the most famous abstract machine, the Warren
 Abstract Machine (WAM). The continuation passing style model of
 Prolog, binary Prolog, leands to the BinWAM. Abstract
 interpretation can be applied to gather information about a
 program. This information is used in the generation of very
 specialized machine code and in optimizations like clause indexing
 and instruction scheduling on each kind of abstract machine.",
 paper = "Kralxx.pdf",
+ "The foundations of functional programming languages are examined from
+ both historical and technical perspectives. Their evolution is traced
+ through several critical periods: early work on lambda calculus and
+ combinatory calculus, Lisp, Iswim, FP, ML, and modern functional
+ languages such as Miranda 1 and Haskell. The fundamental premises on
+ which the functional programming methodology stands are critically
+ analyzed with respect to philosophical, theoretical, and pragmatic
+ concerns. Particular attention is paid to the main features that
+ characterize modern functional languages: higherorder functions, lazy
+ evaluation, equations and patternmatching, strong static typing and
+ type inference, and data abstraction. In addition, current research
+ areas—such as parallelism, nondeterminism, input/output, and
+ stateoriented computations—are examined with the goal of predicting
+ the future development and application of functional languages.",
+ paper = "Huda89.pdf",
keywords = "printed"
}
\end{chunk}
\index{Abadi, Martin}
\index{Cardelli, Luca}
\index{Curien, PierreLouis}
\index{Levy, JeanJacques}
+\index{Grayson, Daniel R.}
\begin{chunk}{axiom.bib}
@inproceedings{Abad90,
 author = "Abadi, Martin and Cardelli, Luca and Curien, PierreLouis
 and Levy, JeanJacques",
 title = {{Explicit Substitutions}},
 booktitle = "Symp. of Principles of Programming Languages",
 publisher = "ACM",
 year = "1990",
 link = "\url{http://www.hpl.hp.com/techreports/CompaqDEC/SRCRR54.pdf}",
+@misc{Gray18,
+ author = "Grayson, Daniel R.",
+ title = {{An Introduction to Univalent Foundations for Mathematicians}},
+ link = "\url{http://arxiv.org/pdfs/1711.01477v3}",
+ year = "2018",
abstract =
 "The $lambda\sigma$calculus is a refinement of the
 $lambda$calculus where substitutions are manipulated
 explicitly. The $\lambda\sigma$calculus provides a setting for
 studying the theory of substitutions, with pleasant mathematical
 properties. It is also a useful bridge between the classical
 $\lambda$calculus and concrete implementations.",
 paper = "Abad90.pdf",
+ "We offer an introduction for mathematicians to the univalent
+ foundations of Vladimir Voevodsky, aiming to explain how he chose to
+ encode mathematics in type theory and how the encoding reveals a
+ potentially viable foundation for all of modern mathematics that can
+ serve as an alternative to set theory",
+ paper = "Gray18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Beeson, Michael}
+\index{BreazuTannen, Val}
+\index{Coquand, Thierry}
+\index{Gunter, Carl A.}
+\index{Scedrov, Andre}
\begin{chunk}{axiom.bib}
@article{Bees80a,
 author = "Beeson, Michael",
 title = {{Formalizing Constructive Mathematics: Why and How?}},
 journal = "Lecture Notes in Mathematics",
 volume = "873",
 pages = "146190",
 year = "1980",
 paper = "Bees80a.pdf"
}

\end{chunk}

\index{Goodman, Nicolas D.}
\begin{chunk}{axiom.bib}
@article{Good80,
 author = "Goodman, Nicolas D.",
 title = {{Reflections on Bishop's Philosophy of Mathematics}},
 journal = "Lecture Notes in Mathematics",
 volume = "873",
 pages = "135145",
 year = "1980",
 paper = "Good80.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Rosenkranz, Markus}
\begin{chunk}{axiom.bib}
@book{Rose07,
 author = "Rosenkranz, Markus",
 title = {{Gr\"obner Bases in Symbolic Analysis}},
 publisher = "Walter de Gruyter, Berlin Germany",
 year = "2007",
 isbn = "978311.0193237",
 keywords = "axiomref, TPDref"
}

\end{chunk}

\index{Daly, Timothy}
\begin{chunk}{axiom.bib}
@misc{Daly06,
 author = "Daly, Timothy",
 title = {{Literate Programming}},
 link = "\url{http://lambdatheultimate.org/node/1336}",
 year = "2006",
 keywords = "axiomref, TPDref"
}

\end{chunk}

\index{Prengel, Alex}
\begin{chunk}{axiom.bib}
@misc{Pren16,
 author = "Prengel, Alex",
 title = {{MIT axiommath_v8.14}},
 link = "\url{http://web.mit.edu/axiommath_v8.14/}",
 year = "2016",
 keywords = "axiomref, TPDref"
}

\end{chunk}

\index{Jenks, Richard D.}
\begin{chunk}{axiom.bib}
@article{Jenk11,
 author = "Jenks, Richard D.",
 title = {{The 2011 Richard D. Jenks Memorial Prize}},
 journal = "ACM Communications in Computer Algebra",
 volume = "45",
 number = "4",
 year = "2011",
 abstract =
 "The 2011 Richard D. Jenks Memorial Prize for Excellence in
 Software Engineering Applied to Computer Algebra was presented by
 members of the Prize Selection Committee and its chair, Erich
 Kaltofen, at ISSAC and SNC in San Jose, CA, on June 9, 2011 to the
 Maple Project at the University of Waterloo.",
 paper = "Jenk11.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Jenks, Richard D.}
\begin{chunk}{axiom.bib}
@article{Jenk13,
 author = "Jenks, Richard D.",
 title = {{The 2013 Richard D. Jenks Memorial Prize}},
 journal = "ACM Communications in Computer Algebra",
 volume = "47",
 number = "4",
 year = "2013",
 abstract =
 "The 2013 Richard D. Jenks Memorial Prize for Excellence in
 Software Engineering Applied to Computer Algebra was announced by
 members of the Prize Selection Committee, Mark Giesbrecht
 representing its chair Erich Kaltofen, at ISSAC in Boston, MA, on
 June 28, 2013 to have been awared to Professor William Arthur
 Stein of the Sage Project at the University of Washington.",
 paper = "Jenk13.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Jenks, Richard D.}
\begin{chunk}{axiom.bib}
@article{Jenk15,
 author = "Jenks, Richard D.",
 title = {{The 2015 Richard D. Jenks Memorial Prize}},
 journal = "ACM Communications in Computer Algebra",
 volume = "49",
 number = "4",
 year = "2015",
 abstract =
 "The 2015 Richard D. Jenks Memorial Prize was awarded on October
 30, 2015 at the Fields Institute in Toronto during the Major
 Thematic Program on Computer Algebra to Professor Victor Shoup for
 NTL: A Library for doing Number Theory.",
 paper = "Jenk15.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Jenks, Richard D.}
\begin{chunk}{axiom.bib}
@article{Jenk17,
 author = "Jenks, Richard D.",
 title = {{The 2017 Richard D. Jenks Memorial Prize}},
 journal = "ACM Communications in Computer Algebra",
 volume = "51",
 number = "4",
 year = "2017",
+@article{Tann93,
+ author = "BreazuTannen, Val and Coquand, Thierry and Gunter, Carl A.
+ and Scedrov, Andre",
+ title = {{Inheritance as Implicit Coercion}},
+ journal = "Information and Computation",
+ volume = "93",
+ pages = "172221",
+ year = "1991",
abstract =
 "The 2017 Richard D. Jenks Memorial Prize for Excellence in
 Software Engineering Applied to Computer Algebra has been awarded
 to Stephen Wolfram for WolframAlpha and Mathematica.",
 paper = "Jenk17.pdf",
 keywords = "axiomref"
}

\end{chunk}

\index{Lazard, Daniel}
\begin{chunk}{axiom.bib}
@article{Laza01,
 author = "Lazard, Daniel",
 title = {{Solving Systems of Algebraic Equations}},
 journal = "ACM SIGSAM Bulletin",
 volume = "35",
 number = "3",
 year = "2001",
+ "We present a method for providing semantic interpretations for
+ languages with a type system featuring inheritance polymorphism. Our
+ approach is illustrated on an extension of the language Fun of
+ Cardelli and Wegner, which we interpret via a translation into an
+ extended polymorphic lambda calculus. Our goal is to interpret
+ inheritances in Fun via coercion functions which are definable in the
+ target of the translation. Existing techniques in the theory of
+ semantic domains can be then used to interpret the extended
+ polymorphic lambda calculus, thus providing many models for the
+ original language. This technique makes it possible to model a rich
+ type discipline which includes parametric polymorphism and recursive
+ types as well as inheritance. A central difficulty in providing
+ interpretations for explit type disciplines featuring inheritance in
+ the sense discussed in this paper arises from the fact that programs
+ can typecheck in more than one way. Since interpretations follow the
+ typechecking derivations, coherence theorems are required: that is,
+ one must prove that the meaning of a program does not depend on the
+ way it was typechecked. Proofs of such theorems for our proposed
+ interpretation are the basic technical results of this paper.
+ Interestingly, proving coherence in the presence of recursive
+ types, variants, and abstract types forced us to reexamine fundamental
+ equational properties that arise in proof theory (in the form of
+ commutative reductions) and domain theory (in the form of strict vs
+ nonstrict functions).",
+ paper = "Tann93.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kauers, Manuel}
+\begin{chunk}{axiom.bib}
+@inproceedings{Kaue08b,
+ author = "Kauers, Manuel",
+ title = {{Computer Algebra for Special Function Inequalities}},
+ booktitle = "Tapas in Experimental Mathematics",
+ pages = "215235",
+ year = "2008",
abstract =
 "Let $f_1,\ldots,f_k$ be $k$ k multivariate polynomials which have
 a finite number of common zeros in the algebraic closure of the ground
 field, counting the common zeros at infinity. An algorithm is
 given and proved which reduces the computations of these zeros to
 the resolution of a single univariate equation whose degree is the
 number of common zeros. This algorithm gives the whole algebraic
 and geometric structure of the set of zeros (multiplicities,
 conjugate zeros,...). When all the polynomials have the same degree,
 the complexity of this algorithm is polynomial relative to the generic
 number of solutions.",
 paper = "Laza01.pdf"
}

\end{chunk}

\index{Abramov, S.A.}
\begin{chunk}{axiom.bib}
@article{Abra00,
 author = "Abramov, S.A.",
 title = {{A Note on the Number of Division Steps in the Euclidean
 Algorithm}},
 journal = "ACM SIGSAM",
 volume = "34",
 number = "4",
 year = "2000",
 paper = "Abra00.pdf",
+ "Recent coputer proofs for some special function inequalities are
+ presented. The algorithmic ideas underlying these computer proofs
+ are described, and the conceptual difference to existing
+ algorithms for proving special function identities is discussed.",
+ paper = "Kaue08b.pdf",
keywords = "printed"
}
\end{chunk}
\index{Essex, Christopher}
\index{Davison, Matt}
\index{Schulzky, Christian}
\begin{chunk}{axiom.bib}
@article{Esse00,
 author = "Essex, Christopher and Davison, Matt and Schulzky, Christian",
 title = {{Numerical Monsters}},
 journal = "ACM SIGSAM",
 volume = "34",
 number = "4",
 year = "2000",
 abstract =
 "When the results of certain computer calculations are shown to be not
 simply incorrect but dramatically incorrect, we have a powerful reason
 to be cautious about all computerbased calculations. In this paper we
 present a 'Rogue's Gallery' of simple calculations whose correct
 solutions are obvious to humans but whose numerical solutions are
 incorrect in pathological ways. We call these calculations, which can
 be guaranteed to wreak numerical mayhem across both software
 packages and hardware platforms, 'Numerical Monsters'. Our monsters
 can be used to provide deep insights into how computer calculations
 fail, and we use t h e m to engender appreciation for the subject of
 numerical analysis in our students. Although these monsters are based
 on wellunderstood numerical pathologies, even experienced numerical
 analysts will find surprises in their behaviour and ,can use the
 lessons they bring to become even better masters of their tools.",
 paper = "Esse00.pdf"
}

\end{chunk}

\index{Buchberger, Bruno}
+\index{Callaghan, Paul}
\begin{chunk}{axiom.bib}
@article{Buch02,
 author = "Buchberger, Bruno",
 title = {{Computer Algebra: The End of Mathematics?}},
 journal = "ACM SIGSAM",
 volume = "36",
 number = "1",
 year = "2002",
 abstract =
 "Mathematical software systems, such as Mathematica, Maple, Derive,
 and so on, are substantially based on enormous advances in the area of
 mathematics known as Computer Algebra or Symbolic Mathematics. In
 fact, everything taught in high school and in the first semesters of a
 university mathematical education, is available in these systems 'at
 the touch of the button'. Will mathematics become unnecessary because
 of this? In the three sections of this essay, I answer this question
 for nonmathematicians, for mathematicians and for (future) students
 of mathematics.",
 paper = "Buch02.pdf"
}

\end{chunk}

\index{Barnett, Michael P.}
\begin{chunk}{axiom.bib}
@article{Barn08,
 author = "Barnett, Michael P.",
 title = {{Reasoning in Symbolic Computation}},
 journal = "ACM Communications in Symbolic Algebra",
 volume = "42",
 number = "1",
+@article{Call08,
+ author = "Callaghan, Paul",
+ title = {{Coercive Subtyping via Mappings of Reduction Behaviour}},
+ journal = "Electronic Notes in Theoretical Computer Science",
+ volume = "196",
+ pages = "5368",
year = "2008",
abstract =
 "I discuss notations for some styles of mathematical reasoning that
 include analogy. These notations extend the conventions of the
 mathematica package mathscape that I reported recently in the
 Journal of Symbolic Computation. The paper introduces the reasoning
 objects that I call 'precursors' and 'consequences lists'.",
 paper = "Barn08.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Lichtblau, Daniel}
\begin{chunk}{axiom.bib}
@article{Lich11,
 author = "Lichtblau, Daniel",
 title = {{Symbolic Definite (and Indefinite) Integration: Methods and
 Open Issues}},
 journal = "ACM Comm. in Computer Algebra",
 volume = "45",
 number = "1",
 year = "2011",
 link = "\url{http://www.sigsam.org/bulletin/articles/175/issue175.pdf}",
 abstract = "
 The computation of definite integrals presents one with a variety of
 choices. There are various methods such as NewtonLeibniz or Slater's
 convolution method. There are questions such as whether to split or
 merge sums, how to search for singularities on the path of
 integration, when to issue conditional results, how to assess
 (possibly conditional) convergence, and more. These various
 considerations moreover interact with one another in a multitude of
 ways. Herein we discuss these various issues and illustrate
 with examples.",
 paper = "Lich11.pdf"
+ "This paper reports preliminary work on a novel approach to
+ Coercive Subtyping that is based on relationships between
+ reduction behaviour of source and target types in coerced
+ terms. Coercive subtyping is a subset of recordbased subtyping,
+ allowing socalled coercion functions to carry the subtyping. This
+ allows many novel and powerful forms of subtyping and
+ abbreviation, with applicaions including interfaces to theorem
+ provers and programming with dependent type systems. However, the
+ use of coercion functions introduces nontrivial overheads, and
+ requires difficult proof of properties such as coherence in order
+ to guarantee sensible results. These points restrict the
+ practicality of coercive subtyping. We begin with the idea that
+ coercing a value $v$ from type $U$ to a type %T% intuitively means
+ that we wish to compute with $v$ as if it was a value in $T$, not
+ that $v$ must be converted into a value in $T$. Instead, we
+ explore how to compute on $U$ in terms of computation on $T$, and
+ develop a framework for mapping computations on some $T$ on
+ computations on some $U$ via a simple extension of the elimination
+ rule of $T$. By exposing how computations on different types are
+ relatd, we gain insight on and make progress with several aspects
+ of coercive subtyping, including (a) distinguishing classes of
+ coercion and finding reasons to deprecate use of some classes; (b)
+ alternative techniques for improving key properties of coercions;
+ (c) greater efficiency from implementations of coercions.",
+ paper = "Call08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Harper, Robert}
+\begin{chunk}{axiom.bib}
+@misc{Harp18,
+ author = "Harper, Robert",
+ title = {{Computational Type Theory}},
+ year = "2018",
+ link =
+ "\url{http://www.cs.uoregon.edu/research/summerschool/summer18/topics.php}",
+ comment = "OPLSS 2018"
}
\end{chunk}
\index{Zengler, Christoph}
\index{Kubler, Andreas}
\index{Kuchlin, Wolfgang}
+\index{Aspinall, David}
+\index{Compagnoni, Adriana}
\begin{chunk}{axiom.bib}
@article{Zeng11,
 author = "Zengler, Christoph and Kubler, Andreas and Kuchlin, Wolfgang",
 title = {{New Approaches to Boolean Quantifier Elimination}},
 journal = "ACM Comm. in Computer Algebra",
 volume = "45",
 number = "2",
 year = "2011",
+@article{Aspi01,
+ author = "Aspinall, David and Compagnoni, Adriana",
+ title = {{Subtyping Dependent Types}},
+ journal = "Theoretical Computer Science",
+ volume = "266",
+ number = "12",
+ pages = "273309",
+ year = "2001",
abstract =
 "We present four different approaches for existential Boolean
 quantifier elimination, based on model enumeration, resolution,
 knowledge compilation with projection, and substitution. We point out
 possible applications in the area of verification and we present
 preliminary benchmark results of the different approaches.",
 paper = "Zeng11.pdf",
+ "The need for subtyping in typesystems with dependent
+ types has been realized for some years. But it is hard to
+ prove that systems combining the two features have
+ fundamental properties such as subject reduction. Here we in
+ vestigate a subtyping extension of the system $\lambda$P
+ which is an abstract version of the type system of the
+ Edinburgh Logical Framework LF. By using an equivalent
+ formulation, we establish some important properties of
+ the new system $\lambda{\rm P}_{\le}$ including subject
+ reduction. Our analysis culminates in a complete and
+ terminating algorithm which establishes the decidability
+ of typechecking",
+ paper = "Aspi01.pdf",
keywords = "printed"
}
\end{chunk}
\index{Stoutemyer, David R.}
+\index{Gordon, Michael J.}
+\index{Milner, Arthur J.}
+\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@article{Stou11,
 author = "Stoutemyer, David R.",
 title = {{Ways to Implement Computer Algebra Compactly}},
 journal = "ACM Comm. in Computer Algebra",
 volume = "45",
 number = "4",
 year = "2011",
 abstract =
 "Computer algebra had to be implemented compactly to fit on early
 personal computers and handheld calculators. Compact implementation
 is still important for portable handheld devices. Also, compact
 implementation increases comprehensibility while decreasing develop
 ment and maintenance time and cost, regardless of the platform. This
 article describes several ways to achieve compact implementations,
 including:
 \begin{itemize}
 \item Exploit evaluation followed by interpolation to avoid
 implementing a parser, such as in PicoMath
 \item Use contiguous storage as an expression stack to avoid garbage
 collection and pointerspace overhead, such as in Calculus Demon
 and TIMathEngine
 \item Use various techniques for saving code space for linkedstorage
 representation of expressions and functions, such as muMath and Derive
 \end{itemize}",
 paper = "Stou11.pdf",
 keywords = "axiomref"
+@book{Gord79,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242"
}
\end{chunk}
\index{Roche, Daniel S.}
+\index{Gordon, Michael J.}
+\index{Milner, Arthur J.}
+\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@article{Roch11,
 author = "Roche, Daniel S.",
 title = {{Chunky and EqualSpaced Polynomial Multiplication}},
 journal = "J. Symbolic Computation",
 volume = "46",
 pages = "791806",
 year = "2011",
 abstract =
 "Finding the product of two polynomials is an essential and basic
 problem in computer algebra. While most previous results have
 focused on the worstcase complexity, we instead employ the
 technique of adaptive analysis to give an improvement in many
 ‘‘easy’’ cases. We present two adaptive measures and methods
 for polynomial multiplication, and also show how to effectively
 combine them to gain both advantages. One useful feature of these
 algorithms is that they essentially provide a gradient between
 existing ‘‘sparse’’ and ‘‘dense’’ methods. We prove that these
 approaches provide significant improvements in many cases but
 in the worst case are still comparable to the fastest existing
 algorithms.",
 paper = "Roch11.pdf"
+@incollection{Gord79a,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "Front Matter",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "18",
+ paper = "Gord79a.pdf"
}
\end{chunk}
\index{Bailey, David H.}
\index{Borwein, Jonathan M.}
\index{Kaiser, Alexander D.}
+\index{Gordon, Michael J.}
+\index{Milner, Arthur J.}
+\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@article{Bail14,
 author = "Bailey, David H. and Borwein, Jonathan M. and
 Kaiser, Alexander D.",
 title = {{Automated Simplification of Large Symbolic Expressions}},
 journal = "J. Symbolic Computation",
 volume = "60",
 pages = "120136",
 year = "2014",
 abstract =
 "We present a set of algorithms for automated simplification of
 symbolic constants of the form
 $\sum_i\alpha_i x_i$ with $\alpha_i$ rational and $x_i$
 complex. The included algorithms, called SimplifySum and
 implemented in Mathematica , remove redundant terms, attempt to make
 terms and the full expression real, and remove terms using repeated
 application of the multipair PSLQ integer relation detection
 algorithm. Also included are facilities for making substitutions
 according to userspecified identities. We illustrate this toolset by
 giving some realworld examples of its usage, including one, for
 instance, where the tool reduced a symbolic expression of
 approximately 100 000 characters in size enough to enable manual
 manipulation to one with just four simple terms.",
 paper = "Bail14.pdf"
+@incollection{Gord79b,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "Introduction",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "112",
+ paper = "Gord79b.pdf"
}
\end{chunk}
\index{Bailey, David H.}
\index{Borwein, Jonathan M.}
+\index{Gordon, Michael J.}
+\index{Milner, Arthur J.}
+\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@article{Bail11,
 author = "Bailey, David H. and Borwein, Jonathan M.",
 title = {{Highprecision Numerical Integration: Progress and Challenges}},
 journal = "J. Symbolic Computation",
 number = "46",
 pages = "741754",
 year = "2011",
 abstract =
 "One of the most fruitful advances in the field of experimental
 mathematics has been the development of practical methods for very
 highprecision numerical integration, a quest initiated by Keith
 Geddes and other researchers in the 1980s and 1990s.These
 techniques, when coupled with equally powerful integer relation
 detection methods, have resulted in the analytic evaluation of many
 integrals that previously were beyond the realm of symbolic
 techniques.This paper presents a survey of the current stateoftheart
 in this area (including results by the present authors and others),
 mentions some new results, and then sketches what challenges lie
 ahead.",
 paper = "Bail11.pdf"
+@incollection{Gord79c,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "ML",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "1361",
+ paper = "Gord79c.pdf"
}
\end{chunk}
\index{Huang, Zongyan}
\index{England, Matthew}
\index{Wilson, David}
\index{Davenport, James H.}
\index{Paulson, Lawrence C.}
+\index{Gordon, Michael J.}
+\index{Milner, Arthur J.}
+\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@article{Huan14,
 author = "Huang, Zongyan and England, Matthew and Wilson, David and
 Davenport, James H. and Paulson, Lawrence C.",
 title = {{A Comparison of Three Heuristics to Choose the Variable
 Ordering for Cylindrical Algebraic Decomposition}},
 journal = "ACM Comm. in Computer Algebra",
 volume = "48",
 number = "3",
 year = "2014",
 abstraact =
 "Cylindrical algebraic decomposition (CAD) is a key tool for problems
 in real algebraic geometry and beyond. When using CAD there is often a
 choice over the variable ordering to use, with some problems
 infeasible in one ordering but simple in another. Here we discuss a
 recent experiment comparing three heuristics for making this choice on
 thousands of examples.",
 paper = "Huan14.pdf"
+@incollection{Gord79d,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "PPLAMBDA",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "6286",
+ paper = "Gord79d.pdf"
}
\end{chunk}
\index{Hamada, Tatsuyoshi}
+\index{Gordon, Michael J.}
+\index{Milner, Arthur J.}
+\index{Wadsworth, Christopher P.}
\begin{chunk}{axiom.bib}
@article{Hama14,
 author = "Hamada, Tatsuyoshi",
 title = {{MathLibre: Personalizable Computer Environment for
 Mathematical Research}},
 volume = "48",
 number = "3",
 year = "2014",
 abstract =
 "MathLibre is a project to archive free mathematical software and free
 mathematical documents and offer them on Live Linux system. MathLibre
 Project is the direct descendant of KNOPPIX/Math Project. It provides
 a desktop for mathematicians that can be set up easily and quickly.",
 paper = "Hama14.pdf"
+@incollection{Gord79e,
+ author = "Gordon, Michael J. and Milner, Arthur J. and
+ Wadsworth, Christopher P.",
+ title = "APPENDIX",
+ booktitle = "Edinburgh LCF",
+ publisher = "SpringerVerlag",
+ year = "1979",
+ isbn = "9783540097242",
+ pages = "87159",
+ paper = "Gord79e.pdf"
}
\end{chunk}
\index{Fateman, Richard}
\begin{chunk}{axiom.bib}
@article{Fate14a,
 author = "Fateman, Richard",
 title = {{Algorithm Differentiation in Lisp: ADIL}},
 journal = "ACM Comm. in Computer Algebra",
 volume = "48",
 number = "3",
 year = "2014",
 abstract =
 "Algorithm differentiation (AD) is a technique used to transform a
 program $F$ computing a numerical function of one argument $F(x)$ into
 another program $G(p)$ that returns a pair,
 $\langle F (p), F^\prime(p)\rangle$ where by $F^\prime(p)$
 we mean the derivative of $F$ with respect to its argument $x$,
 evaluated at $x = p$. That is, we have a program AD that takes as input
 a program, and returns another: $G := AD(F)$. Over the years AD
 programs have been developed to allow $F$ to be expressed in some
 specialized variant of a popular programming language L (FORTRAN, C,
 Matlab, Python) and where $G$ is delivered in that language $L$ or some
 other. Alternatively, executing $F(p)$ is some environment will deliver
 $\langle F^\prime(p), F(p)\rangle$ directly. AD tools have also
 been incorporated in computer algebra systems (CAS) such as
 Maple. A CAS is hardly necessary for the task of writing the AD
 program, since the main requirement is a set of tools for
 manipulation of an internal (typically tree) form of a program.
 In Lisp, a normal program is already in this form and so the AD
 program in Lisp (ADIL), the target $F$ and the product $G$ can
 all be expressed compactly in Lisp. In spite of the brevity and
 extensibility of the ADIL program, we can provide
 features which are unsupported in other AD programs. In particular,
 recursive functions are easily accommodated. Our perspective here is
 to point out that for scientists who write programs in Lisp or any
 language that can be converted to Lisp, AD is easily at hand.",
 paper = "Fate14a.pdf"
}

\end{chunk}

\index{England, M.}
\index{ChebTerrab, E.}
\index{Bradford, R.}
\index{Davenport, J.H.}
\index{Wilson, D.}
\begin{chunk}{axiom.bib}
@article{Engl14c,
 author = "England, M. and ChebTerrab, E. and Bradford, R. and
 Davenport, J.H. and Wilson, D.",
 title = {{Branch Cuts in MAPLE 17}},
 journal = "ACM Comm. in Computer Algebra",
 volume = "48",
 number = "1",
 year = "2014",
 abstract =
 "Accurate and comprehensible knowledge about the position of branch
 cuts is essential for correctly working with multivalued functions,
 such as the square root and logarithm. We discuss the new tools in
 Maple 17 for calculating and visualising the branch cuts of such
 functions, and others built up from them. The cuts are described in an
 intuitive and accurate form, offering substantial improvement on the
 descriptions previously available.",
 paper = "Engl14c.pdf"
}

\end{chunk}

\index{Stoutemyer, David R.}
\begin{chunk}{axiom.bib}
@article{Stou13,
 author = "Stoutemyer, David R.",
 title = {{A Computer Algebra User Interface Manifesto}},
 journal = "ACM Communications in Computer Algebra",
 volume = "47",
 number = "4",
 year = "2013",
 abstract =
 "Many computer algebra systems have more than 1000 builtin functions,
 making expertise difficult. Using mock dialog boxes, this article
 describes a proposed interactive generalpurpose wizard for organizing
 optional transformations and allowing easy fine grain control over the
 form of the result – even by amateurs. This wizard integrates ideas
 including:
 \begin{itemize}
 \item flexible subexpression selection;
 \item complete control over the ordering of variables and
 commutative operands, with wellchosen defaults;
 \item interleaving the choice of successively less main
 variables with applicable function choices to provide detailed
 control without incurring a combinatorial number of applicable
 alternatives at any one level;
 \item quick applicability tests to reduce the listing of
 inapplicable transformations;
 \item using an organizing principle to order the alternatives
 in a helpful manner;
 \item labeling quicklycomputed alternatives in dialog boxes
 with a preview of their results, using ellipsis elisions if
 necessary or helpful;
 \item allowing the user to retreat from a sequence of choices
 to explore other branches of the tree of alternatives – or to
 return quickly to branches already visited;
 \item allowing the user to accumulate more than one of the
 alternative forms;
 \item integrating direct manipulation into the wizard; and
 \item supporting not only the usual inputresult pair mode, but
 also the useful alternative derivational and in situ replacement
 modes in a unified window.
 \end{itemize}",
 paper = "Stou13.pdf",
 kryeotfd = "printed"
}p

\end{chunk}

\index{Sorenson, Jonathan}
+\index{Reynolds, John C.}
\begin{chunk}{axiom.bib}
@article{Sore94,
 author = "Sorenson, Jonathan",
 title = {{Two Fast GCD Algorithms}},
 journal = "Journal of Algorithms",
 volume = "16",
 pages = "110144",
+@misc{Reyn94,
+ author = "Reynolds, John C.",
+ title = {{An Introduction to the Polymorphic Lambda Calculus}},
year = "1994",
 abstract =
 "We present two new algorithms for computing geatest common
 divisors: the right and leftshift $k$ary GCD algorithms. These
 algorithms are generalizations of the binary and leftshift binary
 algorithms. Interestingly, both new algorithms have sequential
 versions that are practical and efficient and parallel versions
 that rival the best previous parallel GCD algorithms. We show that
 sequential versions of both algorithms take $O(n^2/log~n)$ bit
 operations in the worst case to compute the GCD of two nbit
 integers. This compares favorably to the Euclidean and moth
 precision versions of these GCD algorithms, and we found that both
 $k$ary algorithms are faster than the Euclidean and binary
 algorithms on inputs of 100 to 1000 decimal digits in length. We
 show that parallel versions of both algorithms match the
 complexity of the best previous parallel GCD algorithm due to Chor
 and Goldreich. Specifically, if $log~n\le k \le 2^n$ and $k$ is a
 power of two, then both algorithms run in $O(n/log~n+log^2~n)$
 time using a number of processors bounded by a polynomial in $n$
 and $k$ on a common CRCW PRAM. We also discuss extended versions
 of both algorithms.",
 paper = "Sore94.pdf",
+ paper = "Reyn94.pdf",
keywords = "printed"
}
\end{chunk}
\index{Weber, Kenneth}
\index{Trevisan, Vilmar}
\index{Martins, Luiz Felipe}
\begin{chunk}{axiom.bib}
@article{Webe05a,
 author = "Weber, Kenneth and Trevisan, Vilmar and Martins, Luiz Felipe",
 title = {{A Modular Integer GCD Algorithm}},
 journal = "J. of Algorithms",
 volume = "54",
 year = "2005",
 pages = "152167",
 abstract =
 "This paper describes the first algorithm to compute the greatest
 common divisor (GCD) of two $n$bit integers using a modular
 representation for intermediate values $U$, $V$ and also for the
 result. It is based on a reduction step, similar to one used in
 the accelerated algorithm when $U$ and $V$ are close to the same
 size, that replaces $U$ by $(UbV)/p$, where $p$ is one of the
 prime moduli and $b$ is the unique integer in the interval
 $(p/2,p/2)$ such that $b\equiv UV^{1} (mod~p)$. When the
 algorithm is executed on a bit common CRCW PRAM with
 $O(n~log~n~log~log~log~n)$ processors, it takes $(O(n)$ time in
 the worst case. A heuristic model of the average case yields
 $O(n/log~n)$ time on the same number of processors.",
 paper = "Webe05a.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Schonhage, A.}
\begin{chunk}{axiom.bib}
@article{Scho88,
 author = "Schonhage, A.",
 title = {{Probabilistic Computation of Integer Polynomial GCDs}},
 journal = "J. of Algorithms",
 volume = "9",
 pages = "365371",
 year = "1988",
 abstract =
 "We describe a probabilistic algorithm for the computation of the
 gcd of two univariate integer polynomials of degrees $\le n$ with
 their $l^1$norms being bounded by $2^h$ and estimate its expected
 running time by a worstcase bound of
 $O(n(n+h)^{l+\epsilon(l)})$.",
 paper = "Scho88.pdf",
 keywords = "printed"
}

\end{chunk}

\index{LeGuin, Ursula K.}
\begin{chunk}{axiom.bib}
@book{Legu76,
 author = "LeGuin, Ursula K.",
 title = {{The Left Hand of Darkness}},
 publisher = "Penguin Random House",
 year = "1976",
 isbn = "9781101665398"
}

\end{chunk}

\index{Shulman, Michael}
+\index{Meili, Mario}
\begin{chunk}{axiom.bib}
@misc{Shul18,
 author = "Shulman, Michael",
 title = {{Linear Logic for Constructive Mathematics}},
 link = "\url{https://arxiv.org/pdf/1805.07518.pdf}",
 year = "2018",
 abstract =
 "We show that numerous distinctive concepts of constructive
 mathematics arise automatically from an interpretation of 'linear
 higherorder logic' into intuitionistic higherorder logic via a Chu
 construction. This includes apartness relations, complemented
 subsets, antisubgroups and antiideals, strict and nonstrict order
 pairs, cutvalued metrics, and apartness spaces. We also explain the
 constructive bifurcation of classical concept s using the choice
 between multiplicative and additive linear connectives. Linear logic
 thus systematically 'constructivizes' classical definitions and deals
 automatically with the resulting bookkeeping, and could potentially
 be used directly as a basis for constructive mathematics in place
 of intuitionistic logic.",
 paper = "Shul18.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Khan, Muhammad Taimoor}
\begin{chunk}{axiom.bib}
@techreport{Khan13,
 author = "Khan, Muhammad Taimoor",
 title = {{On the Formal Verification of Maple Programs}},
 year = "2013",
 type = "technical report",
 number = "1307",
 institution = "RISC Linz",
 abstract =
 "In this paper, we present an examplebased demonstration of our
 recent results on the formal verification of programs written in the
 computer algebra language (Mini)Maple (a slightly modified subset of
 Maple). The main goal of this work is to develop a verification
 framework for behavioral analysis of MiniMaple programs. For
 verification, we translate an annotated MiniMaple program into the
 language Why3ML of the intermediate verification tool Why3 developed
 at LRI, France. We generate verification conditions by the
 corresponding component of Why3 and later prove the correctness of
 these conditions by various supported by the Why3 backend automatic
 and interactive theorem provers. We have used the verification
 framework to verify some parts of the main test example of our
 verification framework, the Maple package DifferenceDifferential
 developed at our institute to compute bivariate differencedifferential
 polynomials using relative Gr ̈obner bases.",
 paper = "Khan13.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Daly, Timothy}
\begin{chunk}{axiom.bib}}
@misc{Daly03,
 author = "Daly, Timothy",
 title = {{Axiom Website: http://axiomdeveloper.org}},
 link = "\url{http://axiomdeveloper.org}",
 keywords = "axiomref",
 year = "2003"
}

\end{chunk}

\index{Dijkstra, Edsgar W.}
\begin{chunk}{axiom.bib}
@article{Dijk75,
 author = "Dijkstra, Edsgar W.",
 title = {{Guarded Commands, Nondeterminancy and Formal Derivation
 of Programs}},
 journal = "Comm. of the ACM",
 volume = "18",
 number = "8",
 year = "1975",
 pages = "453457",
 abstract =
 "Socalled 'guarded commands' are introduced as a building block
 for alternative and repetitive constructs that allow
 nondeterministic program components for which at least the
 activity evoked, but possibly even the final state, is not
 necessarily uniquly determined by the initial state. For the
 formal derivation of programs expressed in terms of these
 constructs, a calculus will be shown.",
 paper = "Dijk75.pdf",
 keywords = "printed"
}

\end{chunk}

\index{van Emden, M.H.}
\index{Kowalski, R.A.}
\begin{chunk}{axiom.bib}
@article{Emde76,
 author = "van Emden, M.H. and Kowalski, R.A.",
 title = {{The Semantics of Predicate Logic as a Programming Language}},
 journal = "J. Association for Computing Machinery",
 volume = "23",
 number = "4",
 year = "1976",
 pages = "733742",
 abstract =
 "Sentences in firstorder predicate logic can be usefully interpreted
 as programs In this paper the operational and fixpomt semantics of
 predicate logic programs are defined, and the connections with the
 proof theory and model theory of logic are investigated It is
 concluded that operational semantics is a part of proof theory and
 that fixpolnt semantics is a special case of modeltheoret:c
 semantics.",
 paper = "Emde76.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Tarski, Alfred}
\begin{chunk}{axiom.bib}
@article{Tars69,
 author = "Tarski, Alfred",
 title = {{Truth and Proof}},
 journal = "Scientific American",
 volume = "1969",
 pages = "6377",
 year = "1969",
 paper = "Tars69.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Apt, Krysztof R.}
\index{Bezem, Marc}
\begin{chunk}{axiom.bib}
@inbook{Aptx99,
 author = "Apt, Krysztof R. and Bezem, Marc",
 title = {{Formulas as Programs}},
 booktitle = "The Logic Programming Paradigm",
 year = "1999",
 publisher = "Springer Berlin Heidelberg",
 pages = "75107",
 isbn = "9783642600852",
 abstract =
 "We provide here a computational interpretation of firstorder
 logic based on a constructive interpretation of satisfiability
 w.r.t. a fixed but arbitrary interpretation. In this approach the
 formulas themselves are programs. This contrasts with the
 socalled formulas as types approach in which the proofs of the
 formulas are typed terms that can be taken as programs. This view
 of computing is inspired by logic programming and constraint logic
 programming but differs from them in a number of crucial aspects.

 Formulas as programs is argued to yield a realistic approach to
 programming that has been realized in the implemented programming
 language Alma0 Apt, Brunekreef, Partington and Schaerf (1998)
 that combines the advantages of imperative and logic
 programming. The work here reported can also be used to reason
 about the correctness of nonrecursive Alma0 programs that do not
 include destructive assignment.",
 paper = "Aptx99.pdf",
+@misc{Meilxx,
+ author = "Meili, Mario",
+ title = {{Polymorphic Lambda Calculus}},
+ year = "unknown",
+ link = "\urlhttp://wiki.ifs.hsr.ch/SemProgAnTr/files/mmeili_polymorphic_lambda_calculus_final.pdf}",
+ paper = "Meilxx.pdf",
keywords = "printed"
}
\end{chunk}
\index{Mazur, Barry}
\begin{chunk}{axiom.bib}
@misc{Mazu07,
 author = "Mazur, Barry",
 title = {{When is One Thing Equal to Some Other Thing?}},
 link = "\url{http://www.math.harvard.edu/~mazur/preprints/when_is_one.pdf}",
 year = "2007",
 paper = "Mazu07.pdf"
}

\end{chunk}

\index{Kant, Immanuel}
+\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
@book{Kant98,
 author = "Kant, Immanuel",
 title = {{The Critique of Pure Reason}},
 publisher = "Cambridge University Press",
 year = "1998",
 isbn = "0521354021",
 link = "\url{http://strangebeautiful.com/othertexts/kantfirstcritiquecambridge.pdf}",
 paper = "Kant98.pdf"
}

\end{chunk}

\index{Smith, A.}
\begin{chunk}{axiom.bib}
@techreport{Smit90,
 author = "Smith, A.",
 title = {{The KnuthBendix Completion Algorithm and its Specification in Z}},
 type = "technical report",
 institution = "Ministry of Defence, RSRE Malvern WORCS",
 year = "1990",
 number = "RSRE Memorandum 4323",
+@misc{Wadl03,
+ author = "Wadler, Philip",
+ title = {{The GirardReynolds Isomorphism}},
+ journal = "Information and Computation",
+ volume = "186",
+ number = "2",
+ pages = "260280",
+ year = "2003",
+ abstract =
+ "The secondorder polymorphic lambda calculus, F2, was
+ independently discovered by Girard and Reynolds. Girard
+ additionally proved a {\sl representation} theorem: every function
+ on natural numbers that can be proved total in secondorder
+ intutionistic propositional logic, P2, can be represented in
+ F2. Reynolds additionally proved an {\sl abstraction} theorem: for
+ a suitable notion of logical relation, every term in F2 takes
+ related arguments into related results. We observe that the
+ essence of Girard's result is a projection from P2 into F2, and
+ that the essence of Reynolds's result is an embedding of F2 into
+ P2, and that the Reynolds embedding followed by the Girard
+ projection is the identity. The Girard projection discards all
+ firstorder quantifiers, so it seems unreasonable to expect that
+ the Girard projection followed by the Reynolds embedding should
+ also be the identity. However, we show that in the presence of
+ Reynolds's parametricity property that this is indeed the case,
+ for propositions corresponding to inductive definitions of
+ naturals, products, sums, and fixpoint types.",
+ paper = "Wadl03.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Barendregt, Hendrik Pieter}
+\begin{chunk}{axiom.bib}
+@article{Bare91,
+ author = "Barendregt, Hendrik Pieter",
+ title = {{An Introduction to Generalized Type Systems}},
+ journal = "Journal of Functional Programming",
+ volume = "1",
+ number = "2",
+ year = "1991",
+ pages = "125154",
abstract =
 "Proving that something is a consequence of a set of axioms can be
 very difficult. The KnuthBendix completion algorithm attempts to
 automate this process when the axioms are equations. The algorithm
 is bound up in the area of term rewriting, and so this memorandum
 contains an introduction to the tehory of term rewriting, followed
 by an overview of the algorithm. Finally a formal specification of
 the algorithm is given using the language Z.",
 paper = "Smit90.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Dick, A.J.J.}
\begin{chunk}{axiom.bib}
@techreport{Dick88,
 author = "Dick, A.J.J.",
 title = {{Automated Equational Reasoning and the KnuthBendix Algorithm:
 An Informal Introduction}},
 year = "1988",
 type = "technical report",
 institution = "Rutherford Appelton Laboratory",
 number = "RAL88043",
 paper = "Dick88.pdf",
+ "Programming languages often come with type systems. Some of these are
+ simple, others are sophisticated. As a stylistic representation of
+ types in programming languages several versions of typed lambda
+ calculus are studied. During the last 20 years many of these systems
+ have appeared, so there is some need of classification. Working
+ towards a taxonomy, Barendregt (1991) gives a finestructure of the
+ theory of constructions (Coquand and Huet 1988) in the form of a
+ canonical cube of eight type systems ordered by inclusion. Berardi
+ (1988) and Terlouw (1988) have independently generalized the method of
+ constructing systems in the λcube. Moreover, Berardi (1988, 1990)
+ showed that the generalized type systems are flexible enough to
+ describe many logical systems. In that way the wellknown
+ propositionsastypes interpretation obtains a nice canonical form.",
+ paper = "Bare91.pdf",
keywords = "printed"
}
\end{chunk}
\index{Dick, Jeremy}
+\index{Arkoudas, Konstantine}
+\index{Musser, David}
\begin{chunk}{axiom.bib}
@article{Dick91,
 author = "Dick, Jeremy",
 title = {{An Introduction to KnuthBendix Completion}},
 journal = "The Computer Journal",
 volume = "34",
 number = "1",
 year = "1991",
 abstract =
 "An informal introduction is given to the underlying concepts of
 term rewriting. Topics covered are KnuthBendix completion,
 completion modulo equations, unfailing completion and theorem
 proving by completion.",
 paper = "Dick91.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Knuth, Donald E.}
\index{Bendix, Peter B.}
\begin{chunk}
\incollection{Knut70,
 author = "Knuth, Donald E. and Bendix, Peter B.",
 title = {{Simple Word Problems in Unversal Algebras}},
 booktitle = "Computational Problems in Abstract Algebra",
 editor = "Leech, John",
 publisher = "Pergamon Press",
 isbn = "080129757",
 year = "1970",
 pages = "263298",
 paper = "Knut70.pdf",
 keywords = "printed"
+@book{Arko17,
+ author = "Arkoudas, Konstantine and Musser, David",
+ title = {{Fundamental Proof Methods in Computer Science}},
+ publisher = "MIT Press",
+ year = "2017",
+ isbn = "9780262035538"
}
\end{chunk}
\index{Leech, John}
+\index{Dahl, O.J.}
+\index{Dijkstra, E.W.}
+\index{Hoare, C.A.R}
\begin{chunk}{axiom.bib}
@book{Leec70,
 author = "Leech, John",
 title = {{Computational Problems in Abstract Algebra}},
 publisher = "Pergamon Press",
 year = "1970",
 isbn = "080129757",
 paper = "Leec70.pdf"
+@book{Dahl72a,
+ author = "Dahl, O.J. and Dijkstra, E.W. and Hoare, C.A.R",
+ title = {{Structured Programming}},
+ publisher = "Academic Press",
+ year = "1972",
+ isbn = "0122005562"
}
\end{chunk}
\index{Dershowitz, Nachum}
+\index{Nipkow, Tobias}
+\index{Tabacznyj, Christophe}
+\index{Paulson, Lawrence C.}
+\index{Chaieb, Amine}
+\index{Rasmussen, Thomas M.}
+\index{Avigad, Jeremy}
\begin{chunk}{axiom.bib}
@article{Ders87,
 author = "Dershowitz, Nachum",
 title = {{Termination of Rewriting}},
 journal = "J. Symbolic Computation",
 volume = "3",
 year = "1987",
 pages = "69116",
 abstract =
 "This survey describes methods for proving that systems of rewrite
 rules are terminating programs. We illustrate the use in termination
 proofs of various kinds of orderings on terms, including polynomial
 interpretations and path orderings. The effect of restrictions, such
 as linearity, on the form of rules is also considered. In general,
 however, termination is an undeeidable property of rewrite systems.",
 paper = "Ders87.pdf",
 keywords = "printed"
+@misc{Nipk18,
+ author = "Nipkow, Tobias and Tabacznyj, Christophe and
+ Paulson, Lawrence C. and Chaieb, Amine and Rasmussen, Thomas M.
+ and Avigad, Jeremy",
+ title = {{GCD in Isabelle}},
+ link = "\url{http://isabelle.in.tum.ed/dist/library/HOL/HOL/GCD.html}",
+ year = "2018"
}
\end{chunk}
\index{Atkey, Robert}
+\index{McCarthy, John}
+\index{Abrahams, Paul W.}
+\index{Edwards, Daniel J.}
+\index{Hart, Timothy P.}
+\index{Levin, Michael I.}
\begin{chunk}{axiom.bib}
@misc{Atke18,
 author = "Atkey, Robert",
 title = {{Parameterised Notions of Computation}},
 link = "\url{https://bentnib.org/paramnotionsjfp.pdf}",
 year = "2018",
 abstract =
 "Moggi’s Computational Monads and Power et al ’s equivalent notion of
 Freyd category have captured a large range of computational effects
 present in programming languages. Examples include nontermination,
 nondeterminism, exceptions, continuations, sideeffects and
 input/output. We present generalisations of both computational monads
 and Freyd categories, which we call parameterised monads and
 parameterised Freyd categories, that also capture computational
 effects with parameters. Examples of such are composable
 continuations, sideeffects where the type of the state varies and
 input/output where the range of inputs and outputs varies. By also
 considering structured parameterisation, we extend the range of
 effects to cover separated sideeffects and multiple independent
 streams of I/O. We also present two typed $\lambda$calculi that
 soundly and completely model our categorical definitions — with and
 without symmetric monoidal parameterisation — and act as prototypical
 languages with parameterised effects.",
 paper = "Atke18.pdf",
 keywords = "printed"
+@book{Mcca62,
+ author = "McCarthy, John and Abrahams, Paul W. and Edwards, Daniel J.
+ and Hart, Timothy P. and Levin, Michael I.",
+ title = {{LISP 1.5 Programmer's Manual}},
+ link = "\url{http://www.softwarepreservation.org/projects/LISP/book/LISP%201.5%20Programmers%20Manual.pdf}",
+ publisher = "MIT Press",
+ isbn = "0262130114",
+ year = "1962"
}
\end{chunk}

\index{Bozman, Gerald}
\index{Buco, William}
\index{Daly, Timothy}
\index{Tetzlaff, William}
\begin{chunk}{axiom.bib}
@article{Bozm84,
 author = "Bozman, Gerald and Buco, William and Daly, Timothy and
 Tetzlaff, William",
 title = {{Analysis of Free Storage Algorithms  Revisited}},
 journal = "IBM Systems Journal",
 volume = "23",
 number = "1",
 year = "1984",
 abstract =
 "Most research in freestorage management has centered around
 strategies that search a linked list and strategies that partition
 storage into predetermined sizes. Such algorithms are analyzed in
 terms of CPU efficiency and storage efficiency. The subject of this
 study is the freestorage management in the Virtual Machine/System
 Product (VM/SP) system control program. As a part of this study,
 simulations were done of established, and proposed, dynamic storage
 algorithms for the VM/SP operating system. Empirical evidence is given
 that simplifying statistical assumptions about the distribution of
 interarrival times and holding times has high predictive
 ability. Algorithms such as firstfit, modified firstfit, and
 bestfit are found to be CPUinefficient. Buddy systems are found to
 be very fast but suffer from a high degree of internal
 fragmentation. A form of extended subpooling is shown to be as fast as
 buddy systems with improved storage efficiency. This algorithm was
 implemented for VM/SP, and then measured. Results for this algorithm
 are given for several production VM/SP systems.",
 paper = "Bozm84.pdf"
}

\end{chunk}
\ No newline at end of file
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index a794758..857322e 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5966,6 +5966,8 @@ src/axiomwebsite/currentstate.html fix last update
src/axiomwebsite/documentation.html redirect downloads to github
20180811.01.tpd.patch
books/bookvolbib add references
+20180921.01.tpd.patch
+books/bookvolbib add references

1.9.1