From ae437738d95f1eaa82e9f119fd395593610100df Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Wed, 6 Jun 2018 22:40:28 0400
Subject: [PATCH] books/bookvolbib add references
MIMEVersion: 1.0
ContentType: text/plain; charset=UTF8
ContentTransferEncoding: 8bit
Goal: Proving Axiom Correct
\index{Kohlhase, Michael}
\index{De Feo, Luca}
\index{Muller, Dennis}
\index{Pfeiffer, Markus}
\index{Rabe, Florian}
\index{Thiery, Nicolas M.}
\index{Vasilyev, Victor}
\index{Wesing, Tom}
\begin{chunk}{axiom.bib}
@inproceedings{Kohl17,
author = "Kohlhase, Michael and De Feo, Luca and Muller, Dennis and
Pfeiffer, Markus and Rabe, Florian and Thiery, Nicolas M.
and Vasilyev, Victor and Wesing, Tom",
title = {{KnowledgeBased Interoperability for Mathematical Software
Systems}},
booktitle = "7th Int. Conf. on Mathematical Aspects of Computer and
Information Sciences",
publisher = "Springer",
year = "2017",
pages = "195210",
isbn = "9783319724539",
abstract =
"There is a large ecosystem of mathematical software systems.
Individually, these are optimized for particular domains and
functionalities, and together they cover many needs of practical and
theoretical mathematics. However, each system specializes on one area,
and it remains very difficult to solve problems that need to involve
multiple systems. Some integrations exist, but the are adhoc and have
scalability and maintainability issues. In particular, there is not
yet an interoperability layer that combines the various systems into a
virtual research environment (VRE) for mathematics.
The OpenDreamKit project aims at building a toolkit for such VREs. It
suggests using a central systemagnostic formalization of mathematics
(MathintheMiddle, MitM) as the needed interoperability layer. In
this paper, we conduct the first major case study that instantiates
the MitM paradigm for a concrete domain as well as a concrete set of
systems. Specifically, we integrate GAP, Sage, and Singular to perform
computation in group and ring theory.
Our work involves massive practical efforts, including a novel
formalization of computational group theory, improvements to the
involved software systems, and a novel mediating system that sits at
the center of a starshaped integration layout between mathematical
software systems.",
paper = "Kohl17.pdf",
keywords = "printed"
}
\end{chunk}
\index{Dehaye, PaulOlivier}
\index{Iancu, Mihnea}
\index{Kohlhase, Michael}
\index{Konovalov, Alexander}
\index{Lelievre, Samuel}
\index{Muller, Dennis}
\index{Pfeiffer, Markus}
\index{Rabe, Florian}
\index{Thiery, Nicolas M.}
\index{Wiesling, Tom}
\begin{chunk}{axiom.bib}
@inproceedings{Deha16,
author = "Dehaye, PaulOlivier and Iancu, Mihnea and Kohlhase, Michael
and Konovalov, Alexander and Lelievre, Samuel and
Muller, Dennis and Pfeiffer, Markus and Rabe, Florian and
Thiery, Nicolas M. and Wiesling, Tom",
title = {{Interoperability in the OpenDreamKit project: The
MathintheMiddle Approach}},
booktitle = "Intelligent Computer Mathematics",
pages = "117131",
year = "2016",
isbn = "9783319425467",
publisher = "Springer",
abstract =
"OpenDreamKit  'Open Digital Research Environment Toolkit for the
Advancement of Mathematics'  is an H2020 EU Research Infrastructure
project that aims at supporting, over the period 20152019, the
ecosystem of opensource mathematical software systems. OpenDreamKit
will deliver a flexible toolkit enabling research groups to set up
Virtual Research Environments, customised to meet the varied needs of
research projects in pure mathematics and applications.
An important step in the OpenDreamKit endeavor is to foster the
interoperability between a variety of systems, ranging from
computer algebra systems over mathematical databases to
frontends. This is the mission of the integration work
package. We report on experiments and future plans with the
MathintheMiddle approach. This architecture consists of a
central mathematical ontology that documents the domain and fixes a
joint vocabulary, or even a language, going beyond existing
systems such as OpenMath, combined with specifications of the
functionalities of the various systems. Interaction between
systems can then be enriched by pivoting around this architecture.",
paper = "Deha16.pdf",
keywords = "printed, axiomref"
}
\end{chunk}
\index{de Bruijn, N.G.}
\begin{chunk}{axiom.bib}
@techreport{Brui68a,
author = "de Bruijn, N.G.",
title = {{AUTOMATH, A Language for Mathematics}},
year = "1968",
type = "technical report",
number = "68WSK05",
institution = "Technische Hogeschool Eindhoven",
paper = "Brui68a.pdf"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm07a,
author = "Farmer, William M.",
title = {{Chiron: A MultiParadigm Logic}},
journal = "Studies in Logic, Grammar and Rhetoric",
volume = "10",
number = "23",
year = "2007",
abstract =
"Chiron is a derivative of vonNeumannBernaysG̈odel ( NBG ) set
theory that is intended to be a practical, generalpurpose logic for
mechanizing mathematics. It supports several reasoning paradigms by
integrating NBG set theory with elements of type theory, a scheme for
handling undefinedness, and a facility for reasoning about the syntax
of expressions. This paper gives a quick, informal presentation of
the syntax and semantics of Chiron and then discusses some of the
benefits Chiron provides as a multiparadigm logic.",
paper = "Farm07a.pdf",
keywords = "axiomref, printed"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@techreport{Farm13,
author = "Farmer, William M.",
title = {{Chiron: A Set Theory with Types, Undefinedness, Quotation,
and Evaluation}},
type = "technical report",
institution = "McMaster University",
number = "SQRL Report No. 38",
year = "2013",
link = "\url{https://arxiv.org/pdf/1305.6206.pdf}",
abstract =
"Chiron is a derivative of vonNeumannBernaysGodel (NBG) set
theory that is intended to be a practical, generalpurpose logic for
mechanizing mathematics. Unlike traditional set theories such as
ZermeloFraenkel (ZF) and NBG, Chiron is equipped with a type system,
lambda notation, and definite and indefinite description. The type
system includes a universal type, dependent types, dependent function
types, subtypes, and possibly empty types. Unlike traditional logics
such as firstorder logic and simple type theory, Chiron admits
undefined terms that result, for example, from a function applied to
an argument outside its domain or from an improper definite or
indefinite description. The most noteworthy part of Chiron is its
facility for reasoning about the syntax of expressions. Quotation is
used to refer to a set called a construction that represents the
syntactic structure of an expression, and evaluation is used to refer
to the value of the expression that a construction represents. Using
quotation and evaluation, syntactic side conditions, schemas,
syntactic transformations used in deduction and computation rules, and
other such things can be directly expressed in Chiron. This paper
presents the syntax and semantics of Chiron, some definitions and
simple examples illustrating its use, a proof system for Chiron, and a
notion of an interpretation of one theory of Chiron in another.",
paper = "Farm13.pdf"
}
\end{chunk}
\index{Kripke, Saul}
\begin{chunk}{axiom.bib}
@article{Krip75,
author = "Kripke, Saul",
title = {{Outline of a Theory of Truth}},
journal = "Journal of Philosophy",
volume = "72",
number = "19",
year = "1975",
pages = "690716",
paper = "Krip75.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kleene, Stephen Cole}
\begin{chunk}{axiom.bib}
@book{Klee52,
author = "Kleene, Stephen Cole",
title = {{Introduction to MetaMathematics}},
year = "1952",
publisher = "Ishi Press International",
isbn = "9780923891572",
paper = "Klee52.pdf"
}
\end{chunk}
\index{Smith, Peter}
\begin{chunk}{axiom.bib}
@misc{Smit15,
author = "Smith, Peter",
title = {{Some Big Books on Mathematical Logic}},
year = "2015",
link = "\url{}",
paper = "Smit15.pdf",
keywords = "printed"
}
\end{chunk}
\index{Gonthier, Georges}
\begin{chunk}{axiom.bib}
@article{Gont09a,
author = "Gonthier, Georges",
title = {{Software Engineering for Mathematics}},
journal = "LNCS",
volume = "5625",
year = "2009",
pages = "2727",
abstract =
"Despite its mathematical origins, progress in computer assisted
reasoning has mostly been driven by applications in computer science,
like hardware or protocol security verification. Paradoxically, it has
yet to gain widespread acceptance in its original domain of
application, mathematics; this is commonly attributed to a 'lack of
libraries': attempts to formalize advanced mathematics get bogged down
into the formalization of an unwieldly large set of basic resuts.
This problem is actually a symptom of a deeper issue: the main
function of computer proof systems, checking proofs down to their
finest details, is at odds with mathematical practice, which ignores
or defers details in order to apply and combine abstractions in
creative and elegant ways. Mathematical texts commonly leave logically
important parts of proofs as 'exercises to the reader', and are rife
with 'abuses of notation that make mathematics tractable' (according
to Bourbaki). This (essential) flexibility cannot be readily
accomodated by the narrow concept of 'proof library' used by most
proof assistants and based on 19th century firstorder logic: a
collection of constants, definitions, and lemmas.
This mismatch is familiar to software engineers, who have been
struggling for the past 50 years to reconcile the flexibility needed
to produce sensible user requirements with the precision needed to
implement them correctly with computer code. Over the last 20 years
object and components have replaced traditional data and procedure
libraries, partly bridging this gap and making it possible to build
significantly larger computer systems.
These techniques can be implemented in compuer proof systems by
exploiting advances in mathematical logic. Higherorder logics allow
the direct manipulation of functions; this can be used to assign
behaviour, such as simplification rules, to symbols, similarly to
objects. Advanced type systems can assign a secondary, contextual
meaning to expressions, using mechanisms such as type classes,
similarly to the metadata in software components. The two can be combined
to perform reflection, where an entire statement gets quoted as
metadata and then proved algorithmically by some decision procedure.
We propose to use a more modest, smallscale form of reflection, to
implement mathematical components. We use the typederived metadata to
indicate how symbols, definitions and lemmas should be used in other
theories, and functions to implement this usage — roughly, formalizing
some of the exercize section of a textbook. We have applied
successfully this more engineered approch to computer proofs in our
past work on the Four Color Theorem, the CayleyHamilton Theorem, and
our ongoing longterm effort on the Odd Order Theorem, which is the
starting point of the proof of the Classification of Finite Simple
Groups (the famous 'monster theorem' whose proof spans 10,000 pages in
400 articles).",
paper = "Gont09a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\begin{chunk}{axiom.bib}
@article{Care10,
author = "Carette, Jacques",
title = {{Mechanized Mathematics}},
journal = "LNCS",
volume = "6167",
year = "2010",
pages = "157157",
abstract =
"In the 50 years since McCarthy’s 'Recursive Functions of Symbolic
Expressions and Their Computation by Machine', what have we
learned about the realization of Leibniz’s dream of just being
able to utter 'Calculemus!' (Let us calculate!) when faced with a
mathematical dilemma?
In this talk, I will first present what I see as the most
important lessons from the past which need to be heeded by modern
designers. From the present, I will look at the context in which
computers are used, and derive further requirements. In
particular, now that computers are no longer the exclusive
playground for highly educated scientists, usability is now more
important than ever, and justifiably so.
I will also examine what I see as some principal failings of
current systems, primarily to understand some major mistakes to
avoid. These failings will be analyzed to extract what seems to be
the root mistake, and I will present my favourite solutions.
Furthermore, various technologies have matured since the creation
of many of our systems, and whenever appropriate, these should be
used. For example, our understanding of the structure of
mathematics has significantly increased, yet this is barely
reflected in our libraries. The extreme focus on efficiency by the
computer algebra community, and correctness by the (interactive)
theorem proving community should no longer be considered viable
long term strategies. But how does one effectively bridge that
gap?
I personally find that a number of (programming) languagebased
solutions are particularly effective, and I will emphasize
these. Solutions to some of these problems will be illustrated
with code from a prototype of MathScheme 2.0, the system I am
developing with Bill Farmer and our research group.",
paper = "Care10.pdf",
keywords = "printed"
}
\end{chunk}
\index{Zeilberger, Doron}
\begin{chunk}{axiom.bib}
@article{Zeil10,
author = "Zeilberger, Doron",
title = {{Against Rigor}},
journal = "LNCS",
volume = "6167",
year = "2010",
pages = "262262",
abstract =
"The ancient Greeks gave (western) civilization quite a few
gifts, but we should beware of Greeks bearing gifts. The gifts of theatre
and democracy were definitely good ones, and perhaps even the gift of
philosophy, but the 'gift' of the socalled 'axiomatic method' and the
notion of 'rigorous' proof did much more harm than good. If we want
to maximize Mathematical Knowledge, and its Management, we have to
return to Euclid this dubious gift, and giveup our fanatical insistence
on perfect rigor. Of course, we should not go to the other extreme, of
demanding that everything should be nonrigorous. We should encourage
diversity of proofstyles and rigor levels, and remember that nothing is
absolutely sure in this world, and there does not exist an absolutely
rigorous proof, nor absolute certainty, and 'truth' has many shades and
levels.",
paper = "Zeil10.pdf",
keywords = "printed"
}
\end{chunk}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@phdthesis{Pfen87,
author = "Pfenning, Frank",
title = {{Proof Transformations in HigherOrder Logic}},
year = "1987",
school = "Carnegie Mellon University",
link = "\url{http://wwwcgi.cs.cmu.edu/~fp/papers/thesis87.pdf}",
abstract =
"We investigate the problem of translating between different styles of
proof systems in higherorder logic: analytic proofs which are well
suited for automated theorem proving, and nonanalytic deductions
which are well suited for the mathematician. Analytic proofs are
represented as expansion proofs, $H$ , a form of the sequent calculus we
define, nonanalytic proofs are represented by natural deductions. A
nondeterministic translation algorithm between expansion proofs and
$H$deductions is presented and its correctness is proven. We also
present an algorithm for translation in the other direction and prove
its correctness. A cutelimination algorithm for expansion proofs is
given and its partial correctness is proven. Strong termination of
this algorithm remains a conjecture for the full higherorder
system, but is proven for the firstorder fragment. We extend the
translations to a nonanalytic proof system which contains a primitive
notion of equality, while leaving the notion of expansion proof
unaltered. This is possible, since a nonextensional equality is
definable in our system of type theory. Next we extend analytic and
nonanalytic proof systems and the translations between them to
include extensionality. Finally, we show how the methods and notions
used so far apply to the problem of translating expansion proofs into
natural deductions. Much care is taken to specify this translation in
a modular way (through tactics) which leaves a large number of choices
and therefore a lot of room for heuristics to produce elegant natural
deductions. A practically very useful extension, called symmetric
simplification, produces natural deductions which make use of lemmas
and are often much more intuitive than the normal deductions which
would be created by earlier algorithms.",
paper = "Pfen87.pdf"
}
\end{chunk}
\index{Comon, Hubert}
\begin{chunk}{axiom.bib}
@incollection{Como01,
author = "Comon, Hubert",
title = {{Inductionless Induction}},
booktitle = "Handbook of Automated Reasoning",
comment = "Chapter 14",
publisher = "Elsevier",
year = "2001",
paper = "Como01.ps"
}
\end{chunk}
\index{Schmitt, P.H.}
\begin{chunk}{axiom.bib}
@article{Schm86,
author = "Schmitt, P.H.}",
title = {{Computational Aspects of ThreeValued Logic}},
journal = "LNCS",
volume = "230",
year = "1986",
abstract =
"This paper investigates a threevalued logic $L_3$, that has been
introduced in the study of natural language semantics. A complete
proof system based on a threevalued analogon of negative resolution
is presented. A subclass of $L_3$ corresponding to Horn clauses in
twovalued logic is defined. Its model theoretic properties are
studied and it is shown to admit a PROLOGstyle proof procedure.",
paper = "Schm86.pdf"
}
\end{chunk}
\index{Common, Hubert}
\index{Lescanne, Pierre}
\begin{chunk}{axiom.bib}
@article{Como89,
author = "Common, Hubert and Lescanne, Pierre",
title = {{Equational Problems and Disunification}},
journal = "J. Symbolic Computation",
volume = "7",
number = "34",
year = "1989",
pages = "371425",
abstract =
"Roughly speaking, an equational problem is a first order formula
whose only predicate symbol is $=$. We propose some rules for the
transformation of equational problems and study their correctness in
various models. Then, we give completeness results with respect to
some 'simple' problems called solved forms. Such completeness results
still hold when adding some control which moreover ensures
termination. The termination proofs are given for a 'weak' control and
thus hold for the (large) class of algorithms obtained by restricting
the scope of the rules. Finally, it must be noted that a byproduct of
our method is a decision procedure for the validity in the Herbrand
Universe of any first order formula with the only predicate symbol $=$.",
paper = "Como89.pdf"
}
\end{chunk}
\index{Jones, Simon > Peyton}
\begin{chunk}{axiom.bib}
@inproceedings{Jone96,
author = "Jones, Simon > Peyton",
title = {{Compiling Haskell by Program Transformation: A Report from
the Trenches}},
booktitle = "Proc. European Symposium on Programming",
year = "1996",
publisher = "Eurographics",
abstract =
"Many compilers do some of their work by means of
correctnesspreseving, and hopefully performanceimproving, program
transformations. The Glasgow Haskell Compiler (GHC) takes this idea
of 'compilation by transformation' as its warcry, trying to express
as much as possible of the compilation process in the form of
program transformations.
This paper reports on our practical experience of the
transformational approach to compilation, in the context of a
substantial compiler.",
paper = "Jone96.pdf"
}
\end{chunk}
\index{Hanus, Michael}
\begin{chunk}{axiom.bib}
@article{Hanu90,
author = "Hanus, Michael",
title = {{Compiling Logic Programs with Equality}},
journal = "LNCS",
volume = "456",
year = "1990",
pages = "387401",
abstract =
"Horn clause logic with equality is an amalgamation of functional and
logic programming languages. A sound and complete operational
semantics for logic programs with equality is based on resolution to
solve literals, and rewriting and narrowing to evaluate functional
expressions. This paper proposes a technique for compiling programs
with these inference rules into programs of a lowlevel abstract
machine which can be efficiently executed on conventional
architectures. The presented approach is based on an extension of the
Warren abstract machine (WAM). In our approach pure logic programs
without function definitions are compiled in the same way as in the
WAMapproach, and for logic programs with function definitions
particular instructions are generated for occurrences of functions
inside clause bodies. In order to obtain an efficient implementation
of functional computations, a stack of occurrences of function symbols
in goals is managed by the abstract machine. The compiler generates
the necessary instructions for the efficient manipulation of the
occurrence stack from the given equational logic programs.",
paper = "Hanu90.pdf"
}
\end{chunk}
\index{Ballerin, Clemens}
\begin{chunk}{axiom.bib}
@phdthesis{Ball99a,
author = "Ballerin, Clemens",
title = {{Computer Algebra and Theorem Proving}},
school = "Darwin College, University of Cambridge",
year = "1999",
abstract =
"Is the use of computer algebra technology beneficial for
mechanised reasoining in and about mathematical domains? Usually
it is assumed that it is. Many works in this area, however, either
have little reasoning conent, or use symbolic computation only to
simplify expressions. In work that has achieved more, the used
methods do not scale up. They trust the computer algebra system
either too much or too little.
Computer algebra systems are not as rigorous as many provers. They
are not logically sound reasoning systems, but collections of
algorithms. We classify soundness problems that occur in computer
algebra systems. While many algorithms and their implementations
are perfectly trustworthy, the semantics of symbols is often
unclear and leads to errors. On the other hand, more robust
approaches to interface external reasoners to provers are not
always practical because the mathematical depth of proofs
algorithms in computer algebra are based on can be enormous.
Our own approach takes both trustworthiness of the overall system
and efficiency into account. It relies on using only reliable
parts of a computer algebra system, which can be achieved by
choosing a suitable library, and deriving specifications for these
algorithms from their literature.
We design and implement an interface between the prover Isabelle
and the computer algebra library Sumit and use it to prove
nontrivial theorems from coding theory. This is based on the
mechanisation of the algebraic theories of rings and
polynomials. Coding theory is an area where proofs do have a
substantial amount of computational content. Also, it is realistic
to assume that the verification of an encoding or decoding device
could be undertaken in, and indeed, be simplified by, such a
system.
The reason why semantics of symbols is often unclear in current
computer algebra systems is not mathematical difficulty, but the
design of those systems. For Gaussian elimination we show how the
soundness problem can be fixed by a small extension, and without
losing efficiency. This is a prerequisite for the efficient use of
the algorithm in a prover.",
paper = "Ball99a.pdf",
keywords = "axiomref, printed"
}
\end{chunk}
\index{Nederpelt, Rob}
\index{Geuvers, Nerman}
\begin{chunk}{axiom.bib}
@book{Nede14,
author = "Nederpelt, Rob and Geuvers, Nerman",
title = {{Type Theory and Formal Proof}},
year = "2014",
publisher = "Cambridge University Press",
isbn = "9781107036505"
}
\end{chunk}
\index{Robinson, Alan}
\index{Voronkov, Andrei}
\begin{chunk}{axiom.bib}
@book{Robi01,
author = "Robinson, Alan and Voronkov, Andrei",
title = {{Handbook of Automated Reasoning (2 Volumes)}},
year = "2001",
publisher = "MIT Press",
isbn = "0262182238"
}
\end{chunk}
\index{Barendregt, Henk}
\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@article{Bare05,
author = "Barendregt, Henk and Wiedijk, Freek",
title = {{The Challenge of Computer Mathematics}},
journal = "Phil. Trans. Royal Society",
volume = "363",
pages = "23512375",
year = "2005",
abstract =
"Progress in the foundations of mathematics has made it possible to
formulate all thinkable mathematical concepts, algorithms and proofs
in one language and in an impeccable way. This is not in spite of, but
partially based on the famous results of Godel and Turing. In this way
statements are about mathematical objects and algorithms, proofs show
the correctness of statements and computations, and computations are
dealing with objects and proofs. Interactive computer systems for a
full integration of defining, computing and proving are based on
this. The human defines concepts, constructs algorithms and provides
proofs, while the machine checks that the definitions are well formed
and the proofs and computations are correct. Results formalized so far
demonstrate the feasibility of this ‘computer mathematics’. Also there
are very good applications. The challenge is to make the systems more
mathematicianfriendly, by building libraries and tools. The eventual
goal is to help humans to learn, develop, communicate, referee and
apply mathematics.",
paper = "Bare05.pdf",
keywords = "printed"
}
\end{chunk}
\index{Klaeren, Herbert A.}
\begin{chunk}{axiom.bib}
@article{Klae84,
author = "Klaeren, Herbert A.",
title = {{A Constructive Method for Abstract Algebraic Software
Specification}},
journal = "Theoretical Computer Science",
vollume = "30",
year = "1984",
pages = "139204",
abstract =
"A constructive method for abstract algebraic software
specification is presented, where the operations are not
implicitly specified by equations but by an explicit recursion on
the generating operations of an algebra characterizing the
underlying data structure. The underlying algebra itself may be
equationally specified since we cannot assume that all data
structures will correspond to free algebras. This implies that we
distinguish between generating and defined operations and that the
underlying algebra has a mechanism of wellfounded decomposition
w.r.t. the generating operations. We show that the explicit
specification of operations using socalled structural recursive
schemata offers advantages over purely equational specifications,
especially concerning the safeness of enrichments, the ease of
semantics description and the separation between the underlying
data structure and the operations defined on it.",
keywords = "printed"
}
\end{chunk}
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@misc{Pfen04,
author = "Pfenning, Frank",
title = {{Automated Theorem Proving}},
year = "2004",
comment = "Draft of Spring 2004",
keywords = "printed"
}
\end{chunk}
\index{Coquuand, Thierry}
\index{Huet, Gerard}
\begin{chunk}{axiom.bib}
@incollection{Coqu88,
author = "Coquuand, Thierry and Huet, Gerard",
title = {{The Calculus of Constructions}},
booktitle = "Information and Computation, Volume 76",
year = "1988",
publisher = "Academic Press",
paper = "Coqu88.pdf",
keywords = "printed"
}
\end{chunk}
\index{Okasaki, Chris}
\begin{chunk}{axiom.bib}
@phdthesis{Okas96,
author = "Okasaki, Chris",
title = {{Purely Functional Data Structures}},
school = "Carnegie Mellon University",
year = "1996",
link = "\url{}",
comment = "CMUCS96177",
isbn = "9780521663502",
abstract =
"When a C programmer needs an efficient data structure for a
particular problem, he or she can often simply look one up in any of
a number of good textbooks or handbooks. Unfortunately, programmers
in functional languages such as Standard ML or Haskell do not have
this luxury. Although some data structures designed for imperative
languages such as C can be quite easily adapted to a functional
setting, most cannot, usually because they depend in crucial ways on
assignments, which are disallowed, or at least discouraged, in
functional languages. To address this imbalance, we describe several
techniques for designing functional data structures, and numerous
original data structures based on these techniques, including multiple
variations of lists, queues, doubleended queues, and heaps, many
supporting more exotic features such as random access or efficient
catenation.
In addition, we expose the fundamental role of lazy evaluation in
amortized functional data structures. Traditional methods of
amortization break down when old versions of a data structure, not
just the most recent, are available for further processing. This
property is known as persistence , and is taken for granted in
functional languages. On the surface, persistence and amortization
appear to be incompatible, but we show how lazy evaluation can be used
to resolve this conflict, yielding amortized data structures that are
efficient even when used persistently. Turning this relationship
between lazy evaluation and amortization around, the notion of
amortization also provides the first practical techniques for
analyzing the time requirements of nontrivial lazy programs.
Finally, our data structures offer numerous hints to programming
language designers, illustrating the utility of combining strict and
lazy evaluation in a single language, and providing nontrivial
examples using polymorphic recursion and higherorder, recursive
modules.",
paper = "Okas96.pdf"
}
\end{chunk}
\index{Letouzey, Pierre}
\begin{chunk}{axiom.bib}
@inproceedings{Leto04,
author = "Letouzey, Pierre",
title = {{A New Extraction for Coq}},
booktitle = "Types for Proofs and Programs. TYPES 2002",
publisher = "Springer",
pages = "617635",
year = "2004",
abstract =
"We present here a new extraction mechanism for the Coq proof
assistant. By extraction, we mean automatic generation of
functional code from Coq proofs, in order to produce certified
programs. In former versions of Coq, the extraction mechanism
suffered several limitations and in particular worked only with a
subset of the language. We first discuss difficulties encountered
and solutions proposed to remove these limitations. Then we give a
proof of correctness for a theoretical model of the new
extraction. Finally we describe the actual implementation.",
paper = "Leto04.pdf",
keywords = "printed"
}
\end{chunk}
\index{Abrams, Marshall D.}
\index{Zelkowitz, Marvin V.}
\begin{chunk}{axiom.bib}
@article{Abra95,
author = "Abrams, Marshall D. and Zelkowitz, Marvin V.",
title = {{Striving for Correctness}},
journal = "Computers and Security",
volume = "14",
year = "1995",
pages = "719738",
paper = "Abra95.pdf"
}
\end{chunk}
\index{Parnas, David L.}
\begin{chunk}{axiom.bib}
@article{Parn72,
author = "Parnas, David L.",
title = {{A Technique for Software Module Specification with
Examples}},
journal = "CACM",
volume = "15",
number = "5",
year = "1972",
pages = "330336",
abstract =
"This paper presents an approach to writing specifications for
parts of software systems. The main goal is to provide
specifications sufficiently precise and complete that other pieces
of software can be written to interact with the piece specified
without additional information. The secondary goal is to include
in the specification no more information than necessary to meet
the first goal. The technique is illustrated by means of a variety
of examples from a tutorial system.",
paper = "Parn72.pdf",
keywords = "printed"
}
\end{chunk}
\index{Johnson, C.W.}
\begin{chunk}{axiom.bib}
@article{John96,
author = "Johnson, C.W.",
title = {{Literate Specifications}},
journal = "Software Engineering Journal",
volume = "11",
number = "4",
year = "1996",
pages = "225237",
paper = "John96.pdf",
abstract =
"The increasing scale and complexity of software is imposing serious
burdens on many industries. Formal notations, such as Z, VDM and
temporal logic, have been developed to address these problems. There
are, however, a number of limitations that restrict the use of
mathematical specifications far largescale software development. Many
projects take months or years to complete. This creates difficulties
because abstract mathematical requirements cannot easily be used by
new members of a development team to understand past design
decisions. Formal specifications describe what software must do, they
do not explain why it must do it. In order to avoid these limitations,
a literate approach to software engineering is proposed. This
technique integrates a formal specification language and a semiformal
design rationale. The Z schema calculus is usecl to represent what a
system must do. In contrast, the Questions, Options and Criteria
notation is used to represent the justifications that lie behind
development decisions. Empirical evidence is presented that suggests
the integration of these techniques provides significant benefits over
previous approaches to mathematical analysis and design techniques. A
range of tools is described that have been developed to support our
literate approach to the specification of largescale sohare systems.",
keywords = "printed"
}
\end{chunk}
\index{Finney, Kate}
\begin{chunk}{axiom.bib}
@article{Finn96,
author = "Finney, Kate",
title = {{Mathematical Notation in Formal Specifications: Too
Difficult for the Masses?}},
journal = "IEEE Trans. on Software Engineering",
volume = "22",
number = "2",
year = "1996",
pages = "158159",
abstract =
"The phrase 'not much mathematics required' can imply a
variety of skill levels. When this phrase is applied to computer
scientists, software engineers, and clients in the area of formal
specification, the word 'much' can be widely misinterpreted with
disastrous consequences. A small experiment in reading
specifications revealed that students already trained in discrete
mathematics and the specification notation performed very poorly;
much worse than could reasonably be expected if formal methods
proponents are to be believed.",
paper = "Finn96.pdf",
keywords = "printed"
}
\end{chunk}
\index{Moore, Andrew P.}
\index{Payne Jr., Charles N.}
\begin{chunk}{axiom.bib}
@inproceedings{Moor96,
author = "Moore, Andrew P. and Payne Jr., Charles N.",
title = {{Increasing Assurance with LIterate Programming Techniques}},
booktitle = "Comf. on Computer Assurance COMPASS'96",
publisher = "National Institute of Standards and Technology",
year = "1996",
pages = "187198",
abstract =
"The assurance argument that a trusted system satisfies its
information security requirements must be convincing, because the
argument supports the accreditation decision to allow the computer to
process classified information in an operational
environment. Assurance is achieved through understanding, but some
evidence that supports the assurance argument can be difficult to
understand. This paper describes a novel applica tion of a technique,
called literate programming [ll], that significantly improves the
readability of the assur ance argument while maintaining its
consistency with formal specifications that are input to specification
and verification systems. We describe an application c,f this
technique to a simple example and discuss the lessons learned from
this effort.",
paper = "Moor96.pdf",
keywords = "printed"
}
\end{chunk}
\index{Nissanke, Nimal}
\begin{chunk}{axiom.bib}
@book{Niss99,
author = "Nissanke, Nimal",
title = {{Formal Specification}},
publisher = "Springer",
year = "1999",
isbn = "9781852330026",
paper = "Niss99.pdf"
}
\end{chunk}
\index{Vienneau, Robert L.}
\begin{chunk}{axiom.bib}
@misc{Vien93,
author = "Vienneau, Robert L.",
title = {{A Review of Formal Methods}},
year = "1993",
paper = "Vien93.pdf",
keywords = "printed"
}
\end{chunk}
\index{Murthy, S.G.K}
\index{Sekharam, K. Raja}
\begin{chunk}{axiom.bib}
@article{Murt09,
author = "Murthy, S.G.K and Sekharam, K. Raja",
title = {{Software Reliability through Theorem Proving}},
journal = "Defence Science Journal",
volume = "59",
number = "3",
year = "2009",
pages = "314317",
abstract =
"Improving software reliability of missioncritical systems is
widely recognised as one of the major challenges. Early detection
of errors in software requirements, designs and implementation,
need rigorous verification and validation techniques. Several
techniques comprising static and dynamic testing approaches are
used to improve reliability of mission critical software; however
it is hard to balance development time and budget with software
reliability. Particularly using dynamic testing techniques, it is
hard to ensure software reliability, as exhaustive testing is not
possible. On the other hand, formal verification techniques
utilise mathematical logic to prove correctness of the software
based on given specifications, which in turn improves the
reliability of the software. Theorem proving is a powerful formal
verification technique that enhances the software reliability for
mission critical aerospace applications. This paper discusses the
issues related to software reliability and theorem proving used to
enhance software reliability through formal verification
technique, based on the experiences with STeP tool, using the
conventional and internationally accepted methodologies, models,
theorem proving techniques available in the tool without proposing
a new model.",
paper = "Murt09.pdf",
keywords = "printed"
}
\end{chunk}
\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@incollection{Paul90a,
author = "Paulson, Lawrence C.",
title = {{Isabelle: The Next 700 Theorem Provers}},
booktitle = "Logic and Computer Science",
publisher = "Academic Press",
pages = "361386",
year = "1990",
paper = "Paul90a.pdf"
}
\end{chunk}
\index{Beeson, M.}
\begin{chunk}{axiom.bib}
@article{Bees16,
author = "Beeson, M.",
title = {{Mixing Computations and Proofs}},
journal = "J. of Formalized Reasoning",
volume = "9",
number = "1",
pages = "7199",
year = "2016",
abstract =
"We examine the relationship between proof and computation in
mathematics, especially in formalized mathematics. We compare the
various approaches to proofs with a significant computational
component, including (i) verifying the algorithms, (ii) verifying the
results of the unverified algo rithms, and (iii) trusting an external
computation.",
paper = "Bees16.pdf",
keywords = "printed"
}
\end{chunk}
\index{Beeson, Michael}
\begin{chunk}{axiom.bib}
@misc{Bees14,
author = "Beeson, M.",
title = {{Mixing Computations and Proofs}},
comment = "slides",
year = "2014",
link = "\url{http://www.cs.ru.nl/qed20/slides/beesonqed20.pdf}",
paper = "Bees14.pdf"
}
\end{chunk}
\index{Gunter, Elsa L.}
\begin{chunk}{axiom.bib}
@inproceedings{Gunt89a,
author = "Gunter, Elsa L.",
booktitle = "Int. Workshop on Extensions of Logic Programming",
publisher = "Springer",
year = "1989",
pages = "223244",
abstract =
"In this article, we discuss several possible extensions to
traditional logic programming languages. The specific extensions
proposed here fall into two categories: logical extensions and the
addition of constructs to allow for increased control. There is a
unifying theme to the proposed logical extensions, and that is the
scoped introduction of extensions to a programming context. More
specifically, these extensions are the ability to introduce variables
whose scope is limited to the term in which they occur (i.e. Abound
variables within Aterms), the ability to intro~iuce into a goal a
fresh constant whose scope is limited to the derivation of that goal,
and the ability to introduce into a goal a program clause whose scope
is limited to the derivation of that goal. The purpose of the
additions for increased control is to facilitate the raising and
handling of failures.
To motivate these various extensions, we have repeatedly appealed to
examples related to the construction of a generic theorem prover. It
is our thesis that this problem domain is specific enough to lend
focus when one is considering various language constructs, and yet
complex enough to encompass many of the general difficulties found in
other areas of symbolic computation.",
paper = "Gunt89a.pdf"
}
\end{chunk}
\index{Paulson, Lawrence C.}
\index{Smith, Andrew W.}
\begin{chunk}{axiom.bib}
@inproceedings{Paul89,
author = "Paulson, Lawrence C. and Smith, Andrew W.",
title = {{Logic Programming, Functional Programming, and
Inductive Definitions}},
booktitle = "Int. Workshop on Extensions of Logic Programming",
publisher = "Springer",
year = "1989",
pages = "283309",
abstract =
"The unification of logic and functional programming, like the Holy
Grail, is sought by countless people [6, 14]. In reporting our
attempt, we first discuss the motivation. We argue that logic
programming is still immature, compared with functional programming,
because few logic programs are both useful and pure. Functions can
help to purify logic programming, for they can eliminate certain uses
of the cut and can express certMn negations positively.
More generally, we suggest that the traditional paradigm  logic
programming as firstorder logic  is seriously out of step with
practice. We offer an alternative paradigm. We view the logic program
as an inductive definition of sets and relations. This view explains
certain uses of Negation as Failure, and explains why most attempts to
extend PROLO G to larger fragments of firstorder logic have not been
successful. It suggests a logic language with functions,
incorporating equational unification.
We have implemented a prototype of this language. It is very slow,
but complete, and appear to be faster than some earlier
implementations of narrowing. Our experiments illustrate the
programming style and shed light on the further development of such
languages.",
paper = "Paul89.pdf"
}
\end{chunk}
\index{Weirich, Jim}
\begin{chunk}{axiom.bib}
@misc{Weir12,
author = "Weirich, Jim",
title = {{Y Not?  Adventures in Functional Programming}},
year = "2012",
link = "\url{https://www.infoq.com/presentations/YCombinator}",
abstract = "Explains the YCombinator"
}
\end{chunk}
\index{Beeson, Michael}
\begin{chunk}{axiom.bib}
@article{Bees06,
author = "Beeson, Michael",
title = {{Mathematical Induction in OtterLambda}},
journal = "J. Automated Reasoning",
volume = "36",
number = "4",
pages = "311'344",
abstract =
"Otterlambda is Otter modified by adding code to implement an
algorithm for lambda unification. Otter is a resolutionbased,
clauselanguage firstorder prover that accumulates deduced clauses
and uses strategies to control the deduction and retention of
clauses. This is the first time that such a firstorder prover has
been combined in one program with a unification algorithm capable of
instantiating variables to lambda terms to assist in the
deductions. The resulting prover has all the advantages of the
proofsearch algorithm of Otter (speed, variety of inference rules,
excellent handling of equality) and also the power of lambda
unification. We illustrate how these capabilities work well together
by using Otterlambda to find proofs by mathematical induction. Lambda
unification instantiates the induction schema to find a useful
instance of induction, and then Otter's firstorder reasoning can be
used to carry out the base case and induction step. If necessary,
induction can be used for those, too. We present and discuss a variety
of examples of inductive proofs found by Otterlambda: some in pure
Peano arithmetic, some in Peano arithmetic with defined predicates,
some in theories combining algebra and the natural numbers, some
involving algebraic simplification (used in the induction step) by
simplification code from MathXpert, and some involving list induction
instead of numerical induction. These examples demonstrate the
feasibility and usefulness of adding lambda unification to a
firstorder prover.",
papers = "Bees06.pdf",
keywords = "printed"
}
\end{chunk}
\index{Barendregt, Henk}
\index{Barendsen, Erik}
\begin{chunk}{axiom.bib}
@article{Bare02,
author = "Barendregt, Henk and Barendsen, Erik",
title = {{Autorkic Computations in Formal Proofs}},
journal = "J. Automated Reasoning",
volume = "28",
pages = "321336",
year = "2002",
abstract =
"Formal proofs in mathematics and computer science are being studied
because these objects can be verified by a very simple computer
program. An important open problem is whether these formal proofs can
be generated with an effort not much greater than writing a
mathematical paper in, say, LATEX. Modern systems for proof
development make the formalization of reasoning relatively
easy. However, formalizing computations in such a manner that the
results can be used in formal proofs is not immediate. In this paper
we show how to obtain formal proofs of statements such as Prime(61) in
the context of Peano arithmetic or $(x + 1)(x + 1) = x 2 + 2x + 1$ in
the context of rings. We hope that the method will help bridge the gap
between the efficient systems of computer algebra and the reliable
systems of proof development.",
paper = "Bare02.pdf",
keywords = "printed"
}
\end{chunk}
\index{Beeson, Michael}
\begin{chunk}{axiom.bib}
@article{Bees95,
author = "Beeson, Michael",
title = {{Using Nonstandard Analysis to Ensure the Correctness of
Symbolic Computations}},
journal = "Int. J. of Foundations of Computer Science",
volume = "6",
number = "3",
pages = "299338",
year = "1995",
abstract =
"Nonstandard analysis has been put to use in a theoremprover,
where it assists in the analysis of formulae involving limits. The
theoremprover in question is used in the computer program
Mathpert to ensure the correctness of calculations in
calculus. Although nonstandard analysis is widely viewed as
nonconstructive, it can alternately be viewed as a method of
reducing logical manipulation (of formulae with quantifiers) to
coputation (with rewrite rules). We give a logical theory of
nonstandard analysis which is implemented in Mathpert. We describe
a procedure for 'elimination of infinitesimals' (also implemented
in Mathpert) and prove its correctness.",
paper = "Bees95.pdf",
keywords = "printed"
}
\end{chunk}
\index{Beeson, Michael}
\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@article{Bees02,
author = "Beeson, Michael and Wiedijk, Freek",
title = {{The Meaning of Infinity in Calculus and Computer Algebra
Systems}},
journal = "LNCS",
volume = "2385",
year = "2002",
abstract =
"We use filters of open sets to provide a semantics justifying the
use of infinity in informal limit calculations in calculus, and in
the same kind of calculations in computer algebra. We compare the
behavior of these filters to the way Mathematica behaves when
calculating with infinity.
We stress the need to have a proper semantics for computer algebra
expressions, especially if one wants to use results and methods
from computer algebra in theorem provers. The computer algebra
method under discussion in this paper is the use of rewrite rules
to evaluate limits involving infinity.",
paper = "Bees02.pdf",
keywords = "printed"
}
\end{chunk}
\index{Zimmer, Jurgen}
\index{Dennis, Louise A.}
\begin{chunk}{axiom.bib}
@article{Zimm02,
author = "Zimmer, Jurgen and Dennis, Louise A.",
title = {{Inductive Theorem Proving and Computer Algebra in the
MathWeb Software Bus}},
journal = "LNCS",
volume = "2385",
year = "2002",
abstract =
"Reasoning systems have reached a high degree of maturity in the last
decade. However, even the most successful systems are usually not
general purpose problem solvers but are typically specialised on
problems in a certain domain. The MathWeb Software Bus (MathWebSB) is a
system for combining reasoning specialists via a common software bus.
We describe the integration of the $\lambda$Clam system, a reasoning
specialist for proofs by induction, into the MathWebSB. Due to this
integration, $\lambda$Clam now offers its theorem proving expertise
to other systems in the MathWebSB. On the other hand,
$\lambda$Clam can use the
services of any reasoning specialist already integrated. We focus on
the latter and describe first experiments on proving theorems by
induction using the computational power of the Maple system within
$\lambda$Clam.",
paper = "Zimm02.pdf",
keywords = "printed"
}
\end{chunk}
\index{Campbell, J.A.}
\begin{chunk}{axiom.bib}
@article{Camp02,
author = "Campbell, J.A.",
title = {{Indefinite Integration as a Testbed for Developments in
Multiagent Systems}},
journal = "LNCS",
volume = "2385",
year = "2002",
abstract =
"Coordination of multiple autonomous agents to solve problems that
require each of them to contribute their limited expertise in the
construction of a solution is often ensured by the use of numerical
methods such as votecounting, payoff functions, game theory and
economic criteria. In areas where there are no obvious numerical methods
for agents to use in assessing other agents’ contributions, many
questions still remain open for research. The paper reports a study of
one such area: heuristic indefinite integration in terms of agents with
different single heuristic abilities which must cooperate in finding
indefinite integrals. It examines the reasons for successes and lack
of success in performance, and draws some general conclusions about
the usefulness of indefinite integration as a field for realistic
tests of methods for multiagent systems where the usefulness of
'economic' criteria is limited. In this connection, the role of
numerical taxonomy is emphasised.",
paper = "Camp02.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@misc{Wied18,
author = "Wiedijk, Freek",
title = {{Formaizing 100 Theorems}},
link = "\url{http://www.cs.ru.nl/~freek/100/}",
year = "2018"
}
\end{chunk}
\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@misc{Wied08,
author = "Wiedijk, Freek",
title = {{Formai Proof  Geting Started}},
link = "\url{https://www.ams.org/notices/200811/tx081101408p.pdf}",
year = "2008",
paper = "Wied08.pdf"
}
\end{chunk}
\index{Kaliszyk, Cezary}
\begin{chunk}{axiom.bib}
@article{Kali08,
author = "Kaliszyk, Cezary",
title = {{Automating Side Conditions in Formalized Partial Functions}},
journal = "LNCS",
volume = "5144",
year = "2008",
abstract =
"Assumptions about the domains of partial functions are necessary in
stateoftheart proof assistants. On the other hand when
mathematicians write about partial functions they tend not to
explicitly write the side conditions. We present an approach to
formalizing partiality in real and complex analysis in total
frameworks that allows keeping the side conditions hidden from the
user as long as they can be computed and simplified
automatically. This framework simplifies defining and operating on
partial functions in formalized real analysis in HOL Light. Our
framework allows simplifying expressions under partiality conditions
in a proof assistant in a manner that resembles computer algebra
systems.",
paper = "Kali08.pdf"
}
\end{chunk}
\index{Kaliszyk, Cezary}
\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@article{Kali08a,
author = "Kaliszyk, Cezary and Wiedijk, Freek",
title = {{Merging Procedural and Declarative Proof}},
journal = "LNCS",
volume = "5497",
year = "2008",
abstract =
"There are two different styles for writing natural deduction proofs:
the 'Gentzen' style in which a proof is a tree with the conclusion at
the root and the assumptions at the leaves, and the 'Fitch' style
(also called ‘flag’ style) in which a proof consists of lines that are
grouped together in nested boxes.
In the world of proof assistants these two kinds of natural deduction
correspond to procedural proofs (tactic scripts that work on one or
more subgoals, like those of the Coq, HOL and PVS systems), and
declarative proofs (like those of the Mizar and Isabelle/Isar
languages).
In this paper we give an algorithm for converting tree style proofs to
flag style proofs. We then present a rewrite system that simplifies
the results.
This algorithm can be used to convert arbitrary procedural proofs to
declarative proofs. It does not work on the level of the proof terms
(the basic inferences of the system), but on the level of the
statements that the user sees in the goals when constructing the
proof.
The algorithm from this paper has been implemented in the ProofWeb
interface to Coq. In ProofWeb a proof that is given as a Coq proof
script (even with arbitrary Coq tactics) can be displayed both as a
tree style and as a flag style proof.",
paper = "Kali08a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Akbarpour, Behzad}
\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@article{Akba08a,
author = "Akbarpour, Behzad and Paulson, Lawrence C.",
title = {{MetiTarski: An Automatic Prover for the Elementary Functions}},
journal = "LNCS",
volume = "5144",
year = "2008",
abstract =
"Many inequalities involving the functions ln, exp, sin, cos, etc.,
can be proved automatically by MetiTarski: a resolution theorem prover
(Metis) modified to call a decision procedure (QEPCAD) for the theory
of real closed fields. The decision procedure simplifies clauses by
deleting literals that are inconsistent with other algebraic facts,
while deleting as redundant clauses that follow algebraically from
other clauses. MetiTarski includes special code to simplify
arithmetic expressions.",
paper = "Akba08a.pdf"
}
\end{chunk}
\index{Cramer, Marcos}
\begin{chunk}{axiom.bib}
@misc{Cram14,
author = "Cramer, Marcos",
title = {{Modelling the usage of partial functions and undefined
terms using presupposition theory}},
year = "2014",
isbn = "9781848901308",
publisher = "College Publications",
pages = "7188",
abstract =
"We describe how the linguistic theory of presuppositions can be used
to analyse and model the usage of partial functions and undefined
terms in mathematical texts. We compare our account to other accounts
of partial functions and undefined terms, showing how our account
models the actual usage of partial functions and undefined terms more
faithfully than existing accounts. The model described in this paper
has been developed for the Naproche system, a computer system for
proofchecking mathematical texts written in controlled natural
language, and has largely been implemented in this system.",
paper = "Cram14.pdf",
keywords = "printed"
}
\end{chunk}
\index{Freundt, Sebastian}
\index{Horn, Peter}
\index{Konovalov, Alexander}
\index{Linton, Steve}
\index{Roozemond, Dan}
\begin{chunk}{axiom.bib}
@article{Freu08,
author = "Freundt, Sebastian and Horn, Peter and Konovalov, Alexander
and Linton, Steve and Roozemond, Dan",
title = {{Symbolic Computation Software Composability}},
journal = "LNCS",
volume = "5144",
year = "2008",
abstract =
"We present three examples of the composition of Computer Algebra
Systems to illustrate the progress on a composability infrastructure
as part of the SCIEnce (Symbolic Computation Infrastructure for
Europe) project 1 . One of the major results of the project so far is
an OpenMath based protocol called SCSCP (Symbolic Computation Software
Composability Protocol). SCSCP enables the various software packages
for example to exchange mathematical objects, request calcula tions,
and store and retrieve remote objects, either locally or accross the
internet. The three examples show the current state of the GAP, KANT,
and MuPAD software packages, and give a demonstration of exposing
Macaulay using a newly developed framework.",
paper = "Freu08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kozen, Dexter}
\index{Palsberg, Jens}
\index{Schwartzbach, Michael I.}
\begin{chunk}{axiom.bib}
@article{Koze93,
author = "Kozen, Dexter and Palsberg, Jens and Schwartzbach, Michael I.",
title = {{Efficient Recursive Subtyping}},
journal = "Mathematical Structures in Computer Science",
volume = "5",
number = "1",
pages = "113125",
year = "1995",
abstract =
"Subtyping in the presence of recursive types for the
$\lambda$calculus was studied by Amadio and Cardelli in 1991. In that
paper they showed that the problem of deciding whether one recursive
type is a subtype of another is decidable in exponential time. In
this paper we give an $O(n^2)$ algorithm. Our algorithm is based on a
simplification of the definition of the subtype relation, which allows
us to reduce the problem to the emptiness problem for a certain finite
automaton with quadratically many states. It is known that equality
of recursive types and the covariant B̈ohm order can be decided
efficiently by means of finite automata, since they are just language
equality and language inclusion, respectively. Our results extend the
automatatheoretic approach to handle orderings based on
contravariance.",
paper = "Koze93.pdf"
}
\end{chunk}
\index{Martelli, Alberto}
\index{Montanari, Ugo}
\begin{chunk}{axiom.bib}
@article{Mart82,
author = "Martelli, Alberto and Montanari, Ugo",
title = {{An Efficient Unification Algorithm}},
journal = "ACM TOPLAS",
volume = "4",
number = "2",
pages = "258282",
year = "1982",
abstract =
"The unification problem in firstorder predicate calculus is
described in general terms as the solution of a system of equations,
and a nondeterministic algorithm is given. A new unification
algorithm, characterized by having the acyclicity test efficiently
embedded into it, is derived from the nondeterministic one, and a
PASCAL implementation is given. A comparison with other wellknown
unification algorithms shows that the algorithm described here
performs well in all cases.",
paper = "Mart82.pdf"
}
\end{chunk}
\index{Pottier, Francois}
\begin{chunk}{axiom.bib}
@phdthesis{Pott98,
author = "Pottier, Francois",
title = {{Type Inference in the Presence of Subtyping: From Theory
to Practice}},
school = "Universite Paris 7",
year = "1988",
comment = "INRIA Research Report RR3483",
abstract =
"From a purely theoretical point of view, type inference for a
functional language with parametric polymorphism and subtyping
poses little difficulty. Indeed, it suffices to generalize the
inference algorithm used in the ML language, so as to deal with
type inequalities, rather than equalities. However, the number of
such inequalities is linear in the program size  whence, from a
practical point of view, a serious efficiency and readability
problem.
To solve this problem, one must simplify the inferred
constraints. So, after studying the logical properties of
subtyping constraints, this work proposes several simplifcation
algorithms. They combine seamlessly, yielding a homogeneous, fully
formal framework, which directly leads to an efficient
implementation. Although this theoretical study is performed in a
simplified setting, numerous extensions are possible. Thus, this
framework is realistic and should allow a practical appearance of
subtyping in languages with type inference.",
paper = "Pott98.pdf"
}
\end{chunk}
\index{McCarthy, John}
\begin{chunk}{axiom.bib}
@article{Mcca60,
author = "McCarthy, John",
title = {{Recursive Functions of Symbolic Expressions and Their
Computation by Machine, Part I}},
journal = "CACM",
volume = "3",
pages = "184195",
year = "1960",
paper = "Mcca60.pdf",
keywords = "printed"
}
\end{chunk}
\index{Barendregt, Henk}
\begin{chunk}{axiom.bib}
@incollection{Bare92,
author = "Barendregt, Henk",
title = {{Lambda Calculi with Types}},
booktitle = "Handbook of Logic in Computer Science (vol 2)",
publisher = "Oxford University Press",
year = "1992",
paper = "Bare92.pdf"
}
\end{chunk}
\index{Church, Alonzo}
\begin{chunk}{axiom.bib}
@article{Chur28,
author = "Church, Alonzo",
title = {{On the Law of the Excluded Middle}},
journal = "Bull. of the American Mathematical Society",
volume = "34",
number = "1",
pages = "7578",
year = "1928",
paper = "Chur28.pdf"
}
\end{chunk}
\index{Mitchell, John}
\begin{chunk}{axiom.bib}
@inproceedings{Mitc84
author = "Mitchell, John",
title = {{Type Inference and Type Containment}},
booktitle = "Proc. Int. Symp. on Semantics of Data Types",
publisher = "Springer",
isbn = "3540133461",
year = "1984"
pages = "257277",
abstract =
"Type inference, the process of assigning types to untyped
expressions, may be motivated by the design of a typed language or
semantical considerations on the meeaaings of types and expressions.
A typed language GR with polymorphic functions leads to the GR
inference rules. With the addition of an 'oracle' rule for equations
between expressions, the GR rules become complete for a general class
of semantic models of type inference. These inference models are
models of untyped lambda calculus with extra structure similar to
models of the typed language GR. A more specialized set of type
inference rules, the GRS rules, characterize semantic typing when the
functional type $a~$ , is interpreted as all elements of the model that
map a to $~r$ and the polymorphic type $Vt,~0$ is interpreted as the
intersection of all $o'(r)$. Both inference systems may be reformulated
using rules for deducing containments between types. The advantage of
the type inference rules based on containments is thatproofs
correspond more closely to the structure of terms.",
paper = "Mitc84.pdf"
}
\end{chunk}
\index{Griesmer, James}
\begin{chunk}{axiom.bib}
@artcile{Grie11,
author = "Griesmer, James",
title = {{James Griesmer 19292011}},
journal = "ACM Communications in Computer Algebra",
volume = "46",
number = "1/2",
year = "2012",
pages = "1011",
comment = "Obituary",
paper = "Grie11.pdf"
}
\end{chunk}
\index{Daly, Timothy}
\index{Kastner, John}
\index{Mays, Eric}
\begin{chunk}{axiom.bib}
@inproceedings{Daly88,
author = "Daly, Timothy and Kastner, John and Mays, Eric",
title = {{Integrating Rules and Inheritance Networks in a Knowlegebased
Financial Marketing Consutation System}},
booktitle = "Proc. HICSS 21",
volume = "3",
number = "58",
pages = "496500",
year = "1988",
comment = "KROPS",
abstract =
"This paper describes the integrated use of rulebascd inference and
an objectcentered knowledge representation (inheritance network) in a
financial marketing consultation system. The rules provide a highly
flexible pattern match capability and inference cycle for control. The
inheritance network provides a convenient way to represent the
conceptual structure of the domain. By merging the two techniques, our
financial computation can he shared at the most general level, and
rule inference is carried out at any appropriate Ievel of
generalization. Since domain knowledge is representcd independently
from control knowledge, knowledge ahout a particular problcm solving
technique is decoupled from the conditions for its invocation. A Iarge
financial marketing system has been built and examples are given to
show our combined use of rules and inheritance networks.",
paper = "Daly88.pdf"
}
\end{chunk}

books/axiom.bib  1601 +++++++++++++++++++++++++
books/bookvolbib.pamphlet  1917 ++++++++++++++++++++++++++++++
changelog  2 +
patch  2428 ++++++++++++++++++++++
src/axiomwebsite/patches.html  2 +
5 files changed, 4835 insertions(+), 1115 deletions()
diff git a/books/axiom.bib b/books/axiom.bib
index 6ffadf6..80ce5f3 100644
 a/books/axiom.bib
+++ b/books/axiom.bib
@@ 797,7 +797,8 @@ paper = "Brea89.pdf"
the axiomatization are investigated. It is shown that the problem of
type inference is undecidable but a narrowing strategy for
semidecision procedures is described and studied.",
 paper = "Como91.pdf"
+ paper = "Como91.pdf",
+ keywords = "axiomref, printed"
}
@article{Cool92,
@@ 1082,7 +1083,8 @@ paper = "Brea89.pdf"
author = "Girard, JeanYves",
title = {{Proofs and Types}},
publisher = "Cambridge University Press",
 year = "1989"
+ year = "1989",
+ paper = "Gira89.pdf"
}
@misc{Gode58,
@@ 5352,7 +5354,8 @@ paper = "Brea89.pdf"
Eigenvalue/Eigenvector Problem}},
school = "University of California, Berkeley",
year = "1997",
 link = "\url{http://www.eecs.berkeley.edu/Pubs/TechRpts/1997/CSD97971.pdf}",
+ link =
+ "\url{http://www.eecs.berkeley.edu/Pubs/TechRpts/1997/CSD97971.pdf}",
abstract =
"Computing the eigenvalues and orthogonal eigenvectors of an $n\times n$
symmetric tridiagonal matrix is an important task that arises while
@@ 7797,7 +7800,7 @@ paper = "Brea89.pdf"
proving; and formal logical reasoning about the correctness of
programs.",
paper = "Chli17.pdf",
 keywords = "printed"
+ keywords = "printed, reviewed"
}
@article{Coen04,
@@ 7948,7 +7951,8 @@ paper = "Brea89.pdf"
"We present an extensive set of mathematical propositions and proofs
in order to demonstrate the power of expression of the theory of
constructions.",
 paper = "Coqu85.pdf, printed"
+ paper = "Coqu85.pdf",
+ keywords = "printed"
}
@inproceedings{Croc14,
@@ 7976,7 +7980,7 @@ paper = "Brea89.pdf"
abstract = "Wherein existing methods for building secure systems are
examined and found wanting",
paper = "Cyph17.pdf",
 keywords = "printed"
+ keywords = "printed, reviewed"
}
@article{Fass04,
@@ 9305,6 +9309,35 @@ paper = "Brea89.pdf"
link = "\url{http://www.wolfram.com}"
}
+@article{Abra95,
+ author = "Abrams, Marshall D. and Zelkowitz, Marvin V.",
+ title = {{Striving for Correctness}},
+ journal = "Computers and Security",
+ volume = "14",
+ year = "1995",
+ pages = "719738",
+ paper = "Abra95.pdf",
+ keywords = "printed"
+}
+
+@article{Akba08a,
+ author = "Akbarpour, Behzad and Paulson, Lawrence C.",
+ title = {{MetiTarski: An Automatic Prover for the Elementary Functions}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "Many inequalities involving the functions ln, exp, sin, cos, etc.,
+ can be proved automatically by MetiTarski: a resolution theorem prover
+ (Metis) modified to call a decision procedure (QEPCAD) for the theory
+ of real closed fields. The decision procedure simplifies clauses by
+ deleting literals that are inconsistent with other algebraic facts,
+ while deleting as redundant clauses that follow algebraically from
+ other clauses. MetiTarski includes special code to simplify
+ arithmetic expressions.",
+ paper = "Akba08a.pdf"
+}
+
@misc{Anon18,
author = "Anonymous",
title = {{A Compiler From Scratch}},
@@ 9440,6 +9473,68 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@incollection{Bare92,
+ author = "Barendregt, Henk",
+ title = {{Lambda Calculi with Types}},
+ booktitle = "Handbook of Logic in Computer Science (vol 2)",
+ publisher = "Oxford University Press",
+ year = "1992",
+ paper = "Bare92.pdf"
+}
+
+@article{Bare02,
+ author = "Barendregt, Henk and Barendsen, Erik",
+ title = {{Autorkic Computations in Formal Proofs}},
+ journal = "J. Automated Reasoning",
+ volume = "28",
+ pages = "321336",
+ year = "2002",
+ abstract =
+ "Formal proofs in mathematics and computer science are being studied
+ because these objects can be verified by a very simple computer
+ program. An important open problem is whether these formal proofs can
+ be generated with an effort not much greater than writing a
+ mathematical paper in, say, LATEX. Modern systems for proof
+ development make the formalization of reasoning relatively
+ easy. However, formalizing computations in such a manner that the
+ results can be used in formal proofs is not immediate. In this paper
+ we show how to obtain formal proofs of statements such as Prime(61) in
+ the context of Peano arithmetic or $(x + 1)(x + 1) = x 2 + 2x + 1$ in
+ the context of rings. We hope that the method will help bridge the gap
+ between the efficient systems of computer algebra and the reliable
+ systems of proof development.",
+ paper = "Bare02.pdf",
+ keywords = "printed"
+}
+
+@article{Bare05,
+ author = "Barendregt, Henk and Wiedijk, Freek",
+ title = {{The Challenge of Computer Mathematics}},
+ journal = "Phil. Trans. Royal Society",
+ volume = "363",
+ pages = "23512375",
+ year = "2005",
+ abstract =
+ "Progress in the foundations of mathematics has made it possible to
+ formulate all thinkable mathematical concepts, algorithms and proofs
+ in one language and in an impeccable way. This is not in spite of, but
+ partially based on the famous results of Godel and Turing. In this way
+ statements are about mathematical objects and algorithms, proofs show
+ the correctness of statements and computations, and computations are
+ dealing with objects and proofs. Interactive computer systems for a
+ full integration of defining, computing and proving are based on
+ this. The human defines concepts, constructs algorithms and provides
+ proofs, while the machine checks that the definitions are well formed
+ and the proofs and computations are correct. Results formalized so far
+ demonstrate the feasibility of this ‘computer mathematics’. Also there
+ are very good applications. The challenge is to make the systems more
+ mathematicianfriendly, by building libraries and tools. The eventual
+ goal is to help humans to learn, develop, communicate, referee and
+ apply mathematics.",
+ paper = "Bare05.pdf",
+ keywords = "printed"
+}
+
@techreport{Barr96a,
author = "Barras, Bruno",
title = {{Coq en Coq}},
@@ 9481,7 +9576,7 @@ paper = "Brea89.pdf"
Caml program which performs type inference in CC and use this code
to build a smallscale certified proofchecker.",
paper = "Barr18.pdf",
 keywords = "printed"
+ keywords = "printed, reviewed"
}
@inproceedings{Bent07,
@@ 9564,6 +9659,33 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Camp02,
+ author = "Campbell, J.A.",
+ title = {{Indefinite Integration as a Testbed for Developments in
+ Multiagent Systems}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "Coordination of multiple autonomous agents to solve problems that
+ require each of them to contribute their limited expertise in the
+ construction of a solution is often ensured by the use of numerical
+ methods such as votecounting, payoff functions, game theory and
+ economic criteria. In areas where there are no obvious numerical methods
+ for agents to use in assessing other agents’ contributions, many
+ questions still remain open for research. The paper reports a study of
+ one such area: heuristic indefinite integration in terms of agents with
+ different single heuristic abilities which must cooperate in finding
+ indefinite integrals. It examines the reasons for successes and lack
+ of success in performance, and draws some general conclusions about
+ the usefulness of indefinite integration as a field for realistic
+ tests of methods for multiagent systems where the usefulness of
+ 'economic' criteria is limited. In this connection, the role of
+ numerical taxonomy is emphasised.",
+ paper = "Camp02.pdf",
+ keywords = "printed"
+}
+
@article{Card84,
author = "Cardelli, Luca",
title = {{A Semantics of Multiple Inheritance}},
@@ 9649,6 +9771,15 @@ paper = "Brea89.pdf"
paper = "Cart78.pdf"
}
+@article{Char08,
+ author = "Charnley, John and Colton, Simon",
+ title = {{A Global Workspace Framework for Combining Reasoning Systems}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ paper = "Char08.pdf"
+}
+
@article{Chli07,
author = "Chlipala, Adam J.",
title = {{A Certified TypePreserving Compiler from the Lambda Calculus
@@ 9690,6 +9821,64 @@ paper = "Brea89.pdf"
paper = "Clin72.pdf"
}
+@incollection{Como91a,
+ author = "Comon, Hubert",
+ title = {{Disunification: a Survey}},
+ booktitle = "Computational Logic",
+ publisher = "MIT Press",
+ year = "1991",
+ abstract =
+ "Solving an equation in an algebra of terms is known as
+ unification. Solving more complex formulas combining equations and
+ involving in particular negation is called {\sl
+ disunification}. With such a broad definition, many works fall
+ into the scope of disunification. The goal of this paper is to
+ survey these works and bring them together in a same framework.",
+ paper = "Como91a.ps"
+}
+
+@incollection{Como01,
+ author = "Comon, Hubert",
+ title = {{Inductionless Induction}},
+ booktitle = "Handbook of Automated Reasoning",
+ comment = "Chapter 14",
+ publisher = "Elsevier",
+ year = "2001",
+ paper = "Como01.ps"
+}
+
+@incollection{Coqu88,
+ author = "Coquuand, Thierry and Huet, Gerard",
+ title = {{The Calculus of Constructions}},
+ booktitle = "Information and Computation, Volume 76",
+ year = "1988",
+ publisher = "Academic Press",
+ paper = "Coqu88.pdf",
+ keywords = "printed"
+}
+
+@misc{Cram14,
+ author = "Cramer, Marcos",
+ title = {{Modelling the usage of partial functions and undefined
+ terms using presupposition theory}},
+ year = "2014",
+ isbn = "9781848901308",
+ publisher = "College Publications",
+ pages = "7188",
+ abstract =
+ "We describe how the linguistic theory of presuppositions can be used
+ to analyse and model the usage of partial functions and undefined
+ terms in mathematical texts. We compare our account to other accounts
+ of partial functions and undefined terms, showing how our account
+ models the actual usage of partial functions and undefined terms more
+ faithfully than existing accounts. The model described in this paper
+ has been developed for the Naproche system, a computer system for
+ proofchecking mathematical texts written in controlled natural
+ language, and has largely been implemented in this system.",
+ paper = "Cram14.pdf",
+ keywords = "printed"
+}
+
@inproceedings{Crar99,
author = "Crary, Karl and Harper, Robert and Puri, Sidd",
title = {{What is a Recursive Module}},
@@ 9756,6 +9945,17 @@ paper = "Brea89.pdf"
paper = "Crar08.pdf"
}
+@article{Chur28,
+ author = "Church, Alonzo",
+ title = {{On the Law of the Excluded Middle}},
+ journal = "Bull. of the American Mathematical Society",
+ volume = "34",
+ number = "1",
+ pages = "7578",
+ year = "1928",
+ paper = "Chur28.pdf"
+}
+
@incollection{Dahl72,
author = "Dahl, OleJohan and Hoare, C.A.R",
title = {{Hierachical Program Structure}},
@@ 9866,6 +10066,42 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@inproceedings{Deha16,
+ author = "Dehaye, PaulOlivier and Iancu, Mihnea and Kohlhase, Michael
+ and Konovalov, Alexander and Lelievre, Samuel and
+ Muller, Dennis and Pfeiffer, Markus and Rabe, Florian and
+ Thiery, Nicolas M. and Wiesling, Tom",
+ title = {{Interoperability in the OpenDreamKit project: The
+ MathintheMiddle Approach}},
+ booktitle = "Intelligent Computer Mathematics",
+ pages = "117131",
+ year = "2016",
+ isbn = "9783319425467",
+ publisher = "Springer",
+ abstract =
+ "OpenDreamKit  'Open Digital Research Environment Toolkit for the
+ Advancement of Mathematics'  is an H2020 EU Research Infrastructure
+ project that aims at supporting, over the period 20152019, the
+ ecosystem of opensource mathematical software systems. OpenDreamKit
+ will deliver a flexible toolkit enabling research groups to set up
+ Virtual Research Environments, customised to meet the varied needs of
+ research projects in pure mathematics and applications.
+
+ An important step in the OpenDreamKit endeavor is to foster the
+ interoperability between a variety of systems, ranging from
+ computer algebra systems over mathematical databases to
+ frontends. This is the mission of the integration work
+ package. We report on experiments and future plans with the
+ MathintheMiddle approach. This architecture consists of a
+ central mathematical ontology that documents the domain and fixes a
+ joint vocabulary, or even a language, going beyond existing
+ systems such as OpenMath, combined with specifications of the
+ functionalities of the various systems. Interaction between
+ systems can then be enriched by pivoting around this architecture.",
+ paper = "Deha16.pdf",
+ keywords = "printed, axiomref"
+}
+
@misc{Dijk71,
author = "Dijkstra, E.W.",
title = {{A Short Introduction to the Art of Programming}},
@@ 10137,6 +10373,60 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Farm07a,
+ author = "Farmer, William M.",
+ title = {{Chiron: A MultiParadigm Logic}},
+ journal = "Studies in Logic, Grammar and Rhetoric",
+ volume = "10",
+ number = "23",
+ year = "2007",
+ abstract =
+ "Chiron is a derivative of vonNeumannBernaysG̈odel ( NBG ) set
+ theory that is intended to be a practical, generalpurpose logic for
+ mechanizing mathematics. It supports several reasoning paradigms by
+ integrating NBG set theory with elements of type theory, a scheme for
+ handling undefinedness, and a facility for reasoning about the syntax
+ of expressions. This paper gives a quick, informal presentation of
+ the syntax and semantics of Chiron and then discusses some of the
+ benefits Chiron provides as a multiparadigm logic.",
+ paper = "Farm07a.pdf",
+ keywords = "axiomref, printed"
+}
+
+@techreport{Farm13,
+ author = "Farmer, William M.",
+ title = {{Chiron: A Set Theory with Types, Undefinedness, Quotation,
+ and Evaluation}},
+ type = "technical report",
+ institution = "McMaster University",
+ number = "SQRL Report No. 38",
+ year = "2013",
+ link = "\url{https://arxiv.org/pdf/1305.6206.pdf}",
+ abstract =
+ "Chiron is a derivative of vonNeumannBernaysGodel (NBG) set
+ theory that is intended to be a practical, generalpurpose logic for
+ mechanizing mathematics. Unlike traditional set theories such as
+ ZermeloFraenkel (ZF) and NBG, Chiron is equipped with a type system,
+ lambda notation, and definite and indefinite description. The type
+ system includes a universal type, dependent types, dependent function
+ types, subtypes, and possibly empty types. Unlike traditional logics
+ such as firstorder logic and simple type theory, Chiron admits
+ undefined terms that result, for example, from a function applied to
+ an argument outside its domain or from an improper definite or
+ indefinite description. The most noteworthy part of Chiron is its
+ facility for reasoning about the syntax of expressions. Quotation is
+ used to refer to a set called a construction that represents the
+ syntactic structure of an expression, and evaluation is used to refer
+ to the value of the expression that a construction represents. Using
+ quotation and evaluation, syntactic side conditions, schemas,
+ syntactic transformations used in deduction and computation rules, and
+ other such things can be directly expressed in Chiron. This paper
+ presents the syntax and semantics of Chiron, some definitions and
+ simple examples illustrating its use, a proof system for Chiron, and a
+ notion of an interpretation of one theory of Chiron in another.",
+ paper = "Farm13.pdf"
+}
+
@misc{Farm14,
author = "Farmer, William M. and Larjani, Pouya",
title = {{Frameworks for Reasoning about Syntax that Utilize
@@ 10225,6 +10515,29 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Finn96,
+ author = "Finney, Kate",
+ title = {{Mathematical Notation in Formal Specifications: Too
+ Difficult for the Masses?}},
+ journal = "IEEE Trans. on Software Engineering",
+ volume = "22",
+ number = "2",
+ year = "1996",
+ pages = "158159",
+ abstract =
+ "The phrase 'not much mathematics required' can imply a
+ variety of skill levels. When this phrase is applied to computer
+ scientists, software engineers, and clients in the area of formal
+ specification, the word 'much' can be widely misinterpreted with
+ disastrous consequences. A small experiment in reading
+ specifications revealed that students already trained in discrete
+ mathematics and the specification notation performed very poorly;
+ much worse than could reasonably be expected if formal methods
+ proponents are to be believed.",
+ paper = "Finn96.pdf",
+ keywords = "printed"
+}
+
@misc{Flan03,
author = "Flanagan, Cormac and Sabry, Amr and Duba, Bruce F. and
Felleisen, Matthias",
@@ 10277,6 +10590,29 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Freu08,
+ author = "Freundt, Sebastian and Horn, Peter and Konovalov, Alexander
+ and Linton, Steve and Roozemond, Dan",
+ title = {{Symbolic Computation Software Composability}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "We present three examples of the composition of Computer Algebra
+ Systems to illustrate the progress on a composability infrastructure
+ as part of the SCIEnce (Symbolic Computation Infrastructure for
+ Europe) project 1 . One of the major results of the project so far is
+ an OpenMath based protocol called SCSCP (Symbolic Computation Software
+ Composability Protocol). SCSCP enables the various software packages
+ for example to exchange mathematical objects, request calcula tions,
+ and store and retrieve remote objects, either locally or accross the
+ internet. The three examples show the current state of the GAP, KANT,
+ and MuPAD software packages, and give a demonstration of exposing
+ Macaulay using a newly developed framework.",
+ paper = "Freu08.pdf",
+ keywords = "printed"
+}
+
@article{Foxx03,
author = "Fox, Anthony",
title = {{Formal Specification and Verification of ARM6}},
@@ 10351,6 +10687,67 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Gont09a,
+ author = "Gonthier, Georges",
+ title = {{Software Engineering for Mathematics}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ pages = "2727",
+ abstract =
+ "Despite its mathematical origins, progress in computer assisted
+ reasoning has mostly been driven by applications in computer science,
+ like hardware or protocol security verification. Paradoxically, it has
+ yet to gain widespread acceptance in its original domain of
+ application, mathematics; this is commonly attributed to a 'lack of
+ libraries': attempts to formalize advanced mathematics get bogged down
+ into the formalization of an unwieldly large set of basic resuts.
+
+ This problem is actually a symptom of a deeper issue: the main
+ function of computer proof systems, checking proofs down to their
+ finest details, is at odds with mathematical practice, which ignores
+ or defers details in order to apply and combine abstractions in
+ creative and elegant ways. Mathematical texts commonly leave logically
+ important parts of proofs as 'exercises to the reader', and are rife
+ with 'abuses of notation that make mathematics tractable' (according
+ to Bourbaki). This (essential) flexibility cannot be readily
+ accomodated by the narrow concept of 'proof library' used by most
+ proof assistants and based on 19th century firstorder logic: a
+ collection of constants, definitions, and lemmas.
+
+ This mismatch is familiar to software engineers, who have been
+ struggling for the past 50 years to reconcile the flexibility needed
+ to produce sensible user requirements with the precision needed to
+ implement them correctly with computer code. Over the last 20 years
+ object and components have replaced traditional data and procedure
+ libraries, partly bridging this gap and making it possible to build
+ significantly larger computer systems.
+
+ These techniques can be implemented in compuer proof systems by
+ exploiting advances in mathematical logic. Higherorder logics allow
+ the direct manipulation of functions; this can be used to assign
+ behaviour, such as simplification rules, to symbols, similarly to
+ objects. Advanced type systems can assign a secondary, contextual
+ meaning to expressions, using mechanisms such as type classes,
+ similarly to the metadata in software components. The two can be combined
+ to perform reflection, where an entire statement gets quoted as
+ metadata and then proved algorithmically by some decision procedure.
+
+ We propose to use a more modest, smallscale form of reflection, to
+ implement mathematical components. We use the typederived metadata to
+ indicate how symbols, definitions and lemmas should be used in other
+ theories, and functions to implement this usage — roughly, formalizing
+ some of the exercize section of a textbook. We have applied
+ successfully this more engineered approch to computer proofs in our
+ past work on the Four Color Theorem, the CayleyHamilton Theorem, and
+ our ongoing longterm effort on the Odd Order Theorem, which is the
+ starting point of the proof of the Classification of Finite Simple
+ Groups (the famous 'monster theorem' whose proof spans 10,000 pages in
+ 400 articles).",
+ paper = "Gont09a.pdf",
+ keywords = "printed"
+}
+
@article{Gord79,
author = "Gordon, Michael J. and Milner, Arthur j. and
Wadsworth, Christopher P.",
@@ 10464,6 +10861,66 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@techreport{Gunt89,
+ author = "Gunter, Elsa L.",
+ title = {{Doing Algebra in Simple Type Theory}},
+ type = "technical report",
+ institution = "University of Pennsylvania",
+ year = "1989",
+ number = "MSCIS8938",
+ abstract =
+ "To fully utilize the power of higherorder logic in interactive
+ theorem proving, it is desirable to be able to develop abstract areas
+ of Mathematics such as algebra and topology in an automated
+ setting. Theorems provers capable of higher order reasoning have
+ generally had some form of type theory as theory object
+ language. But mathematicians have tended to use the language of set
+ theory to give definitions and prove theorems in algebra and
+ topology. In this paper,we give an incremental description of how to
+ express various basic algebraic concepts in terms of simple type
+ theory. We present a method for representing algebras, subalgebras,
+ quotient algebras, homorphisms and isomorphisms simple type theory,
+ using group theory as an example in each case. Following this we
+ discuss how to automatically apply such an abstract theory to concrete
+ examples. Finally, we conclude with some observations about a
+ potential inconvenience associated with this method of representation,
+ and discuss a difficulty inherent in any attempt to remove this
+ inconvenience.",
+ paper = "Gunt89.pdf",
+ keywords = "printed"
+}
+
+@inproceedings{Gunt89a,
+ author = "Gunter, Elsa L.",
+ booktitle = "Int. Workshop on Extensions of Logic Programming",
+ publisher = "Springer",
+ year = "1989",
+ pages = "223244",
+ abstract =
+ "In this article, we discuss several possible extensions to
+ traditional logic programming languages. The specific extensions
+ proposed here fall into two categories: logical extensions and the
+ addition of constructs to allow for increased control. There is a
+ unifying theme to the proposed logical extensions, and that is the
+ scoped introduction of extensions to a programming context. More
+ specifically, these extensions are the ability to introduce variables
+ whose scope is limited to the term in which they occur (i.e. Abound
+ variables within Aterms), the ability to intro~iuce into a goal a
+ fresh constant whose scope is limited to the derivation of that goal,
+ and the ability to introduce into a goal a program clause whose scope
+ is limited to the derivation of that goal. The purpose of the
+ additions for increased control is to facilitate the raising and
+ handling of failures.
+
+ To motivate these various extensions, we have repeatedly appealed to
+ examples related to the construction of a generic theorem prover. It
+ is our thesis that this problem domain is specific enough to lend
+ focus when one is considering various language constructs, and yet
+ complex enough to encompass many of the general difficulties found in
+ other areas of symbolic computation.",
+ paper = "Gunt89a.pdf"
+}
+
@article{Gutt95,
author = "Guttman, Joshua D. and Ramsdell, John D. and Wand, Mitchell",
title = {{VLISP: A Verified Implementation of Scheme}},
@@ 10489,6 +10946,34 @@ paper = "Brea89.pdf"
year = "1995"
}
+@article{Hanu90,
+ author = "Hanus, Michael",
+ title = {{Compiling Logic Programs with Equality}},
+ journal = "LNCS",
+ volume = "456",
+ year = "1990",
+ pages = "387401",
+ abstract =
+ "Horn clause logic with equality is an amalgamation of functional and
+ logic programming languages. A sound and complete operational
+ semantics for logic programs with equality is based on resolution to
+ solve literals, and rewriting and narrowing to evaluate functional
+ expressions. This paper proposes a technique for compiling programs
+ with these inference rules into programs of a lowlevel abstract
+ machine which can be efficiently executed on conventional
+ architectures. The presented approach is based on an extension of the
+ Warren abstract machine (WAM). In our approach pure logic programs
+ without function definitions are compiled in the same way as in the
+ WAMapproach, and for logic programs with function definitions
+ particular instructions are generated for occurrences of functions
+ inside clause bodies. In order to obtain an efficient implementation
+ of functional computations, a stack of occurrences of function symbols
+ in goals is managed by the abstract machine. The compiler generates
+ the necessary instructions for the efficient manipulation of the
+ occurrence stack from the given equational logic programs.",
+ paper = "Hanu90.pdf"
+}
+
@inproceedings{Harp93b,
author = "Harper, Robert and Lillibridge, Mark",
title = {{Explicit Polymorphism and CPS Conversion}},
@@ 10521,6 +11006,15 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Hatt97,
+ author = "Hatton, Les",
+ title = {{Software Failure: Follies and Fallacies}},
+ journal = "IEEE Review",
+ volume = "43",
+ number = "2",
+ year = "1997",
+}
+
@incollection{Hoar72,
author = "Hoare, C.A.R",
title = {{Notes on Data Structuring}},
@@ 10594,6 +11088,60 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{John96,
+ author = "Johnson, C.W.",
+ title = {{Literate Specifications}},
+ journal = "Software Engineering Journal",
+ volume = "11",
+ number = "4",
+ year = "1996",
+ pages = "225237",
+ paper = "John96.pdf",
+ abstract =
+ "The increasing scale and complexity of software is imposing serious
+ burdens on many industries. Formal notations, such as Z, VDM and
+ temporal logic, have been developed to address these problems. There
+ are, however, a number of limitations that restrict the use of
+ mathematical specifications far largescale software development. Many
+ projects take months or years to complete. This creates difficulties
+ because abstract mathematical requirements cannot easily be used by
+ new members of a development team to understand past design
+ decisions. Formal specifications describe what software must do, they
+ do not explain why it must do it. In order to avoid these limitations,
+ a literate approach to software engineering is proposed. This
+ technique integrates a formal specification language and a semiformal
+ design rationale. The Z schema calculus is usecl to represent what a
+ system must do. In contrast, the Questions, Options and Criteria
+ notation is used to represent the justifications that lie behind
+ development decisions. Empirical evidence is presented that suggests
+ the integration of these techniques provides significant benefits over
+ previous approaches to mathematical analysis and design techniques. A
+ range of tools is described that have been developed to support our
+ literate approach to the specification of largescale sohare systems.",
+ keywords = "printed"
+}
+
+@inproceedings{Jone96,
+ author = "Jones, Simon > Peyton",
+ title = {{Compiling Haskell by Program Transformation: A Report from
+ the Trenches}},
+ booktitle = "Proc. European Symposium on Programming",
+ year = "1996",
+ publisher = "Eurographics",
+ abstract =
+ "Many compilers do some of their work by means of
+ correctnesspreseving, and hopefully performanceimproving, program
+ transformations. The Glasgow Haskell Compiler (GHC) takes this idea
+ of 'compilation by transformation' as its warcry, trying to express
+ as much as possible of the compilation process in the form of
+ program transformations.
+
+ This paper reports on our practical experience of the
+ transformational approach to compilation, in the context of a
+ substantial compiler.",
+ paper = "Jone96.pdf"
+}
+
@article{Kaes88,
author = "Kaes, Stefan",
title = {{Parametric Overloading in Polymorphic Programming Languages}},
@@ 10710,6 +11258,107 @@ paper = "Brea89.pdf"
paper = "Kell13.pdf"
}
+@article{Klae84,
+ author = "Klaeren, Herbert A.",
+ title = {{A Constructive Method for Abstract Algebraic Software
+ Specification}},
+ journal = "Theoretical Computer Science",
+ vollume = "30",
+ year = "1984",
+ pages = "139204",
+ abstract =
+ "A constructive method for abstract algebraic software
+ specification is presented, where the operations are not
+ implicitly specified by equations but by an explicit recursion on
+ the generating operations of an algebra characterizing the
+ underlying data structure. The underlying algebra itself may be
+ equationally specified since we cannot assume that all data
+ structures will correspond to free algebras. This implies that we
+ distinguish between generating and defined operations and that the
+ underlying algebra has a mechanism of wellfounded decomposition
+ w.r.t. the generating operations. We show that the explicit
+ specification of operations using socalled structural recursive
+ schemata offers advantages over purely equational specifications,
+ especially concerning the safeness of enrichments, the ease of
+ semantics description and the separation between the underlying
+ data structure and the operations defined on it.",
+ keywords = "printed"
+}
+
+@book{Klee52,
+ author = "Kleene, Stephen Cole",
+ title = {{Introduction to MetaMathematics}},
+ year = "1952",
+ publisher = "Ishi Press International",
+ isbn = "9780923891572",
+ paper = "Klee52.pdf"
+}
+
+@inproceedings{Kohl17,
+ author = "Kohlhase, Michael and De Feo, Luca and Muller, Dennis and
+ Pfeiffer, Markus and Rabe, Florian and Thiery, Nicolas M.
+ and Vasilyev, Victor and Wesing, Tom",
+ title = {{KnowledgeBased Interoperability for Mathematical Software
+ Systems}},
+ booktitle = "7th Int. Conf. on Mathematical Aspects of Computer and
+ Information Sciences",
+ publisher = "Springer",
+ year = "2017",
+ pages = "195210",
+ isbn = "9783319724539",
+ abstract =
+ "There is a large ecosystem of mathematical software systems.
+ Individually, these are optimized for particular domains and
+ functionalities, and together they cover many needs of practical and
+ theoretical mathematics. However, each system specializes on one area,
+ and it remains very difficult to solve problems that need to involve
+ multiple systems. Some integrations exist, but the are adhoc and have
+ scalability and maintainability issues. In particular, there is not
+ yet an interoperability layer that combines the various systems into a
+ virtual research environment (VRE) for mathematics.
+
+ The OpenDreamKit project aims at building a toolkit for such VREs. It
+ suggests using a central systemagnostic formalization of mathematics
+ (MathintheMiddle, MitM) as the needed interoperability layer. In
+ this paper, we conduct the first major case study that instantiates
+ the MitM paradigm for a concrete domain as well as a concrete set of
+ systems. Specifically, we integrate GAP, Sage, and Singular to perform
+ computation in group and ring theory.
+
+ Our work involves massive practical efforts, including a novel
+ formalization of computational group theory, improvements to the
+ involved software systems, and a novel mediating system that sits at
+ the center of a starshaped integration layout between mathematical
+ software systems.",
+ paper = "Kohl17.pdf",
+ keywords = "printed"
+}
+
+@article{Koze93,
+ author = "Kozen, Dexter and Palsberg, Jens and Schwartzbach, Michael I.",
+ title = {{Efficient Recursive Subtyping}},
+ journal = "Mathematical Structures in Computer Science",
+ volume = "5",
+ number = "1",
+ pages = "113125",
+ year = "1995",
+ abstract =
+ "Subtyping in the presence of recursive types for the
+ $\lambda$calculus was studied by Amadio and Cardelli in 1991. In that
+ paper they showed that the problem of deciding whether one recursive
+ type is a subtype of another is decidable in exponential time. In
+ this paper we give an $O(n^2)$ algorithm. Our algorithm is based on a
+ simplification of the definition of the subtype relation, which allows
+ us to reduce the problem to the emptiness problem for a certain finite
+ automaton with quadratically many states. It is known that equality
+ of recursive types and the covariant B̈ohm order can be decided
+ efficiently by means of finite automata, since they are just language
+ equality and language inclusion, respectively. Our results extend the
+ automatatheoretic approach to handle orderings based on
+ contravariance.",
+ paper = "Koze93.pdf"
+}
+
@inproceedings{Kran86,
author = "Kranz, David and Kelsey, Richard and Rees, Jonathan and
Hudak, Paul and Philbin, James and Adams, Norman",
@@ 10729,6 +11378,18 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Krip75,
+ author = "Kripke, Saul",
+ title = {{Outline of a Theory of Truth}},
+ journal = "Journal of Philosophy",
+ volume = "72",
+ number = "19",
+ year = "1975",
+ pages = "690716",
+ paper = "Krip75.pdf",
+ keywords = "printed"
+}
+
@article{Lamp81,
author = "Lamport, Leslie and Owicki, Susan",
title = {{Program Logics and Program Verification}},
@@ 10761,6 +11422,27 @@ paper = "Brea89.pdf"
paper = "Lero09.pdf"
}
+@inproceedings{Leto04,
+ author = "Letouzey, Pierre",
+ title = {{A New Extraction for Coq}},
+ booktitle = "Types for Proofs and Programs. TYPES 2002",
+ publisher = "Springer",
+ pages = "617635",
+ year = "2004",
+ abstract =
+ "We present here a new extraction mechanism for the Coq proof
+ assistant. By extraction, we mean automatic generation of
+ functional code from Coq proofs, in order to produce certified
+ programs. In former versions of Coq, the extraction mechanism
+ suffered several limitations and in particular worked only with a
+ subset of the language. We first discuss difficulties encountered
+ and solutions proposed to remove these limitations. Then we give a
+ proof of correctness for a theoretical model of the new
+ extraction. Finally we describe the actual implementation.",
+ paper = "Leto04.pdf",
+ keywords = "printed"
+}
+
@article{Mano03,
author = "Manolios, Panagiotis and Moore, J Strother",
title = {{Partial Functions in ACL2}},
@@ 10787,6 +11469,38 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Mart82,
+ author = "Martelli, Alberto and Montanari, Ugo",
+ title = {{An Efficient Unification Algorithm}},
+ journal = "ACM TOPLAS",
+ volume = "4",
+ number = "2",
+ pages = "258282",
+ year = "1982",
+ abstract =
+ "The unification problem in firstorder predicate calculus is
+ described in general terms as the solution of a system of equations,
+ and a nondeterministic algorithm is given. A new unification
+ algorithm, characterized by having the acyclicity test efficiently
+ embedded into it, is derived from the nondeterministic one, and a
+ PASCAL implementation is given. A comparison with other wellknown
+ unification algorithms shows that the algorithm described here
+ performs well in all cases.",
+ paper = "Mart82.pdf"
+}
+
+@article{Mcca60,
+ author = "McCarthy, John",
+ title = {{Recursive Functions of Symbolic Expressions and Their
+ Computation by Machine, Part I}},
+ journal = "CACM",
+ volume = "3",
+ pages = "184195",
+ year = "1960",
+ paper = "Mcca60.pdf",
+ keywords = "printed"
+}
+
@incollection{Mcca63,
author = "McCarthy, John",
title = {{A Basis for a Mathematical Theory of Computation}},
@@ 10906,6 +11620,34 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@inproceedings{Mitc84,
+ author = "Mitchell, John",
+ title = {{Type Inference and Type Containment}},
+ booktitle = "Proc. Int. Symp. on Semantics of Data Types",
+ publisher = "Springer",
+ isbn = "3540133461",
+ year = "1984",
+ pages = "257277",
+ abstract =
+ "Type inference, the process of assigning types to untyped
+ expressions, may be motivated by the design of a typed language or
+ semantical considerations on the meeaaings of types and expressions.
+ A typed language GR with polymorphic functions leads to the GR
+ inference rules. With the addition of an 'oracle' rule for equations
+ between expressions, the GR rules become complete for a general class
+ of semantic models of type inference. These inference models are
+ models of untyped lambda calculus with extra structure similar to
+ models of the typed language GR. A more specialized set of type
+ inference rules, the GRS rules, characterize semantic typing when the
+ functional type $a~$ , is interpreted as all elements of the model that
+ map a to $~r$ and the polymorphic type $Vt,~0$ is interpreted as the
+ intersection of all $o'(r)$. Both inference systems may be reformulated
+ using rules for deducing containments between types. The advantage of
+ the type inference rules based on containments is thatproofs
+ correspond more closely to the structure of terms.",
+ paper = "Mitc84.pdf"
+}
+
@inproceedings{Mell15,
author = "Mellies, PaulAndre and Zeilberger, Noam",
title = {{Functors are Type Refinement Systems}},
@@ 10935,6 +11677,31 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@inproceedings{Moor96,
+ author = "Moore, Andrew P. and Payne Jr., Charles N.",
+ title = {{Increasing Assurance with LIterate Programming Techniques}},
+ booktitle = "Comf. on Computer Assurance COMPASS'96",
+ publisher = "National Institute of Standards and Technology",
+ year = "1996",
+ pages = "187198",
+ abstract =
+ "The assurance argument that a trusted system satisfies its
+ information security requirements must be convincing, because the
+ argument supports the accreditation decision to allow the computer to
+ process classified information in an operational
+ environment. Assurance is achieved through understanding, but some
+ evidence that supports the assurance argument can be difficult to
+ understand. This paper describes a novel applica tion of a technique,
+ called literate programming [ll], that significantly improves the
+ readability of the assur ance argument while maintaining its
+ consistency with formal specifications that are input to specification
+ and verification systems. We describe an application c,f this
+ technique to a simple example and discuss the lessons learned from
+ this effort.",
+ paper = "Moor96.pdf",
+ keywords = "printed"
+}
+
@book{Morg98,
author = "Morgan, Carroll",
title = {{Programming from Specifcations, 2nd Ed.}},
@@ 11002,6 +11769,40 @@ paper = "Brea89.pdf"
paper = "Moss84.pdf"
}
+@article{Murt09,
+ author = "Murthy, S.G.K and Sekharam, K. Raja",
+ title = {{Software Reliability through Theorem Proving}},
+ journal = "Defence Science Journal",
+ volume = "59",
+ number = "3",
+ year = "2009",
+ pages = "314317",
+ abstract =
+ "Improving software reliability of missioncritical systems is
+ widely recognised as one of the major challenges. Early detection
+ of errors in software requirements, designs and implementation,
+ need rigorous verification and validation techniques. Several
+ techniques comprising static and dynamic testing approaches are
+ used to improve reliability of mission critical software; however
+ it is hard to balance development time and budget with software
+ reliability. Particularly using dynamic testing techniques, it is
+ hard to ensure software reliability, as exhaustive testing is not
+ possible. On the other hand, formal verification techniques
+ utilise mathematical logic to prove correctness of the software
+ based on given specifications, which in turn improves the
+ reliability of the software. Theorem proving is a powerful formal
+ verification technique that enhances the software reliability for
+ mission critical aerospace applications. This paper discusses the
+ issues related to software reliability and theorem proving used to
+ enhance software reliability through formal verification
+ technique, based on the experiences with STeP tool, using the
+ conventional and internationally accepted methodologies, models,
+ theorem proving techniques available in the tool without proposing
+ a new model.",
+ paper = "Murt09.pdf",
+ keywords = "printed"
+}
+
@article{Myre14,
author = "Myreen, Magnus O. and Davis, Jared",
title = {{The Reflective Milawa Theorem Prover is Sound}},
@@ 11143,6 +11944,23 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@book{Nede14,
+ author = "Nederpelt, Rob and Geuvers, Nerman",
+ title = {{Type Theory and Formal Proof}},
+ year = "2014",
+ publisher = "Cambridge University Press",
+ isbn = "9781107036505"
+}
+
+@book{Niss99,
+ author = "Nissanke, Nimal",
+ title = {{Formal Specification}},
+ publisher = "Springer",
+ year = "1999",
+ isbn = "9781852330026",
+ paper = "Niss99.pdf"
+}
+
@article{Odon81,
author = "O'Donnell, Michael J.",
title = {{A Critique of the Foundations of Hoarestyle Programming Logics}},
@@ 11174,9 +11992,118 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@phdthesis{Okas96,
+ author = "Okasaki, Chris",
+ title = {{Purely Functional Data Structures}},
+ school = "Carnegie Mellon University",
+ year = "1996",
+ link = "\url{}",
+ comment = "CMUCS96177",
+ isbn = "9780521663502",
+ abstract =
+ "When a C programmer needs an efficient data structure for a
+ particular problem, he or she can often simply look one up in any of
+ a number of good textbooks or handbooks. Unfortunately, programmers
+ in functional languages such as Standard ML or Haskell do not have
+ this luxury. Although some data structures designed for imperative
+ languages such as C can be quite easily adapted to a functional
+ setting, most cannot, usually because they depend in crucial ways on
+ assignments, which are disallowed, or at least discouraged, in
+ functional languages. To address this imbalance, we describe several
+ techniques for designing functional data structures, and numerous
+ original data structures based on these techniques, including multiple
+ variations of lists, queues, doubleended queues, and heaps, many
+ supporting more exotic features such as random access or efficient
+ catenation.
+
+ In addition, we expose the fundamental role of lazy evaluation in
+ amortized functional data structures. Traditional methods of
+ amortization break down when old versions of a data structure, not
+ just the most recent, are available for further processing. This
+ property is known as persistence , and is taken for granted in
+ functional languages. On the surface, persistence and amortization
+ appear to be incompatible, but we show how lazy evaluation can be used
+ to resolve this conflict, yielding amortized data structures that are
+ efficient even when used persistently. Turning this relationship
+ between lazy evaluation and amortization around, the notion of
+ amortization also provides the first practical techniques for
+ analyzing the time requirements of nontrivial lazy programs.
+
+ Finally, our data structures offer numerous hints to programming
+ language designers, illustrating the utility of combining strict and
+ lazy evaluation in a single language, and providing nontrivial
+ examples using polymorphic recursion and higherorder, recursive
+ modules.",
+ paper = "Okas96.pdf"
+}
+
+@article{Parn72,
+ author = "Parnas, David L.",
+ title = {{A Technique for Software Module Specification with
+ Examples}},
+ journal = "CACM",
+ volume = "15",
+ number = "5",
+ year = "1972",
+ pages = "330336",
+ abstract =
+ "This paper presents an approach to writing specifications for
+ parts of software systems. The main goal is to provide
+ specifications sufficiently precise and complete that other pieces
+ of software can be written to interact with the piece specified
+ without additional information. The secondary goal is to include
+ in the specification no more information than necessary to meet
+ the first goal. The technique is illustrated by means of a variety
+ of examples from a tutorial system.",
+ paper = "Parn72.pdf",
+ keywords = "printed"
+}
+
+@incollection{Parn10,
+ author = "Parnas, David",
+ title = {{Precise Documentation: The Key to Better Software}},
+ booktitle = "The Future of Software Engineering",
+ year = "2010",
+ pages = "125148",
+ abstract =
+ "The prime cause of the sorry 'state of the art' in software
+ development is our failure to produce good design documentation. Poor
+ documentation is the cause of many errors and reduces efficiency in
+ every phase of a software product's development and use. Most software
+ developers believe that 'documentation' refers to a collection of
+ wordy, unstructured, introductory descriptions, thousands of pages
+ that nobody wanted to write and nobody trusts. In contrast, Engineers
+ in more traditional disciplines think of precise blueprints, circuit
+ diagrams, and mathematical specifications of component
+ properties. Software developers do not know how to produce precise
+ documents for software. Software developments also think that
+ documentation is something written after the software has been
+ developed. In other fields of Engineering much of the documentation is
+ written before and during the development. It represents forethought
+ not afterthought. Among the benefits of better documentation would be:
+ easier reuse of old designs, better communication about requirements,
+ more useful design reviews, easier integration of separately written
+ modules, more effective code inspection, more effective testing, and
+ more efficient corrections and improvements. This paper explains how
+ to produce and use precise software documentation and illustrate the
+ methods with several examples.",
+ paper = "Parn10.pdf",
+ keywords = "printed"
+}
+
+@incollection{Paul90a,
+ author = "Paulson, Lawrence C.",
+ title = {{Isabelle: The Next 700 Theorem Provers}},
+ booktitle = "Logic and Computer Science",
+ publisher = "Academic Press",
+ pages = "361386",
+ year = "1990",
+ paper = "Paul90a.pdf"
+}
+
@book{Paul96,
author = "Paulson, L.C.",
 title = {{ML for the WOrking Programmer 2nd Edition}},
+ title = {{ML for the Working Programmer 2nd Edition}},
year = "1996",
publisher = "Cambridge University Press",
isbn = "052156543X"
@@ 11205,6 +12132,51 @@ paper = "Brea89.pdf"
year = "2017"
}
+@misc{Pfen04,
+ author = "Pfenning, Frank",
+ title = {{Automated Theorem Proving}},
+ year = "2004",
+ comment = "Draft of Spring 2004",
+ keywords = "printed"
+}
+
+@phdthesis{Pfen87,
+ author = "Pfenning, Frank",
+ title = {{Proof Transformations in HigherOrder Logic}},
+ year = "1987",
+ school = "Carnegie Mellon University",
+ link = "\url{http://wwwcgi.cs.cmu.edu/~fp/papers/thesis87.pdf}",
+ abstract =
+ "We investigate the problem of translating between different styles of
+ proof systems in higherorder logic: analytic proofs which are well
+ suited for automated theorem proving, and nonanalytic deductions
+ which are well suited for the mathematician. Analytic proofs are
+ represented as expansion proofs, $H$ , a form of the sequent calculus we
+ define, nonanalytic proofs are represented by natural deductions. A
+ nondeterministic translation algorithm between expansion proofs and
+ $H$deductions is presented and its correctness is proven. We also
+ present an algorithm for translation in the other direction and prove
+ its correctness. A cutelimination algorithm for expansion proofs is
+ given and its partial correctness is proven. Strong termination of
+ this algorithm remains a conjecture for the full higherorder
+ system, but is proven for the firstorder fragment. We extend the
+ translations to a nonanalytic proof system which contains a primitive
+ notion of equality, while leaving the notion of expansion proof
+ unaltered. This is possible, since a nonextensional equality is
+ definable in our system of type theory. Next we extend analytic and
+ nonanalytic proof systems and the translations between them to
+ include extensionality. Finally, we show how the methods and notions
+ used so far apply to the problem of translating expansion proofs into
+ natural deductions. Much care is taken to specify this translation in
+ a modular way (through tactics) which leaves a large number of choices
+ and therefore a lot of room for heuristics to produce elegant natural
+ deductions. A practically very useful extension, called symmetric
+ simplification, produces natural deductions which make use of lemmas
+ and are often much more intuitive than the normal deductions which
+ would be created by earlier algorithms.",
+ paper = "Pfen87.pdf"
+}
+
@inproceedings{Pfen88a,
author = "Pfenning, Frank and Elliott, Conal",
title = {{HigherOrder Abstract Syntax}},
@@ 11344,6 +12316,35 @@ paper = "Brea89.pdf"
paper = "Poll94a.pdf"
}
+@phdthesis{Pott98,
+ author = "Pottier, Francois",
+ title = {{Type Inference in the Presence of Subtyping: From Theory
+ to Practice}},
+ school = "Universite Paris 7",
+ year = "1988",
+ comment = "INRIA Research Report RR3483",
+ abstract =
+ "From a purely theoretical point of view, type inference for a
+ functional language with parametric polymorphism and subtyping
+ poses little difficulty. Indeed, it suffices to generalize the
+ inference algorithm used in the ML language, so as to deal with
+ type inequalities, rather than equalities. However, the number of
+ such inequalities is linear in the program size  whence, from a
+ practical point of view, a serious efficiency and readability
+ problem.
+
+ To solve this problem, one must simplify the inferred
+ constraints. So, after studying the logical properties of
+ subtyping constraints, this work proposes several simplifcation
+ algorithms. They combine seamlessly, yielding a homogeneous, fully
+ formal framework, which directly leads to an efficient
+ implementation. Although this theoretical study is performed in a
+ simplified setting, numerous extensions are possible. Thus, this
+ framework is realistic and should allow a practical appearance of
+ subtyping in languages with type inference.",
+ paper = "Pott98.pdf"
+}
+
@misc{Remy17,
author = "Remy, Didier",
title = {{Type Systems for Programming Languages}},
@@ 11375,6 +12376,14 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@book{Robi01,
+ author = "Robinson, Alan and Voronkov, Andrei",
+ title = {{Handbook of Automated Reasoning (2 Volumes)}},
+ year = "2001",
+ publisher = "MIT Press",
+ isbn = "0262182238"
+}
+
@techreport{Sabr92,
author = "Sabry, Amr and Felleisen, Matthias",
title = {{Reasoning About Programs in ContinuationPassing Style}},
@@ 11431,6 +12440,22 @@ paper = "Brea89.pdf"
keywords = "printed, axiomref"
}
+@article{Schm86,
+ author = "Schmitt, P.H.",
+ title = {{Computational Aspects of ThreeValued Logic}},
+ journal = "LNCS",
+ volume = "230",
+ year = "1986",
+ abstract =
+ "This paper investigates a threevalued logic $L_3$, that has been
+ introduced in the study of natural language semantics. A complete
+ proof system based on a threevalued analogon of negative resolution
+ is presented. A subclass of $L_3$ corresponding to Horn clauses in
+ twovalued logic is defined. Its model theoretic properties are
+ studied and it is shown to admit a PROLOGstyle proof procedure.",
+ paper = "Schm86.pdf"
+}
+
@article{Scot71,
author = "Scott, Dana S. and Strachey, C.",
title = {{Towards a Mathematical Semantics for Computer Languages}},
@@ 11493,6 +12518,15 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@misc{Smit15,
+ author = "Smith, Peter",
+ title = {{Some Big Books on Mathematical Logic}},
+ year = "2015",
+ link = "\url{}",
+ paper = "Smit15.pdf",
+ keywords = "printed"
+}
+
@article{Soko87,
author = "Sokolowski, Stefan",
title = {{Soundness of Hoare's Logic: An Automated Proof Using LCF}},
@@ 11662,10 +12696,16 @@ paper = "Brea89.pdf"
\subsection{V} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{W} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\index{Wadler, Philip}
+\index{Vienneau, Robert L.}
\begin{chunk}{axiom.bib}
+@misc{Vien93,
+ author = "Vienneau, Robert L.",
+ title = {{A Review of Formal Methods}},
+ year = "1993",
+ paper = "Vien93.pdf",
+ keywords = "printed"
+}
+
@article{Wadl95,
author = "Wadler, Philip",
title = {{Monads for Functional Programming}},
@@ 11810,6 +12850,29 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@misc{Weir12,
+ author = "Weirich, Jim",
+ title = {{Y Not?  Adventures in Functional Programming}},
+ year = "2012",
+ link = "\url{https://www.infoq.com/presentations/YCombinator}",
+ abstract = "Explains the YCombinator"
+}
+
+@misc{Wied18,
+ author = "Wiedijk, Freek",
+ title = {{Formaizing 100 Theorems}},
+ link = "\url{http://www.cs.ru.nl/~freek/100/}",
+ year = "2018"
+}
+
+@misc{Wied08,
+ author = "Wiedijk, Freek",
+ title = {{Formai Proof  Geting Started}},
+ link = "\url{https://www.ams.org/notices/200811/tx081101408p.pdf}",
+ year = "2008",
+ paper = "Wied08.pdf"
+}
+
@phdthesis{Zeil09,
author = "Zeilberger, Noam",
title = {{The Logical Basis of Evaluation Order and PatternMatching}},
@@ 11857,6 +12920,31 @@ paper = "Brea89.pdf"
paper = "Zeil09.pdf"
}
+@article{Zeil10,
+ author = "Zeilberger, Doron",
+ title = {{Against Rigor}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ pages = "262262",
+ abstract =
+ "The ancient Greeks gave (western) civilization quite a few
+ gifts, but we should beware of Greeks bearing gifts. The gifts of theatre
+ and democracy were definitely good ones, and perhaps even the gift of
+ philosophy, but the 'gift' of the socalled 'axiomatic method' and the
+ notion of 'rigorous' proof did much more harm than good. If we want
+ to maximize Mathematical Knowledge, and its Management, we have to
+ return to Euclid this dubious gift, and giveup our fanatical insistence
+ on perfect rigor. Of course, we should not go to the other extreme, of
+ demanding that everything should be nonrigorous. We should encourage
+ diversity of proofstyles and rigor levels, and remember that nothing is
+ absolutely sure in this world, and there does not exist an absolutely
+ rigorous proof, nor absolute certainty, and 'truth' has many shades and
+ levels.",
+ paper = "Zeil10.pdf",
+ keywords = "printed"
+}
+
@misc{Zeil16,
author = "Zeilberger, Noam",
title = {{Towards a Mathematical Science of Programming}},
@@ 11872,6 +12960,32 @@ paper = "Brea89.pdf"
paper = "Zeil16a.pdf"
}
+@article{Zimm02,
+ author = "Zimmer, Jurgen and Dennis, Louise A.",
+ title = {{Inductive Theorem Proving and Computer Algebra in the
+ MathWeb Software Bus}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "Reasoning systems have reached a high degree of maturity in the last
+ decade. However, even the most successful systems are usually not
+ general purpose problem solvers but are typically specialised on
+ problems in a certain domain. The MathWeb Software Bus (MathWebSB) is a
+ system for combining reasoning specialists via a common software bus.
+ We describe the integration of the $\lambda$Clam system, a reasoning
+ specialist for proofs by induction, into the MathWebSB. Due to this
+ integration, $\lambda$Clam now offers its theorem proving expertise
+ to other systems in the MathWebSB. On the other hand,
+ $\lambda$Clam can use the
+ services of any reasoning specialist already integrated. We focus on
+ the latter and describe first experiments on proving theorems by
+ induction using the computational power of the Maple system within
+ $\lambda$Clam.",
+ paper = "Zimm02.pdf",
+ keywords = "printed"
+}
+
@incollection{Soze06,
author = "Sozeau, Matthieu",
title = {{Subset Coercions in Coq}},
@@ 12078,7 +13192,7 @@ paper = "Brea89.pdf"
@InProceedings{Adam01,
author = "Adams, Andrew A. and Dunstan, Martin and Gottlieben, Hanne and
Kelsey, Tom and Martin, Ursula and Owre, Sam",
 title = {{Computer algebra meets automated theorem proving: Integrating
+ title = {{Computer Algebra meets Automated Theorem Proving: Integrating
Maple and PVS}},
booktitle = "Theorem proving in higher order logics",
series = "TPHOLs 2001",
@@ 12390,6 +13504,56 @@ paper = "Brea89.pdf"
keywords = "axiomref, CASProof, printed"
}
+@phdthesis{Ball99a,
+ author = "Ballerin, Clemens",
+ title = {{Computer Algebra and Theorem Proving}},
+ school = "Darwin College, University of Cambridge",
+ year = "1999",
+ abstract =
+ "Is the use of computer algebra technology beneficial for
+ mechanised reasoining in and about mathematical domains? Usually
+ it is assumed that it is. Many works in this area, however, either
+ have little reasoning conent, or use symbolic computation only to
+ simplify expressions. In work that has achieved more, the used
+ methods do not scale up. They trust the computer algebra system
+ either too much or too little.
+
+ Computer algebra systems are not as rigorous as many provers. They
+ are not logically sound reasoning systems, but collections of
+ algorithms. We classify soundness problems that occur in computer
+ algebra systems. While many algorithms and their implementations
+ are perfectly trustworthy, the semantics of symbols is often
+ unclear and leads to errors. On the other hand, more robust
+ approaches to interface external reasoners to provers are not
+ always practical because the mathematical depth of proofs
+ algorithms in computer algebra are based on can be enormous.
+
+ Our own approach takes both trustworthiness of the overall system
+ and efficiency into account. It relies on using only reliable
+ parts of a computer algebra system, which can be achieved by
+ choosing a suitable library, and deriving specifications for these
+ algorithms from their literature.
+
+ We design and implement an interface between the prover Isabelle
+ and the computer algebra library Sumit and use it to prove
+ nontrivial theorems from coding theory. This is based on the
+ mechanisation of the algebraic theories of rings and
+ polynomials. Coding theory is an area where proofs do have a
+ substantial amount of computational content. Also, it is realistic
+ to assume that the verification of an encoding or decoding device
+ could be undertaken in, and indeed, be simplified by, such a
+ system.
+
+ The reason why semantics of symbols is often unclear in current
+ computer algebra systems is not mathematical difficulty, but the
+ design of those systems. For Gaussian elimination we show how the
+ soundness problem can be fixed by a small extension, and without
+ losing efficiency. This is a prerequisite for the efficient use of
+ the algorithm in a prover.",
+ paper = "Ball99a.pdf",
+ keywords = "axiomref, printed"
+}
+
@article{Bare01,
author = "Barendregt, Henk and Cohen, Arjeh M.",
title = {{Electronic Communication of Mathematics and the Interaction
@@ 12487,7 +13651,7 @@ paper = "Brea89.pdf"
keywords = "axiomref"
}
@InCollection{Bees89,
+@InCollection{Bees89a,
author = "Beeson, Michael J.",
title = {{Logic and Computation in MATHPERT: An Expert System for
Learning Mathematics}},
@@ 12525,7 +13689,8 @@ paper = "Brea89.pdf"
limitations of Prolog, and ``Schubert's Steamroller.'' An extension of
GENTZEN also provides a decision procedure for intuitionistic
propositional calculus (but at some cost in efficiency).",
 paper = "Bees89a.pdf"
+ paper = "Bees89a.pdf",
+ keywords = "printed"
}
@article{Bees92,
@@ 12541,7 +13706,33 @@ paper = "Brea89.pdf"
and onevariable calculus. It permits students to direct the
stepbystep solution of problems, and is capable of helping them
by solving the problems itself if necessary.",
 paper = "Bees92.pdf"
+ paper = "Bees92.pdf",
+ keywords = "printed"
+}
+
+@article{Bees95,
+ author = "Beeson, Michael",
+ title = {{Using Nonstandard Analysis to Ensure the Correctness of
+ Symbolic Computations}},
+ journal = "Int. J. of Foundations of Computer Science",
+ volume = "6",
+ number = "3",
+ pages = "299338",
+ year = "1995",
+ abstract =
+ "Nonstandard analysis has been put to use in a theoremprover,
+ where it assists in the analysis of formulae involving limits. The
+ theoremprover in question is used in the computer program
+ Mathpert to ensure the correctness of calculations in
+ calculus. Although nonstandard analysis is widely viewed as
+ nonconstructive, it can alternately be viewed as a method of
+ reducing logical manipulation (of formulae with quantifiers) to
+ coputation (with rewrite rules). We give a logical theory of
+ nonstandard analysis which is implemented in Mathpert. We describe
+ a procedure for 'elimination of infinitesimals' (also implemented
+ in Mathpert) and prove its correctness.",
+ paper = "Bees95.pdf",
+ keywords = "printed"
}
@misc{Bees98,
@@ 12565,6 +13756,95 @@ paper = "Brea89.pdf"
paper = "Bees98.pdf"
}
+@article{Bees02,
+ author = "Beeson, Michael and Wiedijk, Freek",
+ title = {{The Meaning of Infinity in Calculus and Computer Algebra
+ Systems}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "We use filters of open sets to provide a semantics justifying the
+ use of infinity in informal limit calculations in calculus, and in
+ the same kind of calculations in computer algebra. We compare the
+ behavior of these filters to the way Mathematica behaves when
+ calculating with infinity.
+
+ We stress the need to have a proper semantics for computer algebra
+ expressions, especially if one wants to use results and methods
+ from computer algebra in theorem provers. The computer algebra
+ method under discussion in this paper is the use of rewrite rules
+ to evaluate limits involving infinity.",
+ paper = "Bees02.pdf",
+ keywords = "printed"
+}
+
+@article{Bees06,
+ author = "Beeson, Michael",
+ title = {{Mathematical Induction in OtterLambda}},
+ journal = "J. Automated Reasoning",
+ volume = "36",
+ number = "4",
+ pages = "311'344",
+ abstract =
+ "Otterlambda is Otter modified by adding code to implement an
+ algorithm for lambda unification. Otter is a resolutionbased,
+ clauselanguage firstorder prover that accumulates deduced clauses
+ and uses strategies to control the deduction and retention of
+ clauses. This is the first time that such a firstorder prover has
+ been combined in one program with a unification algorithm capable of
+ instantiating variables to lambda terms to assist in the
+ deductions. The resulting prover has all the advantages of the
+ proofsearch algorithm of Otter (speed, variety of inference rules,
+ excellent handling of equality) and also the power of lambda
+ unification. We illustrate how these capabilities work well together
+ by using Otterlambda to find proofs by mathematical induction. Lambda
+ unification instantiates the induction schema to find a useful
+ instance of induction, and then Otter's firstorder reasoning can be
+ used to carry out the base case and induction step. If necessary,
+ induction can be used for those, too. We present and discuss a variety
+ of examples of inductive proofs found by Otterlambda: some in pure
+ Peano arithmetic, some in Peano arithmetic with defined predicates,
+ some in theories combining algebra and the natural numbers, some
+ involving algebraic simplification (used in the induction step) by
+ simplification code from MathXpert, and some involving list induction
+ instead of numerical induction. These examples demonstrate the
+ feasibility and usefulness of adding lambda unification to a
+ firstorder prover.",
+ papers = "Bees06.pdf",
+ keywords = "printed"
+}
+
+@misc{Bees14,
+ author = "Beeson, M.",
+ title = {{Mixing Computations and Proofs}},
+ comment = "slides",
+ year = "2014",
+ link = "\url{http://www.cs.ru.nl/qed20/slides/beesonqed20.pdf}",
+ paper = "Bees14.pdf"
+}
+
+@article{Bees16,
+ author = "Beeson, M.",
+ title = {{Mixing Computations and Proofs}},
+ journal = "J. of Formalized Reasoning",
+ volume = "9",
+ number = "1",
+ pages = "7199",
+ year = "2016",
+ link =
+ "\url{http://www.michaelbeeson.com/research/papers/ProofAndComputation.pdf}",
+ abstract =
+ "We examine the relationship between proof and computation in
+ mathematics, especially in formalized mathematics. We compare the
+ various approaches to proofs with a significant computational
+ component, including (i) verifying the algorithms, (ii) verifying the
+ results of the unverified algo rithms, and (iii) trusting an external
+ computation.",
+ paper = "Bees16.pdf",
+ keywords = "printed"
+}
+
@article{Benk03,
author = "Benke, Marcin and Dybjer, Peter and Jansson, Patrik",
title = {{Universes for generic programs and proofs in dependent type
@@ 12926,6 +14206,37 @@ paper = "Brea89.pdf"
theorem provers and possile extensions of the ideas are considered."
}
+@article{Bove05,
+ author = "Bove, Ana and Capretta, Venanzio",
+ title = {{Modelling General Recursion in Type Theory}},
+ journal = "Math. Struct. in Comp. Science",
+ volume = "15",
+ pages = "671708",
+ year = "2005",
+ abstract =
+ "Constructive type theory is an expressive programming language in
+ which both algorithms and proofs can be represented. A limitation of
+ constructive type theory as a programming language is that only
+ terminating programs can be defined in it. Hence, general recursive
+ algorithms have no direct formalisation in type theory since they
+ contain recursive calls that satisfy no syntactic condition
+ guaranteeing termination. In this work, we present a method to
+ formalise general recursive algorithms in type theory. Given a general
+ recursive algorithm, our method is to define an inductive
+ specialpurpose accessibility predicate that characterises the inputs
+ on which the algorithm terminates. The typetheoretic version of the
+ algorithm is then defined by structural recursion on the proof that
+ the input values satisfy this predicate. The method separates the
+ computational and logical parts of the definitions and thus the
+ resulting typetheoretic algorithms are clear, compact and easy to
+ understand. They are as simple as their equivalents in a functional
+ programming language, where there is no restriction on recursive
+ calls. Here, we give a formal definition of the method and discuss its
+ power and its limitations.",
+ paper = "Bove05.pdf",
+ keywords = "printed"
+}
+
@misc{Bove08,
author = "Bove, Ana and Dybjer, Peter",
title = {{Dependent Types at Work}},
@@ 13082,6 +14393,16 @@ paper = "Brea89.pdf"
keywords = "CASProof, printed"
}
+@techreport{Brui68a,
+ author = "de Bruijn, N.G.",
+ title = {{AUTOMATH, A Language for Mathematics}},
+ year = "1968",
+ type = "technical report",
+ number = "68WSK05",
+ institution = "Technische Hogeschool Eindhoven",
+ paper = "Brui68a.pdf"
+}
+
@article{Buch97,
author = "Buchberger, Bruno",
title = {{Mathematica: doing mathematics by computer?}},
@@ 13471,6 +14792,52 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@article{Care10,
+ author = "Carette, Jacques",
+ title = {{Mechanized Mathematics}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ pages = "157157",
+ abstract =
+ "In the 50 years since McCarthy’s 'Recursive Functions of Symbolic
+ Expressions and Their Computation by Machine', what have we
+ learned about the realization of Leibniz’s dream of just being
+ able to utter 'Calculemus!' (Let us calculate!) when faced with a
+ mathematical dilemma?
+
+ In this talk, I will first present what I see as the most
+ important lessons from the past which need to be heeded by modern
+ designers. From the present, I will look at the context in which
+ computers are used, and derive further requirements. In
+ particular, now that computers are no longer the exclusive
+ playground for highly educated scientists, usability is now more
+ important than ever, and justifiably so.
+
+ I will also examine what I see as some principal failings of
+ current systems, primarily to understand some major mistakes to
+ avoid. These failings will be analyzed to extract what seems to be
+ the root mistake, and I will present my favourite solutions.
+
+ Furthermore, various technologies have matured since the creation
+ of many of our systems, and whenever appropriate, these should be
+ used. For example, our understanding of the structure of
+ mathematics has significantly increased, yet this is barely
+ reflected in our libraries. The extreme focus on efficiency by the
+ computer algebra community, and correctness by the (interactive)
+ theorem proving community should no longer be considered viable
+ long term strategies. But how does one effectively bridge that
+ gap?
+
+ I personally find that a number of (programming) languagebased
+ solutions are particularly effective, and I will emphasize
+ these. Solutions to some of these problems will be illustrated
+ with code from a prototype of MathScheme 2.0, the system I am
+ developing with Bill Farmer and our research group.",
+ paper = "Care10.pdf",
+ keywords = "printed"
+}
+
@misc{Care11a,
author = "Carette, Jacques and Farmer, William M. and Jeremic, Filip and
Maccio, Vincent and O'Connor, Russell and Tran, Quang M.",
@@ 13689,6 +15056,29 @@ paper = "Brea89.pdf"
keywords = "axiomref"
}
+@article{Como89,
+ author = "Common, Hubert and Lescanne, Pierre",
+ title = {{Equational Problems and Disunification}},
+ journal = "J. Symbolic Computation",
+ volume = "7",
+ number = "34",
+ year = "1989",
+ pages = "371425",
+ abstract =
+ "Roughly speaking, an equational problem is a first order formula
+ whose only predicate symbol is $=$. We propose some rules for the
+ transformation of equational problems and study their correctness in
+ various models. Then, we give completeness results with respect to
+ some 'simple' problems called solved forms. Such completeness results
+ still hold when adding some control which moreover ensures
+ termination. The termination proofs are given for a 'weak' control and
+ thus hold for the (large) class of algorithms obtained by restricting
+ the scope of the rules. Finally, it must be noted that a byproduct of
+ our method is a decision procedure for the validity in the Herbrand
+ Universe of any first order formula with the only predicate symbol $=$.",
+ paper = "Como89.pdf"
+}
+
@book{Cons85,
author = "Constable, R.L. and Allen, S.F. and Bromley, H.M. and Cremer, J.F.
and Harper, R.W. and Howe, D.J. and Knoblock, T.B. and
@@ 13913,6 +15303,22 @@ paper = "Brea89.pdf"
paper = "Dave08.pdf"
}
+@article{Dave08a,
+ author = "Davenport, James H.",
+ title = {{AISC Meets Natural Typography}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "McDermott [12,13] introduced the concept 'Artificial Intelligence
+ meets Natural Stupidity'. In this paper, we explore how Artificial
+ Intelligence and Symbolic Computation can meet Natural Typography, and
+ how the conventions for expressing mathematics that humans
+ understand can cause us difficulties when designing mechanised
+ systems.",
+ paper = "Dave08a.pdf"
+}
+
@misc{Dave17a,
author = "Davenport, James",
title = {{Computer Algebra and Formal Proof}},
@@ 14658,7 +16064,8 @@ paper = "Brea89.pdf"
title = {{From LCF to HOL: a short history}},
year = "1996",
link = "\url{http://www.cl.cam.ac.uk/~mjcg/papers/HolHistory.pdf}",
 paper = "Gord96.pdf"
+ paper = "Gord96.pdf",
+ keywords = "printed"
}
@book{Gord93,
@@ 14905,7 +16312,8 @@ paper = "Brea89.pdf"
introduction to the concepts involved in reflection and a survey of
relevant work. To this end, a rather extensive bibliography is
provided.",
 paper = "Harr95.pdf"
+ paper = "Harr95.pdf",
+ keywords = "printed"
}
@phdthesis{Harr96,
@@ 15252,6 +16660,65 @@ paper = "Brea89.pdf"
keywords = "axiomref, CASProof, printed"
}
+@article{Kali08,
+ author = "Kaliszyk, Cezary",
+ title = {{Automating Side Conditions in Formalized Partial Functions}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "Assumptions about the domains of partial functions are necessary in
+ stateoftheart proof assistants. On the other hand when
+ mathematicians write about partial functions they tend not to
+ explicitly write the side conditions. We present an approach to
+ formalizing partiality in real and complex analysis in total
+ frameworks that allows keeping the side conditions hidden from the
+ user as long as they can be computed and simplified
+ automatically. This framework simplifies defining and operating on
+ partial functions in formalized real analysis in HOL Light. Our
+ framework allows simplifying expressions under partiality conditions
+ in a proof assistant in a manner that resembles computer algebra
+ systems.",
+ paper = "Kali08.pdf"
+}
+
+@article{Kali08a,
+ author = "Kaliszyk, Cezary and Wiedijk, Freek",
+ title = {{Merging Procedural and Declarative Proof}},
+ journal = "LNCS",
+ volume = "5497",
+ year = "2008",
+ abstract =
+ "There are two different styles for writing natural deduction proofs:
+ the 'Gentzen' style in which a proof is a tree with the conclusion at
+ the root and the assumptions at the leaves, and the 'Fitch' style
+ (also called ‘flag’ style) in which a proof consists of lines that are
+ grouped together in nested boxes.
+
+ In the world of proof assistants these two kinds of natural deduction
+ correspond to procedural proofs (tactic scripts that work on one or
+ more subgoals, like those of the Coq, HOL and PVS systems), and
+ declarative proofs (like those of the Mizar and Isabelle/Isar
+ languages).
+
+ In this paper we give an algorithm for converting tree style proofs to
+ flag style proofs. We then present a rewrite system that simplifies
+ the results.
+
+ This algorithm can be used to convert arbitrary procedural proofs to
+ declarative proofs. It does not work on the level of the proof terms
+ (the basic inferences of the system), but on the level of the
+ statements that the user sees in the goals when constructing the
+ proof.
+
+ The algorithm from this paper has been implemented in the ProofWeb
+ interface to Coq. In ProofWeb a proof that is given as a Coq proof
+ script (even with arbitrary Coq tactics) can be displayed both as a
+ tree style and as a flag style proof.",
+ paper = "Kali08a.pdf",
+ keywords = "printed"
+}
+
@phdthesis{Kali09,
author = "Kaliszyk, Cezary",
title = {{Correctness and Availability: Building Computer Algebra on top
@@ 15631,7 +17098,7 @@ paper = "Brea89.pdf"
work, these implementations could only be verified with significant
manual effort.",
paper = "Wony18.pdf",
 keywords = "printed"
+ keywords = "printed, reviewed"
}
@misc{Lond74,
@@ 16055,7 +17522,7 @@ paper = "Brea89.pdf"
dependent type language extension. The library is implemented in the
existing Haskell, by 'hiding' a certain part of the existing Prelude.",
paper = "Mesh10.pdf",
 keywords = "axiomref"
+ keywords = "axiomref, printed, reviewed"
}
@article{Mesh14,
@@ 17234,7 +18701,8 @@ paper = "Brea89.pdf"
viable: both that {\tt hstocoq} applies to existing Haskell
code, and that the output it produces is amenable to
verification.",
 paper = "Spec18.pdf"
+ paper = "Spec18.pdf",
+ keywords = "printed, reviewed"
}
@misc{Stac17a,
@@ 17390,7 +18858,7 @@ paper = "Brea89.pdf"
@inproceedings{Wadl88,
author = "Wadler, Philip and Blott, Stephen",
 title = {{How to make adhoc polymorphism less ad hoc}},
+ title = {{How to Make Adhoc Polymorphism Less Ad hoc}},
booktitle = "Proc 16th ACM SIGPLANSIGACT Symp. on Princ. of Prog. Lang",
isbn = "0897912942",
pages = "6076",
@@ 28650,6 +30118,17 @@ paper = "Brea89.pdf"
keywords = "axiomref"
}
+@book{Dave18,
+ author = "Davenport, James H",
+ title = {{Computer Algebra: Systems and Algorithms for Algebraic
+ Computation (updated)}},
+ publisher = "Davenport",
+ year = "2018",
+ link = "\url{http://staff.bath.ac.uk/masjhd/JHDCA.pdf}",
+ paper = "Dave18.pdf",
+ keywords = "axiomref"
+}
+
@misc{Deckxx,
author = "Decker, Wolfram",
title = {{Some Introductory Remarks on Computer Algebra}},
@@ 29100,7 +30579,7 @@ paper = "Brea89.pdf"
concepts, in structured generic programming as practiced in
computational mathematical systems.",
paper = "Reis12.pdf",
 keywords = "axiomref"
+ keywords = "axiomref, printed"
}
@phdthesis{Doye97,
@@ 32307,6 +33786,7 @@ paper = "Brea89.pdf"
state of the art commercial software as well as research prototypes
are presented, followed by a description of present research
directions.",
+ paper = "Kajl94.pdf",
keywords = "axiomref"
}
@@ 34977,7 +36457,7 @@ paper = "Brea89.pdf"
@article{Sant95,
author = "Santas, Philip S.",
 title = {{A type system for computer algebra}},
+ title = {{A Type System for Computer Algebra}},
journal = "J. Symbolic Computation",
volume = "19",
number = "13",
@@ 35724,7 +37204,7 @@ paper = "Brea89.pdf"
@article{Spit11,
author = "Spitters, Bas and van der Weegen, Eelis",
 title = {{Type classes for mathematics in type theory}},
+ title = {{Type Classes for Mathematics in Type Theory}},
journal = "Math. Struct. Comput. Sci.",
volume = "21",
number = "4",
@@ 38571,6 +40051,33 @@ paper = "Brea89.pdf"
keywords = "printed"
}
+@inproceedings{Daly88,
+ author = "Daly, Timothy and Kastner, John and Mays, Eric",
+ title = {{Integrating Rules and Inheritance Networks in a Knowlegebased
+ Financial Marketing Consutation System}},
+ booktitle = "Proc. HICSS 21",
+ volume = "3",
+ number = "58",
+ pages = "496500",
+ year = "1988",
+ comment = "KROPS",
+ abstract =
+ "This paper describes the integrated use of rulebascd inference and
+ an objectcentered knowledge representation (inheritance network) in a
+ financial marketing consultation system. The rules provide a highly
+ flexible pattern match capability and inference cycle for control. The
+ inheritance network provides a convenient way to represent the
+ conceptual structure of the domain. By merging the two techniques, our
+ financial computation can he shared at the most general level, and
+ rule inference is carried out at any appropriate Ievel of
+ generalization. Since domain knowledge is representcd independently
+ from control knowledge, knowledge ahout a particular problcm solving
+ technique is decoupled from the conditions for its invocation. A Iarge
+ financial marketing system has been built and examples are given to
+ show our combined use of rules and inheritance networks.",
+ paper = "Daly88.pdf"
+}
+
@misc{Dave80b,
author = "Davenport, J.H. and Jenks, R.D.",
title = {{SCRATCHPAD/370: Modes and Domains}},
@@ 39333,6 +40840,18 @@ paper = "Brea89.pdf"
paper = "Gray98.pdf"
}
+@artcile{Grie11,
+ author = "Griesmer, James",
+ title = {{James Griesmer 19292011}},
+ journal = "ACM Communications in Computer Algebra",
+ volume = "46",
+ number = "1/2",
+ year = "2012",
+ pages = "1011",
+ comment = "Obituary",
+ paper = "Grie11.pdf"
+}
+
@inproceedings{Gris76,
author = "Griss, Martin L.",
title = {{The Definition and Use of Data Structures in REDUCE}},
@@ 41120,6 +42639,40 @@ paper = "Brea89.pdf"
isbn = "026216082X"
}
+@inproceedings{Paul89,
+ author = "Paulson, Lawrence C. and Smith, Andrew W.",
+ title = {{Logic Programming, Functional Programming, and
+ Inductive Definitions}},
+ booktitle = "Int. Workshop on Extensions of Logic Programming",
+ publisher = "Springer",
+ year = "1989",
+ pages = "283309",
+ abstract =
+ "The unification of logic and functional programming, like the Holy
+ Grail, is sought by countless people [6, 14]. In reporting our
+ attempt, we first discuss the motivation. We argue that logic
+ programming is still immature, compared with functional programming,
+ because few logic programs are both useful and pure. Functions can
+ help to purify logic programming, for they can eliminate certain uses
+ of the cut and can express certMn negations positively.
+
+ More generally, we suggest that the traditional paradigm  logic
+ programming as firstorder logic  is seriously out of step with
+ practice. We offer an alternative paradigm. We view the logic program
+ as an inductive definition of sets and relations. This view explains
+ certain uses of Negation as Failure, and explains why most attempts to
+ extend PROLO G to larger fragments of firstorder logic have not been
+ successful. It suggests a logic language with functions,
+ incorporating equational unification.
+
+ We have implemented a prototype of this language. It is very slow,
+ but complete, and appear to be faster than some earlier
+ implementations of narrowing. Our experiments illustrate the
+ programming style and shed light on the further development of such
+ languages.",
+ paper = "Paul89.pdf"
+}
+
@article{Paul90,
author = "Paulson, Lawrence C.",
title = {{A Formulation of the Simple Theory of Types (for
diff git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index 4458978..0d0cd30 100644
 a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ 1272,7 +1272,8 @@ paper = "Brea89.pdf"
the axiomatization are investigated. It is shown that the problem of
type inference is undecidable but a narrowing strategy for
semidecision procedures is described and studied.",
 paper = "Como91.pdf"
+ paper = "Como91.pdf",
+ keywords = "axiomref, printed"
}
\end{chunk}
@@ 1653,7 +1654,8 @@ paper = "Brea89.pdf"
author = "Girard, JeanYves",
title = {{Proofs and Types}},
publisher = "Cambridge University Press",
 year = "1989"
+ year = "1989",
+ paper = "Gira89.pdf"
}
\end{chunk}
@@ 7354,7 +7356,8 @@ when shown in factored form.
Eigenvalue/Eigenvector Problem}},
school = "University of California, Berkeley",
year = "1997",
 link = "\url{http://www.eecs.berkeley.edu/Pubs/TechRpts/1997/CSD97971.pdf}",
+ link =
+ "\url{http://www.eecs.berkeley.edu/Pubs/TechRpts/1997/CSD97971.pdf}",
abstract =
"Computing the eigenvalues and orthogonal eigenvectors of an $n\times n$
symmetric tridiagonal matrix is an important task that arises while
@@ 10480,7 +10483,7 @@ when shown in factored form.
proving; and formal logical reasoning about the correctness of
programs.",
paper = "Chli17.pdf",
 keywords = "printed"
+ keywords = "printed, reviewed"
}
\end{chunk}
@@ 10667,7 +10670,8 @@ when shown in factored form.
"We present an extensive set of mathematical propositions and proofs
in order to demonstrate the power of expression of the theory of
constructions.",
 paper = "Coqu85.pdf, printed"
+ paper = "Coqu85.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 10702,7 +10706,7 @@ when shown in factored form.
abstract = "Wherein existing methods for building secure systems are
examined and found wanting",
paper = "Cyph17.pdf",
 keywords = "printed"
+ keywords = "printed, reviewed"
}
\end{chunk}
@@ 12344,6 +12348,45 @@ when shown in factored form.
\subsection{A} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Abrams, Marshall D.}
+\index{Zelkowitz, Marvin V.}
+\begin{chunk}{axiom.bib}
+@article{Abra95,
+ author = "Abrams, Marshall D. and Zelkowitz, Marvin V.",
+ title = {{Striving for Correctness}},
+ journal = "Computers and Security",
+ volume = "14",
+ year = "1995",
+ pages = "719738",
+ paper = "Abra95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Akbarpour, Behzad}
+\index{Paulson, Lawrence C.}
+\begin{chunk}{axiom.bib}
+@article{Akba08a,
+ author = "Akbarpour, Behzad and Paulson, Lawrence C.",
+ title = {{MetiTarski: An Automatic Prover for the Elementary Functions}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "Many inequalities involving the functions ln, exp, sin, cos, etc.,
+ can be proved automatically by MetiTarski: a resolution theorem prover
+ (Metis) modified to call a decision procedure (QEPCAD) for the theory
+ of real closed fields. The decision procedure simplifies clauses by
+ deleting literals that are inconsistent with other algebraic facts,
+ while deleting as redundant clauses that follow algebraically from
+ other clauses. MetiTarski includes special code to simplify
+ arithmetic expressions.",
+ paper = "Akba08a.pdf"
+}
+
+\end{chunk}
+
\begin{chunk}{axiom.bib}
@misc{Anon18,
author = "Anonymous",
@@ 12511,6 +12554,82 @@ when shown in factored form.
\end{chunk}
+\index{Barendregt, Henk}
+\begin{chunk}{axiom.bib}
+@incollection{Bare92,
+ author = "Barendregt, Henk",
+ title = {{Lambda Calculi with Types}},
+ booktitle = "Handbook of Logic in Computer Science (vol 2)",
+ publisher = "Oxford University Press",
+ year = "1992",
+ paper = "Bare92.pdf"
+}
+
+\end{chunk}
+
+\index{Barendregt, Henk}
+\index{Barendsen, Erik}
+\begin{chunk}{axiom.bib}
+@article{Bare02,
+ author = "Barendregt, Henk and Barendsen, Erik",
+ title = {{Autorkic Computations in Formal Proofs}},
+ journal = "J. Automated Reasoning",
+ volume = "28",
+ pages = "321336",
+ year = "2002",
+ abstract =
+ "Formal proofs in mathematics and computer science are being studied
+ because these objects can be verified by a very simple computer
+ program. An important open problem is whether these formal proofs can
+ be generated with an effort not much greater than writing a
+ mathematical paper in, say, LATEX. Modern systems for proof
+ development make the formalization of reasoning relatively
+ easy. However, formalizing computations in such a manner that the
+ results can be used in formal proofs is not immediate. In this paper
+ we show how to obtain formal proofs of statements such as Prime(61) in
+ the context of Peano arithmetic or $(x + 1)(x + 1) = x 2 + 2x + 1$ in
+ the context of rings. We hope that the method will help bridge the gap
+ between the efficient systems of computer algebra and the reliable
+ systems of proof development.",
+ paper = "Bare02.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Barendregt, Henk}
+\index{Wiedijk, Freek}
+\begin{chunk}{axiom.bib}
+@article{Bare05,
+ author = "Barendregt, Henk and Wiedijk, Freek",
+ title = {{The Challenge of Computer Mathematics}},
+ journal = "Phil. Trans. Royal Society",
+ volume = "363",
+ pages = "23512375",
+ year = "2005",
+ abstract =
+ "Progress in the foundations of mathematics has made it possible to
+ formulate all thinkable mathematical concepts, algorithms and proofs
+ in one language and in an impeccable way. This is not in spite of, but
+ partially based on the famous results of Godel and Turing. In this way
+ statements are about mathematical objects and algorithms, proofs show
+ the correctness of statements and computations, and computations are
+ dealing with objects and proofs. Interactive computer systems for a
+ full integration of defining, computing and proving are based on
+ this. The human defines concepts, constructs algorithms and provides
+ proofs, while the machine checks that the definitions are well formed
+ and the proofs and computations are correct. Results formalized so far
+ demonstrate the feasibility of this ‘computer mathematics’. Also there
+ are very good applications. The challenge is to make the systems more
+ mathematicianfriendly, by building libraries and tools. The eventual
+ goal is to help humans to learn, develop, communicate, referee and
+ apply mathematics.",
+ paper = "Bare05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Barras, Bruno}
\begin{chunk}{axiom.bib}
@techreport{Barr96a,
@@ 12559,7 +12678,7 @@ when shown in factored form.
Caml program which performs type inference in CC and use this code
to build a smallscale certified proofchecker.",
paper = "Barr18.pdf",
 keywords = "printed"
+ keywords = "printed, reviewed"
}
\end{chunk}
@@ 12671,6 +12790,37 @@ when shown in factored form.
\end{chunk}
+\index{Campbell, J.A.}
+\begin{chunk}{axiom.bib}
+@article{Camp02,
+ author = "Campbell, J.A.",
+ title = {{Indefinite Integration as a Testbed for Developments in
+ Multiagent Systems}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "Coordination of multiple autonomous agents to solve problems that
+ require each of them to contribute their limited expertise in the
+ construction of a solution is often ensured by the use of numerical
+ methods such as votecounting, payoff functions, game theory and
+ economic criteria. In areas where there are no obvious numerical methods
+ for agents to use in assessing other agents’ contributions, many
+ questions still remain open for research. The paper reports a study of
+ one such area: heuristic indefinite integration in terms of agents with
+ different single heuristic abilities which must cooperate in finding
+ indefinite integrals. It examines the reasons for successes and lack
+ of success in performance, and draws some general conclusions about
+ the usefulness of indefinite integration as a field for realistic
+ tests of methods for multiagent systems where the usefulness of
+ 'economic' criteria is limited. In this connection, the role of
+ numerical taxonomy is emphasised.",
+ paper = "Camp02.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Cardelli, Luca}
\begin{chunk}{axiom.bib}
@article{Card84,
@@ 12775,6 +12925,20 @@ when shown in factored form.
\end{chunk}
+\index{Charnley, John}
+\index{Colton, Simon}
+\begin{chunk}{axiom.bib}
+@article{Char08,
+ author = "Charnley, John and Colton, Simon",
+ title = {{A Global Workspace Framework for Combining Reasoning Systems}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ paper = "Char08.pdf"
+}
+
+\end{chunk}
+
\index{Chlipala, Adam J.}
\begin{chunk}{axiom.bib}
@article{Chli07,
@@ 12825,6 +12989,81 @@ when shown in factored form.
\end{chunk}
+\index{Comon, Hubert}
+\begin{chunk}{axiom.bib}
+@incollection{Como91a,
+ author = "Comon, Hubert",
+ title = {{Disunification: a Survey}},
+ booktitle = "Computational Logic",
+ publisher = "MIT Press",
+ year = "1991",
+ abstract =
+ "Solving an equation in an algebra of terms is known as
+ unification. Solving more complex formulas combining equations and
+ involving in particular negation is called {\sl
+ disunification}. With such a broad definition, many works fall
+ into the scope of disunification. The goal of this paper is to
+ survey these works and bring them together in a same framework.",
+ paper = "Como91a.ps"
+}
+
+\end{chunk}
+
+\index{Comon, Hubert}
+\begin{chunk}{axiom.bib}
+@incollection{Como01,
+ author = "Comon, Hubert",
+ title = {{Inductionless Induction}},
+ booktitle = "Handbook of Automated Reasoning",
+ comment = "Chapter 14",
+ publisher = "Elsevier",
+ year = "2001",
+ paper = "Como01.ps"
+}
+
+\end{chunk}
+
+\index{Coquuand, Thierry}
+\index{Huet, Gerard}
+\begin{chunk}{axiom.bib}
+@incollection{Coqu88,
+ author = "Coquuand, Thierry and Huet, Gerard",
+ title = {{The Calculus of Constructions}},
+ booktitle = "Information and Computation, Volume 76",
+ year = "1988",
+ publisher = "Academic Press",
+ paper = "Coqu88.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cramer, Marcos}
+\begin{chunk}{axiom.bib}
+@misc{Cram14,
+ author = "Cramer, Marcos",
+ title = {{Modelling the usage of partial functions and undefined
+ terms using presupposition theory}},
+ year = "2014",
+ isbn = "9781848901308",
+ publisher = "College Publications",
+ pages = "7188",
+ abstract =
+ "We describe how the linguistic theory of presuppositions can be used
+ to analyse and model the usage of partial functions and undefined
+ terms in mathematical texts. We compare our account to other accounts
+ of partial functions and undefined terms, showing how our account
+ models the actual usage of partial functions and undefined terms more
+ faithfully than existing accounts. The model described in this paper
+ has been developed for the Naproche system, a computer system for
+ proofchecking mathematical texts written in controlled natural
+ language, and has largely been implemented in this system.",
+ paper = "Cram14.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Crary, Karl}
\index{Harper, Robert}
\index{Puri, Sidd}
@@ 12906,6 +13145,21 @@ when shown in factored form.
\end{chunk}
+\index{Church, Alonzo}
+\begin{chunk}{axiom.bib}
+@article{Chur28,
+ author = "Church, Alonzo",
+ title = {{On the Law of the Excluded Middle}},
+ journal = "Bull. of the American Mathematical Society",
+ volume = "34",
+ number = "1",
+ pages = "7578",
+ year = "1928",
+ paper = "Chur28.pdf"
+}
+
+\end{chunk}
+
\subsection{D} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Dahl, OleJohan}
@@ 13037,6 +13291,55 @@ when shown in factored form.
\end{chunk}
+\index{Dehaye, PaulOlivier}
+\index{Iancu, Mihnea}
+\index{Kohlhase, Michael}
+\index{Konovalov, Alexander}
+\index{Lelievre, Samuel}
+\index{Muller, Dennis}
+\index{Pfeiffer, Markus}
+\index{Rabe, Florian}
+\index{Thiery, Nicolas M.}
+\index{Wiesling, Tom}
+\begin{chunk}{axiom.bib}
+@inproceedings{Deha16,
+ author = "Dehaye, PaulOlivier and Iancu, Mihnea and Kohlhase, Michael
+ and Konovalov, Alexander and Lelievre, Samuel and
+ Muller, Dennis and Pfeiffer, Markus and Rabe, Florian and
+ Thiery, Nicolas M. and Wiesling, Tom",
+ title = {{Interoperability in the OpenDreamKit project: The
+ MathintheMiddle Approach}},
+ booktitle = "Intelligent Computer Mathematics",
+ pages = "117131",
+ year = "2016",
+ isbn = "9783319425467",
+ publisher = "Springer",
+ abstract =
+ "OpenDreamKit  'Open Digital Research Environment Toolkit for the
+ Advancement of Mathematics'  is an H2020 EU Research Infrastructure
+ project that aims at supporting, over the period 20152019, the
+ ecosystem of opensource mathematical software systems. OpenDreamKit
+ will deliver a flexible toolkit enabling research groups to set up
+ Virtual Research Environments, customised to meet the varied needs of
+ research projects in pure mathematics and applications.
+
+ An important step in the OpenDreamKit endeavor is to foster the
+ interoperability between a variety of systems, ranging from
+ computer algebra systems over mathematical databases to
+ frontends. This is the mission of the integration work
+ package. We report on experiments and future plans with the
+ MathintheMiddle approach. This architecture consists of a
+ central mathematical ontology that documents the domain and fixes a
+ joint vocabulary, or even a language, going beyond existing
+ systems such as OpenMath, combined with specifications of the
+ functionalities of the various systems. Interaction between
+ systems can then be enriched by pivoting around this architecture.",
+ paper = "Deha16.pdf",
+ keywords = "printed, axiomref"
+}
+
+\end{chunk}
+
\index{Dijkstra, E.W.}
\begin{chunk}{axiom.bib}
@misc{Dijk71,
@@ 13381,6 +13684,68 @@ when shown in factored form.
\end{chunk}
\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm07a,
+ author = "Farmer, William M.",
+ title = {{Chiron: A MultiParadigm Logic}},
+ journal = "Studies in Logic, Grammar and Rhetoric",
+ volume = "10",
+ number = "23",
+ year = "2007",
+ abstract =
+ "Chiron is a derivative of vonNeumannBernaysG̈odel ( NBG ) set
+ theory that is intended to be a practical, generalpurpose logic for
+ mechanizing mathematics. It supports several reasoning paradigms by
+ integrating NBG set theory with elements of type theory, a scheme for
+ handling undefinedness, and a facility for reasoning about the syntax
+ of expressions. This paper gives a quick, informal presentation of
+ the syntax and semantics of Chiron and then discusses some of the
+ benefits Chiron provides as a multiparadigm logic.",
+ paper = "Farm07a.pdf",
+ keywords = "axiomref, printed"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@techreport{Farm13,
+ author = "Farmer, William M.",
+ title = {{Chiron: A Set Theory with Types, Undefinedness, Quotation,
+ and Evaluation}},
+ type = "technical report",
+ institution = "McMaster University",
+ number = "SQRL Report No. 38",
+ year = "2013",
+ link = "\url{https://arxiv.org/pdf/1305.6206.pdf}",
+ abstract =
+ "Chiron is a derivative of vonNeumannBernaysGodel (NBG) set
+ theory that is intended to be a practical, generalpurpose logic for
+ mechanizing mathematics. Unlike traditional set theories such as
+ ZermeloFraenkel (ZF) and NBG, Chiron is equipped with a type system,
+ lambda notation, and definite and indefinite description. The type
+ system includes a universal type, dependent types, dependent function
+ types, subtypes, and possibly empty types. Unlike traditional logics
+ such as firstorder logic and simple type theory, Chiron admits
+ undefined terms that result, for example, from a function applied to
+ an argument outside its domain or from an improper definite or
+ indefinite description. The most noteworthy part of Chiron is its
+ facility for reasoning about the syntax of expressions. Quotation is
+ used to refer to a set called a construction that represents the
+ syntactic structure of an expression, and evaluation is used to refer
+ to the value of the expression that a construction represents. Using
+ quotation and evaluation, syntactic side conditions, schemas,
+ syntactic transformations used in deduction and computation rules, and
+ other such things can be directly expressed in Chiron. This paper
+ presents the syntax and semantics of Chiron, some definitions and
+ simple examples illustrating its use, a proof system for Chiron, and a
+ notion of an interpretation of one theory of Chiron in another.",
+ paper = "Farm13.pdf"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
\index{Larjani, Pouya}
\begin{chunk}{axiom.bib}
@misc{Farm14,
@@ 13482,6 +13847,33 @@ when shown in factored form.
\end{chunk}
+\index{Finney, Kate}
+\begin{chunk}{axiom.bib}
+@article{Finn96,
+ author = "Finney, Kate",
+ title = {{Mathematical Notation in Formal Specifications: Too
+ Difficult for the Masses?}},
+ journal = "IEEE Trans. on Software Engineering",
+ volume = "22",
+ number = "2",
+ year = "1996",
+ pages = "158159",
+ abstract =
+ "The phrase 'not much mathematics required' can imply a
+ variety of skill levels. When this phrase is applied to computer
+ scientists, software engineers, and clients in the area of formal
+ specification, the word 'much' can be widely misinterpreted with
+ disastrous consequences. A small experiment in reading
+ specifications revealed that students already trained in discrete
+ mathematics and the specification notation performed very poorly;
+ much worse than could reasonably be expected if formal methods
+ proponents are to be believed.",
+ paper = "Finn96.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Flanagan, Cormac}
\index{Sabry, Amr}
\index{Duba, Bruce F.}
@@ 13554,6 +13946,37 @@ when shown in factored form.
\end{chunk}
+\index{Freundt, Sebastian}
+\index{Horn, Peter}
+\index{Konovalov, Alexander}
+\index{Linton, Steve}
+\index{Roozemond, Dan}
+\begin{chunk}{axiom.bib}
+@article{Freu08,
+ author = "Freundt, Sebastian and Horn, Peter and Konovalov, Alexander
+ and Linton, Steve and Roozemond, Dan",
+ title = {{Symbolic Computation Software Composability}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "We present three examples of the composition of Computer Algebra
+ Systems to illustrate the progress on a composability infrastructure
+ as part of the SCIEnce (Symbolic Computation Infrastructure for
+ Europe) project 1 . One of the major results of the project so far is
+ an OpenMath based protocol called SCSCP (Symbolic Computation Software
+ Composability Protocol). SCSCP enables the various software packages
+ for example to exchange mathematical objects, request calcula tions,
+ and store and retrieve remote objects, either locally or accross the
+ internet. The three examples show the current state of the GAP, KANT,
+ and MuPAD software packages, and give a demonstration of exposing
+ Macaulay using a newly developed framework.",
+ paper = "Freu08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Fox, Anthony}
\begin{chunk}{axiom.bib}
@article{Foxx03,
@@ 13652,6 +14075,71 @@ when shown in factored form.
\end{chunk}
+\index{Gonthier, Georges}
+\begin{chunk}{axiom.bib}
+@article{Gont09a,
+ author = "Gonthier, Georges",
+ title = {{Software Engineering for Mathematics}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ pages = "2727",
+ abstract =
+ "Despite its mathematical origins, progress in computer assisted
+ reasoning has mostly been driven by applications in computer science,
+ like hardware or protocol security verification. Paradoxically, it has
+ yet to gain widespread acceptance in its original domain of
+ application, mathematics; this is commonly attributed to a 'lack of
+ libraries': attempts to formalize advanced mathematics get bogged down
+ into the formalization of an unwieldly large set of basic resuts.
+
+ This problem is actually a symptom of a deeper issue: the main
+ function of computer proof systems, checking proofs down to their
+ finest details, is at odds with mathematical practice, which ignores
+ or defers details in order to apply and combine abstractions in
+ creative and elegant ways. Mathematical texts commonly leave logically
+ important parts of proofs as 'exercises to the reader', and are rife
+ with 'abuses of notation that make mathematics tractable' (according
+ to Bourbaki). This (essential) flexibility cannot be readily
+ accomodated by the narrow concept of 'proof library' used by most
+ proof assistants and based on 19th century firstorder logic: a
+ collection of constants, definitions, and lemmas.
+
+ This mismatch is familiar to software engineers, who have been
+ struggling for the past 50 years to reconcile the flexibility needed
+ to produce sensible user requirements with the precision needed to
+ implement them correctly with computer code. Over the last 20 years
+ object and components have replaced traditional data and procedure
+ libraries, partly bridging this gap and making it possible to build
+ significantly larger computer systems.
+
+ These techniques can be implemented in compuer proof systems by
+ exploiting advances in mathematical logic. Higherorder logics allow
+ the direct manipulation of functions; this can be used to assign
+ behaviour, such as simplification rules, to symbols, similarly to
+ objects. Advanced type systems can assign a secondary, contextual
+ meaning to expressions, using mechanisms such as type classes,
+ similarly to the metadata in software components. The two can be combined
+ to perform reflection, where an entire statement gets quoted as
+ metadata and then proved algorithmically by some decision procedure.
+
+ We propose to use a more modest, smallscale form of reflection, to
+ implement mathematical components. We use the typederived metadata to
+ indicate how symbols, definitions and lemmas should be used in other
+ theories, and functions to implement this usage — roughly, formalizing
+ some of the exercize section of a textbook. We have applied
+ successfully this more engineered approch to computer proofs in our
+ past work on the Four Color Theorem, the CayleyHamilton Theorem, and
+ our ongoing longterm effort on the Odd Order Theorem, which is the
+ starting point of the proof of the Classification of Finite Simple
+ Groups (the famous 'monster theorem' whose proof spans 10,000 pages in
+ 400 articles).",
+ paper = "Gont09a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Gordon, Michael J.}
\index{Milner, Arthur j.}
\index{Wadsworth, Christopher P.}
@@ 13799,6 +14287,74 @@ when shown in factored form.
\end{chunk}
+\index{Gunter, Elsa L.}
+\begin{chunk}{axiom.bib}
+@techreport{Gunt89,
+ author = "Gunter, Elsa L.",
+ title = {{Doing Algebra in Simple Type Theory}},
+ type = "technical report",
+ institution = "University of Pennsylvania",
+ year = "1989",
+ number = "MSCIS8938",
+ abstract =
+ "To fully utilize the power of higherorder logic in interactive
+ theorem proving, it is desirable to be able to develop abstract areas
+ of Mathematics such as algebra and topology in an automated
+ setting. Theorems provers capable of higher order reasoning have
+ generally had some form of type theory as theory object
+ language. But mathematicians have tended to use the language of set
+ theory to give definitions and prove theorems in algebra and
+ topology. In this paper,we give an incremental description of how to
+ express various basic algebraic concepts in terms of simple type
+ theory. We present a method for representing algebras, subalgebras,
+ quotient algebras, homorphisms and isomorphisms simple type theory,
+ using group theory as an example in each case. Following this we
+ discuss how to automatically apply such an abstract theory to concrete
+ examples. Finally, we conclude with some observations about a
+ potential inconvenience associated with this method of representation,
+ and discuss a difficulty inherent in any attempt to remove this
+ inconvenience.",
+ paper = "Gunt89.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Gunter, Elsa L.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Gunt89a,
+ author = "Gunter, Elsa L.",
+ booktitle = "Int. Workshop on Extensions of Logic Programming",
+ publisher = "Springer",
+ year = "1989",
+ pages = "223244",
+ abstract =
+ "In this article, we discuss several possible extensions to
+ traditional logic programming languages. The specific extensions
+ proposed here fall into two categories: logical extensions and the
+ addition of constructs to allow for increased control. There is a
+ unifying theme to the proposed logical extensions, and that is the
+ scoped introduction of extensions to a programming context. More
+ specifically, these extensions are the ability to introduce variables
+ whose scope is limited to the term in which they occur (i.e. Abound
+ variables within Aterms), the ability to intro~iuce into a goal a
+ fresh constant whose scope is limited to the derivation of that goal,
+ and the ability to introduce into a goal a program clause whose scope
+ is limited to the derivation of that goal. The purpose of the
+ additions for increased control is to facilitate the raising and
+ handling of failures.
+
+ To motivate these various extensions, we have repeatedly appealed to
+ examples related to the construction of a generic theorem prover. It
+ is our thesis that this problem domain is specific enough to lend
+ focus when one is considering various language constructs, and yet
+ complex enough to encompass many of the general difficulties found in
+ other areas of symbolic computation.",
+ paper = "Gunt89a.pdf"
+}
+
+\end{chunk}
+
\index{Guttman, Joshua D.}
\index{Ramsdell, John D.}
\index{Wand, Mitchell}
@@ 13836,6 +14392,38 @@ when shown in factored form.
\end{chunk}
+\index{Hanus, Michael}
+\begin{chunk}{axiom.bib}
+@article{Hanu90,
+ author = "Hanus, Michael",
+ title = {{Compiling Logic Programs with Equality}},
+ journal = "LNCS",
+ volume = "456",
+ year = "1990",
+ pages = "387401",
+ abstract =
+ "Horn clause logic with equality is an amalgamation of functional and
+ logic programming languages. A sound and complete operational
+ semantics for logic programs with equality is based on resolution to
+ solve literals, and rewriting and narrowing to evaluate functional
+ expressions. This paper proposes a technique for compiling programs
+ with these inference rules into programs of a lowlevel abstract
+ machine which can be efficiently executed on conventional
+ architectures. The presented approach is based on an extension of the
+ Warren abstract machine (WAM). In our approach pure logic programs
+ without function definitions are compiled in the same way as in the
+ WAMapproach, and for logic programs with function definitions
+ particular instructions are generated for occurrences of functions
+ inside clause bodies. In order to obtain an efficient implementation
+ of functional computations, a stack of occurrences of function symbols
+ in goals is managed by the abstract machine. The compiler generates
+ the necessary instructions for the efficient manipulation of the
+ occurrence stack from the given equational logic programs.",
+ paper = "Hanu90.pdf"
+}
+
+\end{chunk}
+
\index{Harper, Robert}
\index{Lillibridge, Mark}
\begin{chunk}{axiom.bib}
@@ 13873,6 +14461,19 @@ when shown in factored form.
\end{chunk}
+\index{Hatton, Les}
+\begin{chunk}{axiom.bib}
+@article{Hatt97,
+ author = "Hatton, Les",
+ title = {{Software Failure: Follies and Fallacies}},
+ journal = "IEEE Review",
+ volume = "43",
+ number = "2",
+ year = "1997",
+}
+
+\end{chunk}
+
\index{Hoare, C.A.R}
\begin{chunk}{axiom.bib}
@incollection{Hoar72,
@@ 13967,6 +14568,68 @@ when shown in factored form.
\subsection{J} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Johnson, C.W.}
+\begin{chunk}{axiom.bib}
+@article{John96,
+ author = "Johnson, C.W.",
+ title = {{Literate Specifications}},
+ journal = "Software Engineering Journal",
+ volume = "11",
+ number = "4",
+ year = "1996",
+ pages = "225237",
+ paper = "John96.pdf",
+ abstract =
+ "The increasing scale and complexity of software is imposing serious
+ burdens on many industries. Formal notations, such as Z, VDM and
+ temporal logic, have been developed to address these problems. There
+ are, however, a number of limitations that restrict the use of
+ mathematical specifications far largescale software development. Many
+ projects take months or years to complete. This creates difficulties
+ because abstract mathematical requirements cannot easily be used by
+ new members of a development team to understand past design
+ decisions. Formal specifications describe what software must do, they
+ do not explain why it must do it. In order to avoid these limitations,
+ a literate approach to software engineering is proposed. This
+ technique integrates a formal specification language and a semiformal
+ design rationale. The Z schema calculus is usecl to represent what a
+ system must do. In contrast, the Questions, Options and Criteria
+ notation is used to represent the justifications that lie behind
+ development decisions. Empirical evidence is presented that suggests
+ the integration of these techniques provides significant benefits over
+ previous approaches to mathematical analysis and design techniques. A
+ range of tools is described that have been developed to support our
+ literate approach to the specification of largescale sohare systems.",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Jones, Simon > Peyton}
+\begin{chunk}{axiom.bib}
+@inproceedings{Jone96,
+ author = "Jones, Simon > Peyton",
+ title = {{Compiling Haskell by Program Transformation: A Report from
+ the Trenches}},
+ booktitle = "Proc. European Symposium on Programming",
+ year = "1996",
+ publisher = "Eurographics",
+ abstract =
+ "Many compilers do some of their work by means of
+ correctnesspreseving, and hopefully performanceimproving, program
+ transformations. The Glasgow Haskell Compiler (GHC) takes this idea
+ of 'compilation by transformation' as its warcry, trying to express
+ as much as possible of the compilation process in the form of
+ program transformations.
+
+ This paper reports on our practical experience of the
+ transformational approach to compilation, in the context of a
+ substantial compiler.",
+ paper = "Jone96.pdf"
+}
+
+\end{chunk}
+
\subsection{K} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Kaes, Stefan}
@@ 14102,6 +14765,132 @@ when shown in factored form.
\end{chunk}
+\index{Klaeren, Herbert A.}
+\begin{chunk}{axiom.bib}
+@article{Klae84,
+ author = "Klaeren, Herbert A.",
+ title = {{A Constructive Method for Abstract Algebraic Software
+ Specification}},
+ journal = "Theoretical Computer Science",
+ vollume = "30",
+ year = "1984",
+ pages = "139204",
+ abstract =
+ "A constructive method for abstract algebraic software
+ specification is presented, where the operations are not
+ implicitly specified by equations but by an explicit recursion on
+ the generating operations of an algebra characterizing the
+ underlying data structure. The underlying algebra itself may be
+ equationally specified since we cannot assume that all data
+ structures will correspond to free algebras. This implies that we
+ distinguish between generating and defined operations and that the
+ underlying algebra has a mechanism of wellfounded decomposition
+ w.r.t. the generating operations. We show that the explicit
+ specification of operations using socalled structural recursive
+ schemata offers advantages over purely equational specifications,
+ especially concerning the safeness of enrichments, the ease of
+ semantics description and the separation between the underlying
+ data structure and the operations defined on it.",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kleene, Stephen Cole}
+\begin{chunk}{axiom.bib}
+@book{Klee52,
+ author = "Kleene, Stephen Cole",
+ title = {{Introduction to MetaMathematics}},
+ year = "1952",
+ publisher = "Ishi Press International",
+ isbn = "9780923891572",
+ paper = "Klee52.pdf"
+}
+
+\end{chunk}
+
+\index{Kohlhase, Michael}
+\index{De Feo, Luca}
+\index{Muller, Dennis}
+\index{Pfeiffer, Markus}
+\index{Rabe, Florian}
+\index{Thiery, Nicolas M.}
+\index{Vasilyev, Victor}
+\index{Wesing, Tom}
+\begin{chunk}{axiom.bib}
+@inproceedings{Kohl17,
+ author = "Kohlhase, Michael and De Feo, Luca and Muller, Dennis and
+ Pfeiffer, Markus and Rabe, Florian and Thiery, Nicolas M.
+ and Vasilyev, Victor and Wesing, Tom",
+ title = {{KnowledgeBased Interoperability for Mathematical Software
+ Systems}},
+ booktitle = "7th Int. Conf. on Mathematical Aspects of Computer and
+ Information Sciences",
+ publisher = "Springer",
+ year = "2017",
+ pages = "195210",
+ isbn = "9783319724539",
+ abstract =
+ "There is a large ecosystem of mathematical software systems.
+ Individually, these are optimized for particular domains and
+ functionalities, and together they cover many needs of practical and
+ theoretical mathematics. However, each system specializes on one area,
+ and it remains very difficult to solve problems that need to involve
+ multiple systems. Some integrations exist, but the are adhoc and have
+ scalability and maintainability issues. In particular, there is not
+ yet an interoperability layer that combines the various systems into a
+ virtual research environment (VRE) for mathematics.
+
+ The OpenDreamKit project aims at building a toolkit for such VREs. It
+ suggests using a central systemagnostic formalization of mathematics
+ (MathintheMiddle, MitM) as the needed interoperability layer. In
+ this paper, we conduct the first major case study that instantiates
+ the MitM paradigm for a concrete domain as well as a concrete set of
+ systems. Specifically, we integrate GAP, Sage, and Singular to perform
+ computation in group and ring theory.
+
+ Our work involves massive practical efforts, including a novel
+ formalization of computational group theory, improvements to the
+ involved software systems, and a novel mediating system that sits at
+ the center of a starshaped integration layout between mathematical
+ software systems.",
+ paper = "Kohl17.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kozen, Dexter}
+\index{Palsberg, Jens}
+\index{Schwartzbach, Michael I.}
+\begin{chunk}{axiom.bib}
+@article{Koze93,
+ author = "Kozen, Dexter and Palsberg, Jens and Schwartzbach, Michael I.",
+ title = {{Efficient Recursive Subtyping}},
+ journal = "Mathematical Structures in Computer Science",
+ volume = "5",
+ number = "1",
+ pages = "113125",
+ year = "1995",
+ abstract =
+ "Subtyping in the presence of recursive types for the
+ $\lambda$calculus was studied by Amadio and Cardelli in 1991. In that
+ paper they showed that the problem of deciding whether one recursive
+ type is a subtype of another is decidable in exponential time. In
+ this paper we give an $O(n^2)$ algorithm. Our algorithm is based on a
+ simplification of the definition of the subtype relation, which allows
+ us to reduce the problem to the emptiness problem for a certain finite
+ automaton with quadratically many states. It is known that equality
+ of recursive types and the covariant B̈ohm order can be decided
+ efficiently by means of finite automata, since they are just language
+ equality and language inclusion, respectively. Our results extend the
+ automatatheoretic approach to handle orderings based on
+ contravariance.",
+ paper = "Koze93.pdf"
+}
+
+\end{chunk}
+
\index{Kranz, David}
\index{Kelsey, Richard}
\index{Rees, Jonathan}
@@ 14130,6 +14919,22 @@ when shown in factored form.
\end{chunk}
+\index{Kripke, Saul}
+\begin{chunk}{axiom.bib}
+@article{Krip75,
+ author = "Kripke, Saul",
+ title = {{Outline of a Theory of Truth}},
+ journal = "Journal of Philosophy",
+ volume = "72",
+ number = "19",
+ year = "1975",
+ pages = "690716",
+ paper = "Krip75.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{L} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Lamport, Leslie}
@@ 14173,6 +14978,31 @@ when shown in factored form.
\end{chunk}
+\index{Letouzey, Pierre}
+\begin{chunk}{axiom.bib}
+@inproceedings{Leto04,
+ author = "Letouzey, Pierre",
+ title = {{A New Extraction for Coq}},
+ booktitle = "Types for Proofs and Programs. TYPES 2002",
+ publisher = "Springer",
+ pages = "617635",
+ year = "2004",
+ abstract =
+ "We present here a new extraction mechanism for the Coq proof
+ assistant. By extraction, we mean automatic generation of
+ functional code from Coq proofs, in order to produce certified
+ programs. In former versions of Coq, the extraction mechanism
+ suffered several limitations and in particular worked only with a
+ subset of the language. We first discuss difficulties encountered
+ and solutions proposed to remove these limitations. Then we give a
+ proof of correctness for a theoretical model of the new
+ extraction. Finally we describe the actual implementation.",
+ paper = "Leto04.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{M} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Manolios, Panagiotis}
@@ 14206,6 +15036,47 @@ when shown in factored form.
\end{chunk}
+\index{Martelli, Alberto}
+\index{Montanari, Ugo}
+\begin{chunk}{axiom.bib}
+@article{Mart82,
+ author = "Martelli, Alberto and Montanari, Ugo",
+ title = {{An Efficient Unification Algorithm}},
+ journal = "ACM TOPLAS",
+ volume = "4",
+ number = "2",
+ pages = "258282",
+ year = "1982",
+ abstract =
+ "The unification problem in firstorder predicate calculus is
+ described in general terms as the solution of a system of equations,
+ and a nondeterministic algorithm is given. A new unification
+ algorithm, characterized by having the acyclicity test efficiently
+ embedded into it, is derived from the nondeterministic one, and a
+ PASCAL implementation is given. A comparison with other wellknown
+ unification algorithms shows that the algorithm described here
+ performs well in all cases.",
+ paper = "Mart82.pdf"
+}
+
+\end{chunk}
+
+\index{McCarthy, John}
+\begin{chunk}{axiom.bib}
+@article{Mcca60,
+ author = "McCarthy, John",
+ title = {{Recursive Functions of Symbolic Expressions and Their
+ Computation by Machine, Part I}},
+ journal = "CACM",
+ volume = "3",
+ pages = "184195",
+ year = "1960",
+ paper = "Mcca60.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{McCarthy, John}
\begin{chunk}{axiom.bib}
@incollection{Mcca63,
@@ 14349,6 +15220,38 @@ when shown in factored form.
\end{chunk}
+\index{Mitchell, John}
+\begin{chunk}{axiom.bib}
+@inproceedings{Mitc84,
+ author = "Mitchell, John",
+ title = {{Type Inference and Type Containment}},
+ booktitle = "Proc. Int. Symp. on Semantics of Data Types",
+ publisher = "Springer",
+ isbn = "3540133461",
+ year = "1984",
+ pages = "257277",
+ abstract =
+ "Type inference, the process of assigning types to untyped
+ expressions, may be motivated by the design of a typed language or
+ semantical considerations on the meeaaings of types and expressions.
+ A typed language GR with polymorphic functions leads to the GR
+ inference rules. With the addition of an 'oracle' rule for equations
+ between expressions, the GR rules become complete for a general class
+ of semantic models of type inference. These inference models are
+ models of untyped lambda calculus with extra structure similar to
+ models of the typed language GR. A more specialized set of type
+ inference rules, the GRS rules, characterize semantic typing when the
+ functional type $a~$ , is interpreted as all elements of the model that
+ map a to $~r$ and the polymorphic type $Vt,~0$ is interpreted as the
+ intersection of all $o'(r)$. Both inference systems may be reformulated
+ using rules for deducing containments between types. The advantage of
+ the type inference rules based on containments is thatproofs
+ correspond more closely to the structure of terms.",
+ paper = "Mitc84.pdf"
+}
+
+\end{chunk}
+
\index{Mellies, PaulAndre}
\index{Zeilberger, Noam}
\begin{chunk}{axiom.bib}
@@ 14383,6 +15286,36 @@ when shown in factored form.
\end{chunk}
+\index{Moore, Andrew P.}
+\index{Payne Jr., Charles N.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Moor96,
+ author = "Moore, Andrew P. and Payne Jr., Charles N.",
+ title = {{Increasing Assurance with LIterate Programming Techniques}},
+ booktitle = "Comf. on Computer Assurance COMPASS'96",
+ publisher = "National Institute of Standards and Technology",
+ year = "1996",
+ pages = "187198",
+ abstract =
+ "The assurance argument that a trusted system satisfies its
+ information security requirements must be convincing, because the
+ argument supports the accreditation decision to allow the computer to
+ process classified information in an operational
+ environment. Assurance is achieved through understanding, but some
+ evidence that supports the assurance argument can be difficult to
+ understand. This paper describes a novel applica tion of a technique,
+ called literate programming [ll], that significantly improves the
+ readability of the assur ance argument while maintaining its
+ consistency with formal specifications that are input to specification
+ and verification systems. We describe an application c,f this
+ technique to a simple example and discuss the lessons learned from
+ this effort.",
+ paper = "Moor96.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Morgan, Carroll}
\begin{chunk}{axiom.bib}
@book{Morg98,
@@ 14465,6 +15398,45 @@ when shown in factored form.
\end{chunk}
+\index{Murthy, S.G.K}
+\index{Sekharam, K. Raja}
+\begin{chunk}{axiom.bib}
+@article{Murt09,
+ author = "Murthy, S.G.K and Sekharam, K. Raja",
+ title = {{Software Reliability through Theorem Proving}},
+ journal = "Defence Science Journal",
+ volume = "59",
+ number = "3",
+ year = "2009",
+ pages = "314317",
+ abstract =
+ "Improving software reliability of missioncritical systems is
+ widely recognised as one of the major challenges. Early detection
+ of errors in software requirements, designs and implementation,
+ need rigorous verification and validation techniques. Several
+ techniques comprising static and dynamic testing approaches are
+ used to improve reliability of mission critical software; however
+ it is hard to balance development time and budget with software
+ reliability. Particularly using dynamic testing techniques, it is
+ hard to ensure software reliability, as exhaustive testing is not
+ possible. On the other hand, formal verification techniques
+ utilise mathematical logic to prove correctness of the software
+ based on given specifications, which in turn improves the
+ reliability of the software. Theorem proving is a powerful formal
+ verification technique that enhances the software reliability for
+ mission critical aerospace applications. This paper discusses the
+ issues related to software reliability and theorem proving used to
+ enhance software reliability through formal verification
+ technique, based on the experiences with STeP tool, using the
+ conventional and internationally accepted methodologies, models,
+ theorem proving techniques available in the tool without proposing
+ a new model.",
+ paper = "Murt09.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Myreen, Magnus O.}
\index{Davis, Jared}
\begin{chunk}{axiom.bib}
@@ 14637,6 +15609,32 @@ when shown in factored form.
\subsection{N} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Nederpelt, Rob}
+\index{Geuvers, Nerman}
+\begin{chunk}{axiom.bib}
+@book{Nede14,
+ author = "Nederpelt, Rob and Geuvers, Nerman",
+ title = {{Type Theory and Formal Proof}},
+ year = "2014",
+ publisher = "Cambridge University Press",
+ isbn = "9781107036505"
+}
+
+\end{chunk}
+
+\index{Nissanke, Nimal}
+\begin{chunk}{axiom.bib}
+@book{Niss99,
+ author = "Nissanke, Nimal",
+ title = {{Formal Specification}},
+ publisher = "Springer",
+ year = "1999",
+ isbn = "9781852330026",
+ paper = "Niss99.pdf"
+}
+
+\end{chunk}
+
\subsection{O} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{O'Donnell, Michael J.}
@@ 14674,13 +15672,138 @@ when shown in factored form.
\end{chunk}
+\index{Okasaki, Chris}
+\begin{chunk}{axiom.bib}
+@phdthesis{Okas96,
+ author = "Okasaki, Chris",
+ title = {{Purely Functional Data Structures}},
+ school = "Carnegie Mellon University",
+ year = "1996",
+ link = "\url{}",
+ comment = "CMUCS96177",
+ isbn = "9780521663502",
+ abstract =
+ "When a C programmer needs an efficient data structure for a
+ particular problem, he or she can often simply look one up in any of
+ a number of good textbooks or handbooks. Unfortunately, programmers
+ in functional languages such as Standard ML or Haskell do not have
+ this luxury. Although some data structures designed for imperative
+ languages such as C can be quite easily adapted to a functional
+ setting, most cannot, usually because they depend in crucial ways on
+ assignments, which are disallowed, or at least discouraged, in
+ functional languages. To address this imbalance, we describe several
+ techniques for designing functional data structures, and numerous
+ original data structures based on these techniques, including multiple
+ variations of lists, queues, doubleended queues, and heaps, many
+ supporting more exotic features such as random access or efficient
+ catenation.
+
+ In addition, we expose the fundamental role of lazy evaluation in
+ amortized functional data structures. Traditional methods of
+ amortization break down when old versions of a data structure, not
+ just the most recent, are available for further processing. This
+ property is known as persistence , and is taken for granted in
+ functional languages. On the surface, persistence and amortization
+ appear to be incompatible, but we show how lazy evaluation can be used
+ to resolve this conflict, yielding amortized data structures that are
+ efficient even when used persistently. Turning this relationship
+ between lazy evaluation and amortization around, the notion of
+ amortization also provides the first practical techniques for
+ analyzing the time requirements of nontrivial lazy programs.
+
+ Finally, our data structures offer numerous hints to programming
+ language designers, illustrating the utility of combining strict and
+ lazy evaluation in a single language, and providing nontrivial
+ examples using polymorphic recursion and higherorder, recursive
+ modules.",
+ paper = "Okas96.pdf"
+}
+
+\end{chunk}
+
\subsection{P} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Parnas, David L.}
+\begin{chunk}{axiom.bib}
+@article{Parn72,
+ author = "Parnas, David L.",
+ title = {{A Technique for Software Module Specification with
+ Examples}},
+ journal = "CACM",
+ volume = "15",
+ number = "5",
+ year = "1972",
+ pages = "330336",
+ abstract =
+ "This paper presents an approach to writing specifications for
+ parts of software systems. The main goal is to provide
+ specifications sufficiently precise and complete that other pieces
+ of software can be written to interact with the piece specified
+ without additional information. The secondary goal is to include
+ in the specification no more information than necessary to meet
+ the first goal. The technique is illustrated by means of a variety
+ of examples from a tutorial system.",
+ paper = "Parn72.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Parnas, David}
+\begin{chunk}{axiom.bib}
+@incollection{Parn10,
+ author = "Parnas, David",
+ title = {{Precise Documentation: The Key to Better Software}},
+ booktitle = "The Future of Software Engineering",
+ year = "2010",
+ pages = "125148",
+ abstract =
+ "The prime cause of the sorry 'state of the art' in software
+ development is our failure to produce good design documentation. Poor
+ documentation is the cause of many errors and reduces efficiency in
+ every phase of a software product's development and use. Most software
+ developers believe that 'documentation' refers to a collection of
+ wordy, unstructured, introductory descriptions, thousands of pages
+ that nobody wanted to write and nobody trusts. In contrast, Engineers
+ in more traditional disciplines think of precise blueprints, circuit
+ diagrams, and mathematical specifications of component
+ properties. Software developers do not know how to produce precise
+ documents for software. Software developments also think that
+ documentation is something written after the software has been
+ developed. In other fields of Engineering much of the documentation is
+ written before and during the development. It represents forethought
+ not afterthought. Among the benefits of better documentation would be:
+ easier reuse of old designs, better communication about requirements,
+ more useful design reviews, easier integration of separately written
+ modules, more effective code inspection, more effective testing, and
+ more efficient corrections and improvements. This paper explains how
+ to produce and use precise software documentation and illustrate the
+ methods with several examples.",
+ paper = "Parn10.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Paulson, Lawrence C.}
+\begin{chunk}{axiom.bib}
+@incollection{Paul90a,
+ author = "Paulson, Lawrence C.",
+ title = {{Isabelle: The Next 700 Theorem Provers}},
+ booktitle = "Logic and Computer Science",
+ publisher = "Academic Press",
+ pages = "361386",
+ year = "1990",
+ paper = "Paul90a.pdf"
+}
+
+\end{chunk}
+
\index{Paulson, L.C.}
\begin{chunk}{axiom.bib}
@book{Paul96,
author = "Paulson, L.C.",
 title = {{ML for the WOrking Programmer 2nd Edition}},
+ title = {{ML for the Working Programmer 2nd Edition}},
year = "1996",
publisher = "Cambridge University Press",
isbn = "052156543X"
@@ 14721,6 +15844,59 @@ when shown in factored form.
\end{chunk}
\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@misc{Pfen04,
+ author = "Pfenning, Frank",
+ title = {{Automated Theorem Proving}},
+ year = "2004",
+ comment = "Draft of Spring 2004",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Pfenning, Frank}
+\begin{chunk}{axiom.bib}
+@phdthesis{Pfen87,
+ author = "Pfenning, Frank",
+ title = {{Proof Transformations in HigherOrder Logic}},
+ year = "1987",
+ school = "Carnegie Mellon University",
+ link = "\url{http://wwwcgi.cs.cmu.edu/~fp/papers/thesis87.pdf}",
+ abstract =
+ "We investigate the problem of translating between different styles of
+ proof systems in higherorder logic: analytic proofs which are well
+ suited for automated theorem proving, and nonanalytic deductions
+ which are well suited for the mathematician. Analytic proofs are
+ represented as expansion proofs, $H$ , a form of the sequent calculus we
+ define, nonanalytic proofs are represented by natural deductions. A
+ nondeterministic translation algorithm between expansion proofs and
+ $H$deductions is presented and its correctness is proven. We also
+ present an algorithm for translation in the other direction and prove
+ its correctness. A cutelimination algorithm for expansion proofs is
+ given and its partial correctness is proven. Strong termination of
+ this algorithm remains a conjecture for the full higherorder
+ system, but is proven for the firstorder fragment. We extend the
+ translations to a nonanalytic proof system which contains a primitive
+ notion of equality, while leaving the notion of expansion proof
+ unaltered. This is possible, since a nonextensional equality is
+ definable in our system of type theory. Next we extend analytic and
+ nonanalytic proof systems and the translations between them to
+ include extensionality. Finally, we show how the methods and notions
+ used so far apply to the problem of translating expansion proofs into
+ natural deductions. Much care is taken to specify this translation in
+ a modular way (through tactics) which leaves a large number of choices
+ and therefore a lot of room for heuristics to produce elegant natural
+ deductions. A practically very useful extension, called symmetric
+ simplification, produces natural deductions which make use of lemmas
+ and are often much more intuitive than the normal deductions which
+ would be created by earlier algorithms.",
+ paper = "Pfen87.pdf"
+}
+
+\end{chunk}
+
+\index{Pfenning, Frank}
\index{Elliott, Conal}
\begin{chunk}{axiom.bib}
@inproceedings{Pfen88a,
@@ 14882,6 +16058,39 @@ when shown in factored form.
\end{chunk}
+\index{Pottier, Francois}
+\begin{chunk}{axiom.bib}
+@phdthesis{Pott98,
+ author = "Pottier, Francois",
+ title = {{Type Inference in the Presence of Subtyping: From Theory
+ to Practice}},
+ school = "Universite Paris 7",
+ year = "1988",
+ comment = "INRIA Research Report RR3483",
+ abstract =
+ "From a purely theoretical point of view, type inference for a
+ functional language with parametric polymorphism and subtyping
+ poses little difficulty. Indeed, it suffices to generalize the
+ inference algorithm used in the ML language, so as to deal with
+ type inequalities, rather than equalities. However, the number of
+ such inequalities is linear in the program size  whence, from a
+ practical point of view, a serious efficiency and readability
+ problem.
+
+ To solve this problem, one must simplify the inferred
+ constraints. So, after studying the logical properties of
+ subtyping constraints, this work proposes several simplifcation
+ algorithms. They combine seamlessly, yielding a homogeneous, fully
+ formal framework, which directly leads to an efficient
+ implementation. Although this theoretical study is performed in a
+ simplified setting, numerous extensions are possible. Thus, this
+ framework is realistic and should allow a practical appearance of
+ subtyping in languages with type inference.",
+ paper = "Pott98.pdf"
+}
+
+\end{chunk}
+
\subsection{Q} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{R} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ 14925,6 +16134,19 @@ when shown in factored form.
\end{chunk}
+\index{Robinson, Alan}
+\index{Voronkov, Andrei}
+\begin{chunk}{axiom.bib}
+@book{Robi01,
+ author = "Robinson, Alan and Voronkov, Andrei",
+ title = {{Handbook of Automated Reasoning (2 Volumes)}},
+ year = "2001",
+ publisher = "MIT Press",
+ isbn = "0262182238"
+}
+
+\end{chunk}
+
\subsection{S} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Sabry, Amr}
@@ 14993,6 +16215,26 @@ when shown in factored form.
\end{chunk}
+\index{Schmitt, P.H.}
+\begin{chunk}{axiom.bib}
+@article{Schm86,
+ author = "Schmitt, P.H.",
+ title = {{Computational Aspects of ThreeValued Logic}},
+ journal = "LNCS",
+ volume = "230",
+ year = "1986",
+ abstract =
+ "This paper investigates a threevalued logic $L_3$, that has been
+ introduced in the study of natural language semantics. A complete
+ proof system based on a threevalued analogon of negative resolution
+ is presented. A subclass of $L_3$ corresponding to Horn clauses in
+ twovalued logic is defined. Its model theoretic properties are
+ studied and it is shown to admit a PROLOGstyle proof procedure.",
+ paper = "Schm86.pdf"
+}
+
+\end{chunk}
+
\index{Scott, Dana S.}
\index{Strachey, C.}
\begin{chunk}{axiom.bib}
@@ 15065,6 +16307,19 @@ when shown in factored form.
\end{chunk}
+\index{Smith, Peter}
+\begin{chunk}{axiom.bib}
+@misc{Smit15,
+ author = "Smith, Peter",
+ title = {{Some Big Books on Mathematical Logic}},
+ year = "2015",
+ link = "\url{}",
+ paper = "Smit15.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Sokolowski, Stefan}
\begin{chunk}{axiom.bib}
@article{Soko87,
@@ 15266,6 +16521,18 @@ when shown in factored form.
\subsection{V} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Vienneau, Robert L.}
+\begin{chunk}{axiom.bib}
+@misc{Vien93,
+ author = "Vienneau, Robert L.",
+ title = {{A Review of Formal Methods}},
+ year = "1993",
+ paper = "Vien93.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{W} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Wadler, Philip}
@@ 15438,6 +16705,41 @@ when shown in factored form.
\end{chunk}
+\index{Weirich, Jim}
+\begin{chunk}{axiom.bib}
+@misc{Weir12,
+ author = "Weirich, Jim",
+ title = {{Y Not?  Adventures in Functional Programming}},
+ year = "2012",
+ link = "\url{https://www.infoq.com/presentations/YCombinator}",
+ abstract = "Explains the YCombinator"
+}
+
+\end{chunk}
+
+\index{Wiedijk, Freek}
+\begin{chunk}{axiom.bib}
+@misc{Wied18,
+ author = "Wiedijk, Freek",
+ title = {{Formaizing 100 Theorems}},
+ link = "\url{http://www.cs.ru.nl/~freek/100/}",
+ year = "2018"
+}
+
+\end{chunk}
+
+\index{Wiedijk, Freek}
+\begin{chunk}{axiom.bib}
+@misc{Wied08,
+ author = "Wiedijk, Freek",
+ title = {{Formai Proof  Geting Started}},
+ link = "\url{https://www.ams.org/notices/200811/tx081101408p.pdf}",
+ year = "2008",
+ paper = "Wied08.pdf"
+}
+
+\end{chunk}
+
\subsection{X} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{Y} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ 15495,6 +16797,35 @@ when shown in factored form.
\end{chunk}
+\index{Zeilberger, Doron}
+\begin{chunk}{axiom.bib}
+@article{Zeil10,
+ author = "Zeilberger, Doron",
+ title = {{Against Rigor}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ pages = "262262",
+ abstract =
+ "The ancient Greeks gave (western) civilization quite a few
+ gifts, but we should beware of Greeks bearing gifts. The gifts of theatre
+ and democracy were definitely good ones, and perhaps even the gift of
+ philosophy, but the 'gift' of the socalled 'axiomatic method' and the
+ notion of 'rigorous' proof did much more harm than good. If we want
+ to maximize Mathematical Knowledge, and its Management, we have to
+ return to Euclid this dubious gift, and giveup our fanatical insistence
+ on perfect rigor. Of course, we should not go to the other extreme, of
+ demanding that everything should be nonrigorous. We should encourage
+ diversity of proofstyles and rigor levels, and remember that nothing is
+ absolutely sure in this world, and there does not exist an absolutely
+ rigorous proof, nor absolute certainty, and 'truth' has many shades and
+ levels.",
+ paper = "Zeil10.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Zeilberger, Noam}
\begin{chunk}{axiom.bib}
@misc{Zeil16,
@@ 15518,6 +16849,37 @@ when shown in factored form.
\end{chunk}
+\index{Zimmer, Jurgen}
+\index{Dennis, Louise A.}
+\begin{chunk}{axiom.bib}
+@article{Zimm02,
+ author = "Zimmer, Jurgen and Dennis, Louise A.",
+ title = {{Inductive Theorem Proving and Computer Algebra in the
+ MathWeb Software Bus}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "Reasoning systems have reached a high degree of maturity in the last
+ decade. However, even the most successful systems are usually not
+ general purpose problem solvers but are typically specialised on
+ problems in a certain domain. The MathWeb Software Bus (MathWebSB) is a
+ system for combining reasoning specialists via a common software bus.
+ We describe the integration of the $\lambda$Clam system, a reasoning
+ specialist for proofs by induction, into the MathWebSB. Due to this
+ integration, $\lambda$Clam now offers its theorem proving expertise
+ to other systems in the MathWebSB. On the other hand,
+ $\lambda$Clam can use the
+ services of any reasoning specialist already integrated. We focus on
+ the latter and describe first experiments on proving theorems by
+ induction using the computational power of the Maple system within
+ $\lambda$Clam.",
+ paper = "Zimm02.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\section{Proving Axiom Correct  Coercion in CASProof Systesms} %
\index{Sozeau, Matthieu}
@@ 15808,7 +17170,7 @@ when shown in factored form.
@InProceedings{Adam01,
author = "Adams, Andrew A. and Dunstan, Martin and Gottlieben, Hanne and
Kelsey, Tom and Martin, Ursula and Owre, Sam",
 title = {{Computer algebra meets automated theorem proving: Integrating
+ title = {{Computer Algebra meets Automated Theorem Proving: Integrating
Maple and PVS}},
booktitle = "Theorem proving in higher order logics",
series = "TPHOLs 2001",
@@ 16196,6 +17558,60 @@ when shown in factored form.
\end{chunk}
+\index{Ballerin, Clemens}
+\begin{chunk}{axiom.bib}
+@phdthesis{Ball99a,
+ author = "Ballerin, Clemens",
+ title = {{Computer Algebra and Theorem Proving}},
+ school = "Darwin College, University of Cambridge",
+ year = "1999",
+ abstract =
+ "Is the use of computer algebra technology beneficial for
+ mechanised reasoining in and about mathematical domains? Usually
+ it is assumed that it is. Many works in this area, however, either
+ have little reasoning conent, or use symbolic computation only to
+ simplify expressions. In work that has achieved more, the used
+ methods do not scale up. They trust the computer algebra system
+ either too much or too little.
+
+ Computer algebra systems are not as rigorous as many provers. They
+ are not logically sound reasoning systems, but collections of
+ algorithms. We classify soundness problems that occur in computer
+ algebra systems. While many algorithms and their implementations
+ are perfectly trustworthy, the semantics of symbols is often
+ unclear and leads to errors. On the other hand, more robust
+ approaches to interface external reasoners to provers are not
+ always practical because the mathematical depth of proofs
+ algorithms in computer algebra are based on can be enormous.
+
+ Our own approach takes both trustworthiness of the overall system
+ and efficiency into account. It relies on using only reliable
+ parts of a computer algebra system, which can be achieved by
+ choosing a suitable library, and deriving specifications for these
+ algorithms from their literature.
+
+ We design and implement an interface between the prover Isabelle
+ and the computer algebra library Sumit and use it to prove
+ nontrivial theorems from coding theory. This is based on the
+ mechanisation of the algebraic theories of rings and
+ polynomials. Coding theory is an area where proofs do have a
+ substantial amount of computational content. Also, it is realistic
+ to assume that the verification of an encoding or decoding device
+ could be undertaken in, and indeed, be simplified by, such a
+ system.
+
+ The reason why semantics of symbols is often unclear in current
+ computer algebra systems is not mathematical difficulty, but the
+ design of those systems. For Gaussian elimination we show how the
+ soundness problem can be fixed by a small extension, and without
+ losing efficiency. This is a prerequisite for the efficient use of
+ the algorithm in a prover.",
+ paper = "Ball99a.pdf",
+ keywords = "axiomref, printed"
+}
+
+\end{chunk}
+
\index{Barendregt, Henk}
\index{Cohen, Arjeh M.}
\begin{chunk}{axiom.bib}
@@ 16310,7 +17726,7 @@ when shown in factored form.
\index{Beeson, Michael J.}
\begin{chunk}{axiom.bib}
@InCollection{Bees89,
+@InCollection{Bees89a,
author = "Beeson, Michael J.",
title = {{Logic and Computation in MATHPERT: An Expert System for
Learning Mathematics}},
@@ 16352,7 +17768,8 @@ when shown in factored form.
limitations of Prolog, and ``Schubert's Steamroller.'' An extension of
GENTZEN also provides a decision procedure for intuitionistic
propositional calculus (but at some cost in efficiency).",
 paper = "Bees89a.pdf"
+ paper = "Bees89a.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 16372,7 +17789,37 @@ when shown in factored form.
and onevariable calculus. It permits students to direct the
stepbystep solution of problems, and is capable of helping them
by solving the problems itself if necessary.",
 paper = "Bees92.pdf"
+ paper = "Bees92.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Beeson, Michael}
+\begin{chunk}{axiom.bib}
+@article{Bees95,
+ author = "Beeson, Michael",
+ title = {{Using Nonstandard Analysis to Ensure the Correctness of
+ Symbolic Computations}},
+ journal = "Int. J. of Foundations of Computer Science",
+ volume = "6",
+ number = "3",
+ pages = "299338",
+ year = "1995",
+ abstract =
+ "Nonstandard analysis has been put to use in a theoremprover,
+ where it assists in the analysis of formulae involving limits. The
+ theoremprover in question is used in the computer program
+ Mathpert to ensure the correctness of calculations in
+ calculus. Although nonstandard analysis is widely viewed as
+ nonconstructive, it can alternately be viewed as a method of
+ reducing logical manipulation (of formulae with quantifiers) to
+ coputation (with rewrite rules). We give a logical theory of
+ nonstandard analysis which is implemented in Mathpert. We describe
+ a procedure for 'elimination of infinitesimals' (also implemented
+ in Mathpert) and prove its correctness.",
+ paper = "Bees95.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 16402,6 +17849,112 @@ when shown in factored form.
\end{chunk}
+\index{Beeson, Michael}
+\index{Wiedijk, Freek}
+\begin{chunk}{axiom.bib}
+@article{Bees02,
+ author = "Beeson, Michael and Wiedijk, Freek",
+ title = {{The Meaning of Infinity in Calculus and Computer Algebra
+ Systems}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "We use filters of open sets to provide a semantics justifying the
+ use of infinity in informal limit calculations in calculus, and in
+ the same kind of calculations in computer algebra. We compare the
+ behavior of these filters to the way Mathematica behaves when
+ calculating with infinity.
+
+ We stress the need to have a proper semantics for computer algebra
+ expressions, especially if one wants to use results and methods
+ from computer algebra in theorem provers. The computer algebra
+ method under discussion in this paper is the use of rewrite rules
+ to evaluate limits involving infinity.",
+ paper = "Bees02.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Beeson, Michael}
+\begin{chunk}{axiom.bib}
+@article{Bees06,
+ author = "Beeson, Michael",
+ title = {{Mathematical Induction in OtterLambda}},
+ journal = "J. Automated Reasoning",
+ volume = "36",
+ number = "4",
+ pages = "311'344",
+ abstract =
+ "Otterlambda is Otter modified by adding code to implement an
+ algorithm for lambda unification. Otter is a resolutionbased,
+ clauselanguage firstorder prover that accumulates deduced clauses
+ and uses strategies to control the deduction and retention of
+ clauses. This is the first time that such a firstorder prover has
+ been combined in one program with a unification algorithm capable of
+ instantiating variables to lambda terms to assist in the
+ deductions. The resulting prover has all the advantages of the
+ proofsearch algorithm of Otter (speed, variety of inference rules,
+ excellent handling of equality) and also the power of lambda
+ unification. We illustrate how these capabilities work well together
+ by using Otterlambda to find proofs by mathematical induction. Lambda
+ unification instantiates the induction schema to find a useful
+ instance of induction, and then Otter's firstorder reasoning can be
+ used to carry out the base case and induction step. If necessary,
+ induction can be used for those, too. We present and discuss a variety
+ of examples of inductive proofs found by Otterlambda: some in pure
+ Peano arithmetic, some in Peano arithmetic with defined predicates,
+ some in theories combining algebra and the natural numbers, some
+ involving algebraic simplification (used in the induction step) by
+ simplification code from MathXpert, and some involving list induction
+ instead of numerical induction. These examples demonstrate the
+ feasibility and usefulness of adding lambda unification to a
+ firstorder prover.",
+ papers = "Bees06.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Beeson, Michael}
+\begin{chunk}{axiom.bib}
+@misc{Bees14,
+ author = "Beeson, M.",
+ title = {{Mixing Computations and Proofs}},
+ comment = "slides",
+ year = "2014",
+ link = "\url{http://www.cs.ru.nl/qed20/slides/beesonqed20.pdf}",
+ paper = "Bees14.pdf"
+}
+
+\end{chunk}
+
+\index{Beeson, M.}
+\begin{chunk}{axiom.bib}
+@article{Bees16,
+ author = "Beeson, M.",
+ title = {{Mixing Computations and Proofs}},
+ journal = "J. of Formalized Reasoning",
+ volume = "9",
+ number = "1",
+ pages = "7199",
+ year = "2016",
+ link =
+ "\url{http://www.michaelbeeson.com/research/papers/ProofAndComputation.pdf}",
+ abstract =
+ "We examine the relationship between proof and computation in
+ mathematics, especially in formalized mathematics. We compare the
+ various approaches to proofs with a significant computational
+ component, including (i) verifying the algorithms, (ii) verifying the
+ results of the unverified algo rithms, and (iii) trusting an external
+ computation.",
+ paper = "Bees16.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Benke, Marcin}
\index{Dybjer, Peter}
\index{Jansson, Patrik}
@@ 16856,6 +18409,42 @@ when shown in factored form.
\end{chunk}
\index{Bove, Ana}
+\index{Capretta, Venanzio}
+\begin{chunk}{axiom.bib}
+@article{Bove05,
+ author = "Bove, Ana and Capretta, Venanzio",
+ title = {{Modelling General Recursion in Type Theory}},
+ journal = "Math. Struct. in Comp. Science",
+ volume = "15",
+ pages = "671708",
+ year = "2005",
+ abstract =
+ "Constructive type theory is an expressive programming language in
+ which both algorithms and proofs can be represented. A limitation of
+ constructive type theory as a programming language is that only
+ terminating programs can be defined in it. Hence, general recursive
+ algorithms have no direct formalisation in type theory since they
+ contain recursive calls that satisfy no syntactic condition
+ guaranteeing termination. In this work, we present a method to
+ formalise general recursive algorithms in type theory. Given a general
+ recursive algorithm, our method is to define an inductive
+ specialpurpose accessibility predicate that characterises the inputs
+ on which the algorithm terminates. The typetheoretic version of the
+ algorithm is then defined by structural recursion on the proof that
+ the input values satisfy this predicate. The method separates the
+ computational and logical parts of the definitions and thus the
+ resulting typetheoretic algorithms are clear, compact and easy to
+ understand. They are as simple as their equivalents in a functional
+ programming language, where there is no restriction on recursive
+ calls. Here, we give a formal definition of the method and discuss its
+ power and its limitations.",
+ paper = "Bove05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Bove, Ana}
\index{Dybjer, Peter}
\begin{chunk}{axiom.bib}
@misc{Bove08,
@@ 17054,6 +18643,20 @@ when shown in factored form.
\end{chunk}
+\index{de Bruijn, N.G.}
+\begin{chunk}{axiom.bib}
+@techreport{Brui68a,
+ author = "de Bruijn, N.G.",
+ title = {{AUTOMATH, A Language for Mathematics}},
+ year = "1968",
+ type = "technical report",
+ number = "68WSK05",
+ institution = "Technische Hogeschool Eindhoven",
+ paper = "Brui68a.pdf"
+}
+
+\end{chunk}
+
\index{Buchberger, Bruno}
\begin{chunk}{axiom.bib}
@article{Buch97,
@@ 17584,6 +19187,56 @@ when shown in factored form.
\end{chunk}
\index{Carette, Jacques}
+\begin{chunk}{axiom.bib}
+@article{Care10,
+ author = "Carette, Jacques",
+ title = {{Mechanized Mathematics}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ pages = "157157",
+ abstract =
+ "In the 50 years since McCarthy’s 'Recursive Functions of Symbolic
+ Expressions and Their Computation by Machine', what have we
+ learned about the realization of Leibniz’s dream of just being
+ able to utter 'Calculemus!' (Let us calculate!) when faced with a
+ mathematical dilemma?
+
+ In this talk, I will first present what I see as the most
+ important lessons from the past which need to be heeded by modern
+ designers. From the present, I will look at the context in which
+ computers are used, and derive further requirements. In
+ particular, now that computers are no longer the exclusive
+ playground for highly educated scientists, usability is now more
+ important than ever, and justifiably so.
+
+ I will also examine what I see as some principal failings of
+ current systems, primarily to understand some major mistakes to
+ avoid. These failings will be analyzed to extract what seems to be
+ the root mistake, and I will present my favourite solutions.
+
+ Furthermore, various technologies have matured since the creation
+ of many of our systems, and whenever appropriate, these should be
+ used. For example, our understanding of the structure of
+ mathematics has significantly increased, yet this is barely
+ reflected in our libraries. The extreme focus on efficiency by the
+ computer algebra community, and correctness by the (interactive)
+ theorem proving community should no longer be considered viable
+ long term strategies. But how does one effectively bridge that
+ gap?
+
+ I personally find that a number of (programming) languagebased
+ solutions are particularly effective, and I will emphasize
+ these. Solutions to some of these problems will be illustrated
+ with code from a prototype of MathScheme 2.0, the system I am
+ developing with Bill Farmer and our research group.",
+ paper = "Care10.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
\index{Farmer, William M.}
\index{Jeremic, Filip}
\index{Maccio, Vincent}
@@ 17874,6 +19527,34 @@ when shown in factored form.
\end{chunk}
+\index{Common, Hubert}
+\index{Lescanne, Pierre}
+\begin{chunk}{axiom.bib}
+@article{Como89,
+ author = "Common, Hubert and Lescanne, Pierre",
+ title = {{Equational Problems and Disunification}},
+ journal = "J. Symbolic Computation",
+ volume = "7",
+ number = "34",
+ year = "1989",
+ pages = "371425",
+ abstract =
+ "Roughly speaking, an equational problem is a first order formula
+ whose only predicate symbol is $=$. We propose some rules for the
+ transformation of equational problems and study their correctness in
+ various models. Then, we give completeness results with respect to
+ some 'simple' problems called solved forms. Such completeness results
+ still hold when adding some control which moreover ensures
+ termination. The termination proofs are given for a 'weak' control and
+ thus hold for the (large) class of algorithms obtained by restricting
+ the scope of the rules. Finally, it must be noted that a byproduct of
+ our method is a decision procedure for the validity in the Herbrand
+ Universe of any first order formula with the only predicate symbol $=$.",
+ paper = "Como89.pdf"
+}
+
+\end{chunk}
+
\index{Constable, R.L.}
\index{Allen, S.F.}
\index{Bromley, H.M.}
@@ 18181,6 +19862,25 @@ when shown in factored form.
\end{chunk}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Dave08a,
+ author = "Davenport, James H.",
+ title = {{AISC Meets Natural Typography}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "McDermott [12,13] introduced the concept 'Artificial Intelligence
+ meets Natural Stupidity'. In this paper, we explore how Artificial
+ Intelligence and Symbolic Computation can meet Natural Typography, and
+ how the conventions for expressing mathematics that humans
+ understand can cause us difficulties when designing mechanised
+ systems.",
+ paper = "Dave08a.pdf"
+}
+
+\end{chunk}
\index{Davenport, James}
\begin{chunk}{axiom.bib}
@@ 19153,7 +20853,8 @@ when shown in factored form.
title = {{From LCF to HOL: a short history}},
year = "1996",
link = "\url{http://www.cl.cam.ac.uk/~mjcg/papers/HolHistory.pdf}",
 paper = "Gord96.pdf"
+ paper = "Gord96.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 19477,7 +21178,8 @@ when shown in factored form.
introduction to the concepts involved in reflection and a survey of
relevant work. To this end, a rather extensive bibliography is
provided.",
 paper = "Harr95.pdf"
+ paper = "Harr95.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 19904,6 +21606,74 @@ when shown in factored form.
\index{Kaliszyk, Cezary}
\begin{chunk}{axiom.bib}
+@article{Kali08,
+ author = "Kaliszyk, Cezary",
+ title = {{Automating Side Conditions in Formalized Partial Functions}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "Assumptions about the domains of partial functions are necessary in
+ stateoftheart proof assistants. On the other hand when
+ mathematicians write about partial functions they tend not to
+ explicitly write the side conditions. We present an approach to
+ formalizing partiality in real and complex analysis in total
+ frameworks that allows keeping the side conditions hidden from the
+ user as long as they can be computed and simplified
+ automatically. This framework simplifies defining and operating on
+ partial functions in formalized real analysis in HOL Light. Our
+ framework allows simplifying expressions under partiality conditions
+ in a proof assistant in a manner that resembles computer algebra
+ systems.",
+ paper = "Kali08.pdf"
+}
+
+\end{chunk}
+
+\index{Kaliszyk, Cezary}
+\index{Wiedijk, Freek}
+\begin{chunk}{axiom.bib}
+@article{Kali08a,
+ author = "Kaliszyk, Cezary and Wiedijk, Freek",
+ title = {{Merging Procedural and Declarative Proof}},
+ journal = "LNCS",
+ volume = "5497",
+ year = "2008",
+ abstract =
+ "There are two different styles for writing natural deduction proofs:
+ the 'Gentzen' style in which a proof is a tree with the conclusion at
+ the root and the assumptions at the leaves, and the 'Fitch' style
+ (also called ‘flag’ style) in which a proof consists of lines that are
+ grouped together in nested boxes.
+
+ In the world of proof assistants these two kinds of natural deduction
+ correspond to procedural proofs (tactic scripts that work on one or
+ more subgoals, like those of the Coq, HOL and PVS systems), and
+ declarative proofs (like those of the Mizar and Isabelle/Isar
+ languages).
+
+ In this paper we give an algorithm for converting tree style proofs to
+ flag style proofs. We then present a rewrite system that simplifies
+ the results.
+
+ This algorithm can be used to convert arbitrary procedural proofs to
+ declarative proofs. It does not work on the level of the proof terms
+ (the basic inferences of the system), but on the level of the
+ statements that the user sees in the goals when constructing the
+ proof.
+
+ The algorithm from this paper has been implemented in the ProofWeb
+ interface to Coq. In ProofWeb a proof that is given as a Coq proof
+ script (even with arbitrary Coq tactics) can be displayed both as a
+ tree style and as a flag style proof.",
+ paper = "Kali08a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kaliszyk, Cezary}
+\begin{chunk}{axiom.bib}
@phdthesis{Kali09,
author = "Kaliszyk, Cezary",
title = {{Correctness and Availability: Building Computer Algebra on top
@@ 20383,7 +22153,7 @@ when shown in factored form.
work, these implementations could only be verified with significant
manual effort.",
paper = "Wony18.pdf",
 keywords = "printed"
+ keywords = "printed, reviewed"
}
\end{chunk}
@@ 20912,7 +22682,7 @@ when shown in factored form.
dependent type language extension. The library is implemented in the
existing Haskell, by 'hiding' a certain part of the existing Prelude.",
paper = "Mesh10.pdf",
 keywords = "axiomref"
+ keywords = "axiomref, printed, reviewed"
}
\end{chunk}
@@ 22381,7 +24151,8 @@ when shown in factored form.
viable: both that {\tt hstocoq} applies to existing Haskell
code, and that the output it produces is amenable to
verification.",
 paper = "Spec18.pdf"
+ paper = "Spec18.pdf",
+ keywords = "printed, reviewed"
}
\end{chunk}
@@ 22591,7 +24362,7 @@ when shown in factored form.
\begin{chunk}{axiom.bib}
@inproceedings{Wadl88,
author = "Wadler, Philip and Blott, Stephen",
 title = {{How to make adhoc polymorphism less ad hoc}},
+ title = {{How to Make Adhoc Polymorphism Less Ad hoc}},
booktitle = "Proc 16th ACM SIGPLANSIGACT Symp. on Princ. of Prog. Lang",
isbn = "0897912942",
pages = "6076",
@@ 39370,6 +41141,21 @@ May 1984
\end{chunk}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@book{Dave18,
+ author = "Davenport, James H",
+ title = {{Computer Algebra: Systems and Algorithms for Algebraic
+ Computation (updated)}},
+ publisher = "Davenport",
+ year = "2018",
+ link = "\url{http://staff.bath.ac.uk/masjhd/JHDCA.pdf}",
+ paper = "Dave18.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Decker, Wolfram}
\begin{chunk}{axiom.bib}
@misc{Deckxx,
@@ 39991,7 +41777,7 @@ ISBN 1581130732 LCCN QA76.95.I57 1999
concepts, in structured generic programming as practiced in
computational mathematical systems.",
paper = "Reis12.pdf",
 keywords = "axiomref"
+ keywords = "axiomref, printed"
}
\end{chunk}
@@ 45071,6 +46857,7 @@ SIGSAM Communications in Computer Algebra, 157 2006
state of the art commercial software as well as research prototypes
are presented, followed by a description of present research
directions.",
+ paper = "Kajl94.pdf",
keywords = "axiomref"
}
@@ 49530,7 +51317,7 @@ J. of Symbolic Computation 36 pp 513533 (2003)
\begin{chunk}{axiom.bib}
@article{Sant95,
author = "Santas, Philip S.",
 title = {{A type system for computer algebra}},
+ title = {{A Type System for Computer Algebra}},
journal = "J. Symbolic Computation",
volume = "19",
number = "13",
@@ 50827,7 +52614,7 @@ Kognitive Systeme, Universit\"t Karlsruhe 1992
\begin{chunk}{axiom.bib}
@article{Spit11,
author = "Spitters, Bas and van der Weegen, Eelis",
 title = {{Type classes for mathematics in type theory}},
+ title = {{Type Classes for Mathematics in Type Theory}},
journal = "Math. Struct. Comput. Sci.",
volume = "21",
number = "4",
@@ 55515,6 +57302,39 @@ Projet SAFIR, INRIA Sophia Antipolis Nov 25, 1998
\end{chunk}
+\index{Daly, Timothy}
+\index{Kastner, John}
+\index{Mays, Eric}
+\begin{chunk}{axiom.bib}
+@inproceedings{Daly88,
+ author = "Daly, Timothy and Kastner, John and Mays, Eric",
+ title = {{Integrating Rules and Inheritance Networks in a Knowlegebased
+ Financial Marketing Consutation System}},
+ booktitle = "Proc. HICSS 21",
+ volume = "3",
+ number = "58",
+ pages = "496500",
+ year = "1988",
+ comment = "KROPS",
+ abstract =
+ "This paper describes the integrated use of rulebascd inference and
+ an objectcentered knowledge representation (inheritance network) in a
+ financial marketing consultation system. The rules provide a highly
+ flexible pattern match capability and inference cycle for control. The
+ inheritance network provides a convenient way to represent the
+ conceptual structure of the domain. By merging the two techniques, our
+ financial computation can he shared at the most general level, and
+ rule inference is carried out at any appropriate Ievel of
+ generalization. Since domain knowledge is representcd independently
+ from control knowledge, knowledge ahout a particular problcm solving
+ technique is decoupled from the conditions for its invocation. A Iarge
+ financial marketing system has been built and examples are given to
+ show our combined use of rules and inheritance networks.",
+ paper = "Daly88.pdf"
+}
+
+\end{chunk}
+
\index{Dantzig, G. B.}
\begin{chunk}{ignore}
\bibitem[Dantzig 63]{Dan63} Dantzig G B
@@ 57126,6 +58946,22 @@ Grove, IL, USA and Oxford, UK, 1992
\end{chunk}
+\index{Griesmer, James}
+\begin{chunk}{axiom.bib}
+@artcile{Grie11,
+ author = "Griesmer, James",
+ title = {{James Griesmer 19292011}},
+ journal = "ACM Communications in Computer Algebra",
+ volume = "46",
+ number = "1/2",
+ year = "2012",
+ pages = "1011",
+ comment = "Obituary",
+ paper = "Grie11.pdf"
+}
+
+\end{chunk}
+
\index{Griss, Martin L.}
\begin{chunk}{axiom.bib}
@inproceedings{Gris76,
@@ 60043,6 +61879,45 @@ Science of Computer Programming V25 No.1 Oct 1995 pp4161 Elesevier
\end{chunk}
\index{Paulson, Lawrence C.}
+\index{Smith, Andrew W.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Paul89,
+ author = "Paulson, Lawrence C. and Smith, Andrew W.",
+ title = {{Logic Programming, Functional Programming, and
+ Inductive Definitions}},
+ booktitle = "Int. Workshop on Extensions of Logic Programming",
+ publisher = "Springer",
+ year = "1989",
+ pages = "283309",
+ abstract =
+ "The unification of logic and functional programming, like the Holy
+ Grail, is sought by countless people [6, 14]. In reporting our
+ attempt, we first discuss the motivation. We argue that logic
+ programming is still immature, compared with functional programming,
+ because few logic programs are both useful and pure. Functions can
+ help to purify logic programming, for they can eliminate certain uses
+ of the cut and can express certMn negations positively.
+
+ More generally, we suggest that the traditional paradigm  logic
+ programming as firstorder logic  is seriously out of step with
+ practice. We offer an alternative paradigm. We view the logic program
+ as an inductive definition of sets and relations. This view explains
+ certain uses of Negation as Failure, and explains why most attempts to
+ extend PROLO G to larger fragments of firstorder logic have not been
+ successful. It suggests a logic language with functions,
+ incorporating equational unification.
+
+ We have implemented a prototype of this language. It is very slow,
+ but complete, and appear to be faster than some earlier
+ implementations of narrowing. Our experiments illustrate the
+ programming style and shed light on the further development of such
+ languages.",
+ paper = "Paul89.pdf"
+}
+
+\end{chunk}
+
+\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@article{Paul90,
author = "Paulson, Lawrence C.",
diff git a/changelog b/changelog
index a14fb5a..1861f64 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,5 @@
+20180606 tpd src/axiomwebsite/patches.html 20180606.01.tpd.patch
+20180606 tpd books/bookvolbib add references
20180516 tpd src/axiomwebsite/patches.html 20180516.01.tpd.patch
20180516 tpd books/bookvolbib add references
20180505 tpd src/axiomwebsite/patches.html 20180505.01.tpd.patch
diff git a/patch b/patch
index 8ffaf48..412bca1 100644
 a/patch
+++ b/patch
@@ 2,1391 +2,1679 @@ books/bookvolbib add references
Goal: Proving Axiom Correct
\index{Walther, J.S.}
+\index{Kohlhase, Michael}
+\index{De Feo, Luca}
+\index{Muller, Dennis}
+\index{Pfeiffer, Markus}
+\index{Rabe, Florian}
+\index{Thiery, Nicolas M.}
+\index{Vasilyev, Victor}
+\index{Wesing, Tom}
\begin{chunk}{axiom.bib}
@misc{Walt71,
 author = "Walther, J.S.",
 title = {{A Unified Algorithm for Elementary Functions}},
 link = "\url{}",
 year = "1971",
 abstract =
 "This paper describes a single unified algorithm for the calculation
 of elementary functions including multipli cation, division, sin,
 cos, tan, arctan, sinh, cosh, tanh, arctanh, In, exp and squareroot.
 The basis for the algorithm is coordinate rotation in a linear,
 circular, or hyperbolic coordinate system depending on which function
 is to be calculated. The only operations re quired are shifting,
 adding, subtracting and the recall of prestored constants. The
 limited domain of con vergence of the algorithm is calculated,
 leading to a discussion of the modifications required to extend the
 domain for floating point calculations.
+@inproceedings{Kohl17,
+ author = "Kohlhase, Michael and De Feo, Luca and Muller, Dennis and
+ Pfeiffer, Markus and Rabe, Florian and Thiery, Nicolas M.
+ and Vasilyev, Victor and Wesing, Tom",
+ title = {{KnowledgeBased Interoperability for Mathematical Software
+ Systems}},
+ booktitle = "7th Int. Conf. on Mathematical Aspects of Computer and
+ Information Sciences",
+ publisher = "Springer",
+ year = "2017",
+ pages = "195210",
+ isbn = "9783319724539",
+ abstract =
+ "There is a large ecosystem of mathematical software systems.
+ Individually, these are optimized for particular domains and
+ functionalities, and together they cover many needs of practical and
+ theoretical mathematics. However, each system specializes on one area,
+ and it remains very difficult to solve problems that need to involve
+ multiple systems. Some integrations exist, but the are adhoc and have
+ scalability and maintainability issues. In particular, there is not
+ yet an interoperability layer that combines the various systems into a
+ virtual research environment (VRE) for mathematics.
 A hardware floating point processor using the algo rithm was built at
 HewlettPackard Laboratories. The block diagram of the processor, the
 microprogram control used for the algorithm, and measures of actual
 performance are shown.",
 paper = "Walt71.pdf"
}

\end{chunk}

\index{Friedman, Daniel P.}
\index{Wise, David S.}
\begin{chunk}{axiom.bib}
@techreport{Frie76,
 author = "Friedman, Daniel P. and Wise, David S.",
 title = {{CONS should not Evaluate its Arguments}},
 institution = "Indiana University",
 number = "TR44",
 year = "1976",
 abstract =
 "The constructor function which allocates and fills records in
 recursive, sideeffectfree procedural languages is redefined to be a
 nonstrict (Vuillemin 1974) elementary operation. Instead of
 evaluating its arguments, it builds suspensions of them which are not
 coerced until the suspensions is accessed by strict elementary
 function. The resulting evalutation procedures are strictly more
 powerful than existing schemes for languages such as LISP. The main
 results are that Landin's streams are subsumed into McCarthy's LISP
 merely by the redefinition of elementar functions, that invocations of
 LISP's evaluator can be minimized by redefining the elemntary
 functions without redefining the interpreter, and as a strong
 conjecture, that redefining the elementary functions yields the least
 fixedpoint semantics for McCarthy's evalution scheme. This new
 insight into the role of construction functions will do much to ease
 the interface between recursive programmers and iterative programmers,
 as well as the interface between programmers and data structure
 designers.",
 paper = "Frie16.pdf",
+ The OpenDreamKit project aims at building a toolkit for such VREs. It
+ suggests using a central systemagnostic formalization of mathematics
+ (MathintheMiddle, MitM) as the needed interoperability layer. In
+ this paper, we conduct the first major case study that instantiates
+ the MitM paradigm for a concrete domain as well as a concrete set of
+ systems. Specifically, we integrate GAP, Sage, and Singular to perform
+ computation in group and ring theory.
+
+ Our work involves massive practical efforts, including a novel
+ formalization of computational group theory, improvements to the
+ involved software systems, and a novel mediating system that sits at
+ the center of a starshaped integration layout between mathematical
+ software systems.",
+ paper = "Kohl17.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sarma, Gopal}
\index{Hay, Nick J.}
+\index{Dehaye, PaulOlivier}
+\index{Iancu, Mihnea}
+\index{Kohlhase, Michael}
+\index{Konovalov, Alexander}
+\index{Lelievre, Samuel}
+\index{Muller, Dennis}
+\index{Pfeiffer, Markus}
+\index{Rabe, Florian}
+\index{Thiery, Nicolas M.}
+\index{Wiesling, Tom}
\begin{chunk}{axiom.bib}
@article{Sarm17,
 author = "Sarma, Gopal and Hay, Nick J.",
 title = {{Robust Computer Algebra, Theorem Proving, and Oracle AI}},
 journal = "Informatica",
 volume = "41",
 number = "3",
 link = "\url{https://arxiv.org/pdf/1708.02553.pdf}",
 year = "2017",
+@inproceedings{Deha16,
+ author = "Dehaye, PaulOlivier and Iancu, Mihnea and Kohlhase, Michael
+ and Konovalov, Alexander and Lelievre, Samuel and
+ Muller, Dennis and Pfeiffer, Markus and Rabe, Florian and
+ Thiery, Nicolas M. and Wiesling, Tom",
+ title = {{Interoperability in the OpenDreamKit project: The
+ MathintheMiddle Approach}},
+ booktitle = "Intelligent Computer Mathematics",
+ pages = "117131",
+ year = "2016",
+ isbn = "9783319425467",
+ publisher = "Springer",
abstract =
 "In the context of superintelligent AI systems, the term 'oracle' has
 two meanings. One refers to modular systems queried for
 domainspecific tasks. Another usage, referring to a class of systems
 which may be useful for addressing the value alignment and AI control
 problems, is a superintelligent AI system that only answers questions.
 The aim of this manuscript is to survey contemporary research problems
 related to oracles which align with longterm research goals of AI
 safety. We examine existing question answering systems and argue that
 their high degree of architectural heterogeneity makes them poor
 candidates for rigorous analysis as oracles. On the other hand, we
 identify computer algebra systems (CASs) as being primitive examples
 of domainspecific oracles for mathematics and argue that efforts to
 integrate computer algebra systems with theorem provers, systems which
 have largely been developed independent of one another, provide a
 concrete set of problems related to the notion of provable safety that
 has emerged in the AI safety community. We review approaches to
 interfacing CASs with theorem provers, describe welldefined
 architectural deficiencies that have been identified with CASs, and
 suggest possible lines of research and practical software projects for
 scientists interested in AI safety.",
 paper = "Sarm17.pdf",
+ "OpenDreamKit  'Open Digital Research Environment Toolkit for the
+ Advancement of Mathematics'  is an H2020 EU Research Infrastructure
+ project that aims at supporting, over the period 20152019, the
+ ecosystem of opensource mathematical software systems. OpenDreamKit
+ will deliver a flexible toolkit enabling research groups to set up
+ Virtual Research Environments, customised to meet the varied needs of
+ research projects in pure mathematics and applications.
+
+ An important step in the OpenDreamKit endeavor is to foster the
+ interoperability between a variety of systems, ranging from
+ computer algebra systems over mathematical databases to
+ frontends. This is the mission of the integration work
+ package. We report on experiments and future plans with the
+ MathintheMiddle approach. This architecture consists of a
+ central mathematical ontology that documents the domain and fixes a
+ joint vocabulary, or even a language, going beyond existing
+ systems such as OpenMath, combined with specifications of the
+ functionalities of the various systems. Interaction between
+ systems can then be enriched by pivoting around this architecture.",
+ paper = "Deha16.pdf",
keywords = "printed, axiomref"
}
\end{chunk}
\index{Keller, Chantal}
+\index{de Bruijn, N.G.}
\begin{chunk}{axiom.bib}
@phdthesis{Kell13,
 author = "Keller, C.",
 title = {{A Matter of Trust: Skeptical Commuication Between Coq and
 External Provers}},
 school = "Ecole Polytechnique",
 year = "2013",
 link =
"\url{https://www.lri.fr/~keller/Documentsrecherche/Publications/thesis13.pdf}",
 abstract =
 "This thesis studies the cooperation between the Coq proof assistant
 and external provers through proof witnesses. We concentrate on two
 different kinds of provers that can return certicates: first, answers
 coming from SAT and SMT solvers can be checked in Coq to increase both
 the confidence in these solvers and Coq 's automation; second,
 theorems established in interactive provers based on HigherOrder
 Logic can be exported to Coq and checked again, in order to offer the
 possibility to produce formal developments which mix these two
 different logical paradigms. It ended up in two software : SMTCoq, a
 bidirectional cooperation between Coq and SAT/SMT solvers, and
 HOLLIGHTCOQ, a tool importing HOL Light theorems into Coq.

 For both tools, we took great care to define a modular and efficient
 architecture, based on three clearly separated ingredients: an
 embedding of the formalism of the external tool inside Coq which is
 carefully translated into Coq terms, a certified checker to establish
 the proofs using the certicates and a Ocaml preprocessor to transform
 proof witnesses coming from different provers into a generic
 certificate. This division allows that a change in the format of proof
 witnesses only affects the preprocessor, but no proved Coq code.
 Another fundamental component for efficiency and modularity is
 computational reflection, which exploits the computational power of
 Coq to establish generic and small proofs based on the certicates.",
 paper = "Kell13.pdf"
+@techreport{Brui68a,
+ author = "de Bruijn, N.G.",
+ title = {{AUTOMATH, A Language for Mathematics}},
+ year = "1968",
+ type = "technical report",
+ number = "68WSK05",
+ institution = "Technische Hogeschool Eindhoven",
+ paper = "Brui68a.pdf"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm07,
+@article{Farm07a,
author = "Farmer, William M.",
 title = {{Biform Theories in Chiron}},
 journal = "LNCS",
 volume = "4573",
 pages = "6679",
+ title = {{Chiron: A MultiParadigm Logic}},
+ journal = "Studies in Logic, Grammar and Rhetoric",
+ volume = "10",
+ number = "23",
year = "2007",
abstract =
 "An axiomatic theory represents mathematical knowledge declaratively
 as a set of axioms. An algorithmic theory represents mathematical
 knowledge procedurally as a set of algorithms. A biform theory is
 simultaneously an axiomatic theory and an algorithmic theory. It
 represents mathematical knowledge both declaratively and procedurally.
 Since the algorithms of algorithmic theories manipulate th e syntax of
 expressions, biform theories — as well as algorithmic theories — are
 difficult to formalize in a traditional logic without the means to
 reason about syntax. Chiron is a derivative of
 vonNeumannBernaysG̈odel ( NBG ) set theory that is intended to be a
 practical, generalpurpose logic for mechanizing mathematics. It
 includes elements of type theory, a scheme for handling undefinedness,
 and a facility for reasoning about the syntax of expressions. It is an
 exceptionally wellsuited logic for formalizing biform theories. This
 paper defines the notion of a biform theory, gives an overview of
 Chiron, and illustrates how biform theories can be formalized in Chiron.",
 paper = "Farm07.pdf",
 keywords = "printed"
+ "Chiron is a derivative of vonNeumannBernaysG̈odel ( NBG ) set
+ theory that is intended to be a practical, generalpurpose logic for
+ mechanizing mathematics. It supports several reasoning paradigms by
+ integrating NBG set theory with elements of type theory, a scheme for
+ handling undefinedness, and a facility for reasoning about the syntax
+ of expressions. This paper gives a quick, informal presentation of
+ the syntax and semantics of Chiron and then discusses some of the
+ benefits Chiron provides as a multiparadigm logic.",
+ paper = "Farm07a.pdf",
+ keywords = "axiomref, printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
\index{Sorge, Volker}
\begin{chunk}{axiom.bib}
@inproceedings{Care07,
 author = "Carette, Jacques and Farmer, William M. and Sorge, Volker",
 title = {{A Rational Reconstruction of a System for Experimental
 Mathematics}},
 booktitle = "14th Workshop on Automated Reasoning",
 publisher = "unknown",
 year = "2007",
+@techreport{Farm13,
+ author = "Farmer, William M.",
+ title = {{Chiron: A Set Theory with Types, Undefinedness, Quotation,
+ and Evaluation}},
+ type = "technical report",
+ institution = "McMaster University",
+ number = "SQRL Report No. 38",
+ year = "2013",
+ link = "\url{https://arxiv.org/pdf/1305.6206.pdf}",
abstract =
 "In previous papers we described the implementation of a system
 which combines mathematical object generation, transformation and
 filtering, conjecture generation, proving and disproving for
 mathematical discovery in nonassociative algebra. While the system
 has generated novel, fully verified theorems, their construction
 involved a lot of ad hoc communication between disparate systems. In
 this paper we carefully reconstruct a specification of a subprocess
 of the original system in a framework for trustable communication
 between mathematics systems put forth by us. It employs the concept
 of biform theories that enables the combined formalisation of the
 axiomatic and algorithmic theories behind the generation
 process. This allows us to gain a much better understanding of the
 original system, and exposes clear generalisation opportunities.",
 paper = "Care07.pdf",
 keywords = "printed"
+ "Chiron is a derivative of vonNeumannBernaysGodel (NBG) set
+ theory that is intended to be a practical, generalpurpose logic for
+ mechanizing mathematics. Unlike traditional set theories such as
+ ZermeloFraenkel (ZF) and NBG, Chiron is equipped with a type system,
+ lambda notation, and definite and indefinite description. The type
+ system includes a universal type, dependent types, dependent function
+ types, subtypes, and possibly empty types. Unlike traditional logics
+ such as firstorder logic and simple type theory, Chiron admits
+ undefined terms that result, for example, from a function applied to
+ an argument outside its domain or from an improper definite or
+ indefinite description. The most noteworthy part of Chiron is its
+ facility for reasoning about the syntax of expressions. Quotation is
+ used to refer to a set called a construction that represents the
+ syntactic structure of an expression, and evaluation is used to refer
+ to the value of the expression that a construction represents. Using
+ quotation and evaluation, syntactic side conditions, schemas,
+ syntactic transformations used in deduction and computation rules, and
+ other such things can be directly expressed in Chiron. This paper
+ presents the syntax and semantics of Chiron, some definitions and
+ simple examples illustrating its use, a proof system for Chiron, and a
+ notion of an interpretation of one theory of Chiron in another.",
+ paper = "Farm13.pdf"
}
\end{chunk}
\index{Farmer, William M.}
+\index{Kripke, Saul}
\begin{chunk}{axiom.bib}
@inproceedings{Farm00,
 author = "Farmer, William M.",
 title = {{A Proposal for the Development of an Interactive
 Mathematics Laboratory for Mathematics Eductions}},
 booktitle = "Workshop on Deductions Systems for Mathematics Eduation",
 pages = "2025",
 year = "2000"
 paper = "Farm00.pdf",
 keywords = "axiomref, printed"
+@article{Krip75,
+ author = "Kripke, Saul",
+ title = {{Outline of a Theory of Truth}},
+ journal = "Journal of Philosophy",
+ volume = "72",
+ number = "19",
+ year = "1975",
+ pages = "690716",
+ paper = "Krip75.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Farmer, William M.}
+\index{Kleene, Stephen Cole}
\begin{chunk}{axiom.bib}
@article{Farm04,
 author = "Farmer, William M.",
 title = {{MKM A New Interdisciplinary Field of Research}},
 journal = "SIGSAM",
 volume = "38",
 pages = "4752",
 year = "2004",
 abstract =
 "Mathematical Knowledge Management (MKM) is a new interdisciplinary
 field of research in the intersection of mathematics, computer
 science, library science, and scientific publishing. Its objective is
 to develop new and better ways of managing mathematical knowledge
 using sophisticated software tools. Its grand challenge is to create
 a universal digital mathematics library (UDML), accessible via the
 WorldWide Web, that contains essentially all mathematical knowledge
 (intended for the public). The challenges facing MKM are daunting,
 but a UDML, even just partially constructed, would transform how
 mathematics is learned and practiced.",
 paper = "Farm04.pdf",
 keywords = "printed, axiomref"
+@book{Klee52,
+ author = "Kleene, Stephen Cole",
+ title = {{Introduction to MetaMathematics}},
+ year = "1952",
+ publisher = "Ishi Press International",
+ isbn = "9780923891572",
+ paper = "Klee52.pdf"
}
\end{chunk}
\index{Farmer, William M.}
\index{Mohrenschildt, Martin v.}
+\index{Smith, Peter}
\begin{chunk}{axiom.bib}
@inproceedings{Farm00a,
 author = "Farmer, William M. and Mohrenschildt, Martin v.",
 title = {{Transformers for Symbolic Computation and Formal Deduction}},
 booktitle = "CADE17",
 pages = "3645",
 year = "2000",
 abstract =
 "A transformer is a function that maps expressions to expressions.
 Many transformational operators — such as expression evaluators and
 simplifiers, rewrite rules, rules of inference, and decision
 procedures — can be represented by transformers. Computations and
 deductions can be formed by applying sound transformers in
 sequence. This paper introduces machinery for defining sound
 transformers in the context of an axiomatic theory in a formal
 logic. The paper is intended to be a first step in a development of an
 integrated framework for symbolic computation and formal deduction.",
 paper = "Farm00a.pdf",
+@misc{Smit15,
+ author = "Smith, Peter",
+ title = {{Some Big Books on Mathematical Logic}},
+ year = "2015",
+ link = "\url{}",
+ paper = "Smit15.pdf",
keywords = "printed"
}
\end{chunk}
\index{Farmer, William M.}
\index{Larjani, Pouya}
+\index{Gonthier, Georges}
\begin{chunk}{axiom.bib}
@misc{Farm14,
 author = "Farmer, William M. and Larjani, Pouya",
 title = {{Frameworks for Reasoning about Syntax that Utilize
 Quotation and Evaluation}},
 links = "\url{http://imps.mcmaster.ca/doc/syntax.pdf}",
 year = "2014",
 abstract =
 "It is often useful, if not necessary, to reason about the syntactic
 structure of an expression in an interpreted language (i.e., a
 language with a semantics). This paper introduces a mathematical
 structure called a syntax framework that is intended to be an abstract
 model of a system for reasoning about the syntax of an interpreted
 language. Like many concrete systems for reasoning about syntax, a
 syntax framework contains a mapping of expressions in the
 interpreted language to syntactic values that represent the syntactic
 structures of the expressions; a language for reasoning about the
 syntactic values; a mechanism called quotation to refer to the
 syntactic value of an expression; and a mechanism called evaluation to
 refer to the value of the expression represented by a syntactic value.
 A syntax framework provides a basis for integrating reasoning about
 the syntax of the expressions with reasoning about what the
 expressions mean. The notion of a syntax framework is used to discuss
 how quotation and evaluation can be built into a language and to
 define what quasiquotation is. Several examples of syntax frameworks
 are presented.",
 paper = "Farm14.pdf",
+@article{Gont09a,
+ author = "Gonthier, Georges",
+ title = {{Software Engineering for Mathematics}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ pages = "2727",
+ abstract =
+ "Despite its mathematical origins, progress in computer assisted
+ reasoning has mostly been driven by applications in computer science,
+ like hardware or protocol security verification. Paradoxically, it has
+ yet to gain widespread acceptance in its original domain of
+ application, mathematics; this is commonly attributed to a 'lack of
+ libraries': attempts to formalize advanced mathematics get bogged down
+ into the formalization of an unwieldly large set of basic resuts.
+
+ This problem is actually a symptom of a deeper issue: the main
+ function of computer proof systems, checking proofs down to their
+ finest details, is at odds with mathematical practice, which ignores
+ or defers details in order to apply and combine abstractions in
+ creative and elegant ways. Mathematical texts commonly leave logically
+ important parts of proofs as 'exercises to the reader', and are rife
+ with 'abuses of notation that make mathematics tractable' (according
+ to Bourbaki). This (essential) flexibility cannot be readily
+ accomodated by the narrow concept of 'proof library' used by most
+ proof assistants and based on 19th century firstorder logic: a
+ collection of constants, definitions, and lemmas.
+
+ This mismatch is familiar to software engineers, who have been
+ struggling for the past 50 years to reconcile the flexibility needed
+ to produce sensible user requirements with the precision needed to
+ implement them correctly with computer code. Over the last 20 years
+ object and components have replaced traditional data and procedure
+ libraries, partly bridging this gap and making it possible to build
+ significantly larger computer systems.
+
+ These techniques can be implemented in compuer proof systems by
+ exploiting advances in mathematical logic. Higherorder logics allow
+ the direct manipulation of functions; this can be used to assign
+ behaviour, such as simplification rules, to symbols, similarly to
+ objects. Advanced type systems can assign a secondary, contextual
+ meaning to expressions, using mechanisms such as type classes,
+ similarly to the metadata in software components. The two can be combined
+ to perform reflection, where an entire statement gets quoted as
+ metadata and then proved algorithmically by some decision procedure.
+
+ We propose to use a more modest, smallscale form of reflection, to
+ implement mathematical components. We use the typederived metadata to
+ indicate how symbols, definitions and lemmas should be used in other
+ theories, and functions to implement this usage — roughly, formalizing
+ some of the exercize section of a textbook. We have applied
+ successfully this more engineered approch to computer proofs in our
+ past work on the Four Color Theorem, the CayleyHamilton Theorem, and
+ our ongoing longterm effort on the Odd Order Theorem, which is the
+ starting point of the proof of the Classification of Finite Simple
+ Groups (the famous 'monster theorem' whose proof spans 10,000 pages in
+ 400 articles).",
+ paper = "Gont09a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
\index{O'Connor, Russell}
\begin{chunk}{axiom.bib}
@misc{Care11c,
 author = "Carette, Jacques and Farmer, William M. and O'Connor, Russell",
 title = {{MathScheme: Project Description}},
 year = "2011",
 link = "\url{http://imps.mcmaster.ca/doc/cicm2011projdesc.pdf}",
 paper = "Care11c.pdf",
+@article{Care10,
+ author = "Carette, Jacques",
+ title = {{Mechanized Mathematics}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ pages = "157157",
+ abstract =
+ "In the 50 years since McCarthy’s 'Recursive Functions of Symbolic
+ Expressions and Their Computation by Machine', what have we
+ learned about the realization of Leibniz’s dream of just being
+ able to utter 'Calculemus!' (Let us calculate!) when faced with a
+ mathematical dilemma?
+
+ In this talk, I will first present what I see as the most
+ important lessons from the past which need to be heeded by modern
+ designers. From the present, I will look at the context in which
+ computers are used, and derive further requirements. In
+ particular, now that computers are no longer the exclusive
+ playground for highly educated scientists, usability is now more
+ important than ever, and justifiably so.
+
+ I will also examine what I see as some principal failings of
+ current systems, primarily to understand some major mistakes to
+ avoid. These failings will be analyzed to extract what seems to be
+ the root mistake, and I will present my favourite solutions.
+
+ Furthermore, various technologies have matured since the creation
+ of many of our systems, and whenever appropriate, these should be
+ used. For example, our understanding of the structure of
+ mathematics has significantly increased, yet this is barely
+ reflected in our libraries. The extreme focus on efficiency by the
+ computer algebra community, and correctness by the (interactive)
+ theorem proving community should no longer be considered viable
+ long term strategies. But how does one effectively bridge that
+ gap?
+
+ I personally find that a number of (programming) languagebased
+ solutions are particularly effective, and I will emphasize
+ these. Solutions to some of these problems will be illustrated
+ with code from a prototype of MathScheme 2.0, the system I am
+ developing with Bill Farmer and our research group.",
+ paper = "Care10.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
+\index{Zeilberger, Doron}
\begin{chunk}{axiom.bib}
@article{Care08,
 author = "Carette, Jacques and Farmer, William M.",
 title = {{HighLevel Theories}},
+@article{Zeil10,
+ author = "Zeilberger, Doron",
+ title = {{Against Rigor}},
journal = "LNCS",
 volume = "5144",
 pages = "232245"
 year = "2008",
+ volume = "6167",
+ year = "2010",
+ pages = "262262",
abstract =
 "We introduce highlevel theories in analogy with highlevel
 programming languages. The basic point is that even though one can
 define many theories via simple, lowlevel axiomatizations , that is
 neither an effective nor a comfortable way to work with such theories.
 We present an approach which is closer to what users of mathematics
 employ, while still being based on formal structures.",
 paper = "Care08.pdf",
+ "The ancient Greeks gave (western) civilization quite a few
+ gifts, but we should beware of Greeks bearing gifts. The gifts of theatre
+ and democracy were definitely good ones, and perhaps even the gift of
+ philosophy, but the 'gift' of the socalled 'axiomatic method' and the
+ notion of 'rigorous' proof did much more harm than good. If we want
+ to maximize Mathematical Knowledge, and its Management, we have to
+ return to Euclid this dubious gift, and giveup our fanatical insistence
+ on perfect rigor. Of course, we should not go to the other extreme, of
+ demanding that everything should be nonrigorous. We should encourage
+ diversity of proofstyles and rigor levels, and remember that nothing is
+ absolutely sure in this world, and there does not exist an absolutely
+ rigorous proof, nor absolute certainty, and 'truth' has many shades and
+ levels.",
+ paper = "Zeil10.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{O'Connor, Russell}
+\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@article{Care12,
 author = "Carette, Jacques and O'Connor, Russell",
 title = {{Theory Presentation Combinators}},
 journal = "LNCS",
 volume = "7362",
 year = "2012",
 abstract =
 "We motivate and give semantics to theory presentation combinators
 as the foundational building blocks for a scalable library of
 theories. The key observation is that the category of contexts and
 fibered categories are the ideal theoretical tools for this
 purpose.",
 paper = "Care12.pdf",
 keywords = "printed"
+@phdthesis{Pfen87,
+ author = "Pfenning, Frank",
+ title = {{Proof Transformations in HigherOrder Logic}},
+ year = "1987",
+ school = "Carnegie Mellon University",
+ link = "\url{http://wwwcgi.cs.cmu.edu/~fp/papers/thesis87.pdf}",
+ abstract =
+ "We investigate the problem of translating between different styles of
+ proof systems in higherorder logic: analytic proofs which are well
+ suited for automated theorem proving, and nonanalytic deductions
+ which are well suited for the mathematician. Analytic proofs are
+ represented as expansion proofs, $H$ , a form of the sequent calculus we
+ define, nonanalytic proofs are represented by natural deductions. A
+ nondeterministic translation algorithm between expansion proofs and
+ $H$deductions is presented and its correctness is proven. We also
+ present an algorithm for translation in the other direction and prove
+ its correctness. A cutelimination algorithm for expansion proofs is
+ given and its partial correctness is proven. Strong termination of
+ this algorithm remains a conjecture for the full higherorder
+ system, but is proven for the firstorder fragment. We extend the
+ translations to a nonanalytic proof system which contains a primitive
+ notion of equality, while leaving the notion of expansion proof
+ unaltered. This is possible, since a nonextensional equality is
+ definable in our system of type theory. Next we extend analytic and
+ nonanalytic proof systems and the translations between them to
+ include extensionality. Finally, we show how the methods and notions
+ used so far apply to the problem of translating expansion proofs into
+ natural deductions. Much care is taken to specify this translation in
+ a modular way (through tactics) which leaves a large number of choices
+ and therefore a lot of room for heuristics to produce elegant natural
+ deductions. A practically very useful extension, called symmetric
+ simplification, produces natural deductions which make use of lemmas
+ and are often much more intuitive than the normal deductions which
+ would be created by earlier algorithms.",
+ paper = "Pfen87.pdf"
}
\end{chunk}
\index{Musser, David R.}
\index{Kapur, Deepak}
+\index{Comon, Hubert}
\begin{chunk}{axiom.bib}
@article{Muss82,
 author = "Musser, David R. and Kapur, Deepak",
 title = {{Rewrite Rule Theory and Abstract Data Type Analysis}},
 journal = "Lecture Notes in Computer Science",
 volume = "144",
 pages = "7790",
 year = "1982",
 paper = "Muss82.pdf",
 keywords = "axiomref"
+@incollection{Como01,
+ author = "Comon, Hubert",
+ title = {{Inductionless Induction}},
+ booktitle = "Handbook of Automated Reasoning",
+ comment = "Chapter 14",
+ publisher = "Elsevier",
+ year = "2001",
+ paper = "Como01.ps"
}
\end{chunk}
\index{Lazard, Daniel}
+\index{Schmitt, P.H.}
\begin{chunk}{axiom.bib}
@article{Laza82,,
 author = "Lazard, Daniel",
 title = {{Commutative Algebra and Computer Algebra}},
 journal = "Lecture Notes in Computer Science",
 volume = "144",
 pages = "4048",
 year = "1982",
 abstract =
 "It is well known that computer algebra deals with commutative rings
 such as the integers, the rationals, the modular integers and
 polynomials over such rings.

 On the other hand, commutative algebra is that part of mathematics
 which studies commutative rings.

 Our aim is to make this link more precise. It will appear that most of
 the constructions which appear in classical commutative algebra can be
 done explicitly in finite time. However, there are
 exceptions. Moreover, most of the known algorithms are intractable :
 The problems are generally exponential by themselves, but many
 algorithms are overoverexponential. It needs a lot of work to find
 better methods, and to implement them, with the final hope to open a
 computation domain larger than this one which is possible by hand.

 To illustrate the limits and the possibilities of computerizing
 commutative algebra, we describe an algorithm which tests the
 primality of a polynomial ideal and we give an example of a single
 algebraic extension of fields $K\subset L$ wuch that there exists an
 algorithm of factorization for the polynomials over $k$, but not for
 the polynomials over $L$.",
 paper = "Laza82.pdf"
+@article{Schm86,
+ author = "Schmitt, P.H.}",
+ title = {{Computational Aspects of ThreeValued Logic}},
+ journal = "LNCS",
+ volume = "230",
+ year = "1986",
+ abstract =
+ "This paper investigates a threevalued logic $L_3$, that has been
+ introduced in the study of natural language semantics. A complete
+ proof system based on a threevalued analogon of negative resolution
+ is presented. A subclass of $L_3$ corresponding to Horn clauses in
+ twovalued logic is defined. Its model theoretic properties are
+ studied and it is shown to admit a PROLOGstyle proof procedure.",
+ paper = "Schm86.pdf"
}
\end{chunk}
\index{Hearn, Anthony C.}
+\index{Common, Hubert}
+\index{Lescanne, Pierre}
\begin{chunk}{axiom.bib}
@article{Hear82,,
 author = "Hearn, Anthony C.",
 title = {{REDUCE  A Case Study in Algebra System Development}},
 journal = "Lecture Notes in Computer Science",
 volume = "144",
 pages = "263272",
 year = "1982",
 paper = "Hear82.pdf",
 keywords = "axiomref"
}
+@article{Como89,
+ author = "Common, Hubert and Lescanne, Pierre",
+ title = {{Equational Problems and Disunification}},
+ journal = "J. Symbolic Computation",
+ volume = "7",
+ number = "34",
+ year = "1989",
+ pages = "371425",
+ abstract =
+ "Roughly speaking, an equational problem is a first order formula
+ whose only predicate symbol is $=$. We propose some rules for the
+ transformation of equational problems and study their correctness in
+ various models. Then, we give completeness results with respect to
+ some 'simple' problems called solved forms. Such completeness results
+ still hold when adding some control which moreover ensures
+ termination. The termination proofs are given for a 'weak' control and
+ thus hold for the (large) class of algorithms obtained by restricting
+ the scope of the rules. Finally, it must be noted that a byproduct of
+ our method is a decision procedure for the validity in the Herbrand
+ Universe of any first order formula with the only predicate symbol $=$.",
+ paper = "Como89.pdf"
+}
\end{chunk}
\index{Padget, J.A.}
+\index{Jones, Simon > Peyton}
\begin{chunk}{axiom.bib}
@article{Padg82,
 author = "Padget, J.A.",
 title = {{Escaping from Intermediate Expression Swell: A Continuing Saga}},
 journal = "Lecture Notes in Computer Science",
 volume = "144",
 pages = "256262",
 year = "1982",
 abstract =
 "The notion of a closed continuation is introduced, is presented,
 coroutines using function call and return based on this concept, are
 applications and a functional dialect of LISP shown to be merely a
 more general form of for coroutines in algebraic simplification and
 are suggested, by extension function. expression Potential evaluation
 and a specific example of their use is given in a novel attack on the
 phenomenon of intermediate expression swell in polynomial
 multiplication.",
 paper = "Padg82.pdf"
+@inproceedings{Jone96,
+ author = "Jones, Simon > Peyton",
+ title = {{Compiling Haskell by Program Transformation: A Report from
+ the Trenches}},
+ booktitle = "Proc. European Symposium on Programming",
+ year = "1996",
+ publisher = "Eurographics",
+ abstract =
+ "Many compilers do some of their work by means of
+ correctnesspreseving, and hopefully performanceimproving, program
+ transformations. The Glasgow Haskell Compiler (GHC) takes this idea
+ of 'compilation by transformation' as its warcry, trying to express
+ as much as possible of the compilation process in the form of
+ program transformations.
+
+ This paper reports on our practical experience of the
+ transformational approach to compilation, in the context of a
+ substantial compiler.",
+ paper = "Jone96.pdf"
}
\end{chunk}
\index{Norman, Arthur}
+\index{Hanus, Michael}
\begin{chunk}{axiom.bib}
@article{Norm82,
 author = "Norman, Arthur",
 title = {{The Development of a VectorBased Algebra System}},
 journal = "Lecture Notes in Computer Science",
 volume = "144",
 pages = "237248",
 year = "1982",
 paper = "Norm82.pdf"
+@article{Hanu90,
+ author = "Hanus, Michael",
+ title = {{Compiling Logic Programs with Equality}},
+ journal = "LNCS",
+ volume = "456",
+ year = "1990",
+ pages = "387401",
+ abstract =
+ "Horn clause logic with equality is an amalgamation of functional and
+ logic programming languages. A sound and complete operational
+ semantics for logic programs with equality is based on resolution to
+ solve literals, and rewriting and narrowing to evaluate functional
+ expressions. This paper proposes a technique for compiling programs
+ with these inference rules into programs of a lowlevel abstract
+ machine which can be efficiently executed on conventional
+ architectures. The presented approach is based on an extension of the
+ Warren abstract machine (WAM). In our approach pure logic programs
+ without function definitions are compiled in the same way as in the
+ WAMapproach, and for logic programs with function definitions
+ particular instructions are generated for occurrences of functions
+ inside clause bodies. In order to obtain an efficient implementation
+ of functional computations, a stack of occurrences of function symbols
+ in goals is managed by the abstract machine. The compiler generates
+ the necessary instructions for the efficient manipulation of the
+ occurrence stack from the given equational logic programs.",
+ paper = "Hanu90.pdf"
}
\end{chunk}
\index{Bordoni, L.}
\index{Colagrossi, A.}
\index{Miola, A.}
+\index{Ballerin, Clemens}
\begin{chunk}{axiom.bib}
@article{Bord82,
 author = "Bordoni, L. and Colagrossi, A. and Miola, A.",
 title = {{Linear Algebraic Approach for Computing Polynomial Resultant}},
 journal = "Lecture Notes in Computer Science",
 volume = "144",
 pages = "231236",
 year = "1982",
 abstract =
 "This paper presents a linear algebraic method for computing the
 resultant of two polynomials. This method is based on the
 computation of a determinant of order equal to the minimum of the
 degrees of the two given polynomials. This method turns out to be
 preferable to other known linear algebraic methods both from a
 computational point of view and for a total generality respect to
 the class of the given polynomials. Some relationships of this
 method with the polynomial pseudoremainder operation are discussed.",
 paper = "Bord82.pdf"
+@phdthesis{Ball99a,
+ author = "Ballerin, Clemens",
+ title = {{Computer Algebra and Theorem Proving}},
+ school = "Darwin College, University of Cambridge",
+ year = "1999",
+ abstract =
+ "Is the use of computer algebra technology beneficial for
+ mechanised reasoining in and about mathematical domains? Usually
+ it is assumed that it is. Many works in this area, however, either
+ have little reasoning conent, or use symbolic computation only to
+ simplify expressions. In work that has achieved more, the used
+ methods do not scale up. They trust the computer algebra system
+ either too much or too little.
+
+ Computer algebra systems are not as rigorous as many provers. They
+ are not logically sound reasoning systems, but collections of
+ algorithms. We classify soundness problems that occur in computer
+ algebra systems. While many algorithms and their implementations
+ are perfectly trustworthy, the semantics of symbols is often
+ unclear and leads to errors. On the other hand, more robust
+ approaches to interface external reasoners to provers are not
+ always practical because the mathematical depth of proofs
+ algorithms in computer algebra are based on can be enormous.
+
+ Our own approach takes both trustworthiness of the overall system
+ and efficiency into account. It relies on using only reliable
+ parts of a computer algebra system, which can be achieved by
+ choosing a suitable library, and deriving specifications for these
+ algorithms from their literature.
+
+ We design and implement an interface between the prover Isabelle
+ and the computer algebra library Sumit and use it to prove
+ nontrivial theorems from coding theory. This is based on the
+ mechanisation of the algebraic theories of rings and
+ polynomials. Coding theory is an area where proofs do have a
+ substantial amount of computational content. Also, it is realistic
+ to assume that the verification of an encoding or decoding device
+ could be undertaken in, and indeed, be simplified by, such a
+ system.
+
+ The reason why semantics of symbols is often unclear in current
+ computer algebra systems is not mathematical difficulty, but the
+ design of those systems. For Gaussian elimination we show how the
+ soundness problem can be fixed by a small extension, and without
+ losing efficiency. This is a prerequisite for the efficient use of
+ the algorithm in a prover.",
+ paper = "Ball99a.pdf",
+ keywords = "axiomref, printed"
}
\end{chunk}
\index{Arnon, Dennis S.}
\index{McCallum, Scott}
+\index{Nederpelt, Rob}
+\index{Geuvers, Nerman}
\begin{chunk}{axiom.bib}
@article{Arno82a,
 author = "Arnon, Dennis S. and McCallum, Scott",
 title = {{Cylindrical Algebraic Decomposition by Quantifier Eliminations}},
 journal = "Lecture Notes in Computer Science",
 volume = "144",
 pages = "215222",
 year = "1982",
 abstract =
 "Cylindrical algebraic decompositions were introduced as a major
 component of a new quantifier elimination algorithm for elementary
 algebra and geometry (G. Collins, ~973). In the present paper we turn
 the tables and show that one can use quantifier elimination for ele
 mentary algebra and geometry to obtain a new version of the
 cylindrical algebraic decomposi tion algorithm. A key part of our
 result is a theorem, of interest in its own right, that relates the
 multiplicities of the roots of a polynomial to their continuity.",
 paper = "Arno82a.pdf"
+@book{Nede14,
+ author = "Nederpelt, Rob and Geuvers, Nerman",
+ title = {{Type Theory and Formal Proof}},
+ year = "2014",
+ publisher = "Cambridge University Press",
+ isbn = "9781107036505"
}
\end{chunk}
\index{Collins, George E.}
+\index{Robinson, Alan}
+\index{Voronkov, Andrei}
\begin{chunk}{axiom.bib}
@article{Coll82a,
 author = "Collins, George E.",
 title = {{Factorization in Cylindrical Algebraic Decomposition  Abstract}},
 journal = "Lecture Notes in Computer Science",
 volume = "144",
 pages = "212214",
 year = "1982",
 paper = "Coll82a.pdf"
+@book{Robi01,
+ author = "Robinson, Alan and Voronkov, Andrei",
+ title = {{Handbook of Automated Reasoning (2 Volumes)}},
+ year = "2001",
+ publisher = "MIT Press",
+ isbn = "0262182238"
}
\end{chunk}
\index{Lazard, Daniel}
+\index{Barendregt, Henk}
+\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@article{Laza82a,
 author = "Lazard, Daniel",
 title = {{On Polynomial Factorization}},
 journal = "Lecture Notes in Computer Science",
 volume = "144",
 pages = "144157",
 year = "1982",
 abstract =
 "We present new algorithms for factoring univariate polynomials
 over finite fields. They are variants of the algorithms of Camion
 and CantorZassenhaus and differ from them essentially by
 computing the primitive idempotents of the algebra $K[X]/f$ before
 factoring $f$.

 These algorithms are probabilistic in the following sense. The
 time of computation depends on random choices, but the validity of
 the result does not depend on them. So, worst case complexity,
 being infinite, is meaningless and we compute average
 complexity. It appears that our and CantorZassenhaus algorithms
 have the same asymptotic complexity and they are the best
 algorithms actuall known; with elementary multiplication and GCD
 computation, CantorZassenhaus algorithm is always bettern than
 ours; with fast multiplication and GCD, it seems that ours is
 better, but this fact is not yet proven.

 Finally, for factoring polynomials over the integers, it seems
 that the best strategy consists in choosing prime moduli as big as
 possible in the range where the time of the multiplication does
 not depend on the size of the factors (machine word size). An
 accurate computation of the involved constants would be needed for
 proving this estimation.",
 paper = "Laza82a.pdf"
+@article{Bare05,
+ author = "Barendregt, Henk and Wiedijk, Freek",
+ title = {{The Challenge of Computer Mathematics}},
+ journal = "Phil. Trans. Royal Society",
+ volume = "363",
+ pages = "23512375",
+ year = "2005",
+ abstract =
+ "Progress in the foundations of mathematics has made it possible to
+ formulate all thinkable mathematical concepts, algorithms and proofs
+ in one language and in an impeccable way. This is not in spite of, but
+ partially based on the famous results of Godel and Turing. In this way
+ statements are about mathematical objects and algorithms, proofs show
+ the correctness of statements and computations, and computations are
+ dealing with objects and proofs. Interactive computer systems for a
+ full integration of defining, computing and proving are based on
+ this. The human defines concepts, constructs algorithms and provides
+ proofs, while the machine checks that the definitions are well formed
+ and the proofs and computations are correct. Results formalized so far
+ demonstrate the feasibility of this ‘computer mathematics’. Also there
+ are very good applications. The challenge is to make the systems more
+ mathematicianfriendly, by building libraries and tools. The eventual
+ goal is to help humans to learn, develop, communicate, referee and
+ apply mathematics.",
+ paper = "Bare05.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Strachey, Christopher}
\index{Wadsworth, Christopher}
+\index{Klaeren, Herbert A.}
\begin{chunk}{axiom.bib}
@article{Stra00a,
 author = "Strachey, Christopher and Wadsworth, Christopher",
 title = {{Continuations: A Mathematical Semantics for Handling Full Jumps}},
 journal = "HigherOrder and Symbolic Computation",
 volume = "13",
 pages = "135152",
 year = "2000",
+@article{Klae84,
+ author = "Klaeren, Herbert A.",
+ title = {{A Constructive Method for Abstract Algebraic Software
+ Specification}},
+ journal = "Theoretical Computer Science",
+ vollume = "30",
+ year = "1984",
+ pages = "139204",
abstract =
 "This paper describes a method of giving the mathematical
 semantics of programming languages which include the most general
 form of jumps.",
 paper = "Stra00a.pdf",
+ "A constructive method for abstract algebraic software
+ specification is presented, where the operations are not
+ implicitly specified by equations but by an explicit recursion on
+ the generating operations of an algebra characterizing the
+ underlying data structure. The underlying algebra itself may be
+ equationally specified since we cannot assume that all data
+ structures will correspond to free algebras. This implies that we
+ distinguish between generating and defined operations and that the
+ underlying algebra has a mechanism of wellfounded decomposition
+ w.r.t. the generating operations. We show that the explicit
+ specification of operations using socalled structural recursive
+ schemata offers advantages over purely equational specifications,
+ especially concerning the safeness of enrichments, the ease of
+ semantics description and the separation between the underlying
+ data structure and the operations defined on it.",
keywords = "printed"
}
\end{chunk}
\index{Kaes, Stefan}
+\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@article{Kaes88,
 author = "Kaes, Stefan",
 title = {{Parametric Overloading in Polymorphic Programming Languages}},
 journal = "LNCS",
 volume = "300",
 pages = "131144",
 year = "1988",
 abstract =
 "The introduction of unrestricted overloading in languagues with type
 systems based on implicit parametric potymorphism generally destroys
 the principal type property: namely that the type of every expression
 can uniformly be represented by a single type expression over some set
 of type variables. As a consequence, type inference in the presence
 of unrestricted overloading can become a NPcomplete problem. In
 this paper we define the concept of parametric overloading as a
 restricted form of overloading which is easily combined with
 parametric polymorphism. Parametric overloading preserves the
 principal type property, thereby allowing the design of efficient type
 inference algorithms. We present sound type deduction systems, both
 for predefined and programmer defined overloading. Finally we state
 that parametric overloading can be resolved either statically, at
 compile time, or dynamically, during program execution.",
 paper = "Kaes88.pdf",
+@misc{Pfen04,
+ author = "Pfenning, Frank",
+ title = {{Automated Theorem Proving}},
+ year = "2004",
+ comment = "Draft of Spring 2004",
keywords = "printed"
}
\end{chunk}
\index{Kaes, Stefan}
+\index{Coquuand, Thierry}
+\index{Huet, Gerard}
\begin{chunk}{axiom.bib}
@article{Kaes92,
 author = "Kaes, Stefan",
 title = {{Type Inference inthe Presence of Overloading, Subtyping and
 Recursive Types}},
 journal = "ACM Lisp Pointers",
 volume = "5",
 number = "1",
 year = "1992",
 pages = "193204",
 abstract =
 "We present a unified approach to type inference in the presence of
 overloading and coercions based on the concept of {\sl constrained
 types}. We define a generic inference system, show that subtyping and
 overloading can be treated as a special instance of this system and
 develop a simple algorithm to compute principal types. We prove the
 decidability of type inference for hte class of {\sl decomposable
 predicates} and deelop a canonical representation for principal types
 based on {\sl most accurate simplifications} of constraint
 sets. Finally, we investigate the extension of our techniques to
 {\sl recursive types}.",
 paper = "Kaes92.pdf",
+@incollection{Coqu88,
+ author = "Coquuand, Thierry and Huet, Gerard",
+ title = {{The Calculus of Constructions}},
+ booktitle = "Information and Computation, Volume 76",
+ year = "1988",
+ publisher = "Academic Press",
+ paper = "Coqu88.pdf",
keywords = "printed"
}
\end{chunk}
\index{Hall, Cordelia V.}
\index{Hammond, Kevin}
\index{Jones, Simon L. Peyton}
\index{Wadler, Philip L.}
+\index{Okasaki, Chris}
\begin{chunk}{axiom.bib}
@article{Hall96,
 author = "Hall, Cordelia V. and Hammond, Kevin and Jones, Simon L. Peyton
 and Wadler, Philip L.",
 title = {{Type Classes in Haskell}},
 journal = "Trans. on Programming Langues and Systems",
 volume = "18",
 number = "2",
 pages = "109138",
+@phdthesis{Okas96,
+ author = "Okasaki, Chris",
+ title = {{Purely Functional Data Structures}},
+ school = "Carnegie Mellon University",
year = "1996",
+ link = "\url{}",
+ comment = "CMUCS96177",
+ isbn = "9780521663502",
abstract =
 "This article de nes a set of type inference rules for resolving
 overloading introduced by type classes, as used in the functional
 programming language Haskell. Programs including type classes are
 transformed into ones which may be typed by standard HindleyMilner
 inference rules. In contrast to other work on type classes, the rules
 presented here relate directly to Haskell programs. An innovative
 aspect of this work is the use of secondorder lambda calculus to
 record type information in the transformed program.",
 paper = "Hall96.pdf",
 keywords = "printed"
}
+ "When a C programmer needs an efficient data structure for a
+ particular problem, he or she can often simply look one up in any of
+ a number of good textbooks or handbooks. Unfortunately, programmers
+ in functional languages such as Standard ML or Haskell do not have
+ this luxury. Although some data structures designed for imperative
+ languages such as C can be quite easily adapted to a functional
+ setting, most cannot, usually because they depend in crucial ways on
+ assignments, which are disallowed, or at least discouraged, in
+ functional languages. To address this imbalance, we describe several
+ techniques for designing functional data structures, and numerous
+ original data structures based on these techniques, including multiple
+ variations of lists, queues, doubleended queues, and heaps, many
+ supporting more exotic features such as random access or efficient
+ catenation.
+
+ In addition, we expose the fundamental role of lazy evaluation in
+ amortized functional data structures. Traditional methods of
+ amortization break down when old versions of a data structure, not
+ just the most recent, are available for further processing. This
+ property is known as persistence , and is taken for granted in
+ functional languages. On the surface, persistence and amortization
+ appear to be incompatible, but we show how lazy evaluation can be used
+ to resolve this conflict, yielding amortized data structures that are
+ efficient even when used persistently. Turning this relationship
+ between lazy evaluation and amortization around, the notion of
+ amortization also provides the first practical techniques for
+ analyzing the time requirements of nontrivial lazy programs.
+
+ Finally, our data structures offer numerous hints to programming
+ language designers, illustrating the utility of combining strict and
+ lazy evaluation in a single language, and providing nontrivial
+ examples using polymorphic recursion and higherorder, recursive
+ modules.",
+ paper = "Okas96.pdf"
+}
\end{chunk}
\index{Dreyer, Derek}
\index{Harper, Robert}
\index{Chakravarty, Manuel M.T.}
\index{Keller, Gabriele}
+\index{Letouzey, Pierre}
\begin{chunk}{axiom.bib}
@inproceedings{Drey07,
 author = "Dreyer, Derek and Harper, Robert and Chakravarty, Manuel M.T.
 and Keller, Gabriele",
 title = {{Modlular Type Classes}},
 booktitle = "Proc. POPL'07",
 pages = "6370",
 year = "2007",
+@inproceedings{Leto04,
+ author = "Letouzey, Pierre",
+ title = {{A New Extraction for Coq}},
+ booktitle = "Types for Proofs and Programs. TYPES 2002",
+ publisher = "Springer",
+ pages = "617635",
+ year = "2004",
abstract =
 "ML modules and Haskell type classes have proven to be highly
 effective tools for program structuring. Modules emphasize explicit
 configuration of program components and the use of data abstraction.
 Type classes emphasize implicit program construction and ad hoc
 polymorphism. In this paper , we show how the implicitlytyped
 style of type class programming may be supported within the framework
 of an explicitlytyped module language by viewing type classes as a
 particular mode of use of modules. This view offers a harmonious
 integration of modules and type classes, where type class features,
 such as class hierarchies and associated types, arise naturally as
 uses of existing modulelanguage constructs, such as module
 hierarchies and type components. In addition, programmers have
 explicit control over which type class instances are available for
 use by type inference in a given scope. We formalize our approach as
 a HarperStonestyle elaboration relation, and provide a sound type
 inference algorithm as a guide to implementation.",
 paper = "Drey07.pdf",
+ "We present here a new extraction mechanism for the Coq proof
+ assistant. By extraction, we mean automatic generation of
+ functional code from Coq proofs, in order to produce certified
+ programs. In former versions of Coq, the extraction mechanism
+ suffered several limitations and in particular worked only with a
+ subset of the language. We first discuss difficulties encountered
+ and solutions proposed to remove these limitations. Then we give a
+ proof of correctness for a theoretical model of the new
+ extraction. Finally we describe the actual implementation.",
+ paper = "Leto04.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wehr, Stefan}
\index{Chakravarty, Maneul M.T.}
+\index{Abrams, Marshall D.}
+\index{Zelkowitz, Marvin V.}
\begin{chunk}{axiom.bib}
@article{Wehr08,
 author = "Wehr, Stefan and Chakravarty, Maneul M.T.",
 title = {{ML Modules and Haskell Type Classes: A Constructive
 Comparison}},
 journal = "LNCS",
 volume = "5356",
 pages = "188204",
 year = "2008",
 abstract =
 "Researchers repeatedly observed that the module system of ML and the
 type class mechanism of Haskell are related. So far, this relationship
 has received little formal investigation. The work at hand fills this
 gap: It introduces typepreserving translations from modules to type
 classes and vice versa, which enable a thorough comparison of the two
 concepts.",
 paper = "Wehr08.pdf",
 keywords = "printed"
}
+@article{Abra95,
+ author = "Abrams, Marshall D. and Zelkowitz, Marvin V.",
+ title = {{Striving for Correctness}},
+ journal = "Computers and Security",
+ volume = "14",
+ year = "1995",
+ pages = "719738",
+ paper = "Abra95.pdf"
+}
\end{chunk}
\index{Stuckey, Peter J.}
\index{Sulzmann, Martin}
+\index{Parnas, David L.}
\begin{chunk}{axiom.bib}
@article{Stuc05,
 author = "Stuckey, Peter J. and Sulzmann, Martin",
 title = {{A Theory of Overloading}},
 journal = "ACM Trans. on Programming Languages and Systems",
 volume = "27",
 number = "6",
 pages = "154",
 year = "2005",
+@article{Parn72,
+ author = "Parnas, David L.",
+ title = {{A Technique for Software Module Specification with
+ Examples}},
+ journal = "CACM",
+ volume = "15",
+ number = "5",
+ year = "1972",
+ pages = "330336",
abstract =
 "We present a novel approach to allow for overloading of
 identifiers in the spirit of type classes. Our approach relies on
 the combination of the HM(X) type system framework with Constraint
 Handling Rules (CHRs). CHRs are a declarative language for writing
 incremental constraint solvers, that provide our scheme with a
 form of programmable type language. CHRs allow us to precisely
 describe the relationships among overloaded identifiers. Under
 some sufficient conditions on the CHRs we achieve decidable type
 inference and the semantic meaning of programs is unambiguous. Our
 approach provides a common formal basis for many type class
 extensions such as multiparameter type classes and functional
 dependencies.",
 paper = "Stuc05.pdf",
+ "This paper presents an approach to writing specifications for
+ parts of software systems. The main goal is to provide
+ specifications sufficiently precise and complete that other pieces
+ of software can be written to interact with the piece specified
+ without additional information. The secondary goal is to include
+ in the specification no more information than necessary to meet
+ the first goal. The technique is illustrated by means of a variety
+ of examples from a tutorial system.",
+ paper = "Parn72.pdf",
keywords = "printed"
}
\end{chunk}
\index{Reynolds, J.C.}
+\index{Johnson, C.W.}
\begin{chunk}{axiom.bib}
@article{Reyn85,
 author = "Reynolds, J.C.",
 title = {{Three Approaches to Type Structure}},
 journal = "LNCS",
 volume = "185",
 year = "1985",
+@article{John96,
+ author = "Johnson, C.W.",
+ title = {{Literate Specifications}},
+ journal = "Software Engineering Journal",
+ volume = "11",
+ number = "4",
+ year = "1996",
+ pages = "225237",
+ paper = "John96.pdf",
abstract =
 "We examine three disparate views of the type structure of
 programming languages: Milner's type deduction system and polymorphic
 let construct, the theory of subtypes and generic operators, and
 the polymorphic or secondorder typed lambda calculus. These
 approaches are illustrated with a functional language including
 product, sum and list constructors. The syntactic behavior of types
 is formalized with type inference rules, bus their semantics is
 treated intuitively.",
 paper = "Reyn85.pdf",
+ "The increasing scale and complexity of software is imposing serious
+ burdens on many industries. Formal notations, such as Z, VDM and
+ temporal logic, have been developed to address these problems. There
+ are, however, a number of limitations that restrict the use of
+ mathematical specifications far largescale software development. Many
+ projects take months or years to complete. This creates difficulties
+ because abstract mathematical requirements cannot easily be used by
+ new members of a development team to understand past design
+ decisions. Formal specifications describe what software must do, they
+ do not explain why it must do it. In order to avoid these limitations,
+ a literate approach to software engineering is proposed. This
+ technique integrates a formal specification language and a semiformal
+ design rationale. The Z schema calculus is usecl to represent what a
+ system must do. In contrast, the Questions, Options and Criteria
+ notation is used to represent the justifications that lie behind
+ development decisions. Empirical evidence is presented that suggests
+ the integration of these techniques provides significant benefits over
+ previous approaches to mathematical analysis and design techniques. A
+ range of tools is described that have been developed to support our
+ literate approach to the specification of largescale sohare systems.",
keywords = "printed"
}
\end{chunk}
\index{Balsters, Herman}
\index{Fokkinga, Maarten M.}
+\index{Finney, Kate}
\begin{chunk}{axiom.bib}
@article{Bals91,
 author = "Balsters, Herman and Fokkinga, Maarten M.",
 title = {{Subtyping can have a simple semantics}},
 journal = "Theoretical Computer Science",
 volume = "87",
 pages = "8196",
 year = "1991",
+@article{Finn96,
+ author = "Finney, Kate",
+ title = {{Mathematical Notation in Formal Specifications: Too
+ Difficult for the Masses?}},
+ journal = "IEEE Trans. on Software Engineering",
+ volume = "22",
+ number = "2",
+ year = "1996",
+ pages = "158159",
abstract =
 "Consider a first order typed language, with semantics
 $\llbracket~\rrbracket$ for expressions and types. Adding
 subtyping means that a partial order $\le$ on types is defined
 and that the typing rules are extended to the effect that
 expression $e$ has type $\tau$ whenever $e$ has type $\sigma$ and
 $\sigma \le \tau$. We show how to adapt the semantics
 $\llbracket~\rrbracket$ in a {\sl simple set theoretic way},
 obtaining a semantics $\llbracket~\rrbracket$ that satisfies, in
 addition to some obvious requirements, also the property
 $\llbracket\sigma\rrbracket \subseteq $\llbracket\tau\rrbracket$,
 whenever $\sigma \le \tau$.",
 paper = "Bals91.pdf",
+ "The phrase 'not much mathematics required' can imply a
+ variety of skill levels. When this phrase is applied to computer
+ scientists, software engineers, and clients in the area of formal
+ specification, the word 'much' can be widely misinterpreted with
+ disastrous consequences. A small experiment in reading
+ specifications revealed that students already trained in discrete
+ mathematics and the specification notation performed very poorly;
+ much worse than could reasonably be expected if formal methods
+ proponents are to be believed.",
+ paper = "Finn96.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cardelli, Luca}
+\index{Moore, Andrew P.}
+\index{Payne Jr., Charles N.}
\begin{chunk}{axiom.bib}
@article{Card84,
 author = "Cardelli, Luca",
 title = {{A Semantics of Multiple Inheritance}},
 journal = "LNCS",
 volume = "173",
 year = "1984",
 paper = "Card84.pdf",
+@inproceedings{Moor96,
+ author = "Moore, Andrew P. and Payne Jr., Charles N.",
+ title = {{Increasing Assurance with LIterate Programming Techniques}},
+ booktitle = "Comf. on Computer Assurance COMPASS'96",
+ publisher = "National Institute of Standards and Technology",
+ year = "1996",
+ pages = "187198",
+ abstract =
+ "The assurance argument that a trusted system satisfies its
+ information security requirements must be convincing, because the
+ argument supports the accreditation decision to allow the computer to
+ process classified information in an operational
+ environment. Assurance is achieved through understanding, but some
+ evidence that supports the assurance argument can be difficult to
+ understand. This paper describes a novel applica tion of a technique,
+ called literate programming [ll], that significantly improves the
+ readability of the assur ance argument while maintaining its
+ consistency with formal specifications that are input to specification
+ and verification systems. We describe an application c,f this
+ technique to a simple example and discuss the lessons learned from
+ this effort.",
+ paper = "Moor96.pdf",
keywords = "printed"
}
\end{chunk}
\index{Mosses, Peter}
+\index{Nissanke, Nimal}
\begin{chunk}{axiom.bib}
@article{Moss84,
 author = "Mosses, Peter",
 title = {{A Basic Abstract Semantics Algebra}},
 journal = "LNCS",
 volume = "173",
 year = "1984",
 abstract =
 "It seems that there are some pragmatic advantages in using Abstract
 Semantic Algebras (ASAs) instead of Xnotation in denotational
 semantics. The values of ASAs correspond to 'actions' (or
 'processes'), and the operators correspond to primitive ways of
 combining actions. There are simple ASAs for the various independent
 'facets' of actions: a functional ASA for dataflow, an imperative ASA
 for assignments, a declarative ASA for bindings, etc. The aim is to
 obtain general ASAs by systematic combination of these simple ASAs.

 Here we specify a basic ASA that captures the common features of the
 functional, imperative and declarative ASAs  and highlights their
 differences. We discuss the correctness of ASA specifications, and
 sketch the proof of the consistency and (limiting) completeness of the
 functional ASA, relative to a simple model.

 Some familiarity with denotational semantics and algebraic
 specifications is assumed.",
 paper = "Moss84.pdf"
+@book{Niss99,
+ author = "Nissanke, Nimal",
+ title = {{Formal Specification}},
+ publisher = "Springer",
+ year = "1999",
+ isbn = "9781852330026",
+ paper = "Niss99.pdf"
}
+\end{chunk}
+
+\index{Vienneau, Robert L.}
+\begin{chunk}{axiom.bib}
+@misc{Vien93,
+ author = "Vienneau, Robert L.",
+ title = {{A Review of Formal Methods}},
+ year = "1993",
+ paper = "Vien93.pdf",
+ keywords = "printed"
+}
\end{chunk}
\index{Gross, Jason}
\index{Chlipala, Adam}
+\index{Murthy, S.G.K}
+\index{Sekharam, K. Raja}
\begin{chunk}{axiom.bib}
@misc{Gros15,
 author = "Gross, Jason and Chlipala, Adam",
 title = {{Parsing Parses}},
 link = "\url{https://people.csail.mit.edu/jgross/personalwebsite/papers/2015parsingparsetrees.pdf}",
 year = "2015",
 abstract =
 "We present a functional parser for arbitrary contextfree grammars,
 together with soundness and completeness proofs, all inside Coq. By
 exposing the parser in the right way with parametric polymorphism and
 dependent types, we are able to use the parser to prove its own
 soundness, and, with a little help from relational parametricity,
 prove its own completeness, too. Of particular interest is one strange
 instantiation of the type and value parameters: by parsing parse trees
 instead of strings, we convince the parser to generate its own
 completeness proof. We conclude with highlights of our experiences
 iterating through several versions of the Coq development, and some
 general lessons about dependently typed programming.",
 paper = "Gros15.pdf",
+@article{Murt09,
+ author = "Murthy, S.G.K and Sekharam, K. Raja",
+ title = {{Software Reliability through Theorem Proving}},
+ journal = "Defence Science Journal",
+ volume = "59",
+ number = "3",
+ year = "2009",
+ pages = "314317",
+ abstract =
+ "Improving software reliability of missioncritical systems is
+ widely recognised as one of the major challenges. Early detection
+ of errors in software requirements, designs and implementation,
+ need rigorous verification and validation techniques. Several
+ techniques comprising static and dynamic testing approaches are
+ used to improve reliability of mission critical software; however
+ it is hard to balance development time and budget with software
+ reliability. Particularly using dynamic testing techniques, it is
+ hard to ensure software reliability, as exhaustive testing is not
+ possible. On the other hand, formal verification techniques
+ utilise mathematical logic to prove correctness of the software
+ based on given specifications, which in turn improves the
+ reliability of the software. Theorem proving is a powerful formal
+ verification technique that enhances the software reliability for
+ mission critical aerospace applications. This paper discusses the
+ issues related to software reliability and theorem proving used to
+ enhance software reliability through formal verification
+ technique, based on the experiences with STeP tool, using the
+ conventional and internationally accepted methodologies, models,
+ theorem proving techniques available in the tool without proposing
+ a new model.",
+ paper = "Murt09.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wehr, Stefan}
+\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@phdthesis{Wehr05,
 author = "Wehr, Stefan",
 title = {{ML Modules and Haskell Type Classes: A Constructive
 Comparison}},
 school = "Albert Ludwigs Universitat",
 year = "2005",
 abstract =
 "Researchers repeatedly observed that the module system of ML and the
 type class mechanism of Haskell are related. So far, this relationship
 has received little formal investigation. The work at hand fills this
 gap: It introduces typepreserving translations from modules to type
 classes and vice versa, which enable a thorough comparison of the two
 concepts.

 The source language of the first translation is a subset of
 Standard ML. The target language is Haskell with common extensions
 and one new feature, which was deeloped as part of this work. The
 second translation maps a subset of Haskell 98 to ML, with
 wellestablished extensions. I prove that the translations
 preserve type correctness and provide implementations for both.

 Building on the insights obtained from the translations, I present
 a thorough comparison between ML modules and Haskell type
 classes. Moreover, I evaluate to what extent the techniques used
 in the translations can be exploited for modular programming in
 Haskell and for programming with adhoc polymorphism in ML.",
 paper = "Wehr05.pdf"
+@incollection{Paul90a,
+ author = "Paulson, Lawrence C.",
+ title = {{Isabelle: The Next 700 Theorem Provers}},
+ booktitle = "Logic and Computer Science",
+ publisher = "Academic Press",
+ pages = "361386",
+ year = "1990",
+ paper = "Paul90a.pdf"
}
\end{chunk}
\index{Dreyer, Derek}
\index{Crary, Karl}
\index{Harper, Robert}
+\index{Beeson, M.}
\begin{chunk}{axiom.bib}
@article{Drey03,
 author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
 title = {{A Type System for HigherOrder Modules}},
 journal = "ACM SIGPLAN Notices",
 volume = "38",
+@article{Bees16,
+ author = "Beeson, M.",
+ title = {{Mixing Computations and Proofs}},
+ journal = "J. of Formalized Reasoning",
+ volume = "9",
number = "1",
 year = "2003",
 link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thoms.pdf}",
+ pages = "7199",
+ year = "2016",
abstract =
 "We present a type theory for higherorder modules that accounts for
 many central issues in module system design, including translucency,
 applicativity , generativity , and modules as firstclass values.
 Our type system harmonizes design elements from previous work,
 resulting in a simple, economical account of modular programming. The
 main unifying principle is the treatment of abstraction mechanisms
 as computational effects. Our language is the first to provide a
 complete and practical formalization of all of these critical issues
 in module system design.",
 paper = "Drey03.pdf",
+ "We examine the relationship between proof and computation in
+ mathematics, especially in formalized mathematics. We compare the
+ various approaches to proofs with a significant computational
+ component, including (i) verifying the algorithms, (ii) verifying the
+ results of the unverified algo rithms, and (iii) trusting an external
+ computation.",
+ paper = "Bees16.pdf",
keywords = "printed"
}
\end{chunk}
\index{Crary, Karl}
\index{Harper, Robert}
\index{Puri, Sidd}
+\index{Beeson, Michael}
\begin{chunk}{axiom.bib}
@inproceedings{Crar99,
 author = "Crary, Karl and Harper, Robert and Puri, Sidd",
 title = {{What is a Recursive Module}},
 booktitle = "Conf. on Programming Language Design and Implementation",
 year = "1999",
 link = "\url{https://www.cs.cmu.edu/~crary/papers/1999/recmod/recmod.dvi}",
 abstract =
 "A hierarchical module system is an effective tool for structuring
 large programs. Strictly hierarchical module systems impose an
 acyclic ordering on import dependencies among program units. This
 can impede modular programming by forcing mutuallydependent
 components to be consolidated into a single module. Recently there
 have been several proposals for module systems that admit cyclic
 dependencies, but it is not clear how these proposals relate to
 one another, nor how one mught integrate them into an expressive
 module system such as that of ML.

 To address this question we provide a typetheoretic analysis of
 the notion of a recursive module in the context of a
 ``phasedistinction'' formalism for higherorder module
 systems. We extend this calculus with a recursive module mechanism
 and a new form of signature, called a {\sl recurslively dependent
 signature}, to support the definition of recursive modules. These
 extensions are justified by an interpretation in terms of more
 primitive language constructs. This interpretation may also serve
 as a guide for implementation.",
 paper = "Crar99.pdf",
 keywords = "printed"
+@misc{Bees14,
+ author = "Beeson, M.",
+ title = {{Mixing Computations and Proofs}},
+ comment = "slides",
+ year = "2014",
+ link = "\url{http://www.cs.ru.nl/qed20/slides/beesonqed20.pdf}",
+ paper = "Bees14.pdf"
}
\end{chunk}
\index{Dreyer, Derek}
\index{Crary, Karl}
\index{Harper, Robert}
+\index{Gunter, Elsa L.}
\begin{chunk}{axiom.bib}
@techreport{Drey02,
 author = "Dreyer, Derek and Crary, Karl and Harper, Robert",
 title = {{A Type System for HigherOrder Modules (Expanded Version)}},
 type = "technical report",
 institution = "Carnegie Mellon University",
 number = "CMUCS02122R",
 year = "2002",
 link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/thoms/thomstr.pdf}",
+@inproceedings{Gunt89a,
+ author = "Gunter, Elsa L.",
+ booktitle = "Int. Workshop on Extensions of Logic Programming",
+ publisher = "Springer",
+ year = "1989",
+ pages = "223244",
abstract =
 "We present a type theory for higherorder modules that accounts for
 many central issues in module system design, including translucency,
 applicativity , generativity , and modules as firstclass values.
 Our type system harmonizes design elements from previous work,
 resulting in a simple, economical account of modular programming. The
 main unifying principle is the treatment of abstraction mechanisms
 as computational effects. Our language is the first to provide a
 complete and practical formalization of all of these critical issues
 in module system design.",
 paper = "Drey02.pdf"
+ "In this article, we discuss several possible extensions to
+ traditional logic programming languages. The specific extensions
+ proposed here fall into two categories: logical extensions and the
+ addition of constructs to allow for increased control. There is a
+ unifying theme to the proposed logical extensions, and that is the
+ scoped introduction of extensions to a programming context. More
+ specifically, these extensions are the ability to introduce variables
+ whose scope is limited to the term in which they occur (i.e. Abound
+ variables within Aterms), the ability to intro~iuce into a goal a
+ fresh constant whose scope is limited to the derivation of that goal,
+ and the ability to introduce into a goal a program clause whose scope
+ is limited to the derivation of that goal. The purpose of the
+ additions for increased control is to facilitate the raising and
+ handling of failures.
+
+ To motivate these various extensions, we have repeatedly appealed to
+ examples related to the construction of a generic theorem prover. It
+ is our thesis that this problem domain is specific enough to lend
+ focus when one is considering various language constructs, and yet
+ complex enough to encompass many of the general difficulties found in
+ other areas of symbolic computation.",
+ paper = "Gunt89a.pdf"
}
\end{chunk}
\index{Crary, Karl}
+\index{Paulson, Lawrence C.}
+\index{Smith, Andrew W.}
\begin{chunk}{axiom.bib}
@techreport{Crar02,
 author = "Crary, Karl",
 title = {{Toward a Foundational Typed Assembly Language}},
 institution = "Carnegie Mellon University",
 number = "CMUCS02196",
 year = "2002,
 link = "\url{https://www.cs.cmu.edu/~crary/papers/2003/talt/talttr.pdf}",
+@inproceedings{Paul89,
+ author = "Paulson, Lawrence C. and Smith, Andrew W.",
+ title = {{Logic Programming, Functional Programming, and
+ Inductive Definitions}},
+ booktitle = "Int. Workshop on Extensions of Logic Programming",
+ publisher = "Springer",
+ year = "1989",
+ pages = "283309",
abstract =
 "We present the design of a typed assembly language called TALT that
 supports heterogeneous tuples, disjoint sums, arrays, and a general
 account of addressing modes. TALT also implements the von Neumann
 model in which programs are stored in memory, and supports relative
 addressing. Type safety for execution and for garbage collection are
 shown by machinecheckable proofs. TALT is the first formalized typed
 assembly language to provide any of these features.",
 paper = "Crar02.pdf"
+ "The unification of logic and functional programming, like the Holy
+ Grail, is sought by countless people [6, 14]. In reporting our
+ attempt, we first discuss the motivation. We argue that logic
+ programming is still immature, compared with functional programming,
+ because few logic programs are both useful and pure. Functions can
+ help to purify logic programming, for they can eliminate certain uses
+ of the cut and can express certMn negations positively.
+
+ More generally, we suggest that the traditional paradigm  logic
+ programming as firstorder logic  is seriously out of step with
+ practice. We offer an alternative paradigm. We view the logic program
+ as an inductive definition of sets and relations. This view explains
+ certain uses of Negation as Failure, and explains why most attempts to
+ extend PROLO G to larger fragments of firstorder logic have not been
+ successful. It suggests a logic language with functions,
+ incorporating equational unification.
+
+ We have implemented a prototype of this language. It is very slow,
+ but complete, and appear to be faster than some earlier
+ implementations of narrowing. Our experiments illustrate the
+ programming style and shed light on the further development of such
+ languages.",
+ paper = "Paul89.pdf"
}
\end{chunk}
\index{Mili, Ali}
\index{Aharon, Shir}
\index{Nadkarni, Chaitanya}
+\index{Weirich, Jim}
\begin{chunk}{axiom.bib}
@article{Mili09,
 author = "Mili, Ali and Aharon, Shir and Nadkarni, Chaitanya",
 title = {{Mathematics for Reasoning about Loop Functions}},
 journal = "Science of Computer Programming",
 volume = "79",
 year = "2009",
 pages = "9891020",
+@misc{Weir12,
+ author = "Weirich, Jim",
+ title = {{Y Not?  Adventures in Functional Programming}},
+ year = "2012",
+ link = "\url{https://www.infoq.com/presentations/YCombinator}",
+ abstract = "Explains the YCombinator"
+}
+
+\end{chunk}
+
+\index{Beeson, Michael}
+\begin{chunk}{axiom.bib}
+@article{Bees06,
+ author = "Beeson, Michael",
+ title = {{Mathematical Induction in OtterLambda}},
+ journal = "J. Automated Reasoning",
+ volume = "36",
+ number = "4",
+ pages = "311'344",
abstract =
 "The criticality of modern software applications, the pervasiveness of
 malicious code concerns, the emergence of thirdparty software
 development, and the preponderance of program inspection as a quality
 assurance method all place a great premium on the ability to analyze
 programs and derive their function in all circumstances of use and all
 its functional detail. For Clike programming languages, one of the
 most challenging tasks in this endeavor is the derivation of loop
 functions. In this paper, we outline the premises of our approach to
 this problem, present some mathematical results, and discuss how these
 results can be used as a basis for building an automated tool that
 derives the function of while loops under some conditions.",
 paper = "Mili09.pdf",
+ "Otterlambda is Otter modified by adding code to implement an
+ algorithm for lambda unification. Otter is a resolutionbased,
+ clauselanguage firstorder prover that accumulates deduced clauses
+ and uses strategies to control the deduction and retention of
+ clauses. This is the first time that such a firstorder prover has
+ been combined in one program with a unification algorithm capable of
+ instantiating variables to lambda terms to assist in the
+ deductions. The resulting prover has all the advantages of the
+ proofsearch algorithm of Otter (speed, variety of inference rules,
+ excellent handling of equality) and also the power of lambda
+ unification. We illustrate how these capabilities work well together
+ by using Otterlambda to find proofs by mathematical induction. Lambda
+ unification instantiates the induction schema to find a useful
+ instance of induction, and then Otter's firstorder reasoning can be
+ used to carry out the base case and induction step. If necessary,
+ induction can be used for those, too. We present and discuss a variety
+ of examples of inductive proofs found by Otterlambda: some in pure
+ Peano arithmetic, some in Peano arithmetic with defined predicates,
+ some in theories combining algebra and the natural numbers, some
+ involving algebraic simplification (used in the induction step) by
+ simplification code from MathXpert, and some involving list induction
+ instead of numerical induction. These examples demonstrate the
+ feasibility and usefulness of adding lambda unification to a
+ firstorder prover.",
+ papers = "Bees06.pdf",
keywords = "printed"
}
\end{chunk}
\index{Piroi, Florina}
\index{Buchberger, Bruno}
\index{Rosenkranz, Camelia}
+\index{Barendregt, Henk}
+\index{Barendsen, Erik}
\begin{chunk}{axiom.bib}
@misc{Piro08,
 author = "Piroi, Florina and Buchberger, Bruno and Rosenkranz, Camelia",
 title = {{Mathematical Journals as Reasoning Agents: Literature Review}},
 year = "2008",
 link = "\urlhttp://www.risc.jku.at/publications/download/risc_3442/MathAgentsforSFB2008031012h00.pdf{}",
+@article{Bare02,
+ author = "Barendregt, Henk and Barendsen, Erik",
+ title = {{Autorkic Computations in Formal Proofs}},
+ journal = "J. Automated Reasoning",
+ volume = "28",
+ pages = "321336",
+ year = "2002",
abstract =
 "This report reviews the literature relevant for the research project
 'Math−Agents: Mathematical Journals as Reasoning Agents' proposed by
 Bruno Buchberger as a technology transfer project based on the results
 of the SFB Project 'Scientific Computing', in particular the project
 SFB 1302, 'Theorema'. The project aims at computer−supporting the
 refereeing process of mathematical journals by tools that are mainly
 based on automated reasoning and also at building up the knowledge
 archived in mathematical journals in such a way that they can act as
 interactive and active reasoning agents later on. In this report,
 we review current mathematical software systems with a focus on the
 availability of tools that can contribute to the goals of the Math−
 Agents project.",
 paper = "Piro08.pdf",
+ "Formal proofs in mathematics and computer science are being studied
+ because these objects can be verified by a very simple computer
+ program. An important open problem is whether these formal proofs can
+ be generated with an effort not much greater than writing a
+ mathematical paper in, say, LATEX. Modern systems for proof
+ development make the formalization of reasoning relatively
+ easy. However, formalizing computations in such a manner that the
+ results can be used in formal proofs is not immediate. In this paper
+ we show how to obtain formal proofs of statements such as Prime(61) in
+ the context of Peano arithmetic or $(x + 1)(x + 1) = x 2 + 2x + 1$ in
+ the context of rings. We hope that the method will help bridge the gap
+ between the efficient systems of computer algebra and the reliable
+ systems of proof development.",
+ paper = "Bare02.pdf",
keywords = "printed"
}
\end{chunk}
\index{Sokolowski, Stefan}
+\index{Beeson, Michael}
\begin{chunk}{axiom.bib}
@article{Soko87,
 author = "Sokolowski, Stefan",
 title = {{Soundness of Hoare's Logic: An Automated Proof Using LCF}},
 journal = "Trans. on Programming Languages and Systems",
 volume = "9",
 number = "1",
 pages = "100120",
 year = "1987",
+@article{Bees95,
+ author = "Beeson, Michael",
+ title = {{Using Nonstandard Analysis to Ensure the Correctness of
+ Symbolic Computations}},
+ journal = "Int. J. of Foundations of Computer Science",
+ volume = "6",
+ number = "3",
+ pages = "299338",
+ year = "1995",
abstract =
 "This paper presents a natural deduction proof of Hoare's logic
 carried out by the Edinburgh LCF theorem prover. The emphasis is
 on the way Hoare's theory is presented to the LCF, which looks
 very much like an exposition of syntax and semantics to human
 readers; and on the programmable heuristics (tactics). We also
 discuss some problems and possible improvements to the LCF.",
 paper = "Soko87.pdf",
+ "Nonstandard analysis has been put to use in a theoremprover,
+ where it assists in the analysis of formulae involving limits. The
+ theoremprover in question is used in the computer program
+ Mathpert to ensure the correctness of calculations in
+ calculus. Although nonstandard analysis is widely viewed as
+ nonconstructive, it can alternately be viewed as a method of
+ reducing logical manipulation (of formulae with quantifiers) to
+ coputation (with rewrite rules). We give a logical theory of
+ nonstandard analysis which is implemented in Mathpert. We describe
+ a procedure for 'elimination of infinitesimals' (also implemented
+ in Mathpert) and prove its correctness.",
+ paper = "Bees95.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wadler, Philip}
+\index{Beeson, Michael}
+\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@article{Wadl95,
 author = "Wadler, Philip",
 title = {{Monads for Functional Programming}},
+@article{Bees02,
+ author = "Beeson, Michael and Wiedijk, Freek",
+ title = {{The Meaning of Infinity in Calculus and Computer Algebra
+ Systems}},
journal = "LNCS",
 volume = "925",
+ volume = "2385",
+ year = "2002",
abstract =
 "The use of monads to structure functional programs is
 described. Monads provide a convenient framework for simulating
 effects found in other languages, such as global state, exception
 handling, output, or nondeterminism. Three case studies are
 looked at in detail: how monads ease the modification of a simple
 evaluator; how monads act as the basis of a datatype of arrays
 subject to inplace update; and how monads can be used to build
 parsers.",
 paper = "Wadl95.pdf",
+ "We use filters of open sets to provide a semantics justifying the
+ use of infinity in informal limit calculations in calculus, and in
+ the same kind of calculations in computer algebra. We compare the
+ behavior of these filters to the way Mathematica behaves when
+ calculating with infinity.
+
+ We stress the need to have a proper semantics for computer algebra
+ expressions, especially if one wants to use results and methods
+ from computer algebra in theorem provers. The computer algebra
+ method under discussion in this paper is the use of rewrite rules
+ to evaluate limits involving infinity.",
+ paper = "Bees02.pdf",
keywords = "printed"
}
\end{chunk}
\index{Freeman, Tim}
\index{Pfenning, Frank}
+\index{Zimmer, Jurgen}
+\index{Dennis, Louise A.}
\begin{chunk}{axiom.bib}
@inproceedings{Free91,
 author = "Freeman, Tim and Pfenning, Frank",
 title = {{Refinement Types for ML}},
 booktitle = "ACM SIGPLAN PLDI'91",
 year = "1991",
 link = "\url{http://www.cs.cmu.edu/~fp/papers/pldi91.pdf}",
+@article{Zimm02,
+ author = "Zimmer, Jurgen and Dennis, Louise A.",
+ title = {{Inductive Theorem Proving and Computer Algebra in the
+ MathWeb Software Bus}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
abstract =
 "We describe a refinement of ML's type system allowing the
 specification of recursively defined subtypes of userdefined
 datatypes. The resulting system of {\sl refinement types}
 preserves desirable properties of ML such as decidability of type
 inference, while at the same time allowing more errors to be
 detected at compiletime. The type system combines abstract
 interpretation with ideas from the intersection type discipline,
 but remains closely tied to ML in that refinement types are given
 only to programs which are already welltyped in ML.",
 paper = "Free91.pdf",
+ "Reasoning systems have reached a high degree of maturity in the last
+ decade. However, even the most successful systems are usually not
+ general purpose problem solvers but are typically specialised on
+ problems in a certain domain. The MathWeb Software Bus (MathWebSB) is a
+ system for combining reasoning specialists via a common software bus.
+ We describe the integration of the $\lambda$Clam system, a reasoning
+ specialist for proofs by induction, into the MathWebSB. Due to this
+ integration, $\lambda$Clam now offers its theorem proving expertise
+ to other systems in the MathWebSB. On the other hand,
+ $\lambda$Clam can use the
+ services of any reasoning specialist already integrated. We focus on
+ the latter and describe first experiments on proving theorems by
+ induction using the computational power of the Maple system within
+ $\lambda$Clam.",
+ paper = "Zimm02.pdf",
keywords = "printed"
}
\end{chunk}
\index{Zeilberger, Noam}
+\index{Campbell, J.A.}
\begin{chunk}{axiom.bib}
@misc{Zeil16,
 author = "Zeilberger, Noam",
 title = {{Towards a Mathematical Science of Programming}},
 year = "2016"
+@article{Camp02,
+ author = "Campbell, J.A.",
+ title = {{Indefinite Integration as a Testbed for Developments in
+ Multiagent Systems}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "Coordination of multiple autonomous agents to solve problems that
+ require each of them to contribute their limited expertise in the
+ construction of a solution is often ensured by the use of numerical
+ methods such as votecounting, payoff functions, game theory and
+ economic criteria. In areas where there are no obvious numerical methods
+ for agents to use in assessing other agents’ contributions, many
+ questions still remain open for research. The paper reports a study of
+ one such area: heuristic indefinite integration in terms of agents with
+ different single heuristic abilities which must cooperate in finding
+ indefinite integrals. It examines the reasons for successes and lack
+ of success in performance, and draws some general conclusions about
+ the usefulness of indefinite integration as a field for realistic
+ tests of methods for multiagent systems where the usefulness of
+ 'economic' criteria is limited. In this connection, the role of
+ numerical taxonomy is emphasised.",
+ paper = "Camp02.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Zeilberger, Noam}
+\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@inproceedings{Zeil16a,
 author = "Zeilberger, Noam",
 title = {{Principles of Type Refinement}},
 booktitle = "OPLSS 2106",
 link = "\url{http://noamz.org/oplss16/refinementsnotes.pdf}",
 year = "2016",
 paper = "Zeil16a.pdf"
+@misc{Wied18,
+ author = "Wiedijk, Freek",
+ title = {{Formaizing 100 Theorems}},
+ link = "\url{http://www.cs.ru.nl/~freek/100/}",
+ year = "2018"
}
\end{chunk}
\index{McCarthy, John}
+\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@incollection{Mcca63,
 author = "McCarthy, John",
 title = {{A Basis for a Mathematical Theory of Computation}},
 booktitle = "Computer Programming and Formal Systems",
 year = "1963",
 paper = "Mcca63.pdf"
+@misc{Wied08,
+ author = "Wiedijk, Freek",
+ title = {{Formai Proof  Geting Started}},
+ link = "\url{https://www.ams.org/notices/200811/tx081101408p.pdf}",
+ year = "2008",
+ paper = "Wied08.pdf"
}
\end{chunk}
\index{Scott, Dana S.}
\index{Strachey, Christopher}
+\index{Kaliszyk, Cezary}
\begin{chunk}{axiom.bib}
@article{Scot71,
 author = "Scott, Dana S. and Strachey, C.",
 title = {{Towards a Mathematical Semantics for Computer Languages}},
 journal = "Proc. Symp. on Computers and Automata",
 volume = "21",
 year = "1971",
+@article{Kali08,
+ author = "Kaliszyk, Cezary",
+ title = {{Automating Side Conditions in Formalized Partial Functions}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
abstract =
 "Compilers for highlevel languages are generally constructed to
 give a complete translation of the programs into machine
 lanugage. As machines merely juggle bit patterns, the concepts of
 the original language may be lost or at least obscured during this
 passage. The purpose of a mathematical semantics is to give a
 correct and meaningful correspondence between programs and
 mathematical entities in a way that is entirely independent of an
 implementation. This plan is illustrated in a very elementary
 method with the usual idea of state transformations. The next
 section shows why the mathematics of functions has to be modified
 to accommodate recursive commands. Section 3 explains the
 modification. Section 4 introduces the environments for handling
 variables and identifiers and shows how the semantical equations
 define equivalence of programs. Section 5 gives an exposition of
 the new type of mathematical function spaces that are required for
 the semantics of procedures when these are allowed in assignment
 statements. The conclusion traces some of the background of the
 project and points the way to future work.",
 paper = "Scot71.pdf"
+ "Assumptions about the domains of partial functions are necessary in
+ stateoftheart proof assistants. On the other hand when
+ mathematicians write about partial functions they tend not to
+ explicitly write the side conditions. We present an approach to
+ formalizing partiality in real and complex analysis in total
+ frameworks that allows keeping the side conditions hidden from the
+ user as long as they can be computed and simplified
+ automatically. This framework simplifies defining and operating on
+ partial functions in formalized real analysis in HOL Light. Our
+ framework allows simplifying expressions under partiality conditions
+ in a proof assistant in a manner that resembles computer algebra
+ systems.",
+ paper = "Kali08.pdf"
}
\end{chunk}
\index{Mellies, PaulAndre}
\index{Zeilberger, Noam}
+
+\index{Kaliszyk, Cezary}
+\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@inproceedings{Mell15,
 author = "Mellies, PaulAndre and Zeilberger, Noam",
 title = {{Functors are Type Refinement Systems}},
 booktitle = "POPL'15",
 publisher = "ACM",
 year = "2015",
+@article{Kali08a,
+ author = "Kaliszyk, Cezary and Wiedijk, Freek",
+ title = {{Merging Procedural and Declarative Proof}},
+ journal = "LNCS",
+ volume = "5497",
+ year = "2008",
abstract =
 "The standard reading of type theory through the lens of category
 theory is based on the idea of viewing a type system as a category
 of welltyped terms. We propose a basic revision of this reading;
 rather than interpreting type systems as categories, we describe
 them as {\sl functors} from a category of typing derivations to a
 category of underlying terms. Then, turning this around, we
 explain how in fact {\sl any} functor gives rise to a generalized
 type system, with an abstract notion of type judgment, typing
 derivations and typing rules. This leads to a purely categorical
 reformulation of various natural classes of type systems as
 natural classes of functors.

 The main purpose of this paper is to describe the general
 framework (which can also be seen as providing a categorical
 analysis of {\sl refinement types}, and to present a few
 applications. As a larger case study, we revisit Reynold's paper
 on ``The Meaning of Types'' (2000), showing how the paper's main
 results may be reconstructed along these lines.",
 paper = "Mell15.pdf",
+ "There are two different styles for writing natural deduction proofs:
+ the 'Gentzen' style in which a proof is a tree with the conclusion at
+ the root and the assumptions at the leaves, and the 'Fitch' style
+ (also called ‘flag’ style) in which a proof consists of lines that are
+ grouped together in nested boxes.
+
+ In the world of proof assistants these two kinds of natural deduction
+ correspond to procedural proofs (tactic scripts that work on one or
+ more subgoals, like those of the Coq, HOL and PVS systems), and
+ declarative proofs (like those of the Mizar and Isabelle/Isar
+ languages).
+
+ In this paper we give an algorithm for converting tree style proofs to
+ flag style proofs. We then present a rewrite system that simplifies
+ the results.
+
+ This algorithm can be used to convert arbitrary procedural proofs to
+ declarative proofs. It does not work on the level of the proof terms
+ (the basic inferences of the system), but on the level of the
+ statements that the user sees in the goals when constructing the
+ proof.
+
+ The algorithm from this paper has been implemented in the ProofWeb
+ interface to Coq. In ProofWeb a proof that is given as a Coq proof
+ script (even with arbitrary Coq tactics) can be displayed both as a
+ tree style and as a flag style proof.",
+ paper = "Kali08a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Shields, Mark}
\index{Jones, Simon Peyton}
+\index{Akbarpour, Behzad}
+\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@inproceedings{Shie02,
 author = "Shields, Mark and Jones, Simon Peyton",
 title = {{FirstClass Modules for Haskell}},
 booktitle = "9th Int. Conf. on Foundations of ObjectOriented Languages",
 pages = "2840",
 year = "2002",
 link = "\url{http://www.microsoft.com/enus/research/wpcontent/uploads/2016/02/first_class_modules.pdf}",
+@article{Akba08a,
+ author = "Akbarpour, Behzad and Paulson, Lawrence C.",
+ title = {{MetiTarski: An Automatic Prover for the Elementary Functions}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
abstract =
 "Though Haskell's module language is quite weak, its core language
 is highly expressive. Indeed, it is tantalisingly close to being
 able to express much of the structure traditionally delegated to a
 separate module language. However, the encoding are awkward, and
 some situations can't be encoded at all.

 In this paper we refine Haskell's core language to support
 {\sl firstclass modules} with many of the features of MLstyle
 modules. Our proposal cleanly encodes signatures, structures and
 functors with the appropriate type abstraction and type sharing,
 and supports recursive modules. All of these features work across
 compilation units, and interact harmoniously with Haskell's class
 system. Coupled with support for staged computation, we believe
 our proposal would be an elegant approach to runtime dynamic
 linking of structured code.

 Our work builds directly upon Jones' work on parameterised
 signatures, Odersky and L\"aufer's system of higherranked type
 annotations, Russo's semantics of ML modules using ordinary
 existential and universal quantifications, and Odersky and
 Zenger's work on nested types. We motivate the system by examples,
 and include a more formal presentation in the appendix.",
 paper = "Shie02.pdf",
 keywords = "printed"
+ "Many inequalities involving the functions ln, exp, sin, cos, etc.,
+ can be proved automatically by MetiTarski: a resolution theorem prover
+ (Metis) modified to call a decision procedure (QEPCAD) for the theory
+ of real closed fields. The decision procedure simplifies clauses by
+ deleting literals that are inconsistent with other algebraic facts,
+ while deleting as redundant clauses that follow algebraically from
+ other clauses. MetiTarski includes special code to simplify
+ arithmetic expressions.",
+ paper = "Akba08a.pdf"
}
\end{chunk}
\index{Dijkstra, E.W.}
+\index{Cramer, Marcos}
\begin{chunk}{axiom.bib}
@misc{Dijk71,
 author = "Dijkstra, E.W.",
 title = {{A Short Introduction to the Art of Programming}},
 comment = "EWD316",
 year = "1971",
 paper = "Dijk71.pdf"
+@misc{Cram14,
+ author = "Cramer, Marcos",
+ title = {{Modelling the usage of partial functions and undefined
+ terms using presupposition theory}},
+ year = "2014",
+ isbn = "9781848901308",
+ publisher = "College Publications",
+ pages = "7188",
+ abstract =
+ "We describe how the linguistic theory of presuppositions can be used
+ to analyse and model the usage of partial functions and undefined
+ terms in mathematical texts. We compare our account to other accounts
+ of partial functions and undefined terms, showing how our account
+ models the actual usage of partial functions and undefined terms more
+ faithfully than existing accounts. The model described in this paper
+ has been developed for the Naproche system, a computer system for
+ proofchecking mathematical texts written in controlled natural
+ language, and has largely been implemented in this system.",
+ paper = "Cram14.pdf",
+ keywords = "printed"
}
\end{chunk}
\index{Steele, Guy Lewis}
\index{Sussman, Gerald Jay}
+\index{Freundt, Sebastian}
+\index{Horn, Peter}
+\index{Konovalov, Alexander}
+\index{Linton, Steve}
+\index{Roozemond, Dan}
\begin{chunk}{axiom.bib}
@techreport{Stee78a,
 author = "Steele, Guy Lewis and Sussman, Gerald Jay",
 title = {{The Art of the Interpreter or, The Modularity Complex (Parts
 Zero, One, and Two}},
 type = "technical report",
 institution = "MIT AI Lab",
 number = "AIM453",
 year = "1978",
+@article{Freu08,
+ author = "Freundt, Sebastian and Horn, Peter and Konovalov, Alexander
+ and Linton, Steve and Roozemond, Dan",
+ title = {{Symbolic Computation Software Composability}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
abstract =
 "We examine the effects of various language design decisions on
 the programming styles available to a user of the language, with
 particular emphasis on the ability to incrementally construct
 modular systems. At each step we exhibit an interactive
 metacircular interpreter for the language under consideration.

 We explore the consequences of various variable binding
 disciplines and the introduction of side effects. We find that
 dynamic scoping is unsuitable for constructing procedural
 abstractions, but has another role as an agent of modularity,
 being a structured form of side effect. More general side effects
 are also found to be necessary to promote modular style. We find
 that the notion of side effect and the notion of equality (object
 identity) are mutually constraining: to define one is to define
 the other.

 The interpreters we exhibit are all written in a simple dialect of
 LISP, and all implement LISPlike languages. A subset of these
 interpreters constitute a partial historical reconstruction of the
 actual evolution of LISP.",
 paper = "Stee78a.pdf",
+ "We present three examples of the composition of Computer Algebra
+ Systems to illustrate the progress on a composability infrastructure
+ as part of the SCIEnce (Symbolic Computation Infrastructure for
+ Europe) project 1 . One of the major results of the project so far is
+ an OpenMath based protocol called SCSCP (Symbolic Computation Software
+ Composability Protocol). SCSCP enables the various software packages
+ for example to exchange mathematical objects, request calcula tions,
+ and store and retrieve remote objects, either locally or accross the
+ internet. The three examples show the current state of the GAP, KANT,
+ and MuPAD software packages, and give a demonstration of exposing
+ Macaulay using a newly developed framework.",
+ paper = "Freu08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wadler, Philip}
\index{Findler, Robert Bruce}
+\index{Kozen, Dexter}
+\index{Palsberg, Jens}
+\index{Schwartzbach, Michael I.}
\begin{chunk}{axiom.bib}
@inproceedings{Wadl07a,
 author = "Wadler, Philip and Findler, Robert Bruce",
 title = {{WellTyped Programs Can't Be Blamed}},
 booktitle = "Workshop on Scheme and Functional Programming",
 year = "2007",,
 pages = "1526",
 abstract =
 "We show how {\sl contracts} with blame fit naturally with recent
 work on {\sl hybrid types} and {\sl gradual types}. Unlike hybrid
 types or gradual types, we require casts in the source code, in
 order to indicate where type errors may occur. Two (perhaps
 surprising) aspects of our approach are that refined types can
 provide useful static guarantees even in the absence of a theorem
 prover, and that type {\sl dynamic} should not be regarded as a
 supertype of all other types. We factor the wellknown notion of
 subtyping into new notions of positive and negative subtyping, and
 use these to characterise where positive and negative blame may
 arise. Our approach sharpens and clarifies some recent results in
 the literature.",
 paper = "Wadl07a.pdf",
+@article{Koze93,
+ author = "Kozen, Dexter and Palsberg, Jens and Schwartzbach, Michael I.",
+ title = {{Efficient Recursive Subtyping}},
+ journal = "Mathematical Structures in Computer Science",
+ volume = "5",
+ number = "1",
+ pages = "113125",
+ year = "1995",
+ abstract =
+ "Subtyping in the presence of recursive types for the
+ $\lambda$calculus was studied by Amadio and Cardelli in 1991. In that
+ paper they showed that the problem of deciding whether one recursive
+ type is a subtype of another is decidable in exponential time. In
+ this paper we give an $O(n^2)$ algorithm. Our algorithm is based on a
+ simplification of the definition of the subtype relation, which allows
+ us to reduce the problem to the emptiness problem for a certain finite
+ automaton with quadratically many states. It is known that equality
+ of recursive types and the covariant B̈ohm order can be decided
+ efficiently by means of finite automata, since they are just language
+ equality and language inclusion, respectively. Our results extend the
+ automatatheoretic approach to handle orderings based on
+ contravariance.",
+ paper = "Koze93.pdf"
+}
+
+\end{chunk}
+
+\index{Martelli, Alberto}
+\index{Montanari, Ugo}
+\begin{chunk}{axiom.bib}
+@article{Mart82,
+ author = "Martelli, Alberto and Montanari, Ugo",
+ title = {{An Efficient Unification Algorithm}},
+ journal = "ACM TOPLAS",
+ volume = "4",
+ number = "2",
+ pages = "258282",
+ year = "1982",
+ abstract =
+ "The unification problem in firstorder predicate calculus is
+ described in general terms as the solution of a system of equations,
+ and a nondeterministic algorithm is given. A new unification
+ algorithm, characterized by having the acyclicity test efficiently
+ embedded into it, is derived from the nondeterministic one, and a
+ PASCAL implementation is given. A comparison with other wellknown
+ unification algorithms shows that the algorithm described here
+ performs well in all cases.",
+ paper = "Mart82.pdf"
+}
+
+\end{chunk}
+
+\index{Pottier, Francois}
+\begin{chunk}{axiom.bib}
+@phdthesis{Pott98,
+ author = "Pottier, Francois",
+ title = {{Type Inference in the Presence of Subtyping: From Theory
+ to Practice}},
+ school = "Universite Paris 7",
+ year = "1988",
+ comment = "INRIA Research Report RR3483",
+ abstract =
+ "From a purely theoretical point of view, type inference for a
+ functional language with parametric polymorphism and subtyping
+ poses little difficulty. Indeed, it suffices to generalize the
+ inference algorithm used in the ML language, so as to deal with
+ type inequalities, rather than equalities. However, the number of
+ such inequalities is linear in the program size  whence, from a
+ practical point of view, a serious efficiency and readability
+ problem.
+
+ To solve this problem, one must simplify the inferred
+ constraints. So, after studying the logical properties of
+ subtyping constraints, this work proposes several simplifcation
+ algorithms. They combine seamlessly, yielding a homogeneous, fully
+ formal framework, which directly leads to an efficient
+ implementation. Although this theoretical study is performed in a
+ simplified setting, numerous extensions are possible. Thus, this
+ framework is realistic and should allow a practical appearance of
+ subtyping in languages with type inference.",
+ paper = "Pott98.pdf"
+}
+
+\end{chunk}
+
+\index{McCarthy, John}
+\begin{chunk}{axiom.bib}
+@article{Mcca60,
+ author = "McCarthy, John",
+ title = {{Recursive Functions of Symbolic Expressions and Their
+ Computation by Machine, Part I}},
+ journal = "CACM",
+ volume = "3",
+ pages = "184195",
+ year = "1960",
+ paper = "Mcca60.pdf",
keywords = "printed"
}
+\end{chunk}
+
+\index{Barendregt, Henk}
+\begin{chunk}{axiom.bib}
+@incollection{Bare92,
+ author = "Barendregt, Henk",
+ title = {{Lambda Calculi with Types}},
+ booktitle = "Handbook of Logic in Computer Science (vol 2)",
+ publisher = "Oxford University Press",
+ year = "1992",
+ paper = "Bare92.pdf"
+}
+
\end{chunk}
\index{Michaelson, Greg}
+\index{Church, Alonzo}
\begin{chunk}{axiom.bib}
@book{Mich11,
 author = "Michaelson, Greg",
 title = {{Functional Programming Through Lambda Calculus}},
 year = "2011",
 publisher = "Dover",
 isbn = "9780486478838"
+@article{Chur28,
+ author = "Church, Alonzo",
+ title = {{On the Law of the Excluded Middle}},
+ journal = "Bull. of the American Mathematical Society",
+ volume = "34",
+ number = "1",
+ pages = "7578",
+ year = "1928",
+ paper = "Chur28.pdf"
}
\end{chunk}
+
+\index{Mitchell, John}
+\begin{chunk}{axiom.bib}
+@inproceedings{Mitc84
+ author = "Mitchell, John",
+ title = {{Type Inference and Type Containment}},
+ booktitle = "Proc. Int. Symp. on Semantics of Data Types",
+ publisher = "Springer",
+ isbn = "3540133461",
+ year = "1984"
+ pages = "257277",
+ abstract =
+ "Type inference, the process of assigning types to untyped
+ expressions, may be motivated by the design of a typed language or
+ semantical considerations on the meeaaings of types and expressions.
+ A typed language GR with polymorphic functions leads to the GR
+ inference rules. With the addition of an 'oracle' rule for equations
+ between expressions, the GR rules become complete for a general class
+ of semantic models of type inference. These inference models are
+ models of untyped lambda calculus with extra structure similar to
+ models of the typed language GR. A more specialized set of type
+ inference rules, the GRS rules, characterize semantic typing when the
+ functional type $a~$ , is interpreted as all elements of the model that
+ map a to $~r$ and the polymorphic type $Vt,~0$ is interpreted as the
+ intersection of all $o'(r)$. Both inference systems may be reformulated
+ using rules for deducing containments between types. The advantage of
+ the type inference rules based on containments is thatproofs
+ correspond more closely to the structure of terms.",
+ paper = "Mitc84.pdf"
+}
+
+\end{chunk}
+
+\index{Griesmer, James}
+\begin{chunk}{axiom.bib}
+@artcile{Grie11,
+ author = "Griesmer, James",
+ title = {{James Griesmer 19292011}},
+ journal = "ACM Communications in Computer Algebra",
+ volume = "46",
+ number = "1/2",
+ year = "2012",
+ pages = "1011",
+ comment = "Obituary",
+ paper = "Grie11.pdf"
+}
+
+\end{chunk}
+
+\index{Daly, Timothy}
+\index{Kastner, John}
+\index{Mays, Eric}
+\begin{chunk}{axiom.bib}
+@inproceedings{Daly88,
+ author = "Daly, Timothy and Kastner, John and Mays, Eric",
+ title = {{Integrating Rules and Inheritance Networks in a Knowlegebased
+ Financial Marketing Consutation System}},
+ booktitle = "Proc. HICSS 21",
+ volume = "3",
+ number = "58",
+ pages = "496500",
+ year = "1988",
+ comment = "KROPS",
+ abstract =
+ "This paper describes the integrated use of rulebascd inference and
+ an objectcentered knowledge representation (inheritance network) in a
+ financial marketing consultation system. The rules provide a highly
+ flexible pattern match capability and inference cycle for control. The
+ inheritance network provides a convenient way to represent the
+ conceptual structure of the domain. By merging the two techniques, our
+ financial computation can he shared at the most general level, and
+ rule inference is carried out at any appropriate Ievel of
+ generalization. Since domain knowledge is representcd independently
+ from control knowledge, knowledge ahout a particular problcm solving
+ technique is decoupled from the conditions for its invocation. A Iarge
+ financial marketing system has been built and examples are given to
+ show our combined use of rules and inheritance networks.",
+ paper = "Daly88.pdf"
+}
+
+\end{chunk}
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index bea9cc7..3569308 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5930,6 +5930,8 @@ books/bookvolbib add references
books/breakurl.sty latex package to break long URLs
20180516.01.tpd.patch
books/bookvolbib add references
+20180606.01.tpd.patch
+books/bookvolbib add references

1.9.1