From 1181ee79485fdd2c64c86d73650b72f99a9825d4 Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Sat, 5 May 2018 07:03:15 0400
Subject: [PATCH] books/breakurl.sty latex package to break long URLs
Goal: Literate Programming

books/breakurl.sty  314 +++++++++++++
changelog  2 +
patch  1003 +
src/axiomwebsite/patches.html  2 +
4 files changed, 320 insertions(+), 1001 deletions()
create mode 100644 books/breakurl.sty
diff git a/books/breakurl.sty b/books/breakurl.sty
new file mode 100644
index 0000000..ea9181b
 /dev/null
+++ b/books/breakurl.sty
@@ 0,0 +1,314 @@
+%%
+%% This is file `breakurl.sty',
+%% generated with the docstrip utility.
+%%
+%% The original source files were:
+%%
+%% breakurl.dtx (with options: `package')
+%%
+%% This is a generated file.
+%%
+%% Copyright (C) 2005 by Vilar Camara Neto.
+%%
+%% This file may be distributed and/or modified under the
+%% conditions of the LaTeX Project Public License, either
+%% version 1.2 of this license or (at your option) any later
+%% version. The latest version of this license is in:
+%%
+%% http://www.latexproject.org/lppl.txt
+%%
+%% and version 1.2 or later is part of all distributions of
+%% LaTeX version 1999/12/01 or later.
+%%
+%% Currently this work has the LPPL maintenance status "maintained".
+%%
+%% The Current Maintainer of this work is Vilar Camara Neto.
+%%
+%% This work consists of the files breakurl.dtx and
+%% breakurl.ins and the derived file breakurl.sty.
+%%
+\NeedsTeXFormat{LaTeX2e}[1999/12/01]
+\ProvidesPackage{breakurl}
+ [2013/04/10 v1.40 Breakable hyperref URLs]
+
+
+\RequirePackage{xkeyval}
+\RequirePackage{ifpdf}
+
+\ifpdf
+ % Dummy package options
+ \DeclareOptionX{preserveurlmacro}{}
+ \DeclareOptionX{hyphenbreaks}{}
+ \DeclareOptionX{anythingbreaks}{}
+ \DeclareOptionX{vertfit}{}
+ \ProcessOptionsX\relax
+
+ \PackageWarning{breakurl}{%
+ You are using breakurl while processing via pdflatex.\MessageBreak
+ \string\burl\space will be just a synonym of \string\url.\MessageBreak}
+ \DeclareRobustCommand{\burl}{\url}
+ \DeclareRobustCommand*{\burlalt}{\hyper@normalise\burl@alt}
+ \def\burl@alt#1#2{\hyper@linkurl{\Hurl{#1}}{#2}}
+ \expandafter\endinput
+\fi
+
+\@ifpackageloaded{hyperref}{}{%
+ \PackageError{breakurl}{The breakurl depends on hyperref package}%
+ {I can't do anything. Please type X , edit the source file%
+ \MessageBreak
+ and add \string\usepackage\string{hyperref\string} before
+ \string\usepackage\string{breakurl\string}.}
+ \endinput
+}
+
+\newif\if@preserveurlmacro\@preserveurlmacrofalse
+\newif\if@burl@fitstrut\@burl@fitstrutfalse
+\newif\if@burl@fitglobal\@burl@fitglobalfalse
+\newif\if@burl@anythingbreaks\@burl@anythingbreaksfalse
+
+\newtoks\burl@toks
+
+\let\burl@charlistbefore\empty
+\let\burl@charlistafter\empty
+
+\def\burl@addtocharlistbefore{\g@addto@macro\burl@charlistbefore}
+\def\burl@addtocharlistafter{\g@addto@macro\burl@charlistafter}
+
+\bgroup
+ \catcode`\&=12\relax
+ \hyper@normalise\burl@addtocharlistbefore{%}
+ \hyper@normalise\burl@addtocharlistafter{:/.?#&_,;!}
+\egroup
+
+\def\burl@growmif#1#2{%
+ \g@addto@macro\burl@mif{\def\burl@ttt{#1}\ifx\burl@ttt\@nextchar#2\else}%
+}
+\def\burl@growmfi{%
+ \g@addto@macro\burl@mfi{\fi}%
+}
+\def\burl@defifstructure{%
+ \let\burl@mif\empty
+ \let\burl@mfi\empty
+ \expandafter\@tfor\expandafter\@nextchar\expandafter:\expandafter=%
+ \burl@charlistbefore\do{%
+ \expandafter\burl@growmif\@nextchar\@burl@breakbeforetrue
+ \burl@growmfi
+ }%
+ \expandafter\@tfor\expandafter\@nextchar\expandafter:\expandafter=%
+ \burl@charlistafter\do{%
+ \expandafter\burl@growmif\@nextchar\@burl@breakaftertrue
+ \burl@growmfi
+ }%
+}
+
+\AtEndOfPackage{\burl@defifstructure}
+
+\def\burl@setvertfit#1{%
+ \lowercase{\def\burl@temp{#1}}%
+ \def\burl@opt{local}\ifx\burl@temp\burl@opt
+ \@burl@fitstrutfalse\@burl@fitglobalfalse
+ \else\def\burl@opt{strut}\ifx\burl@temp\burl@opt
+ \@burl@fitstruttrue\@burl@fitglobalfalse
+ \else\def\burl@opt{global}\ifx\burl@temp\burl@opt
+ \@burl@fitstrutfalse\@burl@fitglobaltrue
+ \else
+ \PackageWarning{breakurl}{Unrecognized vertfit option `\burl@temp'.%
+ \MessageBreak
+ Adopting default `local'}
+ \@burl@fitstrutfalse\@burl@fitglobalfalse
+ \fi\fi\fi
+}
+
+\DeclareOptionX{preserveurlmacro}{\@preserveurlmacrotrue}
+\DeclareOptionX{hyphenbreaks}{%
+ \bgroup
+ \catcode`\&=12\relax
+ \hyper@normalise\burl@addtocharlistafter{}%
+ \egroup
+}
+\DeclareOptionX{anythingbreaks}{%
+ \@burl@anythingbreakstrue
+}
+\DeclareOptionX{vertfit}[local]{\burl@setvertfit{#1}}
+
+\ProcessOptionsX\relax
+
+\def\burl@hyper@linkurl#1#2{%
+ \begingroup
+ \hyper@chars
+ \burl@condpdflink{#1}%
+ \endgroup
+}
+
+\def\burl@condpdflink#1{%
+ \literalps@out{
+ /burl@bordercolor {\@urlbordercolor} def
+ /burl@border {\@pdfborder} def
+ }%
+ \if@burl@fitstrut
+ \sbox\pdf@box{#1\strut}%
+ \else\if@burl@fitglobal
+ \sbox\pdf@box{\burl@url}%
+ \else
+ \sbox\pdf@box{#1}%
+ \fi\fi
+ \dimen@\ht\pdf@box\dimen@ii\dp\pdf@box
+ \sbox\pdf@box{#1}%
+ \ifdim\dimen@ii=\z@
+ \literalps@out{BU.SS}%
+ \else
+ \lower\dimen@ii\hbox{\literalps@out{BU.SS}}%
+ \fi
+ \ifHy@breaklinks\unhbox\else\box\fi\pdf@box
+ \ifdim\dimen@=\z@
+ \literalps@out{BU.SE}%
+ \else
+ \raise\dimen@\hbox{\literalps@out{BU.SE}}%
+ \fi
+ \pdf@addtoksx{H.B}%
+}
+
+\DeclareRobustCommand*{\burl}{%
+ \leavevmode
+ \begingroup
+ \let\hyper@linkurl=\burl@hyper@linkurl
+ \catcode`\&=12\relax
+ \hyper@normalise\burl@
+}
+
+\DeclareRobustCommand*{\burlalt}{%
+ \begingroup
+ \let\hyper@linkurl=\burl@hyper@linkurl
+ \catcode`\&=12\relax
+ \hyper@normalise\burl@alt
+}
+
+\newif\if@burl@breakbefore
+\newif\if@burl@breakafter
+\newif\if@burl@prevbreakafter
+
+\bgroup
+\catcode`\&=12\relax
+\gdef\burl@#1{%
+ \def\burl@url{#1}%
+ \def\burl@urltext{#1}%
+ \burl@doit
+}
+
+\gdef\burl@alt#1{%
+ \def\burl@url{#1}%
+ \hyper@normalise\burl@@alt
+}
+\gdef\burl@@alt#1{%
+ \def\burl@urltext{#1}%
+ \burl@doit
+}
+
+\gdef\burl@doit{%
+ \burl@toks{}%
+ \let\burl@UrlRight\UrlRight
+ \let\UrlRight\empty
+ \@burl@prevbreakafterfalse
+ \@ifundefined{@urlcolor}{\Hy@colorlink\@linkcolor}{\Hy@colorlink\@urlcolor}%
+ \expandafter\@tfor\expandafter\@nextchar\expandafter:\expandafter=%
+ \burl@urltext\do{%
+ \if@burl@breakafter\@burl@prevbreakaftertrue
+ \else\@burl@prevbreakafterfalse\fi
+ \if@burl@anythingbreaks\@burl@breakbeforetrue\else\@burl@breakbeforefalse\fi
+ \@burl@breakafterfalse
+ \expandafter\burl@mif\burl@mfi
+ \if@burl@breakbefore
+ % Breakable if the current char is in the `can break before' list
+ \burl@flush\linebreak[0]%
+ \else
+ \if@burl@prevbreakafter
+ \if@burl@breakafter\else
+ % Breakable if the current char is not in any of the `can break'
+ % lists, but the previous is in the `can break after' list.
+ % This mechanism accounts for sequences of `break after' characters,
+ % where a break is allowed only after the last one
+ \burl@flush\linebreak[0]%
+ \fi
+ \fi
+ \fi
+ \expandafter\expandafter\expandafter\burl@toks
+ \expandafter\expandafter\expandafter{%
+ \expandafter\the\expandafter\burl@toks\@nextchar}%
+ }%
+ \let\UrlRight\burl@UrlRight
+ \burl@flush
+ \literalps@out{BU.E}%
+ \Hy@endcolorlink
+ \endgroup
+}
+\egroup
+
+\def\the@burl@toks{\the\burl@toks}
+
+\def\burl@flush{%
+ \expandafter\def\expandafter\burl@toks@def\expandafter{\the\burl@toks}%
+ \literalps@out{/BU.L (\burl@url) def}%
+ \hyper@linkurl{\expandafter\Hurl\expandafter{\burl@toks@def}}{\burl@url}%
+ \global\burl@toks{}%
+ \let\UrlLeft\empty
+}%
+
+\if@preserveurlmacro\else\let\url\burl\let\urlalt\burlalt\fi
+
+\AtBeginDvi{%
+ \headerps@out{%
+ /burl@stx null def
+ /BU.S {
+ /burl@stx null def
+ } def
+ /BU.SS {
+ currentpoint
+ /burl@lly exch def
+ /burl@llx exch def
+ burl@stx null ne {burl@endx burl@llx ne {BU.FL BU.S} if} if
+ burl@stx null eq {
+ burl@llx dup /burl@stx exch def /burl@endx exch def
+ burl@lly dup /burl@boty exch def /burl@topy exch def
+ } if
+ burl@lly burl@boty gt {/burl@boty burl@lly def} if
+ } def
+ /BU.SE {
+ currentpoint
+ /burl@ury exch def
+ dup /burl@urx exch def /burl@endx exch def
+ burl@ury burl@topy lt {/burl@topy burl@ury def} if
+ } def
+ /BU.E {
+ BU.FL
+ } def
+ /BU.FL {
+ burl@stx null ne {BU.DF} if
+ } def
+ /BU.DF {
+ BU.BB
+ [ /H /I /Border [burl@border] /Color [burl@bordercolor]
+ /Action << /Subtype /URI /URI BU.L >> /Subtype /Link BU.B /ANN pdfmark
+ /burl@stx null def
+ } def
+ /BU.BB {
+ burl@stx HyperBorder sub /burl@stx exch def
+ burl@endx HyperBorder add /burl@endx exch def
+ burl@boty HyperBorder add /burl@boty exch def
+ burl@topy HyperBorder sub /burl@topy exch def
+ } def
+ /BU.B {
+ /Rect[burl@stx burl@boty burl@endx burl@topy]
+ } def
+ /eop where {
+ begin
+ /@ldeopburl /eop load def
+ /eop { SDict begin BU.FL end @ldeopburl } def
+ end
+ } {
+ /eop { SDict begin BU.FL end } def
+ } ifelse
+ }%
+}
+\endinput
+%%
+%% End of file `breakurl.sty'.
diff git a/changelog b/changelog
index 33edb40..4def1dd 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,5 @@
+20180505 tpd src/axiomwebsite/patches.html 20180505.01.tpd.patch
+20180505 tpd books/breakurl.sty latex package to break long URLs
20180415 tpd src/axiomwebsite/patches.html 20180415.01.tpd.patch
20180415 tpd books/bookvolbib add references
20180401 tpd src/axiomwebsite/patches.html 20180401.01.tpd.patch
diff git a/patch b/patch
index fda4020..64501ea 100644
 a/patch
+++ b/patch
@@ 1,1003 +1,4 @@
books/bookvolbib add references
+books/breakurl.sty latex package to break long URLs
Goal: Proving Axiom Correct
+Goal: Literate Programming
\index{Francez, Nissim}
\begin{chunk}{axiom.bib}
@book{Fran92,
 author = "Francez, Nissim",
 title = {{Program Verification}},
 year = "1992",
 publisher = "AddisonWesley",
 isbn = 0201416085"
}

\end{chunk}

\index{Appel, Andrew W.}
\begin{chunk}{axiom.bib}
@book{Appe92,
 author = "Appel, Andrew W.",
 title = {{Compiling with Continuations}},
 year = "1992",
 publisher = "Cambridge University Press",
 isbn = "052103311X"
}

\end{chunk}

\index{Appel, Andrew W.}
\begin{chunk}{axiom.bib}
@book{Appe98,
 author = "Appel, Andrew W.",
 title = {{Modern Compiler Implementation in ML}},
 year = "1998",
 publisher = "Cambridge University Press",
 isbn = "0521607647"
}

\end{chunk}

\index{Paulson, L.C.}
\begin{chunk}{axiom.bib}
@article{Paul96,
 author = "Paulson, L.C.",
 title = {{ML for the WOrking Programmer 2nd Edition}},
 year = "1996",
 publisher = "Cambridge University Press",
 isbn = "052156543X"
}

\end{chunk}

\index{Pollack, Robert}
\begin{chunk}{axiom.bib}
@phdthesis{Poll94,
 author = "Pollack, Robert",
 title = {{The Theory of LEGO  A Proof Checker for the Extended Calculus
 of Constructions}},
 school = "University of Edinburgh",
 link =
 "\url{http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.38.2610}",
 year = "1994",
 abstract =
 "LEGO is a computer program for interactive typechecking in the
 Extended Calculus of Constructions and two of its subsystems. LEGO
 also supports the extension of these three systems with inductive
 types. These type systems can be viewed as logics, and as meta
 languages for expressing logics, and LEGO is intended to be used for
 interactively constructing proofs in mathematical theories presented
 in these logics. I have developed LEGO over six years, starting from
 an implementation of the Calculus of Constructions by Gerard
 Huet. LEGO has been used for problems at the limits of our abilities
 to do formal mathematics.

 In this thesis I explain some aspects of the metatheory of LEGO’s
 type systems leading to a machinechecked proof that typechecking is
 decidable for all three type theories supported by LEGO, and to a
 verified algorithm for deciding their typing judgements, assuming only
 that they are normalizing. In order to do this, the theory of Pure
 Type Systems (PTS) is extended and formalized in LEGO. This extended
 example of a formally developed body of mathematics is described, both
 for its main theorems, and as a case study in formal mathematics. In
 many examples, I compare formal definitions and theorems with their
 informal counterparts, and with various alternative approaches, to
 study the meaning and use of mathematical language, and suggest
 clarifications in the informal usage.

 Having outlined a formal development far too large to be surveyed in
 detail by a human reader, I close with some thoughts on how the human
 mathematician’s state of understanding and belief might be affected by
 posessing such a thing.",
 paper = "Poll94.pdf"
}

\end{chunk}

\index{Pollack, Robert}
\begin{chunk}{axiom.bib}
@article{Poll94a,
 author = "Pollack, Robert",
 title = {{On Extensibility of Proof Checkers}},
 journal = "LNCS",
 volume = "996",
 pages = "140161",
 year = "1994",
 abstract =
 "My suggestion is little different from LCF, just replacing one
 computational meta language (ML) with another (ECC, FS0,...). The
 philosophical point is that it is then possible to accept non
 canonical proof notations as object level proofs, removing the need to
 actually normalize them. There are problems to be worked out in
 practice, such as extraction of programs from constructive proof, and
 efficient execution of pure, total programs. Although this approach
 doesn't address the difficulty of proving correctness of tactics in
 the meta level, it is immediatly useful for tactics with structural
 justification (e.g. weakening) which are not even representable in
 LCF, and are infeasible in the Nuprl variant of LCF. Since it can be
 used for any object system without adding new principles such as
 reflection, and is compatible with other approaches to extensibility
 (especially partial reflection), it should be considered as part of
 the answer to extensibility in proof checkers.",
 paper = "Poll94a.pdf"
}

\end{chunk}

\index{Pfenning, Frank}
\index{Elliott, Conal}
\begin{chunk}{axiom.bib}
@inproceedings{Pfen88a,
 author = "Pfenning, Frank and Elliott, Conal",
 title = {{HigherOrder Abstract Syntax}},
 booktitle = "Symp on Language Design and Implementation PLDI'88",
 publisher = "ACM",
 link = "\url{https://www.cs.cmu.edu/~fp/papers/pldi88.pdf}",
 year = "1988",
 abstract =
 "We describe motivation, design, use, and implementation of
 higherorder abstract syntax as a central representation for
 programs, formulas, rules, and other syntactic objects in program
 manipulation and other formal systems where matching and substitution
 or unification are central operations. Higherorder abstract syntax
 incorporates name binding information in a uniform and language
 generic way. Thus it acts as a powerful link integrating diverse
 tools in such formal environments. We have implemented higherorder
 abstract syntax, a supporting matching and unification algorithm, and
 some clients in Common Lisp in the framework of the Ergo project at
 Carnegie Mellon University.",
 paper = "Pfen88a.pdf",
 keywords = "printed"
}

\end{chunk}

\index{McDowell, Raymond}
\index{Miller, Dale}
\begin{chunk}{axiom.bib}
@article{Mcdo02,
 author = "McDowell, Raymond and Miller, Dale",
 title = {{Reasoning with HigherOrder Abstract Syntax in a Logical
 Framework}},
 journal = "ACM Trans. Computational Logic",
 volume = "3",
 year = "2002",
 pages = "80136",
 link = "\url{http://www.lix.polytechnique.fr/~dale/papers/mcdowell01.pdf}",
 abstract =
 "Logical frameworks based on intuitionistic or linear logics with
 highertype quantification have been successfully used to give
 highlevel, modular, and formal specifications of many important
 judgments in the area of programming languages and inference systems.
 Given such specifications, it is natural to consider proving
 properties about the specified systems in the framework: for example,
 given the specification of evaluation for a functional programming
 language, prove that the language is deterministic or that evaluation
 preserves types. One challenge in developing a framework for such
 reasoning is that higherorder abstract syntax (HOAS), an elegant and
 declarative treatment of objectlevel abstraction and substitution, is
 difficult to treat in proofs involving induction. In this paper, we
 present a metalogic that can be used to reason about judgments coded
 using HOAS; this metalogic is an extension of a simple intuitionistic
 logic that admits higherorder quantification over simply typed
 $\lambda$terms (key ingredients for HOAS) as well as induction and a
 notion of definition. The latter concept of definition is a
 prooftheoretic device that allows certain theories to be treated as
 'closed' or as defining fixed points. We explore the difficulties of
 formal metatheoretic analysis of HOAS encodings by considering
 encodings of intuitionistic and linear logics, and formally derive the
 admissibility of cut for important subsets of these logics. We then
 propose an approach to avoid the apparent tradeoff between the
 benefits of higherorder abstract syntax and the ability to analyze
 the resulting encodings. We illustrate this approach through examples
 involving the simple functional and imperative programming languages
 $PCF$ and $PCF_{:=}$. We formally derive such properties as unicity of
 typing, subject reduction, determinacy of evaluation, and the
 equivalence of transition semantics and natural semantics
 presentations of evaluation.",
 paper = "Mcdo02.pdf"
}

\end{chunk}

\index{Barras, Bruno}
\begin{chunk}{axiom.bib}
@techreport{Barr96a,
 author = "Barras, Bruno",
 title = {{Coq en Coq}},
 institution = "INRIA",
 year = "1996",
 type = "Research Report",
 number = "inria00073667",
 comment = "French",
 abstract =
 "The essential step of the formal verification of a proofchecker such
 as Coq is the verification of its kernel: a typechecker for the
 {\sl Calculus of Inductive Constructions} (CIC) whihc is its underlying
 formalism. The present work is a first smallscale attempt on a
 significative fragment of CIC: the Calculus of Constructions (CC). We
 formalize the definition and the metatheory of (CC) in Coq. In
 particular, we prove strong normalization and decidability of type
 inference. From the latter proof, we extract a certified {\sl Caml
 Light} program, which performs type inference (or typechecking) for
 an arbitrary typing judgement in CC. Integrating this program in a
 larger system, including a parser and prettyprinter, we obtain a
 standalone proofchecker, called {\sl CoC}, the {\sl Calculus of
 Constructions}. As an example, the formal proof of Newman's lemma,
 build with Coq, can be reverified by {\sl CoC} with reasonable
 performance.",
 paper = "Barr96a.pdf"
}

\end{chunk}

\index{Barras, Bruno}
\index{Werner, Benjamin}
\begin{chunk}{axiom.bib}
@misc{Barr18,
 author = "Barras, Bruno and Werner, Benjamin",
 title = {{Coq in Coq}},
 link =
"\url{http://www.lix.polytechnique.fr/Labo/Bruno.Barras/publi/coqincoq.pdf}",
 comment = "https://github.com/coqcontribs/coqincoq",
 abstract =
 "We formalize the definition and the metatheory of the Calculus of
 Constructions (CC) using the proof assistant Coq. In particular,
 we prove strong normalization and decidability of type
 inference. From the latter proof, we extract a certified Objective
 Caml program which performs type inference in CC and use this code
 to build a smallscale certified proofchecker.",
 paper = "Barr18.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Huet, G\'erard P.}
\begin{chunk}{axiom.bib}
@article{Huet75,
 author = "Huet, Gerard P.",
 title = {{A Unification Algorithm for typed $\lambda$Calculus}},
 journal = "Theoretical Computer Science",
 volume = "1",
 number = "1",
 pages = "2557",
 year = "1975",
 abstract =
 "A semidecision algorithm is presented, to search for unification of
 formulas in typed $\omega$order $\lambda$calculus, and its
 correctness is proved.

 It is shown that the search space is significantly smaller than the
 one for finding the most general unifiers. In particluar, our search
 is not redundant. This allows our algorithm to have good
 directionality and convergence properties.",
 paper = "Huet75.pdf"
}

\end{chunk}

\index{Huet, G\'erard P.}
\index{Lang, Bernard}
\begin{chunk}{axiom.bib}
@article{Huet78,
 author = "Huet, Gerard P. and Lang, Bernard",
 title = {{Proving and Applying Program Transformations Expressed
 with SecondOrder Patterns}},
 journal = "Acta Informatica",
 volume = "11",
 number = "1",
 pages = "3155",
 year = "1978",
 abstract =
 "We propose a program transformation method based on rewritingrules
 composed of secondorder schemas. A complete secondorder matching
 algorithm is presented that allows effective use of these rules. We
 show how to formally prove the correctness of the rules using a
 denotational semantics for the programming language. We establish the
 correctness of the transformation method itself, and give techniques
 pertaining to its actual implementation. The paper is illustrated with
 recursion removal examples."
}

\end{chunk}

\index{Hamming, Richard}
\begin{chunk}{axiom.bib}
@misc{Hamm95,
 author = "Hamming, Richard",
 title = {{Hamming, 'You and Your Research'}},
 link = "\url{https://www.youtube.com/watch?v=a1zDuOPkMSw}",
 year = "1995"
}

\end{chunk}

\index{Dolan, Stephen}
\begin{chunk}{axiom.bib}
@phdthsis{Dola16,
 author = "Dolan, Stephen",
 title = {{Algebraic Subtyping}},
 school = "University of Cambridge",
 year = "2016",
 link = "\url{https://www.cl.cam.ac.uk/~sd601/thesis.pdf}",
 abstract =
 "Type inference gives programmers the benefit of static,
 compiletime type checking without the cost of manually specifying
 types, and has long been a standard feature of functional programming
 languages. However, it has proven difficult to integrate type
 inference with subtyping, since the unification engine at the core of
 classical type inference accepts only equations, not subtyping
 constraints.

 This thesis presents a type system combining MLstyle parametric
 polymorphism and subtyping, with type inference, principal types,
 and decidable type subsumption. Type inference is based on {\sl
 biunification} , an analogue of unification that works with
 subtyping constraints.

 Making this possible are several contributions, beginning with the
 notion of an 'extensible' type system, in which an open world of
 types is assumed, so that no typeable program becomes untypeable
 by the addition of new types to the language. While previous
 formulations of subtyping fail to be extensible, this thesis
 shows that adopting a more algebraic approach can remedy this.
 Using such an approach, this thesis develops the theory of
 biunification, shows how it is used to infer types, and shows how
 it can be efficiently implemented, exploiting deep connections
 between the algebra of regular languages and polymorphic subtyping.",
 paper = "Dola16.pdf"
}

\end{chunk}

\index{Downen, Paul}
\index{Maurer, Luke}
\index{Ariola, Zena M.}
\begin{chunk}{axiom.bib}
@inproceedings{Down16,
 author = "Downen, Paul and Maurer, Luke and Ariola, Zena M.",
 title = {{Sequent Calculus as a Compiler Intermediate Language}},
 booktitle = "ICFP'16",
 year = "2016",
 pages = "7488",
 publisher = "ACM",
 isbn = "9781450342193",
 abstract =
 "The $\lambda$calculus is popular as an intermediate language for
 practical compilers. But in the world of logic it has a
 lesserknown twin, born at the same time, called the {\sl sequent
 calculus}. Perhaps that would make for a good intermediate
 language, too? To explore this question we designed Sequent Core,
 a practicallyoriented core calculus based on the sequent
 calculus, and used it to reimplement a substantial chunk of the
 Glasgow Haskell Compiler.",
 paper = "Down16.pdf"
}

\end{chunk}

\index{Zeilberger, Noam}
\begin{chunk}{axiom.bib}
@phdthesis{Zeil09,
 author = "Zeilberger, Noam",
 title = {{The Logical Basis of Evaluation Order and PatternMatching}},
 school = "Carnegie Mellon University",
 year = "2009",
 link = "\url{https://www.cs.cmu.edu/~rwh/theses/zeilberger.pdf}",
 abstract =
 "An old and celebrated analogy says that writing programs is like
 proving theorems. This analogy has been productive in both
 directions, but in particular has demonstrated remarkable utility in
 driving progress in programming languages, for example leading towards
 a better understanding of concepts such as abstract data types and
 polymorphism. One of the best known instances of the analogy actually
 rises to the level of an isomorphism: between Gentzen’s natural
 deduction and Church’s lambda calculus. However, as has been
 recognized for a while, lambda calculus fails to capture some of the
 important features of modern programming languages. Notably, it does
 not have an inherent notion of evaluation order, needed to make sense
 of programs with side effects. Instead, the historical descendents of
 lambda calculus (languages like Lisp, ML, Haskell, etc.) impose
 evaluation order in an ad hoc way.

 This thesis aims to give a fresh take on the proofsasprograms
 analogy—one which better accounts for features of modern programming
 languages—by starting from a different logical foundation. Inspired
 by Andreoli’s focusing proofs for linear logic, we explain how to
 axiomatize certain canonical forms of logical reasoning through a
 notion of pattern. Propositions come with an intrinsic polarity,
 based on whether they are defined by patterns of proof, or by patterns
 of refutation. Applying the analogy, we then obtain a programming
 language with builtin support for patternmatching, in which
 evaluation order is explicitly reflected at the level of types—and
 hence can be controlled locally, rather than being an ad hoc, global
 policy decision. As we show, different forms of continuationpassing
 style (one of the historical tools for analyzing evaluation order)
 can be described in terms of different polarizations. This language
 provides an elegant, uniform account of both untyped and
 intrinsicallytyped computation (incorporating ideas from infinitary
 proof theory), and additionally, can be provided an extrinsic type
 system to express and statically enforce more refined properties of
 programs. We conclude by using this framework to explore the theory of
 typing and subtyping for intersection and union types in the presence
 of effects, giving a simplified explanation of some of the unusual
 artifacts of existing systems.",
 paper = "Zeil09.pdf"
}

\end{chunk}

\index{Girard, JeanYves}
\begin{chunk}{axiom.bib}
@misc{Giar95,
 author = "Girard, JeanYves",
 title = {{Linear Logic: Its Syntax and Semantics}},
 year = "1995",
 paper = "Gira95.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Dolan, Stephen}
\index{Mycroft, Alan}
\begin{chunk}{axiom.bib}
@article{Dola17,
 author = "Dolan, Stephen and Mycroft, Alan",
 title = {{Polymorphism, Subtyping, and Type Inference in MLsub}},
 journal = "ACM SIGPLAN Notices",
 volume = "52",
 number = "1",
 year = "2017",
 pages = "6072",
 link = "\url{https://www.cl.cam.ac.uk/~sd601/papers/mlsubpreprint.pdf}",
 abstract =
 "We present a type system combining subtyping and MLstyle parametric
 polymorphism. Unlike previous work, our system supports type inference
 and has compact principal types. We demonstrate this system in the
 minimal language MLsub, which types a strict superset of core ML
 programs.

 This is made possible by keeping a strict separation between the types
 used to describe inputs and those used to describe outputs, and
 extending the classical unification algorithm to handle subtyping
 constraints between these input and output types. Principal types are
 kept compact by type simplification, which exploits deep connections
 between subtyping and the algebra of regular languages. An
 implementation is available online.",
 paper = "Dola17.pdf"
}

\end{chunk}

\index{BonnaireSergeant, Ambrose}
\begin{chunk}{axiom.bib}
@misc{Bonn18,
 author = "BonnaireSergeant, Ambrose",
 title = {{Are unsound type systems wrong?}},
 link =
"\url{http://frenchy64.github.io/2018/04/07/unsoundnessinuntypedtypes.html}",
}

\end{chunk}

\index{Tuhola, Henri}
\begin{chunk}{axiom.bib}
@misc{Tuho18,
 author = "Tuhola, Henri",
 title = {{An another MLsub study}},
 link = "\url{http://boxbase.org/entries/2018/mar/12/mlsubstudy/}",
 year = "2018",
 comment = "Subtyping does not solve the problem of coercion"
}

\end{chunk}

\index{Tuhola, Henri}
\begin{chunk}{axiom.bib}
@misc{Tuho18a,
 author = "Tuhola, Henri",
 title = {{Boxbase  Index}},
 link = "\url{http://boxbase.org/}",
 year = "2018",
 keywords = "axiomref"
}

\end{chunk}

\index{Tarditi, David}
\index{Acharya, Anurag}
\index{Lee, Peter}
\begin{chunk}{axiom.bib}
@techreport{Tard90,
 author = "Tarditi, David and Acharya, Anurag and Lee, Peter",
 title = {{No Assembly Required: Compiling Standard ML to C}},
 type = "technical report",
 institution = "Carnegie Mellon University",
 number = "CMUCS90187",
 year = "1990",
 abstract =
 "C has been proposed as a portable target language for
 implementing higherorder languages. Previous efforts at compiling
 such languages to C have produced efficient code, but have had to
 compromise on either the portability or the preservation of the
 tailrecursive properties of the languages. We assert that neither
 of these compromises is necessary for the generation of efficient
 code. We offer a Standard ML to C compiler, which does not make
 wither of these compromises, as an existence proof. The generated
 code achieves an execution speed that is just a factor of two
 slower than the best native code compiler. In this paper, we
 describe the design, implementation and the performance of this
 compiler.",
 paper = "Tard90.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Appel, Andrew W.}
\index{Jim, Trevor}
\begin{chunk}{axiom.bib}
@techreport{Appe88,
 author = "Appel, Andrew W. and Jim, Trevor",
 title = {{ContinuationPassing, ClosurePassing Style}},
 institution = "Princston University",
 year = "1988",
 number = "CSTR18388",
 abstract =
 "We implemented a continuationpassing style (CPS) code generator for
 ML. Our CPS language is represented as an ML datatype in which all
 functions are named and most kinds of illformed expressions are
 impossible. We separate the code generation into phases that rewrite
 this representation into eversimpler forms. Closures are represented
 explicitly as records, so that closure strategies can be communicated
 from one phase to another. No stack is used. Our benchmark data shows
 that the new method is an improvement over our previous,
 abstractmachine based code generator.",
 paper = "Appe88.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Appel, Andrew W.}
\index{MacQueen, David B.}
\begin{chunk}{axiom.bib}
@article{Appe87,
 author = "Appel, Andrew W. and MacQueen, David B.",
 title = {{A Standard ML Compiler}},
 journal = "LNCS",
 volume = "274",
 pages = "301324",
 year = "1987",
 abstract =
 "Standard ML is a major revision of earlier dialects of the functional
 language ML. We describe the first compiler written for Standard ML in
 Standard ML. The compiler incorporates a number of novel features and
 techniques, and is probably the largest system written to date in
 Standard ML.

 Great attention was paid to modularity in the construction of the
 compiler, leading to a successful largescale test of the modular
 capabilities of Standard ML. The front end is useful for purposes
 other than compilation, and the back end is easily retargetable (we
 have code generators for the VAX and MC68020). The module facilities
 of Standard ML were taken into account early in the design of the
 compiler, and they particularly influenced the environment management
 component of the front end. For example, the symbol table structure is
 designed for fast access to opened structures.

 The front end of the compiler is a single phase that integrates
 parsing, environment management, and type checking. The middle end
 uses a sophisticated decision tree scheme to produce efficient
 pattern matching code for functions and case expressions. The abstract
 syntax produced by the front end is translated into a simple
 lambdacalculusbased intermediate representation that lends itself to
 easy case analysis and optimization in the code generator. Special
 care was taken in designing the mntime data structures for fast
 allocation and garbage collection.

 We describe the overall organization of the compiler and present some
 of the data representations and algorithms used in its various
 phases. We conclude with some lessons learned about the ML language
 itself and about compilers for modem functional languages.",
 paper = "Appe87.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Steele, Guy Lewis}
\begin{chunk}{axiom.bib}
@techreport{Stee78,
 author = "Steele, Guy Lewis",
 title = {{RABBIT: A Compiler for SCHEME}
 type = "technical report",
 number = "AITR474",
 year = "1978",
 abstract =
 "We have developed a compiler for the lexicallyscoped dialect of LISP
 known as SCHEME. The compiler knows relatively little about specific
 data manipulation primitives such as arithmetic operators, but
 concentrates on general issues of environment and control. Rather than
 having specialized knowledge about a large variety of control and
 environment constructs, the compiler handles only a small basis set
 which reflects the semantics of lambdacalculus. All of the
 traditional imperative constructs, such as sequencing, assignment,
 looping, GOTO, as well as many standard LISP constructs such as AND,
 OR, and COND, are expressed in macros in terms of the applicative
 basis set. A small number of optimization techniques, coupled with the
 treatment of function calls as GOTO statements, serve to produce code
 as good as that produced by more traditional compilers. The macro
 approach enables speedy implementation of new constructs as desired
 without sacrificing efficiency in the generated code.

 A fair amount of analysis is devoted to determining whether
 environments may be stackallocated or must be heapallocated.
 Heapallocated environments are necessary in general because SCHEME
 (unlike Algol 60 and Algol 68, for example) allows procedures with
 free lexically scoped variables to be returned as the values of other
 procedures; the Algol stackallocation environment strategy does not
 suffice. The methods used here indicate that a heapallocating
 generalization of the 'display' technique leads to an efficient
 implementation of such 'upward funargs'. Moreover, compiletime
 optimization and analysis can eliminate many 'funargs' entirely, and
 so far fewer environment structures need be allocated at run time than
 might be expected.

 A subset of SCHEME (rather than triples, for example) serves as the
 representation intermediate between the optimized SCHEME code and the
 final output code; code is expressed in this subset in the socalled
 continuationpassing style. As a subset of SCHEME, it enjoys the same
 theoretical properties; one could even apply the same optimizer used
 on the input code to the intermediate code. However, the subset is so
 chosen that all temporary quantities are made manifest as variables,
 and no control stack is needed to evaluate it. As a result, this
 apparently applicative representation admits an imperative
 interpretation which permits easy transcription to final imperative
 machine code. These qualities suggest that an applicative language
 like SCHEME is a better candidate for an UNCOL than the more
 imperative candidates proposed to date.",
 paper = "Stee78.pdf"
}

\end{chunk}

\index{PeytonJones, Simon}
\begin{chunk}{axiom.bib}
@misc{Peyt17,
 author = "PeytonJones, Simon",
 title = {{Escape from the ivory tower: the Haskell journey}},
 link = "\url{https://www.youtube.com/watch?v=re96UgMk6GQ}",
 year = "2017"
}

\end{chunk}

\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
@article{Wadl92,
 author = "Wadler, Philip",
 title = {{Comprehending Monads}},
 journal = "Mathematical Structures in Computer Science",
 volume = "2",
 pages = "461493",
 year = "1992",
 abstract =
 "Category theorists invented {\sl monads} in the 1960s to
 concisely express certain aspects of universal algebra. Functional
 programmers invented {\sl list comprehensions} in the 1970s ot
 concisely express certain programs involving lists. This paper
 shows how list comprehensions may be generalised to an arbitrary
 monad, and how the resulting programming feature can concisely
 express in a pure functional language some programs that
 manipulate state, handle exceptions, parse text, or invoke
 continuations. A new solution to the old problem of destructive
 array update is also presented. No knowledge of category theory is
 assumed.",
 paper = "Wadl92.pdf"
}

\end{chunk}

\index{PeytonJones, Simon}
\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
@inproceedings{Peyt93,
 author = "PeytonJones, Simon and Wadler, Philip",
 title = {{Imperative Functional Programming}},
 booktitle = "Principles of Programming Languages POPL'93",
 publisher = "ACM",
 year = "1993",
 pages = "7184",
 abstract =
 "We present a new model, based on monads, for performing
 input/output in a nonstrict, purely functional language. It is
 composable, extensible, efficient, requires no extensions to the
 type system, and extends smoothly to incorporate mixedlanguage
 working and inplace array updates.",
 paper = "Peyt93.pdf"
}

\end{chunk}

\index{Hughes, John}
\begin{chunk}{axiom.bib}
@inproceedings{Hugh90,
 author = "Hughes, John",
 title = {{Why Functional Programming Matters}},
 booktitle = "Research Topics in Functional Programming",
 publisher = "AddisonWesley",
 year = "1990",
 pages = "1742",
 abstract =
 "As software becomes more and more complex, it is more and more
 important to structure it well. Wellstructured software is easy to
 write and to debug, and provides a collection of modules that can be
 reused to reduce future programming costs. In this paper we show that
 two fea tures of functional languages in particular, higherorder
 functions and lazy evaluation, can contribute significantly to
 modularity. As examples, we manipulate lists and trees, program
 several numerical algorithms, and implement the alphabeta heuristic
 (an algorithm from Artificial Intelligence used in gameplaying
 programs). We conclude that since modularity is the key to successful
 programming, functional programming offers important advantages for
 software development.",
 paper = "Hugh90.pdf",
 keywords = "printed"
}

\end{chunk}

\index{R\'emy, Didier}
\begin{chunk}{axiom.bib}
@misc{Remy17,
 author = "Remy, Didier",
 title = {{Type Systems for Programming Languages}},
 ywar = "2017"
 link = "\url{http://gallium.inria.fr/~remy/mpri/cours1.pdf}",
 link = "\url{http://gallium.inria.fr/~remy/mpri/cours2.pdf}",
 link = "\url{http://gallium.inria.fr/~remy/mpri/cours3.pdf}",
 link = "\url{http://gallium.inria.fr/~remy/mpri/cours4.pdf}",
 link = "\url{http://gallium.inria.fr/~remy/mpri/cours5.pdf}",
 paper = "Remy17.tgz"
}

\end{chunk}

\index{Wadler, Philip}
\begin{chunk}{axiom.bib}
@article{Wadl07,
 author = "Wadler, Philip",
 title = {{The GirardReynolds isomorphism (second edition)}},
 journal = "Theoretical Computer Science",
 volume = "375",
 pages = "201226",
 year = "2007",
 abstract =
 "eanYves Girard and John Reynolds independently discovered the
 secondorder polymorphic lambda calculus, F2. Girard additionally
 proved a Representation Theorem : every function on natural numbers
 that can be proved total in secondorder intuitionistic predicate
 logic, P2, can be represented in F2. Reynolds additionally proved an
 Abstraction Theorem : every term in F2 satisfies a suitable notion of
 logical relation; and formulated a notion of parametricity satisfied
 by wellbehaved models.

 We observe that the essence of Girard’s result is a projection from P2
 into F2, and that the essence of Reynolds’s result is an embedding of
 F2 into P2, and that the Reynolds embedding followed by the Girard
 projection is the identity. We show that the inductive naturals are
 exactly those values of type natural that satisfy Reynolds’s notion of
 parametricity, and as a consequence characterize situations in which
 the Girard projection followed by the Reynolds embedding is also the
 identity.

 An earlier version of this paper used a logic over untyped terms. This
 version uses a logic over typed term, similar to ones considered by
 Abadi and Plotkin and by Takeuti, which better clarifies the
 relationship between F2 and P2.",
 paper = "Wadl07.pdf"
}

\end{chunk}

\index{Morrisett, Greg}
\index{Walker, David}
\index{Crary, Karl}
\index{Glew, Neil}
\begin{chunk}{axiom.bib}
@article{Morr99,
 author = "Morrisett, Greg and Walker, David and Crary, Karl and Glew, Neil",
 title = {{From System F to Typed Assembly Language}},
 booktitle = "Trans. on Progamming Languages and Systems TOPLAS",
 volume = "21"
 number = "3",
 year = "1999",
 pages = "527568",
 abstract =
 "We motivate the design of typed assembly language (TAL) and present a
 typepreserving ttranslation from Systemn F to TAL. The typed assembly
 language we pressent is based on a conventional RISC assembly
 language, but its static type sytem provides support for enforcing
 highlevel language abstratctions, such as closures, tuples, and
 userdefined abstract data types. The type system ensures that
 welltyped programs cannot violatet these abstractionsl In addition,
 the typing constructs admit many lowlevel compiler optimiztaions. Our
 translation to TAL is specified as a sequence of typepreserving
 transformations, including CPS and closure conversion phases;
 typecorrect source programs are mapped to typecorrect assembly
 language. A key contribution is an approach to polymorphic closure
 conversion that is considerably simpler than previous work. The
 compiler and typed assembly lanugage provide a fully automatic way to
 produce certified code, suitable for use in systems where unstrusted
 and potentially malicious code must be checked for safety before
 execution.",
 paper = "Morr99.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Kranz, David}
\index{Kelsey, Richard}
\index{Rees, Jonathan}
\index{Hudak, Paul}
\index{Philbin, James}
\index{Adams, Norman}
\begin{chunk}{axiom.bib}
@inproceedings{Kran86,
 author = "Kranz, David and Kelsey, Richard and Rees, Jonathan and
 Hudak, Paul and Philbin, James and Adams, Norman",
 title = {{ORBIT: An Optimizing Compiler for Scheme}},
 booktitle = "SIGLAN '86",
 publisher = "ACM",
 pages = "219233",
 year = "1986",
 abstract =
 "In this paper we describe an optimizing compiler for Scheme
 called {\sl Orbit} that incorporates our experience with an
 earlier Scheme compiler called TC, together with some ideas from
 Steele's Rabbit compiler. The three main design goals have been
 correctness, generating very efficient compiled code, and
 portability.",
 paper = "Kran86.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Flanagan, Cormac}
\index{Sabry, Amr}
\index{Duba, Bruce F.}
\index{Felleisen, Matthias}
\begin{chunk}{axiom.bib}
@misc{Flan03,
 author = "Flanagan, Cormac and Sabry, Amr and Duba, Bruce F. and
 Felleisen, Matthias",
 title = {{The Essence of Compiling with Continuations}},
 link =
 "\url{https://www.cs.rice.edu/~javaplt/411/17spring/Readings/essenceretro.pdf}",
 paper = "Flan03.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Harper, Robert}
\index{Lillibridge, Mark}
\begin{chunk}{axiom.bib}
@inproceedings{Harp93b,
 author = "Harper, Robert and Lillibridge, Mark",
 title = {{Explicit Polymorphism and CPS Conversion}},
 booktitle = "Symp. of Principles of Programming Languages",
 publisher = "ACM Press",
 year = "1993",
 pages = "206=219"
 abstract =
 "We study the typing properties of CPS conversion for an extension
 of F$\omega$ with control operators. Two classes of evaluation
 strategies are considered, each with callbyname and callbyvalue
 variants. Under the 'standard' strategies, constructor abstractions
 are values, and constructor applications can lead to nontrivial
 control effects. In contrast, the 'MLlike' strategies evaluate
 beneath constructor abstractions, reflecting the usual interpretation
 of programs in languages based on implicit polymorphism. Three
 continuation passing style sublanguages are considered, one on which
 the standard strategies coincide, one on which the MLlike
 strategies coincide, and one on which all the strategies coincide.
 Compositional, typepreserving CPS transformation algorithms are
 given for the standard strategies, resulting in terms on which all
 evaluation strategies coincide. This has as a corollary the
 soundness and termination of welltyped programs under the standard
 evaluation strategies. A similar result is obtained for the MLlike
 callbyname strategy . In contrast, such results are obtained for
 the callbyvalue MLlike strategy only for a restricted
 sublanguage in which constructor abstractions are limited to
 values.",
 paper = "Harp93b.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Sabry, Amr}
\index{Felleisen, Matthias}
\begin{chunk}{axiom.bib}
@techreport{Sabr92,
 author = "Sabry, Amr and Felleisen, Matthias",
 title = {{Reasoning About Programs in ContinuationPassing Style}},
 type = "technical report",
 number = "TR 92180",
 institution = "Rice University",
 abstract =
 "Plotkin's $\lambda$value calculus is sound but incomplete for
 reasoning about $\beta\nu$transformations on programs in continuation
 passing style (CPS). To find a complete extension, we define a new,
 compactifying CPS transformation and an 'inverse' mapping,
 $un$CPS, both of which are interesting in their own right. Using the
 new CPS transformation, we can determine the precise language of CPS
 terms closed under $\beta\nu$ transformations. Using the $un$CPS
 transformation, we can derive a set of axioms such that every equation
 between source programs is provable if and only if $\beta\nu$ can
 prove the corresponding equation between CPS programs. The extended
 calculus is equivalent to an untyped variant of Moggi's computational
 $\lambda$calculus.",
 paper = "Sabr92.pdf",
 keywords = "printed"
}

\end{chunk}

\index{Phelps, Tom}
\begin{chunk}{axiom.bib}
@misc{Phel92,
 author = "Phelps, Tom",
 title = {{A Common Lisp CGOL}},
 year = "1992",
 link = "\url{https://people.eecs.berkeley.edu/~fateman/cgol/cgol.1/cgol.ps}",
 abstract =
 "CGOL is an Algollike notation for Lisp. The original version,
 written by Vaughan Pratt at M.I.T. is the early 70s was written in
 Maclisp. This new version is based on Common Lisp. CGOL was translated
 to Common Lisp in the following fourstage process:

 (1) cgol.tok, the tokenizer has been almost completely rewritten; (2)
 cgoll.l, the main translation loop with library of translation schemas
 has been converted from Maclisp to Common Lisp; (3) the code that
 cgol1.l produces has been converted to Common Lisp; (4) selected
 examples of CGOL programs themselves were rewritten, since certain
 aspects of the semantics of Maclisp would otherwise not be modelled in
 the expected fashion. Maclisp differs from Common Lisp in a variety of
 respects, and some of them are apparent from CGOL including direct
 escapes to Lisp, variable scoping, function definitions and numerous
 other aspects.

 In contrast to the programming described above, the major contribution
 of this paper is annotation of selected code from the CGOL
 translator.",
 paper = "Phil92.pdf"
}

\end{chunk}

\index{Duran, Antonio J.}
\index{Perez, Mario}
\index{Varona, Juan L.}
\begin{chunk}{axiom.bib}
@article{Dura14,
 author = "Duran, Antonio J. and Perez, Mario and Varona, Juan L.",
 title = {{The Misfortunes of a Trio of Mathematicians Using Computer
 Algebra Systems. Can We Trust in Them?}},
 journal = "Notices of the AMS",
 volume = "61",
 number = "10",
 year = "2014",
 link = "\url{www.ams.org/notices/201410/rnotip1249.pdf}",
 pages = "12491252",
 paper = "Dura14.pdf",
 keywords = "printed"
}

\end{chunk}
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index a19b687..c0699f3 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5926,6 +5926,8 @@ books/bookvol13 add chapter Here is the Problem
books/bookvolbib add references
20180415.01.tpd.patch
books/bookvolbib add references
+20180505.01.tpd.patch
+books/breakurl.sty latex package to break long URLs

1.9.1