From 7e9a5b176d81afedec04e15cbde374a89236358b Mon Sep 17 00:00:00 2001
From: Tim Daly
Date: Fri, 31 May 2019 23:50:09 0400
Subject: [PATCH] books/bookvolbib added references
Goal: Proving Axiom Sane
\index{Abadi, Martin}
\index{Cardelli, Luca}
\index{Pierce, Benjamin}
\index{Plotkin, Gordon}
\begin{chunk}{axiom.bib}
@inproceedings{Adad89,
author = "Abadi, Martin and Cardelli, Luca and Pierce, Benjamin
and Plotkin, Gordon",
title = {{Dynamic Typing in a Statically Typed Language}},
booktitle = "16th Principles of Programming Languages",
publisher = "ACM",
pages = "213227",
year = "1989"
abstract =
"Statically typed programming languages allow earlier error
checking, better enforcement of disciplined programming styles,
and generation of more efficient object code than languages where
all type consistency checks are performed at run time. However,
even in statically typed languages, there is often the need to
deal with data whose type cannot be determined at compile time. To
handle such situations safely, we propose to add a type Dynamic
whose values are pairs of a value $v$ and a type tag T where $v$
has the type denoted by T. Instances of Dynamic are built with an
explicit tagging construct and inspected with a type safe typecase
construct.
This paper explores the syntax, operational semantics, and
denotational semantics of a simple language including the type
Dynamic. We give examples of how dynamically typed values can be
used in programming. Then we discuss an operational semantics for
our language and obtain a soundness theorem. We present two
formulations of the denotational semantics of this language and
relate them to the operational semantics. Finally, we consider the
implications of polymorphism and some implementation issues.",
paper = "Abad89.pdf",
keywords = "printed"
}
\end{chunk}
\index{AitKaci, Hassan}
\begin{chunk}{axiom.bib}
@book{Aitk99,
author = "AitKaci, Hassan",
title = {{Warren's Abstract Machine: A Tutorial Reconstruction}},
publisher = "MIT Press",
isbn = "0262510588",
year = "1999",
link = "\url{http://wambook.sourceforge.net/wambook.pdf}",
paper = "Aitk99.pdf",
keywords = "printed"
}
\end{chunk}
\index{Bagnara, Roberto}
\index{Bagnara, Abramo}
\index{Biselli, Fabio}
\index{Chiari, Michele}
\index{Gori, Roberta}
\begin{chunk}{axiom.bib}
@misc{Bagn19,
author = "Bagnara, Roberto and Bagnara, Abramo and Biselli, Fabio
and Chiari, Michele and Gori, Roberta",
title = {{Correct Approximation of IEEE 754 FloatingPoint
Arithmetic for Program Verification}},
year = "2019",
link = "\url{https://arxiv.org/abs/1903.06119}",
abstract =
"Verification of programs using floatingpoint arithmetic is
challenging on several accounts. One of the difficulties of
reasoning about such programs is due to the peculiarities of
floatingpoint arithmetic: rounding errors, infinities,
nonnumeric objects (NaNs), signed zeros, denormal numbers,
different rounding modes... One possibility to reason about
floatingpoint arithmetic is to model a program computation path
by means of a set of ternary constraints of the form $z=x op y$
and use constraint propagation techniques to infer new information
on the variables' possible values. In this setting, we define and
prove the correctness of algorithms to precisely bound the value
of one of the variables $x$, $y$, or $z$, starting from the bounds
known for the other two. We do this for each of the operations and
for each rounding mode defined by the IEEE 754 binary
floatingpoint standard, even in the case the rounding mode in
effect is only partially known. This is the first time that such
socalled filtering algorithms are defined and their correctness
is formally proved. This is an important slab for paving the way
to formal verification of programs that use floatingpoint
arithmetics.",
paper = "Bagn19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Bakel, Steffan van}
\begin{chunk}{axiom.bib}
@article{Bake93,
author = "Bakel, Steffan van",
title = {{Principal Type Schemes for the Strict Type Assignment System}},
journal = "J. Logic and Computation",
volume = "3",
number = "6",
pages = "643670",
year = "1993",
abstract =
"We study the strict type assignment system, a restriction on the
intersection type discipline and prove that it has the principal
type property. W define, for a term $M$, the principal pair (of
basis and type). We specify three operations on pairs, and prove
that all pairs deducible for $M$ can be obtained from the
principal one by these operations, and that these map deducible
pairs to deducible pairs.",
paper = "Bake93.pdf",
keywords = "printed"
}
\end{chunk}
@inbook{Boye72,
author = "Boyer, Robert S. and Moore, J Strother",
title = {{The Sharing of Structure in Theorem Proving Programs}},
booktitle = "Machine Intelligence 7",
pages = "110116",
year = "1972",
abstract =
"We describe how clauses in resolution programs can be represented
and used without applying substitutions or consing lists of
literals. The amount of space required by our representation of a
clause is independent of the number of literals in the clause and
the depth of function nesting. We introduce the concept of the
value of an expression in a binding environment which we use to
standardize clauses apart and share the structure of parents in
representing the resolvent. We present unification and resolution
algorithms for our representation. Some data comparing our
representation to more conventional ones is given.",
paper = "Boye72.pdf",
keywords = "printed"
}
\end{chunk}
\index{Clark, K.L.}
\index{Tarnlund, S.A.}
\begin{chunk}{axiom.bib}
@book{Clar82,
author = "Clark, K.L. and Tarnlund, S.A.",
title = {{Logic Programming}},
publisher = "Academic Press",
year = "1982",
isbn = "0121755207"
}
\end{chunk}
\index{Daly, Timothy}
\begin{chunk}{axiom.bib}
@misc{Daly18a,
author = "Daly, Timothy",
title = {{Proving Axiom Sane Talk}},
comment = "International Conference on Mathematical Software",
journal = "LNCS",
volume = "10931",
year = "2018",
paper = "Daly18a.pdf"
}
\index{Dzamonja, Mirna}
\begin{chunk}{axiom.bib}
@misc{Dzam18,
author = "Dzamonja, Mirna",
title =
{{A New Foundational Crisis in Mathematics, Is it really happening?}},
link = "\url{https://arxiv.org/pdf/1802.06221.pdf}",
year = "2018",
abstract =
"The article reconsiders the position of the foundations of
mathematics after the discovery of HoTT. Discussion that this
discovery has generated in the community of mathematicians,
philosophers and computer scientists might indicate a new crisis
in the foundation of mathematics. By examining the mathematical
facts behind HoTT and their relation with the existing
foundations, we conclude that the present crisis is not one. We
reiterate a pluralist vision of the foundations of mathematics.
The article contains a short survey of the mathematical and
historical background needed to understand the main tenets of the
fundational issues.",
paper = "Dzam18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Giannini, Paola}
\begin{chunk}{axiom.bib}
@techreport{Gian85,
author = "Giannini, Paola",
title = {{Type Checking and Type Deduction Techniques for
Polymorphic Programming Languages}},
type = "technical report",
institution = "Carnegie Mellon University",
number = "CMUCS85187",
year = "1985",
abstract =
"In this paper we present some of the syntactic issues that arise
in polymorphic programming languages. In particular we examine
type checking and deduction in two different polymorphic type
strucutres: the parametric lambdacalculus (with let construct)
and the polymorphic or secondorder lambdacalculus. In both
approaches the behavior of types is formalized with type inference
rules. Examples of programming languages following those
approaches are presented and some of their specific problems
studied.",
paper = "Gian85.pdf",
keywords = "printed"
}
\end{chunk}
\begin{chunk}{axiom.bib}
@article{Harp92,
author = "Harper, Robert",
title = {{Constructing Type Systems over an Operational Semantics}},
journal = "J. Symbolic Computation",
volume = "14",
pages = "7184",
year = "1992",
abstract =
"Type theories in the sense of MartinLof and the NuPRL system are
based on taking as primitive a typefree programming language
given by an operational semantics, and defining types as partial
equivalence relations on the set of closed terms. The construction
of a type system is based on a general form of inductive
definition that may either be taken as acceptable in its own
right, or further explicated in terms of other patterns of
induction. One suc account, based on a general theory of
inductively defined relations, was given by Allen. An alternative
account, based on an essentially set theoretic argument, is
presented.",
paper = "Harp92.pdf",
keywords = "printed"
}
\end{chunk}
\begin{chunk}{axiom.bib}
@misc{lion137,
author = "Unknown",
title = {{Thoughts in Free Time}},
link = "\url{https://lion137.blogspot.com/2019/02/fundamentalalgorithmspolyomialgcd.html}",
comment = "\url{https://github.com/lion137/Fundamental_Algorithsms}",
year = "2019"
}
\end{chunk}
\index{Norrish, Michael}
\index{Slind, Konrad}
\begin{chunk}{axiom.bib}
@article{Norr02,
author = "Norrish, Michael and Slind, Konrad",
title = {{A Thread of HOL Development}},
journal = "Computer Journal",
volume = "45",
number = "1",
pages = "3745",
year = "2002",
abstract =
"The HOL system is a mechanized proof assistant for higher order
logic that has been under continuous development since the
mid1980s, by an everchanging group of developers and external
contributors. we give a brief overview of various implementations
of the HOL logic before focusing on the evolution of certain
important features available in a recent implementation. We also
illustrate how the module system of Standard ML provided security
and modularity in the construction of the HOL kernel, as well as
serving in a separate capacity as a useful representation medium
for persistent, hierarchical logical theories.",
paper = "Norr02.pdf",
keywords = "printed"
}
\end{chunk}
\index{Paulson, Lawrence C.}
\begin{chunk}{axiom.bib}
@inbook{Paul90b,
author = "Paulson, Lawrence C.",
title = {{Designing a Theorem Prover}},
booktitle = "Handbook of Logic in Computer Science, Volume 2",
publisher = "Oxford University Press",
pages = "415475",
year = "1992",
paper = "Paul90b.pdf",
keywords = "printed"
}
\end{chunk}
\index{Robinson, J.A.}
\index{Sibert, E.E.}
\begin{chunk}{axiom.bib}
@techreport{Robi80,
author = "Robinson, J.A. and Sibert, E.E.",
title = {{Loglisp: An Alternative to Prolog}},
type = "technical report",
institution = "University of Syracuse",
number = "807",
year = "1980",
paper = "Robi80.pdf",
keywords = "printed"
}
\end{chunk}
\index{Demers, Alan}
\index{Donahue, James}
\begin{chunk}{axiom.bib}
@inproceedings{Deme80,
author = "Demers, Alan and Donahue, James",
title = {{Type Completeness as a Language Principle}},
booktitle = "POPL 80",
publisher = "ACM",
pages = "234244",
year = "1980",
abstract =
"The problem of Von Neumann languages is that their changeable
parts have so little expressive power  John Backus",
paper = "Deme80.pdf",
keywords = "printed"
}
\end{chunk}
\index{Jammer, Max}
\begin{chunk}{axiom.bib}
@book{Jamm66,
author = "Jammer, Max",
title = {{The Conceptual Development of Quantum Mechanics}},
year = "1996",
publisher = "McGrawHill"
}
\end{chunk}
\index{Mackie, Ian}
\index{Pinto, Jorge Sousa}
\begin{chunk}{axiom.bib}
@article{Mack02,
author = "Mackie, Ian and Pinto, Jorge Sousa",
title = {{Encoding Linear Logic with Interaction Combinators}},
journal = "Information and Computation",
volume = "176",
pages = "153186",
year = "2002",
abstract =
"The purpose of this paper is to demonstrate how Lafont's
interaction combinators, a system of three symbols and six
interaction rules, can be used to encode linear
logic. Specifically, we give a translation of the multiplicative,
exponential, and additive fragments of linear logic together with
a strategy for cutelimination which can be faithfully
simulated. Finally, we show briefly how this encoding can be used
for evaluating $\lambda$terms. In addition to offering a very
simple, perhaps the simplest, system of rewriting for linear logic
and the $\lambda$calculus, the interaction net implementation
that we present has been shown by experimental testing to offer a
good level of sharing in terms of the number of cutelimination
steps (resp. $\beta$reduction steps). In particular it performs
better than all extant finite systems of interaction nets.",
paper = "Mack02.pdf",
keywords = "printed"
}
\end{chunk}
\begin{chunk}{axiom.bib}
@inproceedings{Oisd18,
author = "Anonymous",
title = {{Solving Rings in Agda}},
booktitle = "Proc. ACM Program. Lang.",
publisher = "ACM",
year = "2018",
abstract =
"We present a new library which automates the construction of
equivalence proofs between polynomials over commutative rings and
semirings in the programming language Agda [Norell and Chapman
2008]. It is significantly faster than Agda's existing solver. We
use reflection to provide a simple interface to the solver, and
demonstrate how to use the constructed proofs to provide
stepbystep solutions.",
paper = "Oisd18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Harvey, David}
\index{van der Hoeven, Joris}
\begin{chunk}{axiom.bib}
@misc{Harv19,
author = "Harvey, David and van der Hoeven, Joris",
title = {{Integer Multiplication in Time O(n log n)}},
link = "\url{https://hal.archivesouvertes.fr/hal.02070778/document}",
year = "2019",
abstract =
"We present an algorithm that computes the product of two
nbit intgers in O(n log n) bit operations"
paper = "Harv19.pdf"
}
\end{chunk}
\index{Dunfield, Joshua}
\index{Krishnaswami, Neelakantan R.}
\begin{chunk}{axiom.bib}
@misc{Dunf13,
author = "Dunfield, Joshua and Krishnaswami, Neelakantan R.",
title = {{Complete and Easy Bidirectional Typechecking for HigherRank
Polymorphism}},
link = "\url{https://arxiv.org/pdf/1306.6032.pdf}",
year = "2013",
abstract =
"Bidirectional typechecking, in which terms either synthesize a
type or are checked against a known type, has become popular for
its scalability (unlike DamasMilner type inference, bidirectional
typing remains decidable even for very expressive type systems),
its error reporting, and its relative ease of
implementation. Following design principles from proof theory,
bidirectional typing can be applied to many type constructs. The
principles underlying a bidirectional approach to polymorphism,
however, are less obvious. We give a declarative, bidirectional
account of higherrank polymorphism, grounded in proof theory;
this calculus enjoys many properties such as $\eta$reduction and
predictability of annotations. We give an algorithm for
implementing the declarative system; our algorithm is remarkably
simple and wellbehaved, despite being both sound and complete.",
paper = "Dunf13.pdf",
keywords = "printed"
}
\end{chunk}
\index{Parisse, Bernard}
\begin{chunk}{axiom.bib}
@misc{Pari19,
author = "Parisse, Bernard",
title = {{Computing Huge Groebner Basis like Cyclic10 over
$\mathbb{Q}$ with Giac}}.
link = "\url{https://hal.archivesouvertes.fr/hal02081648}",
year = "2019",
abstract =
"We present a short description on how to finetune the
modular algorithm implemented in the Giac computer algebra system
to reconstruct large Groebner basis over $\mathbb{Q}$. The
classical cyclic10 benchmark will serve as example.",
paper = "Pari19.pdf"
}
\end{chunk}
\index{Denes, Maxime}
\index{Mortberg, Anders}
\index{Siles, Vincent}
\begin{chunk}{axiom.bib}
@misc{Dene19,
author = "Denes, Maxime and Mortberg, Anders and Siles, Vincent",
title = {{A Refinementbased Approach to Computational Algebra in COQ}},
year = "2019",
link = "\url{www.cse.chalmers.se/~mortberg/papers/coqeal.pdf}",
abstract =
"We describe a stepbystep approach to the implementation and
formal verification of efficient algebraic algorithms. Formal
specifications are expressed on rich data types which are suitable
for deriving essential theoretical properties. These
specifications are then refined to concrete implementations on
more efficient data structures and linked to their abstract
counterparts. We illustrate this methodology on key applications:
matrix rank computation, Winograd's fast matrix product,
Karatsuba's polynomial multiplication, and the gcd of multivariate
polynomials.",
paper = "Dene19.pdf",
keywords = "printed",
}
\end{chunk}
\index{Ly, Kim Quyen}
\begin{chunk}{axiom.bib}
@misc{Lyxx15,
author = "Ly, Kim Quyen",
title = {{Formalization in Coq of Polynomial Interpretations on
Rationals}},
year = "2015",
link = "\url{https://www.di.ens.fr/~quyen/publication/ly10.pdf}",
paper = "Lyxx15.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kovacs, Laura}
\index{Voronkov, Andrei}
\begin{chunk}{axiom.bib}
@misc{Kova13,
author = "Kovacs, Laura and Voronkov, Andrei",
title = {{FirstOrder Theorem Proving and Vampire}},
year = "2013",
link = "\url{http://www.cse.chalmers.se/~laurako/pub/CAV13_Kovacs.pdf}",
abstract =
"In this paper we give a short introduction in firstorder theorem
proving and the use of the theorem prover Vampire. We discuss the
superposition calculus and explain the key concepts of saturation
and redundancy elimination, present saturation algorithms and
preprocessing, and demonstrate how these concepts are implemented
in Vampire. Further, we also cover more recent topics and features
of Vampire designed for advanced applications, including
satisfiability checking, theory reasoning, interpolation,
consequence elimination, and program analysis.",
paper = "Kova13.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kotelnikov, Evgenii}
\index{Kovacs, Laura}
\index{Reger, Giles}
\index{Voronkov, Andrei}
\begin{chunk}{axiom.bib}
@inproceedings{Kote16,
author = "Kotelnikov, Evgenii and Kovacs, Laura and Reger, Giles and
Voronkov, Andrei",
title = {{The Vampire and the FOOL}},
booktitle = "SIGPLAN Conf. on Certified Programs and Proofs",
year = "2016",
publisher = "ACM",
pages = "3748",
abstract =
"This paper presents new features recently implemented in the
theorem prover Vampire, namely support for firstorder logic with
a first class boolean sort (FOOL) and polymorphic arrays. In
addition to having a first class boolean sort, FOOL also contains
ifthenelse and letin expressions. We argue that presented
extensions facilitate reasoningbased program analysis, both by
increasing the expressivity of firstorder reasoners and by gains
in efficiency.",
paper = "Kote16.pdf",
keywords = "printed"
}
\end{chunk}
\index{Storjohann, Arne}
\begin{chunk}{axiom.bib}
@inproceedings{Stor97,
author = "Storjohann, Arne",
title = {{A Solution to the extended GCD problem with applications}},
booktitle = "ISSAC '97",
publisher = "ACM",
year = "1997",
paper = "Stor97.pdf",
keywords = "printed"
}
\end{chunk}
\index{Corless, Robert}
\index{Postma, Erik}
\index{Stoutemyer, David}
\begin{chunk}{axiom.bib}
@inproceedings{Corl11,
author = "Corless, Robert and Postma, Erik and Stoutemyer, David",
title = {{GCD of Multivariate Approximate Polynomials using
Beautification with the Subtractive Algorithm}},
booktitle = "Int. Workshop on SymbolicNumeric Computation",
publisher = "ACM",
year = "2011",
paper = "Corl11.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cheng, Howard}
\index{Labahn, George}
\index{Zhou, Wei}
\begin{chunk}{axiom.bib}
@article{Chen08,
author = "Cheng, Howard and Labahn, George and Zhou, Wei",
title = {{Computing Polynomial LCD and GCD in Lagrange Basis}},
journal = "Communications in Computer Algebra",
volume = "42",
number = "3",
pages = "129130",
year = "2008",
abstract =
"We discuss the verification of mathematical software solving
polynomial systems symbolically by way of triangular
decomposition. Standard verification techniques are highly
resource consuming and apply only to polynomial systems which are
easy to solve. We exhibit a new approach which manipulates
constructible sets represented by regular systems. We provide
comparative benchmarks of different verification procedures
applied to four solvers on a large set of wellknown polynomial
systems. Our experimental results illustrate the high efficiency
of our new approach. In particular, we are able to verify
triangular decomposition of polynomial systems which are not easy
to solve.",
paper = "Chen08.pdf",
keywords = "printed"
}
\end{chunk}
\index{Cheng, Howard}
\index{Labahn, George}
\begin{chunk}{axiom.bib}
@inproceedings{Chen06,
author = "Cheng, Howard and Labahn, George",
title = {{On Computing Polynomial GCDs in Alternate Bases}},
booktitle = "ISSAC '06",
publisher = "ACM",
year = "2006",
pages = "4754",
abstract =
"In this paper, we examine the problem of computing the greatest
common divisor (GCD) of univariate polynomials represented in
different bases. When the polynomials are represented in Newton
basis or a basis of orthogonal polynomials, we show that the
wellknown Sylvester matrix can be generalized. We give
fractionfree and modular algorithms to directly compute the GCD
in the alternate basis. These algorithms are suitable for
computation in domains where growth of coefficients in
intermediate computations are a central concern. In the cases of
Newton basis and bases using certain orthogonal polynomials, we
also show that the standard subresultant algorithm can be applied
easily. If the degrees of the input polynomials is at most $n$ and
the degree of the GCD is at least $n/2$, our algorithms outperform
the corresponding algorithms using the standard power basis.",
paper = "Chen06.pdf",
keywords = "printed"
}
\end{chunk}
\index{Chen, Changbo}
\index{Maza, Marc Moreno}
\begin{chunk}{axiom.bib}
@article{Chen15,
author = "Chen, Changbo and Maza, Marc Moreno",
title = {{Simplification of Cylindrical Algebraic Formulas}},
journal = "LNCS",
volume = "9301",
pages = "119134",
comment = "Int. Workshop on Computer Algebra in Scientific Computing",
paper = "Chen15.pdf"
}
\end{chunk}
\index{Chen, Changbo}
\index{Covanov, Svyatoslav}
\index{Mansouri, Farnam}
\index{Maza, Marc Moreno}
\index{Xie, Ning}
\index{Xie, Yuzhen}
\begin{chunk}{axiom.bib}
@article{Chen14,
author = "Chen, Changbo and Covanov, Svyatoslav and Mansouri, Farnam
and Maza, Marc Moreno and Xie, Ning and Xie, Yuzhen",
title = {{Basic Polynomial Algebra Subprograms}},
journal = "Communications in Computer Algebra",
volume = "48",
number = "3/4",
pages = "197201",
year = "2014",
paper = "Chen14.pdf"
}
\end{chunk}
\index{Gleich, David}
\begin{chunk}{axiom.bib}
@misc{Glei05,
author = "Gleich, David",
title = {{Finite Calculus: A Tutorial for Solving Nasty Sums}},
link = "\url{}",
year = "2005",
abstract =
"In this tutorial, I will first explain the need for finite
calculus using an example sum I think is difficult to solve. Next,
I will show where this sum actually occurs and why it is
important. Following that, I will present all the mathematics
behind finite calculus and a series of theorems to make it helpful
before concluding with a set of examples to show that it really is
useful.",
paper = "Glei05.pdf",
keywords = "printed"
}
\end{chunk}
\index{Majewski, Bohdan}
\index{Havas, George}
\begin{chunk}{axiom.bib}
@article{Maje94,
author = "Majewski, Bohdan and Havas, George",
title = {{The Complexity of Greatest Common Divisor Computations}},
journal = "LNCS",
volume = "877",
pages = "184193",
year = "1994",
abstract =
"We study the complexity of expressing the greatest common divisor
of $n$ positive numbers as a linear combination of the
numbers. We prove the NPcompleteness of finding an optimal set of
multipliers with respect to either of $L_0$ metric or the
$L_\infty$ norm. We present and analyze a new method for
expressing the gcd of $n$ numbers as their linear combination and
give an upper bound on the size of the largest multiplier
produced by this method, which is optimal.",
paper = "Maje94.pdf",
keywords = "printed"
}
\end{chunk}
\index{Shallit, Jeffrey}
\index{Sorenson, Jonathan}
\begin{chunk}{axiom.bib}
@article{Shal94,
author = "Shallit, Jeffrey and Sorenson, Jonathan",
title = {{Analysis of a LeftShift Binary GCD Algorithm}},
journal = "Journal of Symbolic Computation",
volume = "17",
number = "6",
pages = "473486",
year = "1994",
abstract =
"We introcude a new leftshift binary algorithm, LSBGCD, for
computing the greatest common divisor of two integers, and we
provide an analysis of the worstcase behavior of the
algorithm. The analysis depends on a theorem of Ramharter about
the extremal behavior of certain continuants.",
paper = "Shal94.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kozen, Dexter}
\index{Landau, Susan}
\index{Zippel, Richard}
\begin{chunk}{axiom.bib}
@article{Koze94,
author = "Kozen, Dexter and Landau, Susan and Zippel, Richard",
title = {{Decomposition of Algebraic Functions}},
journal = "LNCS",
volume = "877",
pages = "8092",
year = "1994",
abstract =
"Functional decomposition  whether a function $f(x)$ can be
written as a composition of functions $g(h(x))$ in a nontrivial
way  is an important primitive in symbolic computation
systems. The problem of univariate polynomial decomposition was
shown to have an efficient solution by Kozen and Landau.
Dickerson and von zur Gathen gave algorithms for certain
multivariate cases. Zippel showed how to decompose rational
functions. In this paper, we address the issue of decomposition of
algebraic functions. We show that the problem is related to
univariate resultants in algebraic function fields, and in fact
can be reformulated as a problem of resultant decomposition. We
characterize all decompositions of a given algebraic function up
to isomorphism, and give an exponential time algorithm for finding
a nontrivial one if it exists. The algorithm involves genus
calculations and constructing transcendental generators of fields
of genus zero.",
paper = "Koze94.pdf"
}
\end{chunk}
\index{Lipton, Richard J.}
\begin{chunk}{axiom.bib}
@article{Lipt94,
author = "Lipton, Richard J.",
title = {{StraightLine Complexity and Integer Factorization}},
journal = "LNCS",
volume = "877",
pages = "7179",
year = "1994",
abstract =
"We show that if polynomials with many rational roots have
polynomial length straightline complexity, then integer
factorization is 'easy'",
paper = "Lipt94.pdf"
}
\end{chunk}
\index{Chen, Changbo}
\index{Maza, Marc Moreno}
\begin{chunk}{axiom.bib}
@article{Chen16,
author = "Chen, Changbo and Maza, Marc Moreno",
title = {{Quantifier Elimination by Cylindrical Algebraic
Decomposition based on Regular Chains}},
journal = "Journal of Symbolic Computation",
volume = "75",
pages = "7493",
year = "2016",
abstract =
"A quantifier elimination algorithm by cylindrical algebraic
decomposition based on regular chains is presented. The main idea
is to refine a complex cylindrical tree until the signs of
polynomials appearing in the tree are sufficient to distinguish
the true and false cells. We report an implementation of our
algorithm in the RegularChains library in MAPLE and illustrate its
effectiveness by examples.".
paper = "Chen16.pdf"
}
\end{chunk}
\index{Kotelnikov, Evgenii}
\begin{chunk}{axiom.bib}
@phdthesis{Kote18,
author = "Kotelnikov, Evgenii",
title = {{Automated Theorem Proving with Extensions of FirstOrder Logic}},
school = "Chalmers",
year = "2018",
abstract =
"Automated theorem provers are computer programs that check
whether a logical conjecture follows from a set of logical
statements. The conjecture and the statements are expressed in the
language of some formal logic, such as firstorder logic. Theorem
provers for firstorder logic have been used for automation in
proof assistants, verification of programs, static analysis of
networks, and other purposes. However, the efficient usage of
these provers remains challenging. One of the challenges is the
complexity of translating domain problems to firstorder
logic. Not only can such translation be cumbersome due to semantic
differences between the domain and the logic, but it might
inadvertently result in problems that provers cannot easily handle.
The work presented in the thesis addresses this challenge by
developing an extension of firstorder logic named FOOL. FOOL
contains syntactical features of programming languages and more
expressive logics, is friendly for translation of problems from
various domains, and can be efficiently suported by existing
theorem provers. We describe the syntax and semantics of FOOL and
present a simple translation from FOOL to plain firstorder
logic. We describe an efficient clausal normal form transformation
algorithm for FOOL and based on it implement a support for FOOL in
the Vampire theorem prover. We illustrate the efficient use of
FOOL for program verification by describing a concise encoding of
next state relations of imperative programs in FOOL. We show a
usage of features of FOOL in problems of static analysis of
networks. We demonstrate the efficiency of automated theorem
proving in FOOL with an extensive set of experiments. In these
experiments we compare the performance of Vampire on a large
collection of problems from various sources translated to FOOL and
ordinary firstorder logic. Finally, we fix the syntax for FOOL in
TPTP, the standard language of firstorder theorem provers.",
paper = "Kote18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Dewar, Michael}
\begin{chunk}{axiom.bib}
@inproceedings{Dewa92,
author = "Dewar, Michael",
title = {{Using Computer Algebra to Select Numerical Algorithms}},
booktitle = "ISSAC '92",
publisher = "ACM",
year = "1992",
pages = "18",
isbn = "0897914899",
abstract =
"Many reallife problems require a combination of both symbolic
and numerical methods for their solution. This has led to the
development of integrated, interactive symbolic / numeric packages
which use a computer algebra system for the former and a standard
subroutine library for the later. These systems may also be viewed
as simplified frontends to the numerical library. To use these
packages, however, a user must be able to select which of the many
available routines is the most appropriate for his or her problem,
which contrasts with the 'blackbox' style interfaces available in
computer algebra systems. This paper describes how a computer
algebra system can be used to make this decision, thus providing a
muchsimplified and orthogonal interface.",
paper = "Dewa92.pdf"
}
\end{chunk}
\index{Dupee, Brian J.}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Dupe96,
author = "Dupee, Brian J. and Davenport, James H.",
title = {{An Intelligent Interface to Numerical Routines}},
journal = "LNCS",
number = "1128",
pages = "252262",
year = "1996",
abstract =
"Links from Computer Algebra Systems to Numerical Libraries have
been increasingly made available. However, they remain, like the
numerical routines which comprise the libraries, difficult to use
by a novice and there is little help in choosing the appropriate
routine for any given problem, should there be a choice.
Computer Algebra Systems use generic names for each problem
area. For example, 'integrate' (or 'int') is used for integration
of a function, whatever method the code may use. Numeric
interfaces still use different names for each method together with
a variety of extra parameters, some of which may be
optional. Ideally, we should extend the generic name structure to
cover numerical routines. This would then, necessarily, require
algorithms for making an assessment of the efficacy of different
methods where such a choice exists.
This paper considers the link to the NAG Fortran Library from
version 2.0 of Axiom and shows how we can build on this to extend
and simplify the interface using an expert system for choosing and
using the numerical routines.",
paper = "Dupe96.pdf",
keywords = "printed"
}
\end{chunk}
\index{Davenport, James H.}
\index{Dewar, Michael C.}
\index{Richardson, Michael G.}
\begin{chunk}{axiom.bib}
@inbook{Dave92f,
author = "Davenport, James H. and Dewar, Michael C. and
Richardson, Michael G.",
title = {{Symbolic and Numeric Computation: the Example of IRENA}},
booktitle = "Symbolic and Numerical Computation for Artificial Intelligence",
pages = "347362",
year = "1992",
publisher = "Academic Press",
abstract =
"Historically symbolic and numeric computation have pursued
different lines of evolution, have been written in different
languages and generally seen to be competitive rather than
complementary techniques. Even when both were used to solve a
problem ad hoc methods were used to transfer the data between
them.
We first discuss the reasons for this dichotomy, and then present
IRENA, a system being developed by the authors to present an
integrated environment with all the facilities of Reduce combined
with the functionality of the NAG FORTRAN library.
Not only does IRENA allow the Reduce user to make calls to the NAG
Library interactively, it also converts a natural input
representation to the required unnatural FORTRAN one and
viceversa on output, which results in a much more intuitive
interface. Many parameters have default values and so need not be
supplied by th user.",
paper = "Dave92f.pdf"
}
\end{chunk}
\index{Dewar, Mike}
\index{Carlisle, David}
\begin{chunk}{axiom.bib}
@inproceedings{Dewa01,
author = "Dewar, Mike and Carlisle, David",
title = {{Mathematical Software: The Next Generation?}}
booktitle = "Int. Workshop on Mathematical Knowledge Management",
link = "\url{https://www.emis.de/proceedings/MKM2001/printed/dewar.pdf}",
publisher = "RISC",
year = "2001",
paper = "Dewa01.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Bodnar, Gabor}
\index{Kaltenbacher, Barbara}
\index{Pau, Petru}
\index{Schicho, Josef}
\begin{chunk}{axiom.bib}
@article{Bodn01,
author = "Bodnar, Gabor and Kaltenbacher, Barbara and Pau, Petru and
Schicho, Josef",
title = {{Exact Real Computation in Computer Algebra}},
journal = "LNCS",
volume = "2630",
pages = "279292",
year = "2001",
abstract =
"Exact real computation allows many of the advantages of numerical
computation (e.g. high performance) to be accessed also in
symbolic computation, providing validated results. In this paper
we present our approach to build a transparent and easy to use
connection between the two worlds, using this paradigm. The main
discussed topics are representation of exact real objects,
operations on exact real matrices, polynomial greatest common
divisor and root computation. Some of these problems are
illposed; we use regularization methods to solve them.",
paper = "Bodn01.pdf"
}
\end{chunk}
\index{Barthe, G.}
\index{Elbers, H.}
\begin{chunk}{axiom.bib}
@misc{Bart96,
author = "Barthe, G. and Elbers, H.",
title = {{Towards Lean Proof Checking}},
year = "1996",
abstract =
"Logical formal systems are inefficient at computations. In order
to increase their efficiency, we aim to extend these systems with
computational power. In this paper, we suggest a general, powerful
syntax, called oracle types, to extend type theories with
computational power; the resulting systems, which combine the
logical abilities of logical formal systems and the computational
power of term rewriting systems, provide a suitable environment
for theorem proving. As a practical application, we present an
extension of the theorem prover Lego with oracle types and
illustrate the use of this new system in performing algebraic
computations. Our implementation of oracle types is very flexible
and allows rewriting to be performed either inside Lego or by
Reduce, an efficient symbolic computation system. In our view, the
main novelty of our approach is to combine a sound theoretical
foundation with an efficient implementation. Besides, our work
provides the first attempt to combine symbolic computation systems
with theorem provers such as Coq and Lego, which are based on
intensional type theories.",
paper = "Bart96.pdf",
keywords = "printed"
}
\end{chunk}
\index{Bostan, Alin}
\index{Schost, Eric}
\begin{chunk}{axiom.bib}
@article{Bost13,
author = "Bostan, Alin and Schost, Eric",
title = {{A Simple and Fast Algorithm for Computing Exponentials
of Power Series}},
journal = "Information Processing Letters",
volume = "13",
pages = "754756",
year = "2013",
abstract =
"As was initially shown by Brent, exponentials of truncated power
series can be computed using a constant number of polynomial
multiplications. This note gives a relatively simple algorithm
with a low constant factor",
paper = "Bost13.pdf"
}
\end{chunk}
\index{Piskac, Ruzica}
\begin{chunk}{axiom.bib}
@inproceedings{Pisk15,
author = "Piskac, Ruzica",
title = {{Frome Decision Procedures to Synthesis Procedures}},
booktitle = "Symp. on Symbolic and Numeric Algorithms for
Scientific Computing",
publisher = "ACM",
year = "2015",
abstract =
"Software synthesis is a technique for automatically generating
code from a given specification. The goal of software synthesis is
to make software development easier while increasing both the
productivity of the programmer and the correctness of the produced
code. In this paper we present an approach to synthesis that
relies on the use of automated reasoning and decision
procedures. First we describe how to generalize decision
procedures into predictable and complete synthesis
procedures. Here completeness means that the procedure is
guaranteed to find code that satisfies the given specification. We
illustrate the process of turning a decision procedure into a
synthesis procedure using linear integer arithmetic as an example.
However, writing a complete specification can be a tedious task,
sometimes even harder than writing the code itself. To overcome
this problem, ideally the user could provide a few inputoutput
examples, and then the code should be automatically derived. We
outline how to broaden usability and applications of current
software synthesis techniques. We conclude with an outlook on
possible future research directions and applications of synthesis
procedures.",
paper = "Pisk15.pdf"
}
\end{chunk}
\index{Havas, George}
\index{Majewski, Bohdan}
\index{Matthews, K.R.}
\begin{chunk}{axiom.bib}
@techreport{Hava95,
author = "Havas, George and Majewski, Bohdan and Matthews, K.R.",
title = {{Extended GCD Algorithms}},
type = "technical report",
institution = "University of Queensland",
number = "TR0302",
year = "1995",
abstract =
"Extended gcd calculation has a long history and plays an
important role in computational number theory and linear
algebra. Recent results have shown that finding optimal
multipliers in extended gcd calculations is difficult. We study
algorithms for finding good multipliers and present new algorithms
with improved performance. We present a wellperforming algorithm
which is based on lattice basis reduction methods and may be
formally analyzed. We also give a relatively fast algorithm with
moderate performance.",
paper = "Hava95.pdf",
keywords = "printed"
}
\end{chunk}
\index{Ritt, J.F.}
\begin{chunk}{axiom.bib}
\article{Ritt25,
author = "Ritt, J.F.",
title = {{Elementary Functions and their Inverses}},
journal = "Transactions of the American Mathematical Society",
volume = "27",
pages = "6890",
year = "1925",
paper = "Ritt25.pdf"
}
\end{chunk}
\index{Risch, Robert H.}
\begin{chunk}{axiom.bib}
@article{Risc76,
author = "Risch, Robert H.",
title = {{Implicitly Elementary Integrals}},
journal = "Proc. Amer. Math.",
volume = "57",
number = "1",
pages = "17",
year = "1976",
paper = "Risc76.pdf",
keywords = "printed"
}
\end{chunk}
\index{Risch, Robert H.}
\begin{chunk}{axiom.bib}
@article{Risc79,
author = "Risch, Robert H.",
title = {{Algebraic Properties of the Elementary Functions of Analysis}},
journal = "American Journal of Mathematics",
volume = "101",
number = "4",
pages = "743759",
abstract =
"The elementary functions of a complex variable $z$ are those
functions built up from the rational functions of $z$ by
exponentiation, taking logarithms, and algebraic operations. The
purpose of this paper is first, to prove a 'structure theorem'
which shows that if an algebraic relation holds among a set of
elementary functions, then they must satisfy an algebraic relation
of a special kind. Then we make four applications of this theorem,
obtaining both new and old results which are described here
briefly (and imprecisely).
\begin{enumerate}
\item An algorithm is given for telling when two elementary
expressions define the same function.
\item A characterization is derived of those ordinary differential
equations having elementary solutions
\item The four basic functions of elementary calculus  exp, log,
tan, tan$^{1},  are shown to be 'irredundant'
\item A characterization is given of elementary functions
possessing elementary inverses.",
paper = "Risc79.pdf",
keywords = "printed"
}
\end{chunk}
\index{Moses, Joel}
\begin{chunk}{axiom.bib}
@article{Mose72,
author = "Moses, Joel",
title = {{Toward a General Theory of Special Functions}},
journal = "Communications of the ACM",
volume = "15",
number = "7",
pages = "550554",
year = "1972",
abstract =
"A list of a number of natural developments for the field of
algebraic manipulation is given. Then the prospects for a general
theory of functions defined by ordinary differential equations are
discussed. The claim is made that recent developments in
mathematics indicate that it should be possible to algorithmically
generate many properties of solutions to differential
equations. Such a theory is preferable to a less general effort to
make algebraic manipulation systems knowledgeable about the usual
special functions (e.g. exponential, hypergeometric).",
paper = "Mose72.pdf",
keywords = "printed"
}
\end{chunk}
\index{Muller, JeanMichel}
\begin{chunk}{axiom.bib}
@book{Mull16,
author = "Muller, JeanMichel",
title = {{Elementary Functions: Algorithms and Implementation}},
isbn = "9781489979810",
publisher = "Birkhauser",
year = "2016",
paper = "Mull16.pdf"
}
\end{chunk}
\index{Fitt, A.D.}
\index{Hoare, G.T.Q}
\begin{chunk}{axiom.bib}
@article{Fitt93,
author = "Fitt, A.D. and Hoare, G.T.Q",
title = {{The ClosedForm Integration of Arbitrary Functions}},
journal = "The Mathematical Gazette",
volume = "77",
number = "479",
pages = "227236",
year = "1993"
paper = "Fitt93.pdf",
keywords = "printed"
}
\end{chunk}
\index{Schorre, D.V.}
\begin{chunk}{axiom.bib}
@inproceedings{Scho64,
author = "Schorre, D.V.",
title = {{META II: A SyntaxOriented Compiler Writing Language}},
booktitle = "19th National Conference of the ACM",
publisher = "ACM",
year = "1964",
abstract =
"META II is a compiler writing language which consists of syntax
equations resembling Backus normal form and into which
instructions to output assembly language commands are
inserted. Compilers have been written in this language for VALGOL
I and VALGOL II. The former is a simple algebraic language
designed for the purpose of illustrating META II The latter
contains a fairly arge subset of ALGOL 60.
The method of writing compilers which is given in detail in the
paper may be explained briefly as follows. Each synta equation is
translated into a recursive subroutine which tests the input
string for a particular phrase structure, and deletes it if
found. Backup is avoided by the extensive use of factoring in the
syntax equations. For each source language, an interpreter is
written and programs are compiled into that interpretive language.
META II is not intended as a standard language which everyone will
use to write compilers. Rather, it is an example of a simple
working language which can give one a good start in designing a
compilerwriting compiler suited to his own needs. Indeed, the
META II compiler is written in its own language, thus lending
itself to modification.",
paper = "Scho64.pdf",
keywords = "printed"
}
\end{chunk}
\index{Jenks, Richard D.}
\begin{chunk}{axiom.bib}
@techreport{Jenk70,
author = "Jenks, Richard D.",
title = {{META/LISP: An interactive translator writing system}},
type = "research report",
number = "RC2968",
year = "1970",
institution = "IBM Research",
abstract =
"META/LISP is a general purpose translator writing system for IBM
System/360 currently running on TSS, CP/CMS, and OS/360. The input
to the system is a source program which simultaneously describes
1) the syntax of some input data to be translated and
2) algorithms which operate on the input data and a pushdown stack
to accomplish the desired translation; the output of the system is
a compiled program for translating that input data. In particular
when the input data are statements of a higherlevel language to
be translated into assembly language, META/LISP serves as a
compilercompiler. META/LISP uses the topdown syntaxdirected
approach which makes the system extremely attractive for the
design and implementation of experimental languages; using
META/LISP such compilers are easy to write, easy to check out, and
 most importantly  easy to modify interactively. The appendices
which follow a rather complete description of the system including
a selfdescription of the META/LISP compiler.",
paper = "Jenk70.pdf",
keywords = "axiomref, printed"
}
\end{chunk}
\index{Havas, George}
\index{Majewski, Bohdan S.}
\index{Matthews, Keith R.}
\begin{chunk}{axiom.bib}
@article{Hava98,
author = "Havas, George and Majewski, Bohdan S. and Matthews, Keith R.",
title = {{Extended GCD and Hermite Normal Form Algorithms via
Lattice Basis Reduction}},
journal = "Experimental Mathematics",
volume = "7",
number = "2",
pages = "125136",
year = "1998",
abstract =
"Extended gcd calculation has a long history and plays an
important role in computational number theory and linear
algebra. Recent results have shown that finding optimal
multipliers in extended gcd calculations is difficult. We present
an algorithm which uses lattice basis reduction to produce small
integer multipliers $x_1,\ldots,x_m$ for the equation
$s=gcd(x_1,\ldots,x_m)x_1s_1+\ldots+x_ms_m$ where
$s_1,\ldots,s_m$ are given integers. The method generalises to
produce small unimodular transformation matrices for computing the
hermite normal form of an integer matrix.",
paper = "Hava98.pdf"
}
\end{chunk}
\index{Havas, George}
\index{Majewski, Bohdan}
\begin{chunk}{axiom.bib}
@techreport{Hava97,
author = "Havas, George and Majewski, Bohdan",
title = {{Extended GCD Algorithms}},
type = "technical report",
institution = "University of Queensland",
number = "TR0325",
year = "1997",
abstract =
"Given an integer vector of $n$ positive number numbers
$a=\vert a_i \vert^n_{i=1}$ the extended gcd problem asks for an
integer vector $x$ of length $n$ such that
\[xa^T=\sum_{i=1}^n x_ia_i = gcd(a_1,a_2,\ldots,a_n)\]
For many applications it is vital that some measure of $x$,
$\norm{x}$ is small. We have proved, however, that if we choose
either the max norm or the zero matric the question of finding
$x$ such that $\norm{x}$ is smaller than some positive constant
$K$ is NPcomplete. We conjecture that the questions remains
NPcomplete for other norms.
In the light of these results we hae proposed two approximation
algorithms. Their respective complexities are
$O(n^2 log(max_i\{a_i\}))$ and $O(n^4 log(max_i\{a_i\}))$.
Theoretical analysis of the algorithms leads
to unsatisfactory bounds on the quality of the solution. Thus here
we undertake a practical study of the methods, where their
performance is matched against optimal solutions.",
paper = "Hava97.pdf"
}
\end{chunk}
\index{Cheng, Eugenia}
\begin{chunk}{axiom.bib}
@misc{Chen04,
author = "Cheng, Eugenia",
title = {{How to write proofs: A quick guide}},
link = "\url{http://cheng.staff.shef.ac.uk/proofguide/proofguide.pdf}",
year = "2004",
paper = "Chen04.pdf",
keywords = "printed,DONE"
}
\end{chunk}
\index{Warren, David H.D.}
\index{Pereira, Luis M.}
\begin{chunk}{axiom.bib}
@misc{Warr77,
author = "Warren, David H.D. and Pereira, Luis M.",
title = {{Prolog  The Language and its Implementation Compared
with Lisp}},
year = "1977",
link =
"\url{http://www.public.imtbstsp.eu/~gibson/Teaching/TeachingReadingMaterial/WarrenPereiraPereira77.pdf}",
abstract =
"Prolog is a simple but powerful programming language founded on
symbolic logic. The basic computational mechanism is a pattern
matching process (``unification'') operating on general record
structures (``terms of logic''). We priefly review the language
and compare it especially with pure Lisp. The remainder of the
paper discusses techniques for implementing Prolog efficiently; in
particular we describe how to compile the patterns involved in the
matching process. These techniques are as incorporated in our
DECsystem10 Prolog compiler (written in Prolog). The code it
generates is comparable in speed with that produced by existing
DEC10 Lisp compilers. We argue that pattern matching is a better
method for expressing operations on structured data than
conventional selectors and constructors  both for the user and
for the implementor.",
paper = "Warr77.pdf"
}
\end{chunk}
\index{Day, Martin V.}
\begin{chunk}{axiom.bib}
@book{Dayx16,
author = "Day, Martin V.",
title = {{An Introduction to Proofs and the Mathematical Vernacular}},
year = "2016",
publisher = "Virginia Tech",
link = "\url{www.math.vt.edu/people/day/ProofsBook/IPaMV.pdf}",
paper = "Dayx16.pdf"
}
\end{chunk}
\index{Fourer, Robert}
\index{Gay, David M.}
\index{Kernighan, Brian W.}
\begin{chunk}{axiom.bib}
@misc{Four03,
author = "Fourer, Robert and Gay, David M. and Kernighan, Brian W.",
title = {{AMPL Reference}},
link = "\url{https://ampl.com/BOOK/CHAPTERS/24refman.pdf}",
year = "2003",
paper = "Four03.pdf"
}
\end{chunk}
\index{Hammack, Richard}
\begin{chunk}{axiom.bib}
@book{Hamm18,
author = "Hammack, Richard",
title = {{Book of Proof}},
publisher = "Hammack, Richard",
year = "2018",
paper = "Hamm18.pdf"
}
\end{chunk}
\index{Armstrong, J.L.}
\index{Birding, S.R.}
\index{Williams, M.C.}
\begin{chunk}{axiom.bib}
@inbook{Arms92,
author = "Armstrong, J.L. and Birding, S.R. and Williams, M.C.",
title = {{Use of Prolog for Developing a New Programming Language}},
booktitle = "The Practical Application of Prolog",
year = "1992",
publisher = "Institute of Electrical Engineers, London",
abstract =
"This paper describes how Prolog was used for the development of a
new concurrent realtime symbolic programming language called
Erlang.
Erlang was developed by first building a prototype in Prolog 
the prototype was used by a user group to test their reactions to
the language. As time passed many features were added (and
removed) from the interpreter and eventually the language reached
a level of maturity where it was decided to try it out on a
significant problem.
About 3 years and some 20,000 lines of Erlang later, performance
became an issue  we wrote Prolog cross compilers from Erlang to
various concurrent logic programming languages followed by a
direct implementation of Erlang itself. The direct implementation
of Erlang was loosely based on the WAM and made by writing a
Prolog compiler from Erlang to a new abstractmachine and an
emulator for the abstract machine in 'C'. The instruction set for
the abstract machine was first prototyped in Prolog  finally the
compiler was rewritten in Erlang, thus totally removing any
dependency on Prolog.
This paper describes some of the key events which lay between the
simple prototype and the current version of the language.",
paper = "Arms92.pdf",
keywords = "printed"
}
\end{chunk}
\index{Pirog, Maciej}
\index{Gibbons, Jeremy}
\begin{chunk}{axiom.bib}
@misc{Piroxx,
author = "Pirog, Maciej and Gibbons, Jeremy",
title = {{Extended Abstract: A Functional Derivation of the Warren
Abstract Machine}},
link = "\url{http://www.cs.ox.ac.uk/jeremy.gibbons/publications/wam.pdf}",
year = "unknown",
abstract =
"Based on Danvy et al.'s functional correspondence, we give a
further example of gradual refinement of an interpreter into a
known, lowlevel abstract machine underlying realworld compilers,
by deriving an abstract model of the Warren Abstract Machine from
a simple resolutionbased Prolog interpreter. We show that other
wellknown functional programming techniques (namely, explicit
laziness and semipersistent data structures) can help to develop
abstract machines without detailed examination of the semantics
realised by the interpreter.",
paper = "Piroxx.pdf",
keywords = "printed"
}
\end{chunk}
\index{Russinoff, David M.}
\begin{chunk}{axiom.bib}
@article{Russ92,
author = "Russinoff, David M.",
title = {{A Verified Prolog Compiler for the Warren Abstract Machine}},
journal = "Journal of Logic Programming",
volume = "13",
number = "4",
pages = "367412",
year = "1992",
abstract =
"We extend the theory of Prolog to provide a framework for the
study of Prolog compilation technology. For this purpose, we first
demonstrate the semantic equivalence of two Prolog interpreters: a
conventional SLDrefutation procedure and one that employs
Warren's ``last call'' optimization. Next, we formally define the
Warren Abstract Machine (WAM) and its instruction set and present
a Prolog compiler for the WAM. Finally, we prove that the WAM
execution of a compiled Prolog program produces the same result as
the interpretation of its source.",
paper = "Russ92.pdf",
keywords = "printed"
}
\end{chunk}
\index{Bohrer, Brandon}
\index{Crary, Karl}
\begin{chunk}{axiom.bib}
@misc{Bohr16,
author = "Bohrer, Brandon and Crary, Karl",
title = {{A ProofProducing Verified Prolog Compiler}},
year = "2016",
link = "\url{www.cs.cmu.edu/~bbohrer/pub/twamiclp2016long.pdf}",
abstract =
"We have designed and implemented a verified compiler for a
dialect of Prolog. Our compiler is verified using proofproducing
compilatoin: every compiled program is accompanied with a formal
proof that it is equivalent to a particular source program. Our
formal proofs take the form of type information for our new
verifying abstract machine which we call the TWAM, whose type
system natively understands logic programs specified in the
logical framework LF. We present a soundness metatheorem for the
TWAM showing that welltyped TWAM programs are sound proofsearch
procedures. In doing so, we reduce our trusted computing base from
the entire compiler to the TWAM typechecker.",
paper = "Bohr16.pdf",
keywords = "printed"
}
\end{chunk}
\index{Tarau, Paul}
\begin{chunk}{axiom.bib}
@misc{Tara16,
author = "Tarau, Paul",
title = {{A Hitchhiker's Guide to Reinventing a Prolog Machine}},
year = "2016",
abstract =
"We take a fresh, ``cleanroom'' look at implementing Prolog by
deriving its translation to an executable representation and its
execution algorithm from a simple Horn Clause metainterpreter.
The resulting design has some interesting properties:
\begin{itemize}
\item the heap representation of terms and the abstract machine
instruction encodings are the same.
\item no dedicated code area is used as the code is placed
directly on the heap.
\item unification and indexing operations are orthogonal
\item filtering of matching clauses happens without building new
structures on the heap
\item variables in function and predicate symbol positions are
handled with no performance penalty
\item a simple Englishlike syntax is used as an intermediate
representation for clauses and goals
\item the same Englishlike syntax can be used by programmers
directly as an alternative to classic Prolog syntax
\item solutions of (multiple) logic engines are exposed as answer
streams that can be combined through typical functional
programming patterns
\item performance of a basic interpreter implemeting our design is
within a factor of 2 of a highly optimized WAMbased system
\end{itemize}
To help placing our design on the fairly rich map of Prolog
systems, we discuss similarities to existing Prolog abstract
machines, with emphasis on separating necessary commonalities from
arbitrary implementation choices.",
paper = "Tara16.pdf"
}
\end{chunk}
\index{Felleisen, Matthias}
\begin{chunk}{axiom.bib}
@techreport{Fell85,
author = "Felleisen, Matthias",
title = {{Transliterating Prolog into Scheme}},
type = "technical report",
number = "182",
institution = "University of Indiana",
year = "1985",
paper = "Fell85.pdf",
keywords = "printed"
}
\end{chunk}
\index{Wang, Ke}
\begin{chunk}{axiom.bib}
@misc{Wang19,
author = "Wang, Ke",
title = {{Learning Scalable and Precise Representation of Program
Semantics}},
year = "2019",
abstract =
"Neural program embedding has shown potential in aiding the
analysis of largescale, complicated software. Newly proposed deep
neural architectures pride themselves on learning program
semantics rather than superficial syntactic features. However, by
considering the source code only, the vast majority of neural
networks do not capture a deep, precise representation of program
semantics. In this paper, we present DYPRO, a novel deep neural
network that learns from program execution traces. Compared to the
prior dynamic models, not only is DYPRO capable of generalizing
across multiple executions for learning a program's dynamic
semantics in its entirety, but DYPRO is also more efficient when
dealing with programs yielding long execution traces. For
evaluation, we task DYPRO with semantic classification
(i.e. categorizing programs based on their semantics) and compared
it against two prominent static models: Gated Graph Neural Network
and TreeLSTM. We find that DYPRO achieves the highest prediction
accuracy among all models. To further reeal the capacity of all
aforementioned deep neural architectures, we examine if the models
can learn to detect deeper semantic properties of a program. In
particular given a task of recognizing loop invariants, we show
DYPRO beats all static models by a wide margin.",
paper = "Wang19.pdf",
keywords = "printed"
}
\end{chunk}
\index{Muller, Dennis}
\index{Kohlhase, Michael}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Mull18,
author = "Muller, Dennis and Kohlhase, Michael and Rabe, Florian",
titlle = {{Automatically Finding Theory Morphisms for Knowledge
Management}},
journal = "LNCS",
volume = "11006",
year = "2018",
abstract =
"We present a method for finding morphisms between formal
theories, both within as well as across libraries based on
different logical foundations. As they induce new theorems in the
target theory for any of the source theory, theory morphisms are
highvalue elements of a modular formaly library. Usually, theory
morphisms are manually encoded, but this practice requires authors
who are familiar with source and target theories at the same time,
which limits the scalability of the manual approach.
To remedy this problem, we have developed a morphism finder
algorithm that automates theory morphism discovery. In this paper we
present an implementation in the MMT system and show specific use
cases. We fous on an application of theory discovery, where a user
can check whether a (part of a) formal theory already exists in
some library, potentially avoiding duplication of work or
suggesting an opportunity for refactoring.",
paper = "Mull18.pdf"
}
\end{chunk}
\index{Cohl, Howard S.}
\index{GreinerPetter, Andre}
\index{Schubotz, Moritz}
\begin{chunk}{axiom.bib}
@article{Cohl18,
author = "Cohl, Howard S. and GreinerPetter, Andre and Schubotz, Moritz",
titlle = {{Automated Symbolic and Numerical Testing of DLMF Formulae
Using Computer Algebra Systems}},
journal = "LNCS",
volume = "11006",
year = "2018",
abstract =
"We have developed an automated procedure for symbolic and
numerical testing of formulae extracted from the National
Institute of Standards and Technology (NIST) Digital Library of
Mathematical Functions (DLMF). For the NIST Digital Repository of
Mathematical Formulae, we have developed conversion tools from
semantic Latex to Computer Algebra System (CAS) MAPLE which relies
on Youssef's partofmath tagger. We convert a test data subset of
4,078 semantics Latex DLMF formulae extracted from the DLMF to the
native CAS representation and then apply an automated scheme for
symbolic and numerical testing and verification. Our framework is
implemented using Java and MAPLE. We describe in detail the
conversion process which is required so that the CAS is able to
correctly interpret the mathematical representation of the
formulae. We describe the improvement of the effectiveness of our
automated scheme through incremental enhancements (making more
precise) of the mathematical semantics markup of the formulae.",
paper = "Cohl18.pdf"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
\index{Sharoda, Yasmine}
\begin{chunk}{axiom.bib}
@article{Care18,
author = "Carette, Jacques and Farmer, William M. and Sharoda, Yasmine",
titlle = {{Biform Theories: Project Description}},
journal = "LNCS",
volume = "11006",
year = "2018",
abstract =
"A biform theory is a combination of an axiomatic theory and an
algorithmic theory that supports the integration of reasoning and
computation. These are ideal for specifying and reasoning about
algorithms that manipulate mathematical expressions. However,
formalizing biform theories is challenging as it requires the
means to express statements about the interplay of what these
algorithms do and what their actions mean mathematically. This
paper describes a project to develop a methodology for expressing,
manipulating, managing, and generating mathematical knowledge as a
network of biform theories. It is a subproject of MathScheme, a
longterm project at McMaster University to produce a framework
for integrating formal deduction and symbolic computation."
paper = "Care18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Care17,
author = "Carette, Jacques and Farmer, William M.",
titlle = {{Formalizing Mathematical Knowledge as a Biform Theory
Graph: A Case Study}},
journal = "LNCS",
volume = "10383",
year = "2017",
abstract =
"A biform theory is a combination of an axiomatic theory and an
algorithmic theory that supports the integration of reasoning and
computation. These are ideal for formalizing algorithms that
manipulate mathematical expressions. A theory graph is a network
of theories connected by meaningpreserving theory morphisms that
map the formulae of one theory to the formulas of another
theory. Theory graphs are in turn well suited for formalizing
mathematical knowledge at the most convenient level of abstraction
using the most convenient vocabulary. We are interested in the
problem of whether a body of mathematical knowledge can be
effectively formalized as a theory graph of biform theories. As a
test case, we look at the graph of theories encoding natural
number arithmetic. We used two different formalisms to do this,
which we describe and compare. The first is realized in
CTT$_{uqe}$, a version of Church's type theory with quotation and
evaluation, and the second is realized in Agda, a dependently
typed programming language.",
paper = "Care17.pdf"
}
\end{chunk}
\index{Chojecki, Przemyslaw}
\begin{chunk}{axiom.bib}
@article{Choj17,
author = "Chojecki, Przemyslaw",
titlle = {{DeepAlgebra  An Outline of a Program}},
journal = "LNCS",
volume = "10383",
year = "2017",
abstract =
"We outline a program in the area of formalization of mathematics
to automate theorem proving in algebra and algebraic geometry. We
propose a construction of a dictionary between automated theorem
provers and (La)Tex exploiting syntactic parsers. We describe its
application to a repository of humanwritten facts and definitions
in algebraic geometry (The Stacks Project). We use deep learing
techniques.",
paper = "Choj17.pdf"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm17,
author = "Farmer, William M.",
titlle = {{Theory Morphisms in Church's Type Theory with Quotation
and Evaluation}},
journal = "LNCS",
volume = "10383",
year = "2017",
abstract =
"CTT$_{qe}$ is a version of Church's type theory with global
quotation and evaluation operators that is engineered to reason
about the interplay of syntax and semantics and to formalize
syntaxlevel mathematical algorithms. CTT$_{uqe}$ is a variant of
CTT$_{qe}$ that admits undefined expressions, partial functions,
and multiple base types of individuals. It is better suited than
CTT$_{qe}$ as a logic for building networks of theories connected
by theory morphisms. This paper presents the syntax and semantics
of CTT$_{uqe}$, defines a notion of a theory morphism from one
CTT$_{uqe}$ theory to another, and gives two simple examples
involving monoids that illustrate the use of theory morphisms in
CTT$_{qe}$.",
paper = "Farm17.pdf"
}
\end{chunk}
\index{Muller, Dennis}
\index{Gauthier, Thibault}
\index{Kaliszyk, Cezary}
\index{Kohlhase, Michael}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Mull17,
author = "Muller, Dennis and Gauthier, Thibault and Kaliszyk, Cezary
and Kohlhase, Michael and Rabe, Florian",
title = {{Classification of Alignments Between Concepts of Formal
Mathematical Systems}},
journal = "LNCS",
volume = "10383",
year = "2017",
abstract =
"Mathematical knowledge is publicly available in dozens of
different formats and languages, ranging from informal
(e.g. Wikipedia) to formal corpora (e.g. Mizar). Despite an
enormous amount of overlap between these corpora, only few
machineactionalbe connections exist. We speak of alignment if the
same concept occurs in different libraries, possibly with slightly
different names, notations, or formal definitions. Leveraging
these alignments creates a huge potential for knowledge sharing
and transfer, e.g. integrating theorem provers ore reusing
services across systems. Notably, even imperfect alignments,
i.e. concepts that are very similar rather than identical, can
often play very important roles. Specifically, in machine learning
techniques for theorem proving and in automation techniques that
use these, they allow learningreasoning base automation for
theorem provers to take inspiration from proofs from different
formal proof libraries or semiformal libraries even if the latter
is based on a different mathematical foundation. We present a
classification of alignments and design a simple format for
describing alignments, as well as an infrastructure for sharing
them. We propose these as a centralized standard for the
community. Finally, we present an initial collection of
approximately 12000 alignments from the different kinds of
mathematical corpora, including proof assistant libraries and
semiformal corpora as a public resource.",
paper = "Mull17.pdf"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm16,
author = "Farmer, William M.",
titlle = {{Incorporating Quotation and Evaluation into Church's Type
Theory: Syntax and Semantics}},
journal = "LNCS",
volume = "9791",
year = "2016",
abstract =
"CTT$_{qe}$ is a version of Church's type theory that includes
quotation and evaluation operators that are similar to quote and
eval in the Lisp programming language. With quotation and
evaluation it is possible to reason in CTT$_{qe}$ about the
interplay of the syntax and semantics of expressions and, as a
result, to formalize syntaxbased mathematical algorithms. We
present the syntax and semantics of CTT$_{qe}$ and give several
examples that illustrate the usefulness of having quotation and
evaluation in CTT$_{qe}$. We do not give a proof system for
CTT$_{qe}$ but we do sketch what a proof system could look like.",
paper = "Farm16.pdf"
}
\end{chunk}
\index{Blanchette, Jasmin Christian}
\index{Haslbeck, Maximilian}
\index{Matichuk, Daniel}
\index{Nipkow, Tobias}
\begin{chunk}{axiom.bib}
@article{Blan15,
author = "Blanchette, Jasmin Christian and Haslbeck, Maximilian and
Matichuk, Daniel and Nipkow, Tobias",
titlle = {{Mining the Archive of Formal Proofs}},
journal = "LNCS",
volume = "9150",
year = "2015",
abstract =
"The Archive of Formal Proofs is a vast collection of
computerchecked proofs developed using the proof assistant
Isabelle. We perform an indepth analysis of the archive, looking
at various properties of the proof developments, including size,
dependencies, and proof style. This gives some insights into the
nature of formal proofs",
paper = "Blan15.pdf"
}
\end{chunk}
\index{Horozal, Fulya}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Horo15,
author = "Horozal, Fulya and Rabe, Florian",
titlle = {{Formal Logic Definitions for Interchange Languages}},
journal = "LNCS",
volume = "9150",
year = "2015",
abstract =
"System integration often requires standardized interchange
languages, via which systems can exchange mathematical
knowledge. Major examples are the MathMLbased markup languages
and TPTP. However, these languages standardize only the syntax of
the exchanged knowledge, which is insufficient when the involved
logics are complex or numerous. Logical frameworks, on the other
hand, allow representing the logics themselves (and are thus aware
of the semantics), but they abstract from the concrete syntax.
Maybe surprisingly, until recently, stateoftheart logical
frameworks were not quite able to adequately represent logics
commonly used in formal systems. Using a recent extension of the
logical framework LF, we show how to give concise formal
definitions of the logics used in TPTP. We can also formally
define translations and combinations between the various TPTP
logics. This allows us to build sematicsaware tool support such
as typechecking TPTP content.
While our presentation focuses on the current TPTP logics, our
approach can be easily extended to other logics and interchange
languages. In particular, our logic representations can be used
with both TPTP and MathML. Thus, a single definition of the
semantics can be used with either interchange syntax.",
paper = "Horo15.pdf"
}
\end{chunk}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Rabe15,
author = "Rabe, Florian",
titlle = {{Generic Literals}},
journal = "LNCS",
volume = "9150",
year = "2015",
abstract =
"MMT is a formal framework that combines the flexibility of
knowledge representation languages like OPENMATH with the formal
rigor of logical frameworks like LF. It systematically abstracts
from theoretical and practical aspects of individual formal
languages and tries to develop as many solutions as possible
generically.
In this work, we allow MMT theories to declare userdevined
literals, which makes literals as userextensible as operators,
axioms, and notations. This is particularly important for
framework languages, which must be able to represent any choice of
literals. Theoretically, our literals are introduced by importing
a model that defines the denotations of some types and function
symbols. Practically, MMT is coupled with a programming language,
in which these models are defined.
Our results are implemented in the MMT system. In particular,
literals and computation on them are integrated with the parser
and type checker.",
paper = "Rabe15.pdf"
}
\end{chunk}
\index{Carette, Jacques}
\index{Farmer, William M.}
\index{Kohlhase, Michael}
\begin{chunk}{axiom.bib}
@article{Care14,
author = "Carette, Jacques and Farmer, William M. and Kohlhase, Michael",
title = {{Realms: A Structure for Consolidating Knowledge about
Mathematical Theories}},
journal = "LNCS",
volume = "8543",
year = "2014",
abstract =
"Since there are different ways of axiomatizing and developing a
mathematical theory, knowledge about such a theory may reside in
many places and in many forms within a library of formalized
mathematics. We introduce the notion of a realm as a structure for
consolidating knowledge about a mathematical theory. A realm
contains several axiomatizations of a theory that are separately
developed. Views interconnect these developments and establish
that the axiomatizations are equivalent in the sense of being
mutually interpretable. A realm also contains an external interface
that is convenient for users of the library who want to apply the
concepts and facts of the theory without delving into the details
of how the concepts are facts were developed. We illustrate the
utility of realms through a series of examples. We also give an
outline of the mechanisms that are needed to create and maintain
realms.",
paper = "Care14.pdf"
}
\end{chunk}
\index{Bradford, Russell}
\index{Davenport, James H.}
\index{England, Matthew}
\index{Wilson, David}
\begin{chunk}{axiom.bib}
@article{Brad13a,
author = "Bradford, Russell and Davenport, James H. and England, Matthew
and Wilson, David",
title = {{Optimising Problem Formulation for Cylindrical Algebraic
Decomposition}},
journal = "LNCS",
volume = "7961",
year = "2013",
abstract =
"Cylindrical Algebraic Decomposition (CAD) is an important tool
for the study of real algebraic geometry with many applications
both within mathematics and elsewhere. It is known to have doubly
exponential complexity in the number of variables in the worst
case, but the actual computation time can vary greatly. It is
possible to offer different formulations for a given problem
leading to great differences in tractability. In this paper we
suggest a new measure for CAD complexity which takes into account
the real geometry of the problem. This leads to new heuristics for
choosing: the variable ordering for a CAD problem, a designated
equational constraint, and formulations for truthtable invariant
CADs (TTICASs). We then consider the possibility of using Groebner
bases to precondition TTICAD and when such formulations constitute
the creation of a new problem.",
paper = "Brad13a.pdf"
}
\end{chunk}
\index{Farmer, William M.}
\begin{chunk}{axiom.bib}
@article{Farm13a,
author = "Farmer, William M.",
title = {{The Formalization of SyntaxBased Mathematical Algorithms
Using Quotation and Evaluation}},
journal = "LNCS",
volume = "7961",
year = "2013",
abstract =
"Algorithms like those for differentiating functional expressions
manipulate the syntactic structure of mathematical expressions in
a mathematically meaningful way. A formalization of such an
algorithm should include a specification of its computational
behavior, a specification of its mathematical meaning, and a
mechanism for applying the algorithm to actual
expressions. Achieving these goals requires the ability to
integrate reasoning about the synta of the expressions with
reaoning about what the expressions mean. A syntax framework is a
mathematical structure that is an abstract model for a syntax
reasoning system. It contains a mapping of expressions to
syntactic values that represent the syntactic structures of the
expressions; a language for reasoning about syntactic values; a
quotation mechanism to refer to the syntactic value of an
expression; and an evaluation mechanism to refer to the value of
the expression represented by a syntactic value. We present and
compare two approaches, based on instances of a syntax framework,
to formalize a syntaxbased mathematical algorithm in a formal
theory $T$. In the first approach the syntactic values for the
expressions manipulated by the algorithm are members of an
inductive type in $T$, but quotation and evaluation are functions
defined in the matatheory of $T$. In the second approach every
expression in $T$ is represented by a syntactic value, and
quotation and evalution are operators in $T$ itself.",
paper = "Farm13a.pdf"
}
\end{chunk}
\index{Heras, Jonathan}
\index{Komendantskaya, Ekaterina}
\begin{chunk}{axiom.bib}
@article{Hera13,
author = "Heras, Jonathan and Komendantskaya, Ekaterina",
title = {{ML4PG in Computer Algebra Verification}},
journal = "LNCS",
volume = "7961",
year = "2013",
abstract =
"ML4PG is a machinelearning extension that provides statical
proof hints during the process of Coq/SSReflect proof
development. In this paper, we use ML4PG to find proof patterns in
the CoqEAL library  a library that was devised to verify the
correctness of Computer Algebra algorithms. In particular, we use
ML4PG to help us in the formalisation of an efficient algorithm to
computer the inverse of triangular matrices.",
paper = "Hera13.pdf"
}
\end{chunk}
\index{Kohlhase, Michael}
\index{Mance, Felix}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Kohl13,
author = "Kohlhase, Michael and Mance, Felix and Rabe, Florian",
title = {{A Universal Machine for Biform Theory Graphs}},
journal = "LNCS",
volume = "7961",
year = "2013",
abstract =
"Broadly speaking, there are two kinds of semanticsaware
assistant systems for mathematics: proof assistants express the
semantic in logic and emphasize deduction, and computer algebra
systems express the semantics in programming languages and
empahsize computation. Combining the complementary strengths of
both approaches while mending their complementary weaknesses has
been an important goal of the mechanized mathematics community for
some time.
We pick up on the idea of biform theories and interpret it in the
MMT/OMDOC framework which introduced the foundationsastheories
approach, and can thus represent both logics and programming
languages as theories. This yields a formal, modular framework of
biform theory graphs which mixes specifications and implemenations
sharing the module system and typing information.
We present automated knowledge management work flows that
interface to existing specification/programming tools and enable
an OPENMATH Machine, that operationalizes biform theories,
evaluating expressions by exhaustively applying the
implementations of the respective operators. We evaluate the new
biform framework by adding implementations to the OPENMATH
standard content dictionaries.",
paper = "Kohl13.pdf"
}
\end{chunk}
\index{Mahboubi, Assia}
\begin{chunk}{axiom.bib}
@article{Mahb13,
author = "Mahboubi, Assia",
title = {{The Rooster and the Butterflies}},
journal = "LNCS",
volume = "7961",
year = "2013",
abstract =
"This paper describes a machinechecked proof of the JordenHolder
theorem for finite groups. This purpose of this description is to
discuss the representation of the elementary concepts of finite
group theory inside type theory. The design choices underlying
these representations were crucial to the successful formalization
of a complete proof of the Odd Order Theorem in the Coq system.",
paper = "Mahb13.pdf"
}
\end{chunk}
\index{Tankink, Carst}
\index{Kaliszyk, Cezary}
\index{Urban, Josef}
\index{Geuvers, Herman}
\begin{chunk}{axiom.bib}
@article{Tank13,
author = "Tankink, Carst and Kaliszyk, Cezary and Urban, Josef and
Geuvers, Herman",
title = {{Formal Mathematics on Display: A Wiki for Flyspeck}},
journal = "LNCS",
volume = "7961",
year = "2013",
abstract =
"The AGORA system is a prototype ``Wiki for Formal Mathematics'',
with an aim to support developing and documenting large
formalizations of mathematics in a proof assistant. The functions
implemented in AGORA include inbrowser editing, strong AI/ATP
proof advice, verification, and HTML rendering. The HTML rendering
contains hyperlinks and provides ondemand explanation of the
proof state for each proof step. In the present paper we show the
prototype Flyspeck Wiki as an instance of AGORA for HOL Light
formalizations. The wiki can be used for formalizations of
mathematics and for writing informal wiki pages about
mathematics. Such informal pages may contain islands of formal
text, which is used here for providing an initial crosslinking
between Hales's informal Flyspeck book, and the formal Flyspeck
development.
The AGORA platform intends to address distributed wikistyle
collaboration on large formalization projects, in particular both
the aspect of immediate editing, verification and rendering of
formal code, and the aspect of gradual and mutual refactoring and
correspondence of the initial informal text and its
formalization. Here, we highlight these features with the Flyspeck
Wiki.",
paper = "Tank13.pdf"
}
\end{chunk}
\index{Asperti, Andrea}
\index{Ricciotti, Wilmer}
\begin{chunk}{axiom.bib}
@article{Aspe12a,
author = "Asperti, Andrea and Ricciotti, Wilmer",
title = {{A Web Interface for Matita}},
journal = "LNCS",
volume = "7362",
year = "2012",
paper = "Aspe12a.pdf"
}
\end{chunk}
\index{Alama, Jesse}
\index{Mamane, Lionel}
\index{Urban, Josef}
\begin{chunk}{axiom.bib}
@article{Alam12,
author = "Alama, Jesse and Mamane, Lionel and Urban, Josef",
title = {{Dependencies in Formal Mathematics: Applications and
Extration for Coq and Mizar}},
journal = "LNCS",
volume = "7362",
year = "2012",
abstract =
"Two methods for extracting detailed formal dependencies from the
Coq and Mizar system are presented and compared. The methods are
used for dependency extraction from two large mathematical
repositories: the Coq Repository at Nijmegen and the Mizar
Mathematical Library. Several applications of the detailed
dependency analysis are described and proposed. Motivated by the
different applications, we discuss the various kinds of
dependencies that we are interested in, and the suitability of
various dependency extraction methods.",
paper = "Alam12.pdf"
}
\end{chunk}
\index{Hetzl, Stefan}
\begin{chunk}{axiom.bib}
@article{Hetz12,
author = "Hetzl, Stefan",
title = {{Project Presentation: Algorithmic Structuring and
Compression of Proofs (ASCOP)}},
journal = "LNCS",
volume = "7362",
year = "2012",
abstract =
"Computergenerated proofs are typically analytic, i.e. they
essentially consist only of formulas which are present in the
theorem that is shown. In contrast, mathematical proffs written by
humans almost never are: they are highly structured due to the use
of lemmas.
The ASCOP project aims at developing algorithms and software which
structure and abbreviate analytic proofs by computing useful
lemmas. These algorithms will be based on recent groundbreaking
results establishing a new connection between proof theory and
formal language theory. This connecion allows the application of
efficient algorithms based on formal grammars to structure and
compress proofs.",
paper = "Hetz12.pdf"
}
\end{chunk}
\begin{chunk}{axiom.bib}
@misc{Gapt19,
author = "Unknown",
title = {{GAPT: General Architecture for Proof Theory}},
year = "2019",
link = "\url{https://www.logic.at/gapt/downloads/gaptusermanual.pdf}",
paper = "Gapt19.pdf"
}
\end{chunk}
\index{Horozal, Fulya}
\index{Kohlhase, Michael}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Horo12,
author = "Horozal, Fulya and Kohlhase, Michael and Rabe, Florian",
title = {{Extending MKM Formats at the Statement Level}},
journal = "LNCS",
volume = "7362",
year = "2012",
abstract =
"Successful representation and markup languages find a good
balance between giving the user freedom of expression, enforcing
the fundamental semantic invariants of the modeling framework, and
allowing machine support for the underlying semantic
structures. MKM formats maintain strong invariants while trying to
be foundationally unconstrained, which makes the induced design
problem particularly challenging.
In this situation, it is standard practice to define a minimal
core language together with a scripting/macro facility for
syntactic extensions that map into the core language. In practice,
such extension facilities are either fully unconstrained (making
invariants and machine support difficult) or limited to the object
level (keeping the statement and theory levels fixed).
In this paper we develop a general methodology for extending MKM
representation formats at the statement level. We show the utility
(and indeed necessity) of statementlevel extensions by
redesigning the OMDoc format into a minimal, regular core language
(strict OMDoc) and an extension (pragmatic OMDoc) that maps into
strict OMDoc.",
paper = "Horo12.pdf"
}
\end{chunk}
\index{Iancu, Mihnea}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Ianc12,
author = "Iancu, Mihnea and Rabe, Florian",
title = {{Management of Change in Declarative Languages}},
journal = "LNCS",
volume = "7362",
year = "2012",
abstract =
"Due to the high degree of interconnectedness of formal
mathematical statements and theories, human authors often have
difficulties anticipating and tracking the effects of a change in
large bodies of symbolic mathematical knowledge. Therefore, the
automation of change management is desirable. But while computers
can in principle detect and propagate changes automatically, this
process must take the semantics of the underlying mathematical
formalism into account. Therefore, concrete management of change
solutions are difficult to realize.
The MMT language was designed as a generic declarative language
that captures universal structural features while avoiding a
commitment to a particular formalism. Therefore, it provides a
promising framework for the systematic study of changes in
declarative languages. We leverage this framework by providing a
generic change management solution at the MMT level, which can be
instantiated for arbitrary specific languages.",
paper = "Ianc12.pdf"
}
\end{chunk}
\index{Wilson, David J.}
\index{Bradford, Russell J.}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Wils12,
author = "Wilson, David J. and Bradford, Russell J. and
Davenport, James H.",
title = {{Speeding Up Cylindrical Algebraic Decomposition by
Groebner Bases}},
journal = "LNCS",
volume = "7362",
year = "2012",
abstract =
"Groebner Bases and Cylindrical Algebraic Decomposition are
generally thought of as two, rather different, methods of looking
at systems of equations and, in the case of Cylindrical Algebraic
Decomposition, inequalities. However, even for a mixed system of
equalities and inequalities, it is possible to apply Groebner
bases to the (conjoined) equalities before invoking CAD. We see
that this is, quite often but not always, a beneficial
preconditioning of the CAD problem.
It is also possible to precondition the (conjoined) inequalities
with respect to the equalities, and this can also be useful in
many cases.",
paper = "Wils12.pdf"
}
\end{chunk}
\index{Horozal, Fulya}
\index{Iacob, Alin}
\index{Jucovschi, Constantin}
\index{Kohlhase, Michael}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Horo11,
author = "Horozal, Fulya and Iacob, Alin and Jucovschi, Constantin
and Kohlhase, Michael and Rabe, Florian",
title = {{Combining Source, Content, Presentation, Narration, and
Relational Presentation}},
journal = "LNCS",
volume = "6824",
year = "2011",
abstract =
"In this paper, we try to bridge the gap between different
dimensions / incarnations of mathematical knowledge: MKM
representation formats (content), their humanoriented languages
(source, presentation), their narrative linearizations
(narration), and relational presentations used in the semantic
web. The central idea is to transport solutions from software
engineering to MKM regarding the parallel interlinked maintenance
of the different incarnations. We show how the integration of
these incarnations can be utilized to enrich the authoring and
viewing processes, and we evaluate our infrastructure on the LATIN
Logic Atlas, a modular library of logic formalizations, and a set
of computer science lecture notes written in STEX  a modular,
semantic variant of LATEX.",
paper = "Horo11.pdf"
}
\end{chunk}
\index{Krebbers, Robbert}
\index{Spitters, Bas}
\begin{chunk}{axiom.bib}
@article{Kreb11,
author = "Krebbers, Robbert and Spitters, Bas",
title = {{Computer Certified Efficient Exact Reals in Coq}},
journal = "LNCS",
volume = "6824",
year = "2011",
abstract =
"Floating point operations are fast, but require continuous effort
on the part of the user in order to ensure that the results are
correct. This burden can be shifted away from the user by
providing a library of exact analysis in which the computer
handles the error estimates. We provide an implementaiton of the
exact real numbers in the Coq proof assistant. This improves on
the earlier Coqimplementation by O'Connor in two ways: we use
dyadic rationals built from the machine integers and we optimize
computation of power series by using approximate
division. Moreover, we use type classes for clean mathematical
interfaces. This appears to be the first time that type classes
are used in heavy computation. We obtain over a 100 times speed up
of the basic operations and indications for improving the Coq system.",
paper = "Kreb11.pdf"
}
\end{chunk}
\index{Rabe, Florian}
\index{Kohlhase, Michael}
\index{Coen, Claudio Sacerdoti}
\begin{chunk}{axiom.bib}
@article{Rabe11,
author = "Rabe, Florian and Kohlhase, Michael and Coen, Claudio Sacerdoti",
title = {{A Foundational View on Integration Problems}},
journal = "LNCS",
volume = "6824",
year = "2011",
abstract =
"The integration of reasoning and computation services across
system and language boundaries is a challenging problem of
computer science. In this paper, we use integration for the
scenario where we have two systems that we integrate by moving
problems and solutions between them. While this scenario is often
approached from an engineering perspective, we take a foundational
view. Based on the generic declarative language MMT, we develop a
theoretical framework for system integration using theories and
partial theory morphisms. Because MMT permits representations of
the metalogical foundations themselves, this includes integration
across logics. We discuss save and unsafe integration schemes and
devise a general form of safe integration.",
paper = "Rabe11.pdf"
}
\end{chunk}
\index{Asperti, Andrea}
\index{Coen, Claudio Sacerdoti}
\begin{chunk}{axiom.bib}
@article{Aspe10a
author = "Asperti, Andrea and Coen, Claudio Sacerdoti",
title = {{Some Considerations on the Usability of Interactive Provers}},
journal = "LNCS",
volume = "6167",
year = "2010",
abstract =
"In spite of the remarkable achievements recently obtained in the
field of mechanization of formal reasoning, the overall usability
of interactive provers does not seem to be sensibly improved since
the advent of the ``second generation'' of systems, in the mid of
the eighties. We try to analyze the reasons of such a slow
progress, pointing out the main problems and suggesting some
possible research directions.",
paper = "Aspe10a.pdf",
keywords = "DONE"
}
\end{chunk}
\index{Calmet, Jacques}
\index{Campbell, John A.}
\begin{chunk}{axiom.bib}
@article{Calm10,
author = "Calmet, Jacques and Campbell, John A.",
title = {{A Revisited Perspective on Symbolic Mathematical Computing
and Artificial Intelligence}},
journal = "LNCS",
volume = "6167",
year = "2010",
abstract =
"We provide a perspective on the current state and possible future
of links between symbolic mathematical computing and artificial
intelligence, on the occasion of the 10th biennial conference
(AISMC, later AISC) devoted to those connections. It follows a
similar perspective expressed for the first such conference in 1992
and then revised and expanded 5 years later. Issues related to the
computational management of mathematical knowledge are
highlighted.",
paper = "Calm10.pdf"
}
\end{chunk}
\index{Carette, Jacques}
\index{Sexton, Alan P.}
\index{Sorge, Volker}
\index{Watt, Stephen M.}
\begin{chunk}{axiom.bib}
@article{Care10a,
author = "Carette, Jacques and Sexton, Alan P. and Sorge, Volker and
Watt, Stephen M.",
title = {{Symbolic Domain Decomposition}},
journal = "LNCS",
volume = "6167",
year = "2010",
abstract =
"Decomposing the domain of a function into parts has many uses in
mathematics. A domain may naturally be a union of pieces, a
function may be defined by cases, or different boundary conditions
may hold on different regions. For any particular problem the
domain can be given explicitly, but when dealing with a family of
problems given in terms of symbolic parameters, matters become
more difficult. This article shows how hybrid sets, that is
multisets allowing negative multiplicity, may be used to express
symbolic domain decompositions in an efficient, elegant and
uniform way, simplifying both computation and reasoning. We apply
this theory to the arithmetic of piecewise functions and symbolic
matrices and show how certain operations may be reduced from
exponential to linear complexity.",
paper = "Care10a.pdf"
}
\end{chunk}
\index{Cohen, Cyril}
\index{Mahboubi, Assia}
\begin{chunk}{axiom.bib}
@article{Cohe10,
author = "Cohen, Cyril and Mahboubi, Assia",
title = {{A Formal Quantifier Elimination for Algebraically Closed Fields}},
journal = "LNCS",
volume = "6167",
year = "2010",
abstract =
"We prove formally that the first order theory of algebraically
closed fields enjoys quantifier elimination, and hence is
decidable. This proof is organized in two modular parts. We first
reify the first order theory of rings and prove that quantifier
elimination leads to decidability. Then we implement an algorithm
which constructs a quantifier free formula from any first order
formula in the theory of ring. If the underlying ring is in fact
an algebraically closed field, we prove that the two formulas have
the same semantic. The algorithm producing the quantifier free
formula is programmed in continuation passing style, which leads
to both a concise program and an elegant proof of semantics
correctness.",
paper = "Cohe10.pdf"
}
\end{chunk}
\index{Dominguez, Cesar}
\index{Rubio, Julio}
\begin{chunk}{axiom.bib}
@article{Domi10,
author = "Dominguez, Cesar and Rubio, Julio",
title = {{Computing in Coq with Infinite Algebraic Data Structures}},
journal = "LNCS",
volume = "6167",
year = "2010",
abstract =
"Computational content encoded into constructive type theory
proofs can be used to make computing experiments over concrete
data structures. In this paper, we explore this possibility when
working in Coq with chain complexes of infinite type (that is to
say, generated by infinite sets) as a part of the formalization of
a hierarchy of homological algebra structures.",
paper = "Domi10.pdf"
}
\end{chunk}
\index{Kerber, Manfred}
\begin{chunk}{axiom.bib}
@article{Kerb10,
author = "Kerber, Manfred",
title = {{Proofs, Proofs, Proofs, and Proofs}},
journal = "LNCS",
volume = "6167",
year = "2010",
abstract =
"In logic there is a clear concept of what constitutes a proof and
what not. A proof is essentially defined as a finite sequence of
formulae which are either axioms or derived by proof rules from
formulae earlier in the sequence. Sociologically, however, it is
more difficult to say what should constitute a proof and what
not. In this paper we will look at different forms of proofs and
try to clarify the concept of proof in the wider meaning of the
term. This has implications on how proofs should be represented
formally.",
paper = "Kerb10.pdf",
keywords = "printed"
}
\end{chunk}
\index{Nederpelt, R.}
\index{Kamareddine, F.}
\begin{chunk}{axiom.bib}
@inproceedings{Nede01,
author = "Nederpelt, R. and Kamareddine, F.",
title = {{An Abstract Syntax for a Formal Language of Mathematics}},
booktitle = "4th int. Tbilisi Symp. on Language, Logic, and Computation",
publisher = "unknown",
year = "2001",
abstract =
"This paper provides an abstract syntax for a formal language of
mathematics. We call our language Weak Type Theory (abbreviated
WTT). WTT will be as faithful as possible to the mathematician's
language yet will be formal and will not allow ambiguities. WTT
can be used as an intermediatry between the natural language of
the mathematician and the formal language of the logician. As far
as we know, this is the first extensive formalization of an
abstract syntax of a formal language of mathematics.",
paper = "Nede01.pdf"
}
\end{chunk}
\index{Barbeau, Edward J.}
\begin{chunk}{axiom.bib}
@book{Barb00,
author = "Barbeau, Edward J.",
title = {{Mathematical Fallacies, Flaws, and Flimflam}},
publisher = "American Mathematical Society",
year = "2000",
paper = "Barb00.pdf"
}
\end{chunk}
\index{Baker, Josef B.}
\index{Sexton, Alan P.}
\index{Sorge, Volker}
\begin{chunk}{axiom.bib}
@article{Bake09,
author = "Baker, Josef B. and Sexton, Alan P. and Sorge, Volker",
title = {{A Linear Grammar Approach to Mathematical Formula
Recognition from PDF}},
journal = "LNCS",
volume = "5625",
year = "2009",
abstract =
"Many approaches have been proposed over the years for the
recognition of mathematical formulae from scanned documents. More
recently a need has arisen to recognise formulae from PDF
documents. Here we can avoid ambiguities introduced by traditional
OCR approaches and instead extract perfect knowledge of the
characters used in formulae directly from the document. This can
be exploited by formula recognition techniques to achieve correct
results and high performance.
In this paper we revist an old grammatical approach to formula
recognition, that of Anderson from 1968, and assess its
applicability with respect to data extracted from PDF
documents. We identify some problems of the original method when
applied to common mathematical expressions and show how they can
be overcome. The simplicity of the original method leads to a very
efficient recognition technique that not only is very simple to
implement but also yields results of high accuracy for the
recognition of mathematical formulae from PDF documents.",
paper = "Bake09.pdf"
}
\end{chunk}
\index{Biha, Sidi Ould}
\begin{chunk}{axiom.bib}
@article{Biha09,
author = "Biha, Sidi Ould",
title = {{Finite Group Representation Theory with Coq}},
journal = "LNCS",
volume = "5625",
year = "2009",
abstract =
"Representation theory is a branch of algebra that allows the
study of groups through linear applications, i.e. matrices. Thus
problems in abstract groups can be reduced to problems on
matrices. Representation theory is the basis for character
theory. In this paper we present a formalization of finite groups
representation theory in the Coq system that includes a
formalization of Maschke's theorem on reducible finite group algebra.",
paper = "Biha09.pdf"
}
\end{chunk}
\index{Calmet, Jacques}
\begin{chunk}{axiom.bib}
@article{Calm09,
author = "Calmet, Jacques",
title = {{AbstractionBased Information Technology: A Framework for
Open Mechanized Reasoning}},
journal = "LNCS",
volume = "5625",
year = "2009",
abstract =
"OMRS (Open Mechanized Reasoning Systems) was designed for
Automated Theorem Proving and then extended to Computer
Algebra. These are the two domains at the heart of the Calculemus
approach. An obvious question is to assess whether such an
approach can be extended to new domains either within AI or
outside of AI. There have been several attempts to turn the world
into a computational system. This talk stays away from such
general attempts and introduces a framework that is fully set
within AI. It extends the basic concepts of OMRS to diverse fields
ranging from information technology to sociology through law as
illustrated by examples. The main motivation is to claim that
whatever the selected approach, Artificial Intelligence is gaining
enough strength and power to reach new frontiers and to turn
challenges that are not a priori of a purely computational nature
into AI domains.",
paper = "Calm09.pdf"
}
\end{chunk}
\index{Calude, Cristian S.}
\index{Muller, Christine}
\begin{chunk}{axiom.bib}
@article{Calu09,
author = "Calude, Cristian S. and Muller, Christine",
title = {{Formal Proof: Reconciling Correctness and Understanding}},
journal = "LNCS",
volume = "5625",
year = "2009",
abstract =
"Hilbert's concept of formal proof is an ideal of rigour for
mathematics which has important applications in mathematical
logic, but seems irrelevant for the practice of mathematics. The
advent, in the last twenty years, of proof assistants was followed
by an impressive record of deep mathematical theorems formally
proved. Formal proof is practically achievable. With formal proof,
correctness reaches a standard that no penandpaper proof can
match, but an essential component of mathematics  the insight
and understanding  seems to be in short supply. So, what makes a
proof understandable? To answer this question we first suggest a
list of symptoms of understanding. We then propose a vision of an
environment in which users can write and check formal proofs as
well as query them with reference to the symptoms of
understanding. In this way, the environment reconciles the main
features of proof: correctness and understanding.",
paper = "Calu09.pdf"
}
\end{chunk}
\index{Davenport, James H.}
\index{Kohlhase, Michael}
\begin{chunk}{axiom.bib}
@article{Dave09,
author = "Davenport, James H. and Kohlhase, Michael",
title = {{Unifying Math Ontologies: A Tale of Two Standards}},
journal = "LNCS",
volume = "5625",
year = "2009",
abstract =
"One of the fundamental and seemingly simple aims of mathematical
knowledge management (MKM) is to develop and standardize formats
that allow to ``represent the meaning of the objects of
mathematics''. The open formats OpenMath and MathML address this,
but differ subtly in syntax, rigor, and structural viewpoints
(notably over calculus). To avoid fragmentation and smooth out
interoperability obstacles, effort is under way to align them into
a joint format OpenMath/MathML 3. We illustrate the issues that
come up in such an alignment by looking at three main areas: bound
variables and conditions, calculus (which relates to the previous)
and ``lifted'' nary operators.",
paper = "Dave09.pdf"
}
\end{chunk}
\index{Heras, Jonathan}
\index{Pascual, Vico}
\index{Rubio, Julio}
\begin{chunk}{axiom.bib}
@article{Hera09,
author = "Heras, Jonathan and Pascual, Vico and Rubio, Julio",
title = {{Using Open Mathematical Documents to Interface Computer
Algebra and Proof Assistant Systems}},
journal = "LNCS",
volume = "5625",
year = "2009",
abstract =
"Mathematical Knowledge can be encoded by means of Open
Mathematical Documents (OMDoc) to interface both Computer Algebra
and Proof Assistant systems. In this paper, we show how a unique
OMDoc structure can be used to dynamically generate, both a
Graphical User Interface for a Computer Algebra system and a
script for a Proof Assistant. This generic approach has been made
concrete through a first prototype interfacing the Kenzo Computer
Algebra system with the ACL2 Theorem Prover, both based on the
Common Lisp programming language. An OMDoc repository has been
developed allowing the user to customize the application in an
easy way.",
paper = "Hera09.pdf"
}
\end{chunk}
\index{Rich, Albert D.}
\index{Jeffrey, David J.}
\begin{chunk}{axiom.bib}
@article{
author = "Rich, Albert D. and Jeffrey, David J.",
title = {{A Knowledge Repository for Indefinite Integration
Based on Transformation Rules}},
journal = "LNCS",
volume = "5625",
year = "2009",
abstract = "
Taking the specific problem domain of indefinite integration, we
describe the ongoing development of a repository of mathematical
knowledge based on transformation rules. It is important that the
repository be not confused with a lookup table. The database of
transformation rules is at present encoded in Mathematica, but this is
only one convenient form of the repository, and it could be readily
translated into other formats. The principles upon which the set of
rules is compiled is described. One important principle is
minimality. The benefits of the approach are illustrated with
examples, and with the results of comparisons with other approaches.",
paper = "Rich09.pdf"
}
\end{chunk}
\index{Watt, Stephen M.}
\begin{chunk}{axiom.bib}
@article{Watt09,
author = "Watt, Stephen M.",
title = {{Algorithms for the Functional Decomposition of Laurent
Polynomials}},
journal = "LNCS",
volume = "5625",
year = "2009",
abstract =
"Recent work has detailed the conditions under which univariate
Laurent polynomials have functional decompositions. This paper
presents algorithms to compute such univariate Laurent polynomial
decompositions efficiently and gives their multivariate
generalizations.
One application of functiona decomposition of Laurent polynomials
is the functional decomposition of socalled ``symbolic
polynomials''. These are polynomiallike objects whose exponents
are themselves integervalued polynomials rather than
integers. The algebraic independence of $X%, $X^n$, $X^{n^2/2}$,
etc., and some elementary results on integervalued polynomials
allow problems with symbolic polynomials to be reduced to problems
with multivariate Laurent polynomials. Hence we are interested in
the functional decomposition of these objects.",
paper = "Watt09.pdf"
}
\end{chunk}
\index{Aspinall, David}
\index{Denney, Ewen}
\index{Luth, Christoph}
\begin{chunk}{axiom.bib}
@article{Aspi08,
author = "Aspinall, David and Denney, Ewen and Luth, Christoph",
title = {{A Tactic Language for Hiproofs}},
journal = "LNCS",
volume = "5144",
year = "2008",
abstract =
"We introduce and study a tactic language, Hitac, for constructing
hierarchical proofs, known as hiproofs. The idea of hiproofs is to
superimpose a labelled hierarchical nesting on an ordinary proof
tree. The labels and nesting are used to describe the organisation
of the proof, typically relating to its construction process. This
can be useful for understanding and navigating the proof. Tactics
in our language construct hiproof structure together with an
underlying proof tree. We provide both a bigstep and a smallstep
operational semantics for evaluating tactic expressions. The
bigstep semantics captures the intended meaning, whereas the
smallstep semantics hints at possible implementations and
provides a unified notion of proof state. We prove that these
notions are equivalent and construct valid proofs.",
paper = "Aspi08.pdf"
}
\end{chunk}
\index{Backeljauw, Franky}
\index{Becuwe, Stefan}
\index{Cuyt, Annie}
\begin{chunk}{axiom.bib}
@article{Back08,
author = "Backeljauw, Franky and Becuwe, Stefan and Cuyt, Annie",
title = {{Validated Evaluation of Special Mathematical Functions}}
journal = "LNCS",
volume = "5144",
year = "2008",
abstract =
"Because of the importance of special functions, several books and
a large collection of papers have been devoted to the numerical
computation of these functions, the most wellknown being the
Abramowitz and Stegun handbook. But up to this date, no
environment offers routines for the provable correct evaluation of
these special functions.
We point out how series and limitperiodic continued fraction
representation of the functions can be helpful in this
respect. Our scalable precision technique is mainly based on the
use of sharpened a priori truncation and roundoff error upper
bounds, in case of real arguments. The implementation is validated
in the sense that it returns a sharp interval enclosure for the
requested function evaluation, at the same cost as the evaluation.",
paper = "Back08.pdf"
}
\end{chunk}
\index{Bouche, Thierry}
\begin{chunk}{axiom.bib}
@article{Bouc08,
author = "Bouche, Thierry",
title = {{Digital Mathematics Libraries: The Good, the Bad, the Ugly}},
journal = "LNCS",
volume = "5144",
year = "2008",
abstract =
"The mathematicians' Digital mathematics library (DML), which is
not to be confused with libraries of mathematical objects
represented in some digital format, is the generous idea that all
mathematics ever published should end up in digital form so that
it would be more easily referenced, accessible, usable. This
concept was formulated at the very beginning of this century, and
yielded a lot of international activity that culminated around
years 20022005. While it is estimated that a substantial part of
the existing math literature is already available in some digital
format, nothing looking like one digital mathematics library has
emerged, but a multiplicity of competing electronic offers, with
unique standards, features, business models, access policies,
etc.  even though the contents themselves overlap somewhat,
while leaving wide areas untouched. The millenium's applealing
idea has become a new Tower of Babel.
It is not obvious how much of the traditional library functions we
should give up while going digital. The point of view shared by
many mathematicians is that we should be able to find a reasonable
archiving policy fitting all stakeholders, allowing to translate
the essential features of the past library system  which is the
central infrastructure of all math departments worldwide  in the
digital paradigm, while enhancing overall performances thanks to
dedicated information technology.
The vision of this library is rather straightforward: a third
party to the academic publishing system, preserving, indexing, and
keeping current its digital collections through a distributed
network of partners curating the physical holdings, and a
centralized access facility making use of innovative mining and
interlinking techniques for easy navigation and discovery.
However, the fragmentation level is so high that the hope of a
unique portal providing seamless access to everything relevant to
mathematical research seems now completely out of reach.
Nevertheless, we have lessons to learn from each one of the
already numerous projects running. One of them is that there are
too many items to deal with, and too many different inital choices
over metadata sets and formats: it won't be possible to find a
nontrivial greatest common divisor coping with everything already
available, and manual upgrading is highly improbable.
This is where future management techniques for loosely formalised
mathematical knowledge could provide a new impetus by at last
enabling a minimum set of features across projects borders through
automated procedures. We can imagine e.g. mathaware OCR on
scanned pages, concurrently with interpreters of electronic
sources of born digital texts, both producing searchable full
texts in a compatible semistructured format. The challenge is
ultimately to take advantage of the high formalisation of
mathematical texts rather than merely ignoring it!
With these considerations in mind, the talk will focus on
achievements, limitations, and failures of existing digital
mathematics libraries, taking the NUMDAM and CEDRAM programs as
principal examples, hence the speaker himself is the target.",
paper = "Bouc08.pdf"
}
\end{chunk}
\index{Dominguez, Cesar}
\begin{chunk}{axiom.bib}
@article{Domi08,
author = "Dominguez, Cesar",
title = {{Formalizing in Coq Hidden Algebras to Specify Symbolic
Computation Systems}},
journal = "LNCS",
volume = "5144",
year = "2008",
abstract =
"This work is an attempt to formalize, using the Coq proof
assistant, the algebraic specification of the data structures
appearing in two symbolic computation systems for algebraic
topology called EAT and Kenzo. The specification of these
structures have been obtained through an operation, called imp
operation, between different specification frameworks as standard
algebraic specifications and hidden specifications. Resuing
previous Coq implementations of universal algebra and category
theory we have proposed a Coq formalization of the imp operation,
extending the representation to the particular hidden algebras
which take part in this operation.",
paper = "Domi08.pdf"
}
\end{chunk}
\index{Kohlhase, Michael}
\index{Muller, Christine}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Kohl08a,
author = "Kohlhase, Michael and Muller, Christine and Rabe, Florian",
title = {{Notations for Living Mathematical Documents}},
journal = "LNCS",
volume = "5144",
year = "2008",
abstract =
"Notations are central for understanding mathematical
discourse. Readers would like to read notations that transport the
meaning well and prefer notations that are familiar to
them. Therefore, authors optimze the choice of notations with
respect to these two criteria, while at the same time trying to
remain consistent over the document and their own prior
publications. In print media where notations are fixed at
publication time, this is an overconstrained problem. In living
documents notations can be adapted at reading time, taking reader
preferences into account.
We present a representational infrastructure for notations in
living mathematical documents. Mathematical notations can be
defined declaratively. Author and reader can extensionally define
the set of available notation definitions at arbitrary document
levels, and they can guide the notation selection function via
intensional annotations.
We give an abstract specification of notation definitions and the
flexible rendering algorithms and show their coverage on
paradigmatic examples. We show how to use this framework to render
OPENMATH and ContentMathML to PresentationMathML, but the
approach extends to arbitrary content and presentation formats. We
discuss prototypical implementations of all aspects of the
rendering pipeline.",
paper = "Kohl08a.pdf"
}
\end{chunk}
\index{Stratford, Jonathan}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Stra08,
author = "Stratford, Jonathan and Davenport, James H.",
title = {{Unit Knowledge Management}},
journal = "LNCS",
volume = "5144",
year = "2008",
abstract =
"In 9, various observations on the handling of (physical) units in
OpenMath were made. In this paper, we update those observations,
and make some comments based on a working unit converter that,
because of its OpenMathbased design, is modular, extensible, and
reflective. We also note that some of the issues in an effective
converter, such as the rules governing abbreviations, being more
linguistic than mathematical, do not lend themselves to easy
expression in OpenMath.",
paper = "Stra08.pdf"
}
\end{chunk}
\index{Andres, Mirian}
\index{Lamban, Laureano}
\index{Rubio, Julio}
\begin{chunk}{axiom.bib}
@article{Andr07,
author = "Andres, Mirian and Lamban, Laureano and Rubio, Julio",
title = {{Executing in Common Lisp, Proving in ACL2}},
journal = "LNCS",
volume = "4573",
year = "2007",
abstract =
"In this paper, an approach to integrate an alreadywritten Common
Lisp program for algebraic manipulation with ACL2 proofs of
properties of that program is presented. We report on a particular
property called ``cancellation theorem'', which has been proved in
ACL2, and could be applied to several problems in the field of
Computational Algebraic Topology.",
paper = "Andr07.pdf"
}
\end{chunk}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Dave07a,
author = "Davenport, James H.",
title = {{What Might ``Understand a Function'' Mean?}},
journal = "LNCS",
volume = "4573",
year = "2007",
abstract =
"Many functions in classical mathematics are largely defined in
terms of their derivatives, so Bessel's function is ``the''
solution of Bessel's equation, etc. For definiteness, we need to
add other properties, such as initial values, branch cuts,
etc. What actually makes up ``the definition'' of a function in
computer algebra? The answer turns out to be a combination of
arithmetic and analytic properties.",
paper = "Dave07a.pdf"
}
\end{chunk}
\index{Asperti, Andrea}
\index{Geuvers, Herman}
\index{Loeb, Iris}
\index{Mamane, Lionel Elie}
\index{Coen, Claudio Sacerdoti}
\begin{chunk}{axiom.bib}
@article{Aspe06,
author = "Asperti, Andrea and Geuvers, Herman and Loeb, Iris and
Mamane, Lionel Elie and Coen, Claudio Sacerdoti",
title = {{An Interactive Algebra Course with Formalised Proofs and
Definitions}},
journal = "LNCS",
volume = "4108",
year = "2006",
abstract =
"We describe a casestudy of the application of web technology to
create webbased didactic material out of a repository of formal
mathematics, using the structure of an existing course. The paper
discusses the difficulties related to associating notation to a
formula, the embedding of formal notions into a document (the
``view''), and the rednering of proofs.",
paper = "Aspe06.pdf"
}
\end{chunk}
\index{Cairns, Paul}
\index{Gow, Jeremy}
\begin{chunk}{axiom.bib}
@article{Cair06,
author = "Cairns, Paul and Gow, Jeremy",
title = {{Literate Proving: Presenting and Documenting Formal Proofs}},
journal = "LNCS",
volume = "4108",
year = "2006",
abstract =
"Literate proving is the analogue for literate programming in the
mathematical realm. That is, the goal of literate proving is for
humans to produce clear expositions of formal mathematics that
could even be enjoyable for people to read whilst remaining
faithful representations of the actual proofs. This paper
describes maze, a generic literate proving system. Authors markup
formal proof files, such as Mizar files, with arbitrary XML and
use maze to obtain the selected extracts and transform them for
presentation, e.g. as Latex. To aid its use, maze has built in
transformations that include pretty printing and proof sketching
for inclusion in latex documents. These transformations challenge
the concept of faithfulness in literate proving but it is argued
that this should be a distinguishing feature of literate proving
from literate programming.",
paper = "Cair06.pdf"
}
\end{chunk}
\index{Raja, Amar}
\index{Rayner, Matthew}
\index{Sexton, Alan}
\index{Sorge, Volker}
\begin{chunk}{axiom.bib}
@article{Raja06,
author = "Raja, Amar and Rayner, Matthew and Sexton, Alan and
Sorge, Volker",
title = {{Towards a Parser for Mathematical Formula Recognition}},
journal = "LNCS",
volume = "4108",
year = "2006",
abstract =
"For the transfer of mathematical knowledge from paper to
electronic form, the reliable automatic analysis and understanding
of mathematical texts is crucial. A robust system for this task
needs to combine low level character recognition with higher level
structural analysis of mathematical formulas. We present progress
towards this goal by extending a databasedriven optical character
recognition system for mathematics with two high level analysis
features. One extends and enhances the traditional approach of
projection profile cutting. The second aims at integrating the
recognition process with graph grammar rewriting by giving support
to the interactive construction and validation of grammar
rules. BOth approaches can be successfully employed to enhance the
capabilities of our system to recognise and reconstruct compound
mathematical expressions.",
paper = "Raja06.pdf"
}
\end{chunk}
\index{Adams, Andrew A.}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Adam04,
author = "Adams, Andrew A. and Davenport, James H.",
title = {{Copyright Issues for MKM}},
journal = "LNCS",
volume = "3119",
year = "2004",
abstract =
"We present an overview of the current situation and recent and
expected future developments in areas of copyright law and
economics relevant to Mathematical Knowledge Management.",
paper = "Adam04.pdf"
}
\end{chunk}
\index{Carlisle, David}
\index{Dewar, Mike}
\begin{chunk}{axiom.bib}
@article{Carl03,
author = "Carlisle, David and Dewar, Mike",
title = {{NAG Library Documentation}},
journal = "LNCS",
volume = "2594",
year = "2003",
abstract =
"This paper describes the management and evolution of a large
collection of 1200 documents detailing the functionality in NAG
Library products.
This provides a case study addressing many of the issues which
concern the ``MKM'' project, involving conversion of legacy
formats (SGML and Latex) to XML, and inferring semantic content
from mainly presentational mathematical expressions.",
paper = "Carl03.pdf"
}
\end{chunk}
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Dave03,
author = "Davenport, James H.",
title = {{MKM from Book to Computer: A Case Study}},
journal = "LNCS",
volume = "2594",
year = "2003",
abstract =
"[2] is one of the great mathematical knowledge
repositories. Nevertheless, it was written for a different era,
and for human readership. In this paper, we describe the sorts of
knowledge in one chapter (elementary transcendental functions) and
the difficulties in making this sort of knowledge formal. This
makes us ask questions about the nature of a Mathematical
Knowledge Repository, and whether a database is enough, or whether
more ``intelligence'' is required.",
paper = "Dave03.pdf"
}
\end{chunk}
\index{Wiedijk, Freek}
\begin{chunk}{axiom.bib}
@article{Wied03b,
author = "Wiedijk, Freek",
title = {{Comparing Mathematical Provers}},
journal = "LNCS",
volume = "2594",
year = "2003",
abstract =
"We compare fifteen systems for the formalizatioin of mathematics
with a computer. We present several tables that list various
properties of these programs. The three main dimensions on which
we compare these systems are: the size of their library, the
strength of their logic and their level of automation.",
paper = "Wied03b.pdf"
}
\end{chunk}
\index{Deplagne, Eric}
\index{Kirchner, Claude}
\begin{chunk}{axiom.bib}
@article{Depl02,
author = "Deplagne, Eric and Kirchner, Claude",
title = {{Deduction versus Computation: The Case of Induction}},
journal = "LNCS",
volume = "2385",
year = "2002",
abstract =
"The fundamental difference and the essential complementarity
between computation and deduction are central in computer algebra,
automated deduction, proof assistants and in frameworks making
them cooperating. In this work we show that the fundamental proof
method of induction can be udnerstood and implemented as either
computation or deduction.
Inductive proofs can be built either explicitly by making use of
an induction principle or implicitly by using the socalled
induction by rewriting and inductionless induction methods. When
mechanizing proof construction, explicit induction is used in
proof assistants and implicit induction is used in rewrite based
automated theorem provers. The two approaches are clearly
complementary but up to now there was no framework able to
encompass and to understand uniformly the two methods. In this
work, we propose such an approach based on the general notion of
deduction modulo. We extend slightly the original version of the
deduction modulo framework and we provide modularity properites
for it. We show how this applies to a uniform understanding of the
so called induction by rewriting method and how this relates
directly to the general use of the induction principle.",
paper = "Depl02.pdf"
}
\end{chunk}
\index{Meier, Andreas}
\index{Sorge, Volker}
\index{Colton, Simon}
\begin{chunk}{axiom.bib}
@article{Meie02,
author = "Meier, Andreas and Sorge, Volker and Colton, Simon",
title = {{Employing Theory Formationi to Guide Proof Planning}},
journal = "LNCS",
volume = "2385",
year = "2002",
abstract =
"The invention of suitable concepts to characterise mathematical
structures is one of the most challenging tasks for both human
mathematicians and automated theorem provers alike. We present an
approach where automatic concept formation is used to guide
nonisomorphism proofs in the residue class domain. The main idea
behind the proof is to automatically identify discriminants for
two given structures to show that they are not
isomorphic. Suitable discriminants are generated by a theory
formation system; the overall proof is constructe by a proof
planner with the additional support of traditional automated
theorem provers and a computer algebra system.",
paper = "Meie02.pdf"
}
\end{chunk}
\index{Sturm, Thomas}
\begin{chunk}{axiom.bib}
@article{Stur02,
author = "Sturm, Thomas",
title = {{Integration of Quantifier Elimination with Constraint
Logic Programming}},
journal = "LNCS",
volume = "2385",
year = "2002",
abstract =
"We examine the potential of an extension of constraint logic
programming, where the admissible constraints are arbitrary
firstorder formulas over some domain. Constraint solving is
realized by effective quantifier elimination. The arithmetic is
always exact. We descrbe the conceptual advantages of our approach
and the capabilities of the current implementation
CLP(RL). Supported domains are $\mathbb{R}$, $\mathbb{C}$, and
$\mathbb{Q}_p$. For our discussion here we restrict to $\mathbb{R}$.
paper = "Stur02.pdf"
}
\end{chunk}
\index{Rabe, Florian}
\begin{chunk}{axiom.bib}
@article{Rabe12,
author = "Rabe, Florian",
title = {{Q Query Language for Formal Mathematical Libraries}},
journal = "LNCS",
volume = "7362",
year = "2012",
abstract =
"One of the most promising applications of mathematical knowledge
management is search: Even if we restrict attention to the tiny
fragment of mathematics that has been formalized, the amount
exceeds the comprehension of an individual human.
Based on the generic representation language MMT, we introduce the
mathematical query langauge QMT: It combines simplicity,
expressivity, and scalability while avoiding a commitment to a
particular logical formalism. QMT can integrate various search
paradigms such as unification, semantic web, or XQuery style
queries, and QMT queries can span different mathematical
libraries.
We have implemented QMT as a part of the MMT API. This combination
provides a scalable indexing and query engine that can be readily
applied to any library of mathematical knowledge. While our focus
here is on libraries that are available in a content markup
language, QMT naturally extends to presentation and narration
markup languages.",
paper = "Rabe12.pdf"
}
\end{chunk}
\index{Amin, Nada}
\index{Rompf, Tiark}
\begin{chunk}{axiom.bib}
@inproceedings{Amin18,
author = "Amin, Nada and Rompf, Tiark",
title = {{Collapsing Towers of Interpreters}},
booktitle = "Principles of Programming Languages",
year = "2018",
publisher = "ACM",
abstract =
Given a tower of interpreters, i.e., a sequence of multiple
interpreters interpreting one another as input programs, we aim to
collapse this tower into a compiler that removes all interpretive
overhead and runs in a single pass. In the real world, a use case
might be Python code executed by an x86 runtime, on a CPU emulated
in a JavaScript VM, running on an ARM CPU. Collapsing such a tower
can not only exponentially improve runtime performance, but also
enable the use of base language tools for interpreted programs,
e.g. for analysis and verification. In this paper, we lay the
foundations in an idealized but realistic setting.
We present a multilevel lambda calculus that features staging
constructs and stage polymorphism: based on runtime parameters, an
evaluator either executes source code (thereby acting as an
interpreter) or generates code (thereby acting as a compiler). We
identify stage polymorphism, a programming model from the domain
of highperformance program generators, as the key mechanism to
make such interpreters compose in a collapsible way.
We present Pink, a metacircular Lisplike evaluator on top of
this calculus, and demonstrate that we can collapse arbitrarily
many levels of selfinterpretation, including levels with
semantics modifications. We discuss several examples: compiling
regular expressions through an interpreter to base code, building
program transformers from modified interpreters, and others. We
develop these ideas further to include reflection and reification,
culminating in Purple, a reflective language inspired by Brown,
Blond, and Black, which realizes a conceptually infinite tower,
where every aspect of the semantics can change
dynamically. Addressing an open challenge, we show how user
programs can be compiled and recompiled under usermodified
semantics.",
paper = "Amin18.pdf",
keywords = "printed"
}
\end{chunk}
\index{Ritt, Joseph Fels}
\begin{chunk}{axiom.bib}
@book{Ritt48,
author = {{Ritt, Joseph Fels}},
title = {{Integration in Finite Terms}},
publisher = "Columbia University Press, New York",
year = "1948"
}
\end{chunk}
\index{Conrad, Brian}
\begin{chunk}{axiom.bib}
@misc{Conr05,
author = "Conrad, Brian",
title = {{Impossibility Theorems for Elementary Integration}},
year = "2005",
link =
"\url{http://www2.maths.ox.ac.uk/cmi/library/academy/LectureNotes05/Conrad.pdf}",
abstract =
"Liouville proved that certain integrals, most famously
$\int{e^{x^2}}~dx$, cannot be expressed in elementary terms. We
explain how to give precise meaning to the notion of integration
``in elementary terms'', and we formulate Liouville's theorem
that characterizes the possible form of elementary
antiderivatives. Using this theorem, we deduce a practical
criterion for proving such impossibility results in special cases.
This criterion is illustrated for the Gaussian integral
$\int{e^{x^2}}~dx$ from probability theory, the logarithmic
integral $\int{}~dt/log(t)$ from the study of primes, and
elliptic integrals. Our exposition is aimed at students who are
familiar with calculus and elementary abstract algebra (at the
level of polynomial rings $F(t)$ over a field $F$).",
paper = "Conr05.pdf",
keywords = "printed"
}
\end{chunk}
\index{Kiss, Csongor}
\index{Eisenbach, Susan}
\index{Field, Tony}
\index{Jones, Simon Peyton}
\begin{chunk}{axiom.bib}
@inproceedings{Kiss18,
author = "Kiss, Csongor and Eisenbach, Susan and Field, Tony and
Jones, Simon Peyton",
title = {{Higherorder Typelevel Programming in Haskell}},
booktitle = "Proc. ACM Programming Languages",
year = "2018",
publisher = "ACM"
abstract =
"Type family applications in Haskell must be fully saturated. This
means that all typelevel functions have to be firstorder,
leading to code that is both messy and long winded. In tis paper
we detail an extension to GHC that removes this restriction. We
augment Haskell's existing type arrow, $\rightarrow$, with an
unmatchable arrow $\twoheadrightarrow$, that supports partial
application of type families without compromising soundness. A
soundness proof is provided. We show how the techniques described
can lead to substantial codesize reduction (circa 80\%) in the
typelevel logic of commonlyused typelevel libraries whilst
simultaneously improving code quality and readability.",
paper = "Kiss18.pdf"
}
\end{chunk}
\index{Ostebee, Arnold}
\index{Zorn, Paul}
\begin{chunk}{axiom.bib}
@article{Oste93,
author = "Ostebee, Arnold and Zorn, Paul",
title = {{Telegraphic Reviews}},
journal = "The American Mathematical Monthly",
volume = "100",
number = "8",
pages = "812817",
year = "1993",
paper = "Oste93.pdf",
keywords = "axiomref"
}
\end{chunk}
\index{Davenport, James}
\begin{chunk}{axiom.bib}
@misc{Davexxa,
author = "Davenport, James",
title = {{Integration in Finite Terms}},
year = "unknown",
paper = "Davexxa.pdf",
keywords = "printed"
}
\end{chunk}
\index{Jenks, Richard D.}
\begin{chunk}{axiom.bib}
@article{Jenk75,
author = "Jenks, Richard D.",
title = {{Course Outline: Yale University, New Haven}},
journal = "SIGSAM Bulletin",
volume = "9",
number = "3",
pages = "910",
publisher = "ACM",
year = "1975",
paper = "Jenk75.pdf",
keywords = "axiomref, printed, DONE"
}
\end{chunk}
\index{Jenks, Richard D.}
\begin{chunk}{axiom.bib}
@inproceedings{Jenk71a,
author = "Jenks, Richard D.",
title = {{META LISP and META PLUS:: Tools for Rapidly Implementing
extendable language translators}},
booktitle = "Proc. 2nd ACM Symposium on Symbolic and Algebraic
Manipulation",
publisher = "ACM",
pages = "281",
year = "1971",
abstract =
"A unique feature of the SCRATCHPAD system for symbolic
manipulation is its powerful translation facilities. The essential
components are META/LISP, a translator writing system, and
META/PLUS, a facility for immediately extending the syntax of any
translator produced through META/LISP. This talk will illustrate
how these facilities may be used to produce a conversational
higherlevel LISP system. The language chosen is called ALPL
because of its similarity to APL and LPL, a language resident in
the SCRATCHPAD system. The essential characteristics of ALPL are
described by eight syntax rules together with brief examples of
corresponding ALPL and LISP programs. It is shown how a META/LISP
program may be easily written to produce a conversation ALPL
system. The ALPL language is then incrementally extended by
introducing new notations defined in terms of existing ALPL
constructs through calls to META/PLUS.",
keywords = "axiomref"
}
\end{chunk}
\index{Davenport, James}
\index{Padget, Julian}
\begin{chunk}{axiom.bib}
@article{Dave85c,
author = "Davenport, James and Padget, Julian",
title = {{HEUGCD: How Elementary Upperbounds Generate Cheaper Data}},
journal = "LNCS",
volume = "204",
year = "1985",
booktitle = "EUROCAL '85 European Conference on Computer Algebra",
publisher = "Springer",
abstract =
"The work presented in this paper is a direct consequence of the
ideas set forth by Char et al (1984b) describing a new technique
for computing the greatest common divisor of polynomials.",
paper = "Dave85c.pdf"
}
\end{chunk}
\index{Char, Bruce W.}
\index{Geddes, Keith O.}
\index{Gonnet, Gaston H.}
\begin{chunk}{axiom.bib}
@article{Char84,
author = "Char, Bruce W. and Geddes, Keith O. and Gonnet, Gaston H.",
title = {{GCDHEU: Heuristic polynomial GCD algorithm base on Integer
GCD computation}},
journal = "LNCS",
volume = "174",
pages = "285296",
year = "1984",
abstract =
"The design of algorithms for polynomial GCD computation has been
a continuing area of research since the beginning of the
development of symbolic computation systems. The earliest efforts
were mainly directed at PRS (Polynomial Remainder Sequence)
algorithms which are a direct generalization of Euclid's
algorithm. The main algorithms of this type are the Reduced PRS
algorithm and the Subresultant PRS algorithm. Hearn discusses the
use of trial divisions to further improve the performance of PRS
algorithms. The first fundamentally different polynomial GCD
algorithm was the modular algorithm. To amek the modular algorithm
competative for sparse multivariate polynomals, Zippel developed
the sparse modular algorithm. Another modulartype algorithm was
the Henselbased EZ GCD algorithm which was later improved as the
EEZ GCD algorithm.
the present paper discusses a new heuristic algorithm, GCDHEU,
which is found to be very efficient for problems in a small number
of variables. The heuristic algorithm can be viewed as
amodulartype algorithm in that it uses evaluation and
interpolation, but only a single evaluation per variable is
used. The heuristic algorithm can be incorporated into a
reorganized form of the EEZ GCD algorithm such that the base of
the EEZ GCD algorithm, rather than a univariate GCD algorithm, is
GCDHEU which is often successful for problems in up to four variables.",
paper = "Char84.pdf",
keywords = "printed"
}
\end{chunk}
\index{Gianni, Patrizia}
\index{Trager, Barry}
\begin{chunk}{axiom.bib}
@article{Gian85a,
author = "Gianni, Patrizia and Trager, Barry",
title = {{GCD's and Factoring Multivariate Polynomials using
Grobner Bases}},
journal = "LNCS",
volume = "204",
year = "1985",
booktitle = "EUROCAL '85 European Conference on Computer Algebra",
publisher = "Springer",
abstract =
"This paper shows how Grobner basis computations can be used to
compute multivariate gcds, perform Hensel lifting, and reduce
multivariate factorization to univariate. The essential idea is to
produce an ideal containing the desired polynomial as an element
of least degree. The construction is somewhat analogous to the
recent lattice algorithms for polynomial factorization. A major
difference is that we don't need to perform the hensel lifting as
a separate step; one Brobner basis computation is sufficient to
find the desired multivariate factor or gcd. We produce algorithms
which are vary simple and may be of use on small systems where
code size is critical. We feel that these results demonstrate the
fundamental importance of the Grobner basis in computer algebra.",
paper = "Gian85a.pdf"
}
\end{chunk}
\index{Griesmer, J.H.}
\index{Jenks, R.D.}
\index{Yun, D.Y.Y}
\begin{chunk}{axiom.bib}
@article{Grie78a,
author = "Griesmer, J.H. and Jenks, R.D. and Yun, D.Y.Y",
title = {{A Taxonomy for Algebraic Computation}},
journal = "ACM SIGSAM Bulletin",
volume = "12",
number = "3",
pages = "2528",
year = "1978",
abstract =
"Recently the authors responded to a request from Professor
Anthony Ralston of the State University of New York at Buffalo to
participate in the review of a proposed Taxonomy of Computer
Science and Engineering.",
paper = "Grie78a.pdf"
}
\end{chunk}
\index{Davenport, J.H.}
\begin{chunk}{axiom.bib}
@article{Dave79,
author = "Davenport, J.H.",
title = {{The Computerisation of Algebraic Geometry}},
journal = "LNCS",
volume = "72",
pages = "119133",
year = "1979",
abstract =
"This paper is concerned with the problems of performing computer
algebra when the variables involved are related by some algebraic
dependencies. It is shown that heuristic or ad hoc treatment of
such cases leads rapidly to problems, and the proper mathematical
foundations for the treatment of algebraic functions is
presented. The formalism leads directly to the requirement for
algorithms to find the genus of an algebraic curve, and to
discover what function, if any, is associated with a given
divisor. These algorithms and the relevant computational
techniques are briefly described. In a concluding section the
areas where these techniques are required in an integration scheme
for algebraic functions are explained.",
paper = "Dave79.pdf"
}
\end{chunk}
\index{Trager, Barry}
\begin{chunk}{axiom.bib}
@article{Trag79,
author = "Trager, Barry",
title = {{Integration of Simple Radical Extensions}},
journal = "LNCS",
volume = "72",
pages = "408414",
year = "1979",
abstract =
"Risch's landmark paper presented the first decision procedure for
the integration of elementary functions. In that paper he required
that the functions appearing in the integrand be algebraically
independent. Shortly afterwards in [Risalg] and [Ris70] he relaxed
that restriction and outlined a complete decision procedure for
the integration of elementary functions in finite
terms. Unfortunately his algorithms for dealing with algebraic
functions required considerably more complex machinery than his
earlier ones for purely transcendental functions. Moses'
implementation of the earlier approach in MACSYMA demonstrated its
practicality, whereas the same has yet to be done for Risch's more
recent approach.
This paper will show how Risch's earlier techniques can be
generalized to deal with unnested radicals. While this may seem a
severe restriction, perusing an integral table such as [Bois61] will
show that fewer than 1\% of the problems are excluded.",
paper = "Trag79.pdf"
}
\end{chunk}
\index{Moses, Joel}
\index{Zippel, Richard}
\begin{chunk}{axiom.bib}
@article{Mose79,
author = "Moses, Joel and Zippel, Richard",
title = {{Algorithms for the Integration of Algebraic Functions}},
journal = "LNCS",
volume = "72",
pages = "426430",
year = "1979",
paper = "Mose79.pdf"
}
\end{chunk}
\index{Avigad, Jeremy}
\begin{chunk}{axiom.bib}
@misc{Avig19,
author = "Avigad, Jeremy",
title = {{The Mechanization of Mathematics}},
year = "2019",
comment = "The Big Proof Workshop"
paper = "Avig19.pdf",
keywords = "DONE"
}
\end{chunk}
\index{Mosses, Peter}
\begin{chunk}{axiom.bib}
@article{Moss80,
author = "Mosses, Peter",
title = {{A Constructive Approach to Compiler Correctness}},
journal = "LNCS",
volume = "85",
year = "1980",
booktitle = "Automata, Languages and Programming",
publisher = "Springer",
abstract =
"It is suggested that denotational semantics definitions of
programming languages should be based on a small number of
abstract data types, each embodying a fundamental concept of
computation. Once these fundamental abstract data types have been
implemented in a particular target language (e.g. stackmachine
code), it is a simple matter to construct a correct compiler for
any source language from its denotational semantic definition. The
approach is illustrated by constructing a compiler similar to the
one which was proved correct by Thatcher, Wagner \& Wright
(1979). Some familiarity with manysorted algebras is presumed.",
paper = "Moss80.pdf",
keywords = "printed"
}
\end{chunk}
\index{Ehrig, Hartmut}
\index{Kreowski, HansJorg}
\index{Thatcher, James}
\index{Wagner, Eric}
\index{Wright, Jesse}
\begin{chunk}{axiom.bib}
@article{Ehri80a,
author = "Ehrig, Hartmut and Kreowski, HansJorg and Thatcher, James
and Wagner, Eric and Wright, Jesse",
title = {{Parameterized Data Types in Algebraic Specification Languages}},
journal = "LNCS",
volume = "85",
year = "1980",
booktitle = "Automata, Languages and Programming",
publisher = "Springer",
paper = "Ehri80a.pdf",
keywords = "printed"
}
\end{chunk}
\index{Liskov, Barbara}
\index{Zilles, Stephen}
\begin{chunk}{axiom.bib}
@article{Lisk77a,
author = "Liskov, Barbara and Zilles, Stephen",
title = {{Programming with Abstract Data Types}},
journal = "SIGPLAN Notices",
volume = "9",
number = "4",
pages = "5059",
year = "1977",
abstract =
"The motivation behind the work in veryhighlevel languages is to
ease the programming task by providing the programmer with a
language containing primitives or abstractions suitable to his
problem area. The programmer is then able to spend his effort in
the right place; he concentrates on solving his problem, and the
resulting program will be more reliable as a result. Clearly, this
is a worthwhile goal.
Unfortunately, it is very difficult for a designer to select in
advance all the abstractions which the users of his language might
need. If a language is to be used at all, it is likely to be used
to solve problems which its designer did not envision, and for
which the abstractions embedded in the language are not sufficient.
This paper presents an approach which allows the set of builtin
abstractions to be augmented when the need for a new data
abstraction is discovered. This approach to the handling of
abstraction is an outgrowth of work on designing a language for
structured programming. Relevant aspects of this language are
described, and examples of the use and definitions of abstraction
are given.",
paper = "Lisk77a.pdf"
}
\end{chunk}
\index{Black, A.P.}
\begin{chunk}{axiom.bib}
@techreport{Blac80,
author = "Black, A.P.",
title = {{Exception Handling and Data Abstraction}},
type = "Research Report",
institution = "IBM Research",
number = "RC8059",
year = "1980"
}
\end{chunk}
\index{Goguen, J.A.}
\index{Thatcher, J.W.}
\index{Wagner, E.G.}
\index{Wright, J.B.}
\begin{chunk}{axiom.bib}
@techreport{Gogu76,
author = "Goguen, J.A. and Thatcher, J.W. and Wagner, E.G. and
Wright, J.B.",
title = {{An Initial Algebra Approach to the Specification,
Correctness and Implementation of Abstract Data Types}},
type = "Research Report",
institution = "IBM Research",
number = "RC6487",
year = "1976"
}
\end{chunk}
\index{Donahue, J.}
@misc{Dona77,
author = "Donahue, J.",
title = {{On the semantics of ``Data Type''}},
comment = "Cornell University",
year = "1977"
}
\end{chunk}
\index{Ershov, A.P.}
\begin{chunk}{axiom.bib}
@misc{Ersh77,
author = "Ershov, A.P.",
title = {{On the Essence of Compilation}},
comment = "Proc. IFIP Working Conf. on Formal Description of
Programming Concepts, Vol. 1",
year = "1977"
}
\end{chunk}
\index{Hearn, Anthony C.}
\begin{chunk}{axiom.bib}
@book{Hear73,
author = "Hearn, Anthony C.",
title = {{REDUCE2 Users Manual}},
comment = "Computing Physics Group",
publisher = "University of Utah",
year = "1973"
}
\end{chunk}
\index{Hearn, Anthony C.}
\begin{chunk}{axiom.bib}
@article{Hear71,
author = "Hearn, Anthony C.",
title = {{Applications of Symbol Manipulation in Theoretical Physics}},
journal = "Communications of the ACM",
volume = "14",
number = "8",
pages = "511516",
year = "1971",
paper = "Hear71.pdf"
}
\end{chunk}
\index{Hearn, Anthony C.}
\begin{chunk}{axiom.bib}
@article{Hear72,
author = "Hearn, Anthony C.",
title = {{An Improved NonModular Polynomial GCD Algorithm}},
journal = "ACM SIGSAM Bulletin",
volume = "23",
pages = "1015",
year = "1972,
abstract =
"An improved nonmodular algorithm for the calculation of the
greatest common divisor of two multivariate polynomials is
presented.",
paper = "Hear72.pdf"
}
\end{chunk}
\index{Campbell, J.A.}
\index{Hearn, Anthony C.}
\begin{chunk}{axiom.bib}
@article{Hear70,
author = "Campbell, J.A. and Hearn, Anthony C.",
title = {{Symbolic Analysis of Feynman Diagrams by Computer}},
journal = "J. of Computational Physics",
volume = "5",
number = "2",
pages = "280327",
year = "1970",
abstract =
"We describe a system of programs in the language LISP 1.5 which
handles all stages of calculation from the specification of an
elementaryparticle process in terms of a Hamiltonian of
interaction or Feynman diagrams to the derivation of an absolute
square of the matrix element for the process. Examples of
significant parts of the program are presented in the text, while
a detailed listing of this material is contained in two Appendices
which are avaiable on request from the authors.",
paper = "Camp70.pdf"
}
\end{chunk}
\index{Campbell, J.A.}
\index{Hearn, Anthony C.}
\begin{chunk}{axiom.bib}
@article{Hear70,
author = "Campbell, J.A. and Hearn, Anthony C.",
title = {{Symbolic Analysis of Feynman Diagrams by Computer}},
journal = "J. of Computational Physics",
volume = "5",
number = "2",
pages = "280327",
year = "1970",
abstract =
"We describe a system of programs in the language LISP 1.5 which
handles all stages of calculation from the specification of an
elementaryparticle process in terms of a Hamiltonian of
interaction or Feynman diagrams to the derivation of an absolute
square of the matrix element for the process. Examples of
significant parts of the program are presented in the text, while
a detailed listing of this material is contained in two Appendices
which are avaiable on request from the authors.",
paper = "Camp70.pdf"
}
\end{chunk}
\index{Henderson, Peter}
\index{Morris Jr., James H.}
\begin{chunk}{axiom.bib}
@inproceedings{Hend76,
author = "Henderson, Peter and Morris Jr., James H.",
title = {{A Lazy Evaluator}},
booktitle = "3rd Symp. on Principles of Programming Languages",
publisher = "ACM",
pages = "95103",
year = "1976",
abstract =
"A different way to execute pure LISP programs is presented. It
delays the evaluation of parameters and list structures without
ever having to perform more evaluation steps than the usual
method. Although the central idea can be found in earlier work
this paper is of interest since it treats a rather wellknown
language and works out an algorithm which avoids full
substitution. A partial correctness proof using ScottStrachey
semantics is sketched in a later section.",
paper = "Hend76.pdf",
keywords = "printed"
}
\end{chunk}
\index{Jensen, Kathleen}
\index{Wirth, Niklaus}
\begin{chunk}{axiom.bib}
@book{Jens75,
author = "Jensen, Kathleen and Wirth, Niklaus",
title = {{PASCAL User Manual and Report}},
publisher = "SpringerVerlag",
year = "1975",
isbn = "0387901442",
keywords = "owned"
}
\end{chunk}
\begin{chunk}{axiom.bib}
@book{IBMx78,
author = "IBM",
title = {{LISP/370 Program Description / Operations Manual}},
publisher = "IBM Research",
year = "1978",
comment = "SH2020760"
}
\end{chunk}
\index{Morris Jr., J.H.}
\begin{chunk}{axiom.bib}
@inproceedings{Morr73,
author = "Morris Jr., J.H.",
title = {{Types are not Sets}},
booktitle = "Symp. on the Principles of Programming Languages",
publisher = "ACM",
pages = "120124",
year = "1973"
}
\end{chunk}

books/bookvol4.pamphlet  892 ++++++++
books/bookvolbib.pamphlet  4520 ++++++++++++++++++++++++++++++++++++++
changelog  3 +
patch  559 +++++
src/axiomwebsite/patches.html  2 +
5 files changed, 5859 insertions(+), 117 deletions()
diff git a/books/bookvol4.pamphlet b/books/bookvol4.pamphlet
index b4a1fe3..2c5ad61 100644
 a/books/bookvol4.pamphlet
+++ b/books/bookvol4.pamphlet
@@ 14,6 +14,898 @@ Confronting every new programmer learning a new language are
 Daniel Higginbotham in Clojure for the Brave and True
\end{quote}
+\section{MODLISP by James Davenport}
+This is based on a paper by Davenport and Jenks \cite{Dave80a}.
+
+It provides historical details of the original SCRATCHPAD II design.
+Some of these details have changed over time but it is important to
+know what motivates some of the design choices. The verbatim text
+of the paper follows.
+
+This paper discusses the design and implementation of MODLISP, a
+LISPlike language enhanced with the idea of MODes. This extension
+permits, but does not require, the uesr to declare the types of
+various variables, and to compile functions with the arguments
+declared to be of a particular type. It is possible to declare several
+functions of the same name, with arguments of different type
+(e.g. PLUS could be declared for Integer arguments, or Rational, or
+Real, or even Polynomial arguments) and the system will apply the
+correct function for the types of the arguments.
+
+The MODLISP language differs from other abstract data type languages
+such as CLU\cite{Lisk77,Lisk77a,Lisk79} and Russell
+\cite{Dona77} in that it allows dynamic construction of new
+parameterised data types and possesses a unified semantics covering
+interpreted and compiled code, which can call one another at will. In
+short, it is LISPlike.
+
+\subsection{Introduction}
+
+MODLISP is a system developed from LISP with the primary aim of
+supporting research in computer algebra. It has been realised for some
+years (\cite{Loos74,Jenk77}) that progress in computer algera
+required the development of systems which were able to deal with
+formal ``modes'' such as ``polynomial'' or ``matrix''.
+
+Just as LISP has been the vehicle of choice for implementing computer
+algebra systems in the past, it was felt that implementing such a
+system was best done in a typed variant of LISP, and since none such
+was available at the time the project started, MODLISP was born.
+However, MODLISP is more than just an implementation vehicle. It
+is an interesting language in its own right, with powerful features
+for the manipulation of abstract and parameterised data types.
+
+\subsection{Interpretive Structure}
+
+The evaluator is the key to any LISP system, and so it is to MODLISP.
+The quintessential difference between MODLISP and LISP 1.5 can be
+discerned in the evaluator: whereas a conventional LISP evaluator has
+two arguments (the expression to be evaluated and the environment in
+which it is to be evaluated), the MODLISP evaluator has three (the
+expression to be evaluated, the desired mode of the answer, and the
+environment in which it is to be evaluated). We present in Appendix A
+a somewhat simplified description of the MODLISP evaluator. Here we
+make some brief remarks on the evaluation strategy.
+
+The major difference between MODLISP and LISP 1.5 lies in the area of
+function application. A conventional LISP system's strategy, when
+faced with an operatoroperand form to evaluate, is to evaluate the
+CAR (i.e. the operator part) and then (assuming it is not some sort of
+macro) to evaluate the arguments recursively in a leftright manner,
+and then to APPLY the operator part to the list of arguments in some
+manner (evalutating in an environment enriched by the bindings of the
+arguments, in the case that the CAR was a $\lambda$ expression.
+
+Regretably, such a simple approach is impossible in the case of
+MODLISP. The easiest way of exemplifying this is to see that we
+cannot evaluate the operator until we know what the modes\footnote{ We
+will later define precisely what we mean by the term 'mode'. For the
+time being, it may help the reader to regard it as the equivalent of
+`type' in conventional typed languages.} of the operands are:
+e.g. the TIMES function for integers is different from that for
+polynomials or for matrices. Furthermore, there is the additional
+complication of heterogeneous argument lists; e.g. multiplying a
+polynomial by an integer.
+
+Fortunately things are not as bleak as all that  we can still
+evaluate as far as the name of the operator (assuming that evaluating
+the operator will yield a name) without knowing the types of (or
+indeed anything else about) the operands. The difficulty comes in the
+step that corresponds to a conventional LISP 1.5 system accessing the
+EXPR (or whatever) property of the operator name. The MODLISP system
+needs to apply one of a variety of functions at this point, depending
+on the modes of the operands, rather than one specific function.
+
+Rather than associate an EXPR property with an atom, therefore,
+MODLISP associates a MODEMAP property with it, whose corresponding
+value is a {\sl modemap list}\footnote{We imply here that a modemap
+list is a list of (map, function) pairs. The actual structure is not
+significantly more complicated (as mentioned in the Appendix A) and is
+a list of (map, condpart) pairs, where a condpart is a list of
+(predicate, function) pairs. The function is only used if the
+predicate evaluates to T. While this additional level is not required
+in the system as described so far, several of the features of MODLISP
+described later in the paper require it. It is mentioned here to avoid
+giving a false impression.} These modemap lists are to MODLISP what
+{\sl subrs} (i.e. BPIs) are to conventional LISPs: they are the
+applicable objects, as defined by the evaluator. The evaluator looks
+through the modemap list, which is a list of maps and associated
+functions, until it finds a map which matches the arguments. This
+causes somewhat of a ``chicken and egg'' situation, since the map
+cannot be matched until we know what the modes of the arguments are,
+and we cannot evaluate the arguments until we know what mode to
+evaluate them in, which the map will tell us.
+
+Hence the evaluator (in fact function EVFORM, as described in Appendix
+A) runs down the list of arguments, evaluating them in the mode given
+by the corresponding entry of the map. An example of a case where this
+complexity is needed is given by the (admittedly artificial, but
+realistic examples are more complex) construct:
+\begin{verbatim}
+ (TIMES (ZERO) (RATIONAL 1 2))
+\end{verbatim}
+where RATIONAL is a function which takes two Integer arguments and
+returns a Rational result. ZERO is a function which returns the zero
+element of whatever type is required (and, of course, the Integer zero
+may well be different from the Rational zero or the Matrix zero).
+TIMES might have a (simplified) modemaplist such as:
+\begin{verbatim}
+ ( ((Integer Integer Integer) IntegerTimes)
+ ((Rational Rational Rational) RationalTimes) )
+\end{verbatim}
+Then (assuming for the sake of simplicity that there is no particular
+mode for the result) the evaluator would first try to apply the first
+modemap (which states that TIMES can yield an Integer result from two
+Integer arguments) and would evaluate (ZERO) in mode Integer, which
+would work, and then would evaluate (RATIONAL 1 2) in mode Integer,
+which would not (i.e. eval returns NIL). Hence we infer that that map
+is not applicable, and we attempt to apply the second modemap. This
+causes (ZERO) and (RATIONAL 1 2) to be evaluated in mode Rational,
+which works, and hence the function (in the untyped sense  i.e. that
+definition which was asserted to be valid for the case of multiplying
+two Rationals) RationalTimes is applied.
+
+The modemap for TIMES quoted above looks as if it is the solution to
+the problem of {\sl polymorphic operators}, i.e. those which
+correspond to different functions for different data types, but in a
+computer algebra setting it is not a complete answer. One way of
+seeing this is to ask ``How many definitions of TIMES are there?'',
+and the answer is that there are an unlimited number of them. There
+are definitions of TIMES for polynomials over the integers, matrices
+over the rational numbers, polynomials over matrices over the
+integers, polynomials over the Gaussian integers, polynomials over the
+field of fractions of the polynomials over the integers modulo 7
+etc. Searching such a modemap would be extremely inefficient, but that
+is by no means the worst of the problem  how are we to know that we
+have placed enough definitions on the modemap. It should now be
+obvious that a modemap which lists simple domains and the associated
+functions is insufficient for our purposes  and that is why we need
+the concept of a {\sl mode}.
+
+\subsection{What is a MODE?}
+
+In order to discuss MODLISP, we need to define what is meant by the
+term 'mode'. The terminology in this area is sufficiently confused,
+with different authors using the same word to mean many different
+things, that we hope the reader will bear with us while we define many
+terms in the way we intend to use them.
+
+The first term we wish to define is 'domain', (short for 'domain of
+computation') whose meaning approximates that of 'data type' in
+traditional programming languages. The traditional view of such 'data
+types' is best summarised by Jensen \& Wirth \cite[p.~12]{Jens75}
+ ``A data type defines the set of values a variable may assume''.
+More recent work has lead to the viewpoint [Morris, 1973] that a type
+is a set of values complete with a list of operations that can be
+performed on those values.
+
+The simplest example of such a domain is that of a basic type,
+e.g. Integer or Boolean. The basic types built into MODLISP are
+Integer, Boolean, Identifier and String, though the user can (and
+probably will) define others.
+
+The next level of complexity is provided by the structured types, of
+which MODLISP provides two: Union and Struct. Familiar examples of
+these are Sexpr, defined as Union(Pair,Atom) and Pair, defined as
+Struct(Car:Sexpr, Cdr:Sexpr).
+
+We can move up from this to {\sl parameterised data types} or just
+{\sl types}, also known as 'type generators', as defined by ADJ
+\cite{Ehri80a}.
+The basic MODLISP system provides two parameterised data types for
+efficiency (List and Vector), but there are facilities for defining
+one's own, and it is expected that a large amount of MODLISP
+programming will be done via these userdefined types. General
+examples of parameterised data types include Stack and Alist, while
+computer algebra is full of such types: Polynomial, Matrix, Algebraic
+Extension, Rational Function etc.
+
+ADJ only considered parameterised data types parameterised by one or
+more domains (in their terminology 'data types'), whereas we may also
+wish to parameterise them by actual values, or a mixture of the
+two. An exmple of this is ``3dimensional vectors over the rational
+numbers'', which is parameterised both by the number 3 and by the type
+``rational number''.
+
+The previous definitions allow us to define a {\sl domain}
+(recursively) as being either a basic type, or the result of supplying
+actual values and domains for all the parameters of a (parameterised
+data) type. Hence ``Vectors of Integers, of length 3'', is a domain,
+since ``Integers'' is a basic type, 3 is an actual value for the
+length of the vectors, and ``Vector'' is a type parameterised by a
+positive integer and a type. Conversely, ``Vectors of Integers'' is
+not a domain, for we have not specified the length.
+
+We can now answer the question posed by the heading of this section:
+``What is a mode?''. The answer is deceptively simple: a mode is
+{\sl any} class of domains. While the MODLISP system places no
+limitation on the definition of this class, some classes are more
+useful than others. We shall see below (under the heading 'Compilation
+for Arbitrary Domains') that it is possible to define a mode
+consisting of all the domains on which certain operations (satisfying
+certain axioms) are defined  this kind of mode will be called a
+'category'.
+
+Some particularly useful modes are those constructed by substituting
+actual values for some parameters of a type, hence we can speak about
+the mode of ``Vectors of length 3'' or the mode of ``Stacks''. These
+modes appear naturally in the modemap for many operations, as we see
+in the next paragraph. One mode that crops up particularly in
+interactive computer algebra, but is useful elsewhere, is NilMode,
+denoting the class of all domains.
+
+With parameterised data types, one wants only one routine, and one
+entry inthe modemap for a particular operator, for all the various
+parameterisations of one type, e.g. one entry for Vector, and not one
+for Vector(3,Rational), another for Vector(2,Complex) etc. This is
+made possible by the use of patternmatching variables (denoted by
+*1, *2, ...) in modemaps. For example, the map for 'scalar product of
+vectors' might look like\footnote{The internal representation is
+actually somewhat different, since we distinguish between parameters
+which are types, and those which are expressions (e.g. between
+``Rational'' and ``3''), and there is also space to record whether or
+not that part of the map is obligatory or optional (see below under
+``Partial Declarations'').}
+\begin{verbatim}
+ (*2 (Vector *1 *2) (Vector *1 *2))
+\end{verbatim}
+which could be applied with *1=3 and *2=Rational, or with *1=2 and
+*2=Complex. If pattern matching alone is unable to express the precise
+nature of an operation, then the conditionalpart of the modemap
+(mentioned in the prior footnote) can be used to add extra
+restrictions to the applicability of a particular function
+definitions. For example we could have a map of (*2 *3 *3) and a
+conditional expression of the form
+\begin{verbatim}
+ (EQUAL *3 '(VECTOR *1 *2))
+\end{verbatim}
+which would have the same overall effects as the previous map.
+
+\subsection{Compilation}
+
+While such a system may appear laudable, it will not make progress in
+computer algebra unless efficient compilation is available. Many of
+the problems of computer algebra strain the resources of even the
+largest computers, and any extra overhead due to interpretation cannot
+be afforded (especially as MODLISP interpretation is inherently more
+expensive than LISP interpretation due to the necessity to analyse
+modemaps). This gives rise to the obvious question: ``How do we
+compile modemap selection?''.
+
+It is reasonably clear that, in the case where all the arguments of a
+$\lambda$ expression are of known definite type (e.g. Integer), then
+all the analysis can indeed be done, and we are in a situation similar
+to standard LISP compilation. The only difficulty might be that both
+the compiler and the evaluator (i.e. interpreter) would perform
+modemap analysis, and it would be possible for these two to get out of
+step, thus giving compiled code a different semantics from interpreted
+code. This difficulty is met by adopting the technique of {\sl partial
+compilation} \cite{Ersh77}, in which the evaluator also acts as the
+compiler, producing values where it can, and compiled code where it
+cannot\footnote{Note that we are not considering delayed (or lazy)
+evaluation \cite{Hend76} here. The 'rule' feature of
+MODLISP \cite{Jenk79} is based on such a concept, but, as it is
+independent of the ideas considered in this paper, we shall not
+discuss it further.} As an example of this, let us consider the
+compilation of (PLUS X Y) in three cases:
+\begin{enumerate}
+\item X is 2 of mode Integer, Y is 3 of mode Integer. Then the modemap
+for PLUS tells us to apply the function IntegerPlus, and the evaluator
+does so, returning the answer 5.
+\item X and Y are known to be of mode Integer, but their values are
+not known. For example, they might be the formal parameters of a
+function. In this case, we can still select a function from the
+modemap for PLUS, since we do know the modes of X and Y. We cannot
+apply the function IntegerPlus that is produced, but we can compile
+code to produce it, so the compiled code\footnote{The current MODLISP
+implementation compiles into LISP370\cite{IBMx78}, which is compiled
+into machine code by the LISP370 compiler.} is (IntegerPlus X Y).
+\item Nothing is known about X and Y. In this case, we have no idea
+how to apply the modemap for PLUS, and we must compile a call to eval.
+\end{enumerate}
+
+These additions to the evaluator, to allow it to function as a
+compiler as well, while requiring significant changes to the treatment
+of some of the special forms, do not require a major rewrite of the
+actual evaluator. Having compilation performed by the partial
+compilation feature of the MODLISP evaluator has an additional
+advantage  any expressions which can be evaluated at compile time
+are, and so, for example, in (PLUS X (TIMES 1 2)) the multiplication
+would be replaced by 2. While one could argue that such code should
+not be written, one case where it is hard not to do so is
+conversions. If X is Rational, then in (TIMES X 2), the number 2 must
+be converted from Integer to Rational for the multiplication to be
+performed. This conversion would automatically be done at compile time
+rather than run time.
+
+\subsection{Compilation for Arbitrary Domains}
+
+A significant problem is how to compile code (not just calls to the
+evaluator) for parameterised data types, so that, for example, the
+code for Polynomial can be compiled, and can then be used for
+Polynomials of Integers, Polynomials of Matrices, Polynomials of
+Polynomials etc. or so that we can write (and compile)
+{\sl polymorphic functions}, such as a sort function that will sort
+any set on which a comparison predicate has been defined. Note that
+this is a problem not addressed by, say, the Mode Reduce system
+\cite{Hear74}, where a separate version of the module is compiled for
+each underlying domain. Some of the mechanism required for this exists
+already  as we saw above we can declare a modemap for the
+scalarproduct of two vectors which is independent of the dimension of
+the vectors, or of the ground field. Hence calls to this routine can
+be compiled even when these parameters of Vector are not known.
+
+The answer adopted is to declare some properties of the parameterising
+types, so that, just as the first argument to Vector, as discussed
+above, is declared to be an integer, so the second is declared to be a
+Field, i.e. a type on which certain operators are valid.
+
+We require a theoretical basis for ``declaring some properties of the
+parameterising types'', and this is provided in the work of ADJ.
+\cite{Gogu76, Ehri80a}
+We do not intend to discuss the whole of their approach
+here, and the reader is referred to the papers quoted for further
+details, but it is sufficient for present purposes to say that they
+consider a {\sl data type} (which corresponds to our {\sl domain}) to
+be a (manysorted) {\sl algebra} and a list of operations between the
+carriers. This agrees with Morris\cite{Morr73} in interpreting a type as a
+set (or sets) with operations. These operations must not only exist
+but are also required to satisfy certain {\sl axioms}.
+
+We might wish to define a stack of integers this way (this example is
+based on a rigorous discussion by ADJ \cite[p.~11]{Ehri80a})
+by saying that there
+are two carriers I and S (informally the integers themselves, and
+stacks of them)\footnote{Actually the mathematics demands a third
+carrier, that of the Booleans, since the function Empty takes values
+in the Booleans (i.e. True or False). Computationally, however, the
+Booleans are a constant, and we do not need to declare them specially.},
+and operations:
+\[\begin{array}{ll}
+{\tt Create}: & () \rightarrow S ({\tt no arguments}),\\
+{\tt Push}: & (I,S) \rightarrow S,\\
+{\tt Pop}: & (S) \rightarrow S\footnote{Here we avoid detailed discussions
+of problems such as ``What is Top of an empty stack?''. The literature
+mentions at least two basic techniques for dealing with such
+'exceptional states'  the simplistic 'hard stop' and the return of a
+Union mode \cite{Blac80}. Both solutions are possible in MODLISP, and
+indeed both are used in the algebra system being implemented in MODLISP.}\\
+{\tt Top}: & (S) \rightarrow I,\\
+{\tt Empty}: & (S) \rightarrow Boolean
+\end{array}\]
+Of course, what we want to do is to define stack as an abstract idea,
+implemented by one program, rather than just to define
+stackofintegers, and later define stackofrationals, etc. by
+separate programs.
+
+We now define a {\sl category} of algebras (i.e. types) to be the
+class of all algebras with carriers indexed by the same sets, and with
+equivalent operators which satisfy a certain list of axioms (this
+definition is deliberately vague and intuitive  precise definitions
+are given by ADJ\cite{Ehri80a}). As an example we can define the category of
+all stacks to be the class of all algebras with two carriers (indexed
+by I and S)\footnote{As remarked in a previous footnote, there is a
+technical requirement for a third carrier Boolean. Since this is
+intended to be a constant, we shall ignore this requirement. We can
+view what we write as a shorthand for a formal language in which these
+constant carriers were always fully defined, and with suitable extra
+axioms to ensure that all instances of the constant Boolean were
+isomorphic.} with operators
+\[\begin{array}{ll}
+{\tt Create}: & () \rightarrow C_S ({\tt no arguments}),\\
+{\tt Push}: & (C_I,C_S) \rightarrow C_S,\\
+{\tt Pop}: & (C_S) \rightarrow C_S\\
+{\tt Top}: & (C_S) \rightarrow C_I,\\
+{\tt Empty}: & (C_S) \rightarrow Boolean
+\end{array}\]
+Here $C_S$ is the carrier indexed by S and $C_i$ is the carrier
+indexed by I. These operations must satify certain axioms, such as
+\begin{verbatim}
+ Top(Push(x,s)) = x;
+ Pop(Push(x,s)) = s;
+ Empty(Create()) = True;
+ Empty(Push(x,s)) = False;
+\end{verbatim}
+
+We can also abstractly define other structures, corresponding not to
+one concrete implementation (such as Stack), but to whole
+families. One example is Collection, which consists of two carriers I
+and S (or, more precisely, each element of the category Collection
+consists of two carriers indexed by I and S) with the following
+operations:
+\begin{verbatim}
+ New: () > S;
+ Add: (I,S) > S;
+ Empty: (S) > Boolean;
+ Member: (I,S) > Boolean
+\end{verbatim}
+and the following axioms:
+\begin{verbatim}
+ Empty(New()) = True;
+ Empty(Add(x,s)) = False;
+ Member(x,New()) = False;
+ Member(x,Add(y,s)) = if x=y then True else Member(x,s)
+\end{verbatim}
+This represents any collection to which we can add things, and can
+test whether or not an item is in the collection. The concrete
+representation of this abstract category includes lists, lists without
+duplicates, hash tables, balanced trees etc.
+
+The general problem of compilation is then typified by the question:
+``How do we compile something to run over any of these domains,
+without any modemap resolution at run time?''.
+
+We discussed above, when describing modemaps for parameterised
+routines, the possibility of using pattern matching variables in
+modemaps. Using these, we can describe the modemap for a routine (such
+as BelongsList, which, given a list of members of I, and a collection,
+returns a list of those members of the list that lie in the
+collection) by a map like ((List *1) (List *1) *2) with a conditional
+part expressing the fact that *2 has to be a collection of *1.
+
+The remaining problem is to place something in the modemap which the
+compiler can access and place into the compiled code, even though it
+does not know what, in this case, either I or S actually is  I could
+be the Integers and S a hash table, or I could be a set of polynomials
+and S a linked list. However we do know, from our formalism that the
+only functions that can be accessed are New, Add, Empty and Member, so
+that if we know of a vector which will contain the four functions, in
+a fixed order, at run time, then the compiler can just compile
+accesses into this vector. For example, New might be element 5 of the
+vector\footnote{In the system, ``user'' functions start at the fifth
+element  elements 0, 1 and 2 are reserved for bookkeeping, 3
+contains the equalitytesting function, and 4 contains the printing
+function.} and then a call to New would compile into code to select
+the fifth element and apply it.
+
+This is in fact the scheme adopted  each description of a category
+(such as Collection above, or the many categories Ring, Group, Field
+etc. are needed in computer algebra) defines the layout of a vector
+which is to represent the operations defined on any domain lying in
+that category. The modemaps for these operations are updated to
+include appropriate entries, for example PLUS has an entry of
+\begin{verbatim}
+ ((*1 *1 *1) ( (OF *1 Monoid) (Elt *1 5)))
+\end{verbatim}
+meaning that PLUS is a function valid in all Monoids (the clause
+starting OF is the conditional part of the modemap) and that its
+definition is to be found as element 5 of the vector representing the
+operations on the monoid.
+
+Hence the only extra overhead at runtime due to the existence of
+parameterised types (i.e. not knowing at compile time what the types
+of the operands were) is the cost of the element extraction 
+typically two machine instructions.
+
+\subsection{Partial Declaration}
+
+We have seen in the previous section that, if we can declare, for
+example, that X and Y belong to some unspecified domain G which is a
+Group (for example) then we will be able to compile code which will
+operate over all groups. This is not difficult to do (though the
+algebra system being implemented in MODLISP has a highlevel language
+preprocessor to make it easier), and gives rise to a very powerful system.
+
+In fact we can do more  this system opens up the possibility of
+``partial declaration'', which can be performed in either interpreted
+or compiled code. We can say, for example, that F is to be a
+polynomial in X, but we do not care over what the polynomial is taken
+(one example of this would be in integration, where we wish to express
+the function to be integrated in terms of the variable of integration,
+and all other variables etc. are secondary). Code to manipulate F can
+still be compiled, because we know that polynomials always lie in
+rings, and the precise nature of the underlying coefficients is
+irrelevant since all the necessary information (functions to
+manipulate them, etc.) will be contained in the vector implementing
+the particular polynomial domain.
+
+\subsection{Status of the Project}
+
+The MODLISP system is currently implemented above LISP/370\cite{IBMx78},
+and is being used to develop an experimental computer algebra system
+which is based on abstract data types, in
+the sense that it is possible to define ``Polynomial'' as an operation
+which takes a ring, and yields another one (i.e. a vector of
+operations which can be accessed by a MODLISP program compiled knowing
+that its arguments came from a ring).
+
+\subsection{Acknowledgments}
+
+We are grateful to J.W. Thatcher and E.G. Wagner for many useful
+discussions about abstract parameterised data types, and to
+D.R. Barton, J.D. Cohen and D.Y.Y. Yun for many fruitful discussions
+on MODLISP. Earlier drafts of this paper were read by F.W. Blair,
+R.W. Ryniker II and J.W. Thatcher, and we are grateful to them for
+many helpful comments and criticisms.
+
+\subsection{Appendix A: Description of MODLISP Evaluator}
+
+This appendix first gives a formal description of a simplified MODLISP
+evaluator, one which does not provide for partial compilation or
+partial declarations. The overall interpretive structure is more
+easily seen from this simpler description. We then discuss how this
+evaluator can be extended to the complete MODLISP evaluator, which
+provides both partial compilation and partial declaration, and for the
+interaction of the two.
+
+The following summarizes the dialect of LISP used below to describe
+the evaluator. (QUOTE X) is abbreviated by "X. (LET X .) is used to
+introduce a local variable X. The notation \textless X M E\textgreater
+is used for an
+atomic datatype called {\sl triple} consisting of an expression X, a
+mode M, and an environment E. An argument of the form (: A B) to a
+$\lambda$ expression means ``A has the form of B'', and has the
+sideeffect of binding symbols appearing in B to the indicated parts
+of the structure. The (REPEAT (..) X) repeats X as given by the
+iterators (..) and has the value NIL. (COLLECT (..) X) does the same
+but its value is a list of the successive values of X (thus, (COLLECT
+((IN U X)) (F U)) is similar to (MAPCAR X (FUNCTION F))). COLLECT and
+REPEAT do not generate PROGs, so that (RETURN X) always causes an exit
+from the function containing it with value X.
+
+\begin{verbatim}
+(EVAL (LAMBDA ((: T )) (COND
+ ((ATOM X) (PROGN
+ (LET TP (COND
+ ((TRIPLEP X) ((GETEVALFUN M E) X))
+ ((SYMBOLP X) (EVSYMBOL X E))
+ ("T ) ))
+ (EVCONVERT TP M) ))
+ ((ATOM (LET OP (CAR X))) (PROGN
+ ((LET FN (GET OP "SPECIAL E)) (FN T))
+ ((REPEAT ((IN MAP (GET OP "MODEMAP E)))
+ (COND ((LET Z (EVFORM T MAP))
+ (RETURN Z)))))
+ ("T NIL) ))
+ ((EQ (CAR OP) "LAMBDA)
+ (EVLAPPLY (CADR OP) (CADDR OP) (CDR X) M E))
+ ("T (EVAL <(S:EXPR (EVAL ))
+ M
+ E>)) )))
+\end{verbatim}
+
+\begin{verbatim}
+(EVSYMBOL (LAMBDA (S E) (COND
+ ((LET V (GET S "VALUE E)) V)
+ ((GET S "MODE E) NIL)
+ ("T ) )))
+\end{verbatim}
+
+\begin{verbatim}
+(EVCONVERT (LAMBDA (T M) (COERCE T M)))
+\end{verbatim}
+
+\begin{verbatim}
+(EVFORM (LAMBDA ((: T <(OP . ARGL) TM E>)
+ (: MAP (SIG . CEXPR))) (PROGN
+ (LET MAP (SUBLIS (OR (MATCH TM (CAR SIG))
+ (RETURN NIL)) MAP))
+ (LET L (COLLECT ((IN A ARGL) (IN M (CADR MAP)))
+ (S:EXPR (LET T (OR (EVAL )
+ (RETURN NIL)))) ))
+ (REPEAT ((IN X L))
+ (COND ((EQUAL "True
+ (S:EXPR (EVAL <(CAR X) "Boolean E>)))
+ (RETURN <(LISPAPPLY (CADR X) L) MD E>))))
+ (RETURN NIL) )))
+\end{verbatim}
+
+\begin{verbatim}
+(EVLAPPLY (LAMBDA (VL B ARGL M E) (PROGN
+ (LET XE (EXTEND E))
+ (REPEAT ((IN A ARGL) (IN X VL))
+ (LET XE (BIND (CAR X) XE (LIST
+ (CONS "MODE (CDR X))
+ (CONS "VALUE (EVAL ))))))
+ (EVAL ) )))
+\end{verbatim}
+
+\subsection{Explanation}
+
+EVAL takes a triple T consisting of an expression X, a ``target'' mode
+M, and an environment E, and returns either a triple or else NIL
+indicating failure to produce such a triple. If X is an atom, a triple
+TP is created, then EVCONVERTed to M. There are three ways that
+triples are produced from atomic X. If X is already a triple
+(remember, triples are atomic), then an evaluation function associated
+with M is applied to evaluate X in the new environment E (for example,
+if M is SEXPR then that evaluation function is EVAL). If X is a
+symbol, then TP is produced by EVSYMBOL. Otherwise, the atom is a
+basic object for which the function MODE can produce the triple directly.
+
+If X is not an atom but its first element OP is, then there are three
+cases. First, symbol OP may be a special MODLISP operator such as
+EXIT, IF, LAMBDA, QUOTE, RETURN, SETQ, in which case the name FN
+stored under property SPECIAL is applied to the list of arguments of
+X. If OP has a MODEMAP property, then the value stored under that
+property is a list of modemaps. Each modemap is applied in turn by
+EVFORM attempting to evaluate form X successfully to produce a triple
+Z. If both of the above two cases fail, EVAL returns NIL.
+
+If neither X nor its CAR is atomic, then two cases remain. If the CAR
+of OP is "LAMBDA, then EVLAPPLY is called. Otherwise, the OP is
+evaluated with target mode SEXPR to produce a new OP which is consed
+onto the list of arguments and passed recursively to EVAL.
+
+EVSYMBOL takes a symbol S and an environment E, and returns either a
+triple or NIL. If S has a VALUE property\footnote{As set by an
+assignment (the SETQ special form) or by a binding (function
+EVLAPPLY).} then a triple stored under that property is returned. If S
+has a MODE property but not a VALUE, then EVSYMBOL fails. Otherwise, a
+triple with mode SYMBOL is created directly for S.
+
+EVCONVERT takes a triple T and a mode MP, and produces either another
+triple or NIL. In a system without partial declarations (for example,
+all conventional ``typed'' languages), EVCONVERT is equivalent to the
+conversion function COERCE described below.
+
+EVFORM takes two arguments: a triple T=\textless X TM E\textgreater,
+where X consists of a
+(generic) OPerator consed onto a list ARGL of arguments, and, a
+Modemap MAP consisting of a ``signature pattern'' SIG consed onto a
+conditional expression CEXPR. The SIG is a list of patterns whose CAR
+gives a ``modepattern'' of the result and whose CDR gives the
+modepatterns for the arguments in lefttoright order. First, an
+attempt is made to MATCH SIG's result mode to the mode TM of the
+triple. If this match fails, then EVFORM returns NIL. The substitution
+list resulting from a successful match is SUBLISted into the signature
+pattern to produce a signature MAP containing no free variables. Next,
+each argument in ARGL is evaluated in turn with the corresponding mode
+from the new signature; if all evaluations are successful, a list L of
+evaluated arguments is produced. The CDR of the new MAP is a
+conditional expression list, each element of which is a list (Boolean
+expression, function descriptor). If the Boolean expression can be
+successfully EVALed with mode Boolean to yield "true, then the
+function descriptor is consed onto L and LISPEVALed.
+
+EVLAPPLY takes a triple T consisting of a LAMBDAexpression with
+symbol list VL and a body B consed onto a list of arguments ARGL as
+the expression part, a mode, and an environment. Here, VL is a list of
+pairs each consisting of a formal parameter consed with a mode. First,
+XE is created by extending E with a new (empty) local
+environment. Next, XE is incrementally extended by adding a binding
+for each successive member of VL. Each binding in MODLISP is a
+property list consisting of a MODE m together with a VALUE consisting
+of a triple created by evaling the corresponding member of ARGL using
+m as target mode, and the original E as the environment. Having
+extended XE to include bindings for all the formal parameters of VL, a
+triple is formed by evaluating the body of the LAMBDAexpression in
+that extended environment.
+
+The following auxillary functions are used:
+
+\vskip 2mm
+\noindent
+(BIND S E PL) adds a property list PL as a binding for symbol S in the
+current local environment of E and returns the updated E.
+
+\vskip 2mm
+\noindent
+(COERCE T M) converts triple T to one with mode M, or else returns NIL
+if that conversion is not possible.
+
+\vskip 2mm
+\noindent
+(EXTEND E) extends E to contain a new empty local environment.
+
+\vskip 2mm
+\noindent
+(GET S P E) returns value stored under property P for symbol S in
+environment E.
+
+\vskip 2mm
+\noindent
+(GETEVALFUN M) returns the evaluator function for mode M.
+
+\vskip 2mm
+\noindent
+(MATCH E P) returns a list of substitution pairs ((A . B) ..) if
+pattern P (containing ``free variables'' A,..) matches expression E
+(that is, P=E when all A's in P are replaced by corresponding B's)
+
+\vskip 2mm
+\noindent
+(LISPAPPLY FN L) applies MODLISP function FN to a list L of evaluated
+arguments by a LISP evaluation of (CONS (CAR FN) (APPEND L (LIST (CDR
+FN)))). In MODLISP, all functions are represented as pairs whose CAR
+is a code pointer and whose CDR is a runtime environment (as defined
+by the modemap) which will be supplied to FN as its last argument when
+FN is called.
+
+\vskip 2mm
+\noindent
+(MODE X) returns the mode of X, assumed to be a builtin type
+
+\vskip 2mm
+\noindent
+(S:EXPR T) returns the expression part of triple T.
+
+\vskip 2mm
+\noindent
+(S:MODE T) returns the mode part of triple T.
+
+\vskip 2mm
+\noindent
+(S:ENV T) returns the environment part of triple T.
+
+\vskip 2mm
+\noindent
+(SYMBOLP X) tests that X is a symbol.
+
+\vskip 2mm
+\noindent
+(TRIPLEP X) tests that X is a triple.
+
+\vskip 2mm
+\noindent
+(COMPILE FN ARGS) compiles a call to the function FN, with argumets ARGS.
+
+\vskip 2mm
+\noindent
+(COMPILEDP X) test if X represents a piece of compiled code, rather
+than a value.
+
+\subsection{On Extensions for Partial Compilation}
+
+In order to cope with partial compilation, several modifications to
+the evaluator must be made. The first, and easiest, is that the fourth
+line of EVSYMBOL should be changed to return a compiled reference to X
+rather than returning NIL. The other changes are to EVFORM. The loop
+in lines 58 should be modified to set the flag EVALFG if some (S:MODE
+T) is "NilMode, COMPFG, if some (S:EXPR T) represents compiled
+code. In addition, the call to LISPAPPLY on the last line should be
+replaced by a conditional expression (COND (EVALFG "CALLEVAL) (COMPFG
+"COMPILE) ("T "LISPAPPLY)). These changes to EVFORM discriminate (in
+reverse order) between the 3 cases described in the section
+"Compilation" in the body of the paper. To recap, if any of the
+arguments has an unknown mode (indicated by a return mode of "NilMode),
+then all we can do is call the evaluator at runtime, when the modes
+are known. If the modes are known, but some of the values are not
+(i.e. the expression part of at least one triple represents a piece of
+compiled code, as determined by COMPILEDP), then we can compile a call
+to the function determined from the modemap. Finally, if all the
+values are known, then we can proceed by calling LISPEVAL, just as in
+the case of the basic evaluator without the additions for partial
+compilation.
+
+\subsection{On Extensions for Partial Declarations}
+
+In the general case where partial declarations are allowed \cite{Cohe80},
+the definition of EVCONVERT must be replaced by:
+\begin{verbatim}
+(EVCONVERT (LAMBDA ((: T ) MP)
+ (COERCE T (OR (RESOLVE M MP) (RETURN NIL))) ))
+\end{verbatim}
+Here, the mode M of T is RESOLVEd with MP to produce a third mode M'
+(if no such mode can be found, EVCONVERT returns NIL immediately). The
+expression X of T is then COERCEd to M' producing a final triple, or
+NIL if that is not possible.
+
+In addition, the description of EVFORM becomes somewhat more
+complicated. First of all, it is not necessary for the result mode of
+SIG to match successfully the target mode TM of the triple. As a
+result SIG will generally contain free variables throughout the
+process of lefttoright argument evaluation. This process has the
+following alternative code replacing the loop in lines 58 of EVFORM:
+\begin{verbatim}
+(LET LT (COLLECT ((IN A ARGL) (IN P (CDR SIG)))
+ (PROGN
+ (LET MD (PATTERN_TO_MODE (SUBLIS LS P)))
+ (LET T (OR (EVAL ) (RETURN NIL)))
+ (LET E (S:ENV T))
+ (LET LS (NCONC (MATCH (S:MODE T) P) LS))
+ T)))
+\end{verbatim}
+\begin{verbatim}
+(LET L (COLLECT ((IN T TL)
+ (IN MD (SUBLIS LS (CDR SIG))))
+ (S:EXPR (OR (COERCE T MD) (RETURN NIL)))))
+\end{verbatim}
+
+This is described as follows. First, a list LT of triples is
+produced. The first argument A of ARGL is evaluated in the initial
+environment with a target mode obtained by SUBLISting LS (initially
+NIL\footnote{This initial setting determines a {\sl bottomup}
+evaluation strategy, where the mode of a result is determined purely
+by the modes of its arguments. This clearly creates difficulties for
+functions without arguments. The strategy actually used in MODLISP is
+first to try a {\sl topdown} evaluation, and then to use a bottomup
+strategy if the topdown one fails. The topdown strategy is
+implemented by setting the initial value of LS to be the result of
+MATCHing the modepattern of the result (CAR SIG) with the target mode
+TM.}) into P and converting the result to a mode by
+PATTERN\_TO\_MODE. If evaluation successfully produces a triple T, the
+initial environment is replaced by the environment of T, the
+substitution list LS is updated to contain new bindings which result
+from MATCHing P to the mode of T. A second argument is then evaluated
+with a target mode determined from the evaluation of the first
+argument, etc. until a triple is either produced for each argument or
+else NIL is returned along the way.
+
+The list LT now consists of triples containing values whose modes are
+specific domains. All relevant free variables in the final LS are
+paired with domains, with later pairings (those which appear leftmost
+in LS) taking precedence. A final signature is determined therefore by
+SUBLISting LS into (CDAR MAP). A pass over TL is then made to COERCE
+all expressions to the appropriate domain.
+
+\subsection{Appendix B: Sample Compilation by the MODLISP Evaluator}
+
+The following illustrates a sample source language function definition
+and corresponding compilation by the current MODLISP evaluator into
+LISP code. The example describes a function to raise an expression to
+a power using a repeated squaring method. The domain of the
+expression, r, is assumed to be in the category Ring, the power
+assumed to be a NonNegativeInteger. The example is taken from the
+context of the definition of the category Ring in our experimental
+computer algebra system.
+\begin{verbatim}
+Source language:
+ given
+ x: r
+ n: NonNegativeInteger
+ define
+ x ** n >
+ if n = 0 then 1
+ else if n = 1 then x
+ else if oddp(n) then x*((x*x)**((n1)/2))
+ else (x*x)**(n/2).
+\end{verbatim}
+Compilation into LISP:
+\begin{verbatim}
+(LAMBDA (x n r) ((LAMBDA (G4 G1 G2 G0 G3)
+ (COND
+ ((EQUAL n 0) (ELT r 10))
+ ((EQUAL n 1) x)
+ ((oddp n)
+ ((CAR (SETQ G2 (ELT r 9)))
+ x
+ ((CAR (SETQ G1 (ELT r 11)))
+ ((CAR (SETQ G0 (ELT r 9))) x x (QCDR G0))
+ (QUOTIENT (DIFFERENCE n 1) 2) (QCDR G1))
+ (QCDR G2)) )
+ ("T
+ ((CAR (SETQ G4 (ELT r 11)))
+ ((CAR (SETQ G3 (ELT r 9))) x x (QCDR G3))
+ (QUOTIENT n 2)
+ (QCDR G4))) ))
+ NIL NIL NIL NIL NIL ))
+\end{verbatim}
+Explanation. A representation of the ring r is passed as a third
+argument to the function. Rings are represented by vectors of length
+11 (or more, if they are of a more restrictive category). Elements 5
+through 11 are allocated as follows:
+\begin{verbatim}
+ (5) the zero element of the ring
+ (6) dyadic +
+ (7) monadic 
+ (8) dyadic 
+ (9) dyadic *
+ (10) the multiplicative identity (1)
+ (11) dyadic **
+\end{verbatim}
+Each of the slots 69 and 11 are pairs consisting of a code pointer in
+its CAR and a pointer to the vector representing r in its CDR. In
+particular, the CAR of the contents of slot 11 in the vector is a
+pointer to the compiled code of the above function!
+
+The function QCDR is a version of CDR that does not check to see if
+its argument is a pair first, and hence it can compile into 1 machine
+instruction. Note that this function is only used in circumstances
+where the CAR has already been taken, so we know that the argument
+really is a pair. (ELT v n) extracts the nth element (counting from
+0) of the vector v. The symbols G0G4 represent ``gensyms'' generated
+by MODLISP.
+
+We note that there are several optimizations which could be performed
+on the code to improve performance and to reduce the size of the
+compiled code, such as replacing EQUAL by EQ, G2 and G3 by G0, G4 by
+G1, the inner (CAR (SETQ G0..)) by (QCAR (SETQ G0..)), all (ELT r 9)
+by (QELT r 9). These optimisations which are planned, are global in
+nature. The evaluator already performs several local optimisations,
+such as the use of QCDR.
+
\section{Tedious Maintainer Tasks}
\subsection{Maintaining the credits list}
diff git a/books/bookvolbib.pamphlet b/books/bookvolbib.pamphlet
index b286cbf..1b0730d 100644
 a/books/bookvolbib.pamphlet
+++ b/books/bookvolbib.pamphlet
@@ 1049,7 +1049,8 @@ paragraph for those unfamiliar with the terms.
journal = "LNCS",
volume = "523",
pages = "4557",
 paper = "Card86.pdf"
+ paper = "Card86.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 1064,7 +1065,8 @@ paragraph for those unfamiliar with the terms.
number = "23",
year = "1988",
pages = "138164",
 paper = "Card88.pdf"
+ paper = "Card88.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 1337,6 +1339,33 @@ paragraph for those unfamiliar with the terms.
\subsection{E} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Ehrig, H.}
+\index{Kreowski, H.J.}
+\index{Padawitz, P.}
+\begin{chunk}{axiom.bib}
+@article{Ehri80,
+ author = "Ehrig, H. and Kreowski, H.J. and Padawitz, P.",
+ title = {{Algebraic Implementation of Abstract Data Types: Concepts,
+ Syntax, Semanics and Correctness}},
+ journal = "LNCS",
+ volume = "85",
+ year = "1980",
+ booktitle = "Automata, Languages and Programming",
+ publisher = "Springer",
+ abstract =
+ "A new concept for the implementation of abstract data types is
+ proposed: Given algebraic specifications SPEC0 and SPEC1 of
+ abstract data types ADT0 and ADT1 an implentation of ADT0 by ADT1
+ is defined spearately on the syntactical level of specifications
+ and on the semantical level of algebras. This concept is shown to
+ satisfy a number of conceptual requirements for the implementation
+ of abstract data types. Several correctness criteria are given and
+ illustrating examples are provided.",
+ paper = "Ehri80.pdf"
+}
+
+\end{chunk}
+
\index{Ehrig, Hartmut}
\index{Mahr, Bernd}
\begin{chunk}{axiom.bib}
@@ 9855,7 +9884,7 @@ when shown in factored form.
\index{Tassi, Enrico}
\index{Zacchiroli, Stefano}
\begin{chunk}{axiom.bib}
@inproceedings{Aspe06,
+@inproceedings{Aspe06a,
author = "Asperti, Andrea and Coen, Claudio Sacerdoti and
Tassi, Enrico and Zacchiroli, Stefano",
title = {{Crafting a Proof Assistant}},
@@ 9874,7 +9903,8 @@ when shown in factored form.
highlight the common functionalities, not only in view of
reusability but also to encourage a more systematic comparison of
different softwares and architectural solutions.",
 paper = "Aspe06.pdf"
+ paper = "Aspe06a.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 12616,6 +12646,105 @@ when shown in factored form.
\subsection{A} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Abadi, Martin}
+\index{Cardelli, Luca}
+\index{Pierce, Benjamin}
+\index{Plotkin, Gordon}
+\begin{chunk}{axiom.bib}
+@inproceedings{Adad89,
+ author = "Abadi, Martin and Cardelli, Luca and Pierce, Benjamin
+ and Plotkin, Gordon",
+ title = {{Dynamic Typing in a Statically Typed Language}},
+ booktitle = "16th Principles of Programming Languages",
+ publisher = "ACM",
+ pages = "213227",
+ year = "1989",
+ abstract =
+ "Statically typed programming languages allow earlier error
+ checking, better enforcement of disciplined programming styles,
+ and generation of more efficient object code than languages where
+ all type consistency checks are performed at run time. However,
+ even in statically typed languages, there is often the need to
+ deal with data whose type cannot be determined at compile time. To
+ handle such situations safely, we propose to add a type Dynamic
+ whose values are pairs of a value $v$ and a type tag T where $v$
+ has the type denoted by T. Instances of Dynamic are built with an
+ explicit tagging construct and inspected with a type safe typecase
+ construct.
+
+ This paper explores the syntax, operational semantics, and
+ denotational semantics of a simple language including the type
+ Dynamic. We give examples of how dynamically typed values can be
+ used in programming. Then we discuss an operational semantics for
+ our language and obtain a soundness theorem. We present two
+ formulations of the denotational semantics of this language and
+ relate them to the operational semantics. Finally, we consider the
+ implications of polymorphism and some implementation issues.",
+ paper = "Abad89.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Adams, Andrew A.}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Adam04,
+ author = "Adams, Andrew A. and Davenport, James H.",
+ title = {{Copyright Issues for MKM}},
+ journal = "LNCS",
+ volume = "3119",
+ year = "2004",
+ abstract =
+ "We present an overview of the current situation and recent and
+ expected future developments in areas of copyright law and
+ economics relevant to Mathematical Knowledge Management.",
+ paper = "Adam04.pdf"
+}
+
+\end{chunk}
+
+\index{AitKaci, Hassan}
+\begin{chunk}{axiom.bib}
+@book{Aitk99,
+ author = "AitKaci, Hassan",
+ title = {{Warren's Abstract Machine: A Tutorial Reconstruction}},
+ publisher = "MIT Press",
+ isbn = "0262510588",
+ year = "1999",
+ link = "\url{http://wambook.sourceforge.net/wambook.pdf}",
+ paper = "Aitk99.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Alama, Jesse}
+\index{Mamane, Lionel}
+\index{Urban, Josef}
+\begin{chunk}{axiom.bib}
+@article{Alam12,
+ author = "Alama, Jesse and Mamane, Lionel and Urban, Josef",
+ title = {{Dependencies in Formal Mathematics: Applications and
+ Extration for Coq and Mizar}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "Two methods for extracting detailed formal dependencies from the
+ Coq and Mizar system are presented and compared. The methods are
+ used for dependency extraction from two large mathematical
+ repositories: the Coq Repository at Nijmegen and the Mizar
+ Mathematical Library. Several applications of the detailed
+ dependency analysis are described and proposed. Motivated by the
+ different applications, we discuss the various kinds of
+ dependencies that we are interested in, and the suitability of
+ various dependency extraction methods.",
+ paper = "Alam12.pdf"
+}
+
+\end{chunk}
+
\index{Altenkirch, Thorsten}
\begin{chunk}{axiom.bib}
@misc{Alte18,
@@ 12627,6 +12756,76 @@ when shown in factored form.
\end{chunk}
+\index{Amin, Nada}
+\index{Rompf, Tiark}
+\begin{chunk}{axiom.bib}
+@inproceedings{Amin18,
+ author = "Amin, Nada and Rompf, Tiark",
+ title = {{Collapsing Towers of Interpreters}},
+ booktitle = "Principles of Programming Languages",
+ year = "2018",
+ publisher = "ACM",
+ abstract =
+ "Given a tower of interpreters, i.e., a sequence of multiple
+ interpreters interpreting one another as input programs, we aim to
+ collapse this tower into a compiler that removes all interpretive
+ overhead and runs in a single pass. In the real world, a use case
+ might be Python code executed by an x86 runtime, on a CPU emulated
+ in a JavaScript VM, running on an ARM CPU. Collapsing such a tower
+ can not only exponentially improve runtime performance, but also
+ enable the use of base language tools for interpreted programs,
+ e.g. for analysis and verification. In this paper, we lay the
+ foundations in an idealized but realistic setting.
+
+ We present a multilevel lambda calculus that features staging
+ constructs and stage polymorphism: based on runtime parameters, an
+ evaluator either executes source code (thereby acting as an
+ interpreter) or generates code (thereby acting as a compiler). We
+ identify stage polymorphism, a programming model from the domain
+ of highperformance program generators, as the key mechanism to
+ make such interpreters compose in a collapsible way.
+
+ We present Pink, a metacircular Lisplike evaluator on top of
+ this calculus, and demonstrate that we can collapse arbitrarily
+ many levels of selfinterpretation, including levels with
+ semantics modifications. We discuss several examples: compiling
+ regular expressions through an interpreter to base code, building
+ program transformers from modified interpreters, and others. We
+ develop these ideas further to include reflection and reification,
+ culminating in Purple, a reflective language inspired by Brown,
+ Blond, and Black, which realizes a conceptually infinite tower,
+ where every aspect of the semantics can change
+ dynamically. Addressing an open challenge, we show how user
+ programs can be compiled and recompiled under usermodified
+ semantics.",
+ paper = "Amin18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Andres, Mirian}
+\index{Lamban, Laureano}
+\index{Rubio, Julio}
+\begin{chunk}{axiom.bib}
+@article{Andr07,
+ author = "Andres, Mirian and Lamban, Laureano and Rubio, Julio",
+ title = {{Executing in Common Lisp, Proving in ACL2}},
+ journal = "LNCS",
+ volume = "4573",
+ year = "2007",
+ abstract =
+ "In this paper, an approach to integrate an alreadywritten Common
+ Lisp program for algebraic manipulation with ACL2 proofs of
+ properties of that program is presented. We report on a particular
+ property called ``cancellation theorem'', which has been proved in
+ ACL2, and could be applied to several problems in the field of
+ Computational Algebraic Topology.",
+ paper = "Andr07.pdf"
+}
+
+\end{chunk}
+
\index{Antoy, Sergio}
\index{Peters, Arthur}
\begin{chunk}{axiom.bib}
@@ 12757,6 +12956,113 @@ when shown in factored form.
\end{chunk}
+\index{Armstrong, J.L.}
+\index{Birding, S.R.}
+\index{Williams, M.C.}
+\begin{chunk}{axiom.bib}
+@inbook{Arms92,
+ author = "Armstrong, J.L. and Birding, S.R. and Williams, M.C.",
+ title = {{Use of Prolog for Developing a New Programming Language}},
+ booktitle = "The Practical Application of Prolog",
+ year = "1992",
+ chapter = "unknown",
+ pages = "unknown",
+ publisher = "Institute of Electrical Engineers, London",
+ abstract =
+ "This paper describes how Prolog was used for the development of a
+ new concurrent realtime symbolic programming language called
+ Erlang.
+
+ Erlang was developed by first building a prototype in Prolog 
+ the prototype was used by a user group to test their reactions to
+ the language. As time passed many features were added (and
+ removed) from the interpreter and eventually the language reached
+ a level of maturity where it was decided to try it out on a
+ significant problem.
+
+ About 3 years and some 20,000 lines of Erlang later, performance
+ became an issue  we wrote Prolog cross compilers from Erlang to
+ various concurrent logic programming languages followed by a
+ direct implementation of Erlang itself. The direct implementation
+ of Erlang was loosely based on the WAM and made by writing a
+ Prolog compiler from Erlang to a new abstractmachine and an
+ emulator for the abstract machine in 'C'. The instruction set for
+ the abstract machine was first prototyped in Prolog  finally the
+ compiler was rewritten in Erlang, thus totally removing any
+ dependency on Prolog.
+
+ This paper describes some of the key events which lay between the
+ simple prototype and the current version of the language.",
+ paper = "Arms92.pdf",
+ keywords = "printed,DONE"
+}
+
+\end{chunk}
+
+\index{Asperti, Andrea}
+\index{Geuvers, Herman}
+\index{Loeb, Iris}
+\index{Mamane, Lionel Elie}
+\index{Coen, Claudio Sacerdoti}
+\begin{chunk}{axiom.bib}
+@article{Aspe06,
+ author = "Asperti, Andrea and Geuvers, Herman and Loeb, Iris and
+ Mamane, Lionel Elie and Coen, Claudio Sacerdoti",
+ title = {{An Interactive Algebra Course with Formalised Proofs and
+ Definitions}},
+ journal = "LNCS",
+ volume = "4108",
+ year = "2006",
+ abstract =
+ "We describe a casestudy of the application of web technology to
+ create webbased didactic material out of a repository of formal
+ mathematics, using the structure of an existing course. The paper
+ discusses the difficulties related to associating notation to a
+ formula, the embedding of formal notions into a document (the
+ ``view''), and the rednering of proofs.",
+ paper = "Aspe06.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Asperti, Andrea}
+\index{Coen, Claudio Sacerdoti}
+\begin{chunk}{axiom.bib}
+@article{Aspe10a,
+ author = "Asperti, Andrea and Coen, Claudio Sacerdoti",
+ title = {{Some Considerations on the Usability of Interactive Provers}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "In spite of the remarkable achievements recently obtained in the
+ field of mechanization of formal reasoning, the overall usability
+ of interactive provers does not seem to be sensibly improved since
+ the advent of the ``second generation'' of systems, in the mid of
+ the eighties. We try to analyze the reasons of such a slow
+ progress, pointing out the main problems and suggesting some
+ possible research directions.",
+ paper = "Aspe10a.pdf",
+ keywords = "DONE"
+}
+
+\end{chunk}
+
+\index{Asperti, Andrea}
+\index{Ricciotti, Wilmer}
+\begin{chunk}{axiom.bib}
+@article{Aspe12a,
+ author = "Asperti, Andrea and Ricciotti, Wilmer",
+ title = {{A Web Interface for Matita}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ paper = "Aspe12a.pdf"
+}
+
+\end{chunk}
+
\index{Aspinall, David}
\index{Compagnoni, Adriana}
\begin{chunk}{axiom.bib}
@@ 12787,6 +13093,35 @@ when shown in factored form.
\end{chunk}
+\index{Aspinall, David}
+\index{Denney, Ewen}
+\index{Luth, Christoph}
+\begin{chunk}{axiom.bib}
+@article{Aspi08,
+ author = "Aspinall, David and Denney, Ewen and Luth, Christoph",
+ title = {{A Tactic Language for Hiproofs}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "We introduce and study a tactic language, Hitac, for constructing
+ hierarchical proofs, known as hiproofs. The idea of hiproofs is to
+ superimpose a labelled hierarchical nesting on an ordinary proof
+ tree. The labels and nesting are used to describe the organisation
+ of the proof, typically relating to its construction process. This
+ can be useful for understanding and navigating the proof. Tactics
+ in our language construct hiproof structure together with an
+ underlying proof tree. We provide both a bigstep and a smallstep
+ operational semantics for evaluating tactic expressions. The
+ bigstep semantics captures the intended meaning, whereas the
+ smallstep semantics hints at possible implementations and
+ provides a unified notion of proof state. We prove that these
+ notions are equivalent and construct valid proofs.",
+ paper = "Aspi08.pdf"
+}
+
+\end{chunk}
+
\index{Atkey, Robert}
\begin{chunk}{axiom.bib}
@inproceedings{Atke18,
@@ 12812,16 +13147,147 @@ when shown in factored form.
\end{chunk}
+\index{Avigad, Jeremy}
+\begin{chunk}{axiom.bib}
+@misc{Avig19,
+ author = "Avigad, Jeremy",
+ title = {{The Mechanization of Mathematics}},
+ year = "2019",
+ comment = "slides from The Big Proof Workshop",
+ paper = "Avig19.pdf",
+ keywords = "DONE"
+}
+
+\end{chunk}
+
\subsection{B} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Bauer, Andrej}
+\index{Backeljauw, Franky}
+\index{Becuwe, Stefan}
+\index{Cuyt, Annie}
\begin{chunk}{axiom.bib}
@misc{Baue19,
 author = "Bauer, Andrej",
 title = {{How to Implement Type Theory in an Hour}},
+@article{Back08,
+ author = "Backeljauw, Franky and Becuwe, Stefan and Cuyt, Annie",
+ title = {{Validated Evaluation of Special Mathematical Functions}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "Because of the importance of special functions, several books and
+ a large collection of papers have been devoted to the numerical
+ computation of these functions, the most wellknown being the
+ Abramowitz and Stegun handbook. But up to this date, no
+ environment offers routines for the provable correct evaluation of
+ these special functions.
+
+ We point out how series and limitperiodic continued fraction
+ representation of the functions can be helpful in this
+ respect. Our scalable precision technique is mainly based on the
+ use of sharpened a priori truncation and roundoff error upper
+ bounds, in case of real arguments. The implementation is validated
+ in the sense that it returns a sharp interval enclosure for the
+ requested function evaluation, at the same cost as the evaluation.",
+ paper = "Back08.pdf"
+}
+
+\end{chunk}
+
+\index{Bagnara, Roberto}
+\index{Bagnara, Abramo}
+\index{Biselli, Fabio}
+\index{Chiari, Michele}
+\index{Gori, Roberta}
+\begin{chunk}{axiom.bib}
+@misc{Bagn19,
+ author = "Bagnara, Roberto and Bagnara, Abramo and Biselli, Fabio
+ and Chiari, Michele and Gori, Roberta",
+ title = {{Correct Approximation of IEEE 754 FloatingPoint
+ Arithmetic for Program Verification}},
year = "2019",
 link = "\url{https://vimeo.com/286652934}",
 comment = "\url{https://github.com/andrejbauer/spartantypetheory}"
+ link = "\url{https://arxiv.org/abs/1903.06119}",
+ abstract =
+ "Verification of programs using floatingpoint arithmetic is
+ challenging on several accounts. One of the difficulties of
+ reasoning about such programs is due to the peculiarities of
+ floatingpoint arithmetic: rounding errors, infinities,
+ nonnumeric objects (NaNs), signed zeros, denormal numbers,
+ different rounding modes... One possibility to reason about
+ floatingpoint arithmetic is to model a program computation path
+ by means of a set of ternary constraints of the form $z=x op y$
+ and use constraint propagation techniques to infer new information
+ on the variables' possible values. In this setting, we define and
+ prove the correctness of algorithms to precisely bound the value
+ of one of the variables $x$, $y$, or $z$, starting from the bounds
+ known for the other two. We do this for each of the operations and
+ for each rounding mode defined by the IEEE 754 binary
+ floatingpoint standard, even in the case the rounding mode in
+ effect is only partially known. This is the first time that such
+ socalled filtering algorithms are defined and their correctness
+ is formally proved. This is an important slab for paving the way
+ to formal verification of programs that use floatingpoint
+ arithmetics.",
+ paper = "Bagn19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Baker, Josef B.}
+\index{Sexton, Alan P.}
+\index{Sorge, Volker}
+\begin{chunk}{axiom.bib}
+@article{Bake09,
+ author = "Baker, Josef B. and Sexton, Alan P. and Sorge, Volker",
+ title = {{A Linear Grammar Approach to Mathematical Formula
+ Recognition from PDF}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "Many approaches have been proposed over the years for the
+ recognition of mathematical formulae from scanned documents. More
+ recently a need has arisen to recognise formulae from PDF
+ documents. Here we can avoid ambiguities introduced by traditional
+ OCR approaches and instead extract perfect knowledge of the
+ characters used in formulae directly from the document. This can
+ be exploited by formula recognition techniques to achieve correct
+ results and high performance.
+
+ In this paper we revist an old grammatical approach to formula
+ recognition, that of Anderson from 1968, and assess its
+ applicability with respect to data extracted from PDF
+ documents. We identify some problems of the original method when
+ applied to common mathematical expressions and show how they can
+ be overcome. The simplicity of the original method leads to a very
+ efficient recognition technique that not only is very simple to
+ implement but also yields results of high accuracy for the
+ recognition of mathematical formulae from PDF documents.",
+ paper = "Bake09.pdf",
+ keywords = "DONE"
+}
+
+\end{chunk}
+
+\index{Bakel, Steffan van}
+\begin{chunk}{axiom.bib}
+@article{Bake93,
+ author = "Bakel, Steffan van",
+ title = {{Principal Type Schemes for the Strict Type Assignment System}},
+ journal = "J. Logic and Computation",
+ volume = "3",
+ number = "6",
+ pages = "643670",
+ year = "1993",
+ abstract =
+ "We study the strict type assignment system, a restriction on the
+ intersection type discipline and prove that it has the principal
+ type property. W define, for a term $M$, the principal pair (of
+ basis and type). We specify three operations on pairs, and prove
+ that all pairs deducible for $M$ can be obtained from the
+ principal one by these operations, and that these map deducible
+ pairs to deducible pairs.",
+ paper = "Bake93.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 12950,6 +13416,18 @@ when shown in factored form.
\end{chunk}
+\index{Barbeau, Edward J.}
+\begin{chunk}{axiom.bib}
+@book{Barb00,
+ author = "Barbeau, Edward J.",
+ title = {{Mathematical Fallacies, Flaws, and Flimflam}},
+ publisher = "American Mathematical Society",
+ year = "2000",
+ paper = "Barb00.pdf"
+}
+
+\end{chunk}
+
\index{Barendregt, Hendrik Pieter}
\begin{chunk}{axiom.bib}
@article{Bare91,
@@ 12980,6 +13458,50 @@ when shown in factored form.
\end{chunk}
+\index{Barendregt, Henk}
+\begin{chunk}{axiom.bib}
+@misc{Bare97,
+ author = "Barendregt, Henk",
+ title = {{The Impact of the Lambda Calculus}},
+ link = "\url{http://wwwusers.mat.umk.pl/~adwid/materialy/doc/church.pdf}",
+ year = "1997",
+ paper = "Bare97.pdf"
+}
+
+\end{chunk}
+
+\index{Barthe, G.}
+\index{Elbers, H.}
+\begin{chunk}{axiom.bib}
+@misc{Bart96,
+ author = "Barthe, G. and Elbers, H.",
+ title = {{Towards Lean Proof Checking}},
+ year = "1996",
+ abstract =
+ "Logical formal systems are inefficient at computations. In order
+ to increase their efficiency, we aim to extend these systems with
+ computational power. In this paper, we suggest a general, powerful
+ syntax, called oracle types, to extend type theories with
+ computational power; the resulting systems, which combine the
+ logical abilities of logical formal systems and the computational
+ power of term rewriting systems, provide a suitable environment
+ for theorem proving. As a practical application, we present an
+ extension of the theorem prover Lego with oracle types and
+ illustrate the use of this new system in performing algebraic
+ computations. Our implementation of oracle types is very flexible
+ and allows rewriting to be performed either inside Lego or by
+ Reduce, an efficient symbolic computation system. In our view, the
+ main novelty of our approach is to combine a sound theoretical
+ foundation with an efficient implementation. Besides, our work
+ provides the first attempt to combine symbolic computation systems
+ with theorem provers such as Coq and Lego, which are based on
+ intensional type theories.",
+ paper = "Bart96.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Barwise, Jon}
\index{Moss, Lawrence}
\begin{chunk}{axiom.bib}
@@ 12993,6 +13515,18 @@ when shown in factored form.
\end{chunk}
+\index{Bauer, Andrej}
+\begin{chunk}{axiom.bib}
+@misc{Baue19,
+ author = "Bauer, Andrej",
+ title = {{How to Implement Type Theory in an Hour}},
+ year = "2019",
+ link = "\url{https://vimeo.com/286652934}",
+ comment = "\url{https://github.com/andrejbauer/spartantypetheory}"
+}
+
+\end{chunk}
+
\index{Beer, Randall D.}
\begin{chunk}{axiom.bib}
@article{Beer87,
@@ 13010,6 +13544,64 @@ when shown in factored form.
\end{chunk}
+\index{Biha, Sidi Ould}
+\begin{chunk}{axiom.bib}
+@article{Biha09,
+ author = "Biha, Sidi Ould",
+ title = {{Finite Group Representation Theory with Coq}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "Representation theory is a branch of algebra that allows the
+ study of groups through linear applications, i.e. matrices. Thus
+ problems in abstract groups can be reduced to problems on
+ matrices. Representation theory is the basis for character
+ theory. In this paper we present a formalization of finite groups
+ representation theory in the Coq system that includes a
+ formalization of Maschke's theorem on reducible finite group algebra.",
+ paper = "Biha09.pdf"
+}
+
+\end{chunk}
+
+\index{Black, A.P.}
+\begin{chunk}{axiom.bib}
+@techreport{Blac80,
+ author = "Black, A.P.",
+ title = {{Exception Handling and Data Abstraction}},
+ type = "Research Report",
+ institution = "IBM Research",
+ number = "RC8059",
+ year = "1980"
+}
+
+\end{chunk}
+
+\index{Blanchette, Jasmin Christian}
+\index{Haslbeck, Maximilian}
+\index{Matichuk, Daniel}
+\index{Nipkow, Tobias}
+\begin{chunk}{axiom.bib}
+@article{Blan15,
+ author = "Blanchette, Jasmin Christian and Haslbeck, Maximilian and
+ Matichuk, Daniel and Nipkow, Tobias",
+ title = {{Mining the Archive of Formal Proofs}},
+ journal = "LNCS",
+ volume = "9150",
+ year = "2015",
+ abstract =
+ "The Archive of Formal Proofs is a vast collection of
+ computerchecked proofs developed using the proof assistant
+ Isabelle. We perform an indepth analysis of the archive, looking
+ at various properties of the proof developments, including size,
+ dependencies, and proof style. This gives some insights into the
+ nature of formal proofs",
+ paper = "Blan15.pdf"
+}
+
+\end{chunk}
+
\index{Boas, Peter van Emde}
\begin{chunk}{axiom.bib}
@article{Boas12,
@@ 13037,6 +13629,60 @@ when shown in factored form.
\end{chunk}
+\index{Bodnar, Gabor}
+\index{Kaltenbacher, Barbara}
+\index{Pau, Petru}
+\index{Schicho, Josef}
+\begin{chunk}{axiom.bib}
+@article{Bodn01,
+ author = "Bodnar, Gabor and Kaltenbacher, Barbara and Pau, Petru and
+ Schicho, Josef",
+ title = {{Exact Real Computation in Computer Algebra}},
+ journal = "LNCS",
+ volume = "2630",
+ pages = "279292",
+ year = "2001",
+ abstract =
+ "Exact real computation allows many of the advantages of numerical
+ computation (e.g. high performance) to be accessed also in
+ symbolic computation, providing validated results. In this paper
+ we present our approach to build a transparent and easy to use
+ connection between the two worlds, using this paradigm. The main
+ discussed topics are representation of exact real objects,
+ operations on exact real matrices, polynomial greatest common
+ divisor and root computation. Some of these problems are
+ illposed; we use regularization methods to solve them.",
+ paper = "Bodn01.pdf"
+}
+
+\end{chunk}
+
+\index{Bohrer, Brandon}
+\index{Crary, Karl}
+\begin{chunk}{axiom.bib}
+@misc{Bohr16,
+ author = "Bohrer, Brandon and Crary, Karl",
+ title = {{A ProofProducing Verified Prolog Compiler}},
+ year = "2016",
+ link = "\url{www.cs.cmu.edu/~bbohrer/pub/twamiclp2016long.pdf}",
+ abstract =
+ "We have designed and implemented a verified compiler for a
+ dialect of Prolog. Our compiler is verified using proofproducing
+ compilatoin: every compiled program is accompanied with a formal
+ proof that it is equivalent to a particular source program. Our
+ formal proofs take the form of type information for our new
+ verifying abstract machine which we call the TWAM, whose type
+ system natively understands logic programs specified in the
+ logical framework LF. We present a soundness metatheorem for the
+ TWAM showing that welltyped TWAM programs are sound proofsearch
+ procedures. In doing so, we reduce our trusted computing base from
+ the entire compiler to the TWAM typechecker.",
+ paper = "Bohr16.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Bohrer, Brandon}
\index{Crary, Karl}
\begin{chunk}{axiom.bib}
@@ 13081,6 +13727,27 @@ when shown in factored form.
\end{chunk}
+\index{Bostan, Alin}
+\index{Schost, Eric}
+\begin{chunk}{axiom.bib}
+@article{Bost13,
+ author = "Bostan, Alin and Schost, Eric",
+ title = {{A Simple and Fast Algorithm for Computing Exponentials
+ of Power Series}},
+ journal = "Information Processing Letters",
+ volume = "13",
+ pages = "754756",
+ year = "2013",
+ abstract =
+ "As was initially shown by Brent, exponentials of truncated power
+ series can be computed using a constant number of polynomial
+ multiplications. This note gives a relatively simple algorithm
+ with a low constant factor",
+ paper = "Bost13.pdf"
+}
+
+\end{chunk}
+
\index{Bostock, David}
\begin{chunk}{axiom.bib}
@book{Bost97,
@@ 13094,6 +13761,104 @@ when shown in factored form.
\end{chunk}
+\index{Bouche, Thierry}
+\begin{chunk}{axiom.bib}
+@article{Bouc08,
+ author = "Bouche, Thierry",
+ title = {{Digital Mathematics Libraries: The Good, the Bad, the Ugly}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "The mathematicians' Digital mathematics library (DML), which is
+ not to be confused with libraries of mathematical objects
+ represented in some digital format, is the generous idea that all
+ mathematics ever published should end up in digital form so that
+ it would be more easily referenced, accessible, usable. This
+ concept was formulated at the very beginning of this century, and
+ yielded a lot of international activity that culminated around
+ years 20022005. While it is estimated that a substantial part of
+ the existing math literature is already available in some digital
+ format, nothing looking like one digital mathematics library has
+ emerged, but a multiplicity of competing electronic offers, with
+ unique standards, features, business models, access policies,
+ etc.  even though the contents themselves overlap somewhat,
+ while leaving wide areas untouched. The millenium's applealing
+ idea has become a new Tower of Babel.
+
+ It is not obvious how much of the traditional library functions we
+ should give up while going digital. The point of view shared by
+ many mathematicians is that we should be able to find a reasonable
+ archiving policy fitting all stakeholders, allowing to translate
+ the essential features of the past library system  which is the
+ central infrastructure of all math departments worldwide  in the
+ digital paradigm, while enhancing overall performances thanks to
+ dedicated information technology.
+
+ The vision of this library is rather straightforward: a third
+ party to the academic publishing system, preserving, indexing, and
+ keeping current its digital collections through a distributed
+ network of partners curating the physical holdings, and a
+ centralized access facility making use of innovative mining and
+ interlinking techniques for easy navigation and discovery.
+
+ However, the fragmentation level is so high that the hope of a
+ unique portal providing seamless access to everything relevant to
+ mathematical research seems now completely out of reach.
+ Nevertheless, we have lessons to learn from each one of the
+ already numerous projects running. One of them is that there are
+ too many items to deal with, and too many different inital choices
+ over metadata sets and formats: it won't be possible to find a
+ nontrivial greatest common divisor coping with everything already
+ available, and manual upgrading is highly improbable.
+
+ This is where future management techniques for loosely formalised
+ mathematical knowledge could provide a new impetus by at last
+ enabling a minimum set of features across projects borders through
+ automated procedures. We can imagine e.g. mathaware OCR on
+ scanned pages, concurrently with interpreters of electronic
+ sources of born digital texts, both producing searchable full
+ texts in a compatible semistructured format. The challenge is
+ ultimately to take advantage of the high formalisation of
+ mathematical texts rather than merely ignoring it!
+
+ With these considerations in mind, the talk will focus on
+ achievements, limitations, and failures of existing digital
+ mathematics libraries, taking the NUMDAM and CEDRAM programs as
+ principal examples, hence the speaker himself is the target.",
+ paper = "Bouc08.pdf",
+ keywords = "DONE"
+}
+
+\end{chunk}
+
+\index{Boyer, Robert S.}
+\index{Moore, J Strother}
+\begin{chunk}{axiom.bib}
+@inbook{Boye72,
+ author = "Boyer, Robert S. and Moore, J Strother",
+ title = {{The Sharing of Structure in Theorem Proving Programs}},
+ booktitle = "Machine Intelligence 7",
+ publisher = "Edinburgh University",
+ pages = "110116",
+ year = "1972",
+ abstract =
+ "We describe how clauses in resolution programs can be represented
+ and used without applying substitutions or consing lists of
+ literals. The amount of space required by our representation of a
+ clause is independent of the number of literals in the clause and
+ the depth of function nesting. We introduce the concept of the
+ value of an expression in a binding environment which we use to
+ standardize clauses apart and share the structure of parents in
+ representing the resolvent. We present unification and resolution
+ algorithms for our representation. Some data comparing our
+ representation to more conventional ones is given.",
+ paper = "Boye72.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Boyer, Robert S.}
\index{Moore, J Strother}
\begin{chunk}{axiom.bib}
@@ 13156,6 +13921,39 @@ when shown in factored form.
\end{chunk}
+\index{Bradford, Russell}
+\index{Davenport, James H.}
+\index{England, Matthew}
+\index{Wilson, David}
+\begin{chunk}{axiom.bib}
+@article{Brad13a,
+ author = "Bradford, Russell and Davenport, James H. and England, Matthew
+ and Wilson, David",
+ title = {{Optimising Problem Formulation for Cylindrical Algebraic
+ Decomposition}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "Cylindrical Algebraic Decomposition (CAD) is an important tool
+ for the study of real algebraic geometry with many applications
+ both within mathematics and elsewhere. It is known to have doubly
+ exponential complexity in the number of variables in the worst
+ case, but the actual computation time can vary greatly. It is
+ possible to offer different formulations for a given problem
+ leading to great differences in tractability. In this paper we
+ suggest a new measure for CAD complexity which takes into account
+ the real geometry of the problem. This leads to new heuristics for
+ choosing: the variable ordering for a CAD problem, a designated
+ equational constraint, and formulations for truthtable invariant
+ CADs (TTICASs). We then consider the possibility of using Groebner
+ bases to precondition TTICAD and when such formulations constitute
+ the creation of a new problem.",
+ paper = "Brad13a.pdf"
+}
+
+\end{chunk}
+
\index{Brady, Edwin}
\begin{chunk}{axiom.bib}
@misc{Brad17,
@@ 13338,6 +14136,35 @@ when shown in factored form.
\subsection{C} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Cairns, Paul}
+\index{Gow, Jeremy}
+\begin{chunk}{axiom.bib}
+@article{Cair06,
+ author = "Cairns, Paul and Gow, Jeremy",
+ title = {{Literate Proving: Presenting and Documenting Formal Proofs}},
+ journal = "LNCS",
+ volume = "4108",
+ year = "2006",
+ abstract =
+ "Literate proving is the analogue for literate programming in the
+ mathematical realm. That is, the goal of literate proving is for
+ humans to produce clear expositions of formal mathematics that
+ could even be enjoyable for people to read whilst remaining
+ faithful representations of the actual proofs. This paper
+ describes maze, a generic literate proving system. Authors markup
+ formal proof files, such as Mizar files, with arbitrary XML and
+ use maze to obtain the selected extracts and transform them for
+ presentation, e.g. as Latex. To aid its use, maze has built in
+ transformations that include pretty printing and proof sketching
+ for inclusion in latex documents. These transformations challenge
+ the concept of faithfulness in literate proving but it is argued
+ that this should be a distinguishing feature of literate proving
+ from literate programming.",
+ paper = "Cair06.pdf"
+}
+
+\end{chunk}
+
\index{Callaghan, Paul}
\begin{chunk}{axiom.bib}
@article{Call08,
@@ 13378,6 +14205,115 @@ when shown in factored form.
\end{chunk}
+\index{Calmet, Jacques}
+\begin{chunk}{axiom.bib}
+@article{Calm09,
+ author = "Calmet, Jacques",
+ title = {{AbstractionBased Information Technology: A Framework for
+ Open Mechanized Reasoning}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "OMRS (Open Mechanized Reasoning Systems) was designed for
+ Automated Theorem Proving and then extended to Computer
+ Algebra. These are the two domains at the heart of the Calculemus
+ approach. An obvious question is to assess whether such an
+ approach can be extended to new domains either within AI or
+ outside of AI. There have been several attempts to turn the world
+ into a computational system. This talk stays away from such
+ general attempts and introduces a framework that is fully set
+ within AI. It extends the basic concepts of OMRS to diverse fields
+ ranging from information technology to sociology through law as
+ illustrated by examples. The main motivation is to claim that
+ whatever the selected approach, Artificial Intelligence is gaining
+ enough strength and power to reach new frontiers and to turn
+ challenges that are not a priori of a purely computational nature
+ into AI domains.",
+ paper = "Calm09.pdf"
+}
+
+\end{chunk}
+
+\index{Calmet, Jacques}
+\index{Campbell, John A.}
+\begin{chunk}{axiom.bib}
+@article{Calm10,
+ author = "Calmet, Jacques and Campbell, John A.",
+ title = {{A Revisited Perspective on Symbolic Mathematical Computing
+ and Artificial Intelligence}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "We provide a perspective on the current state and possible future
+ of links between symbolic mathematical computing and artificial
+ intelligence, on the occasion of the 10th biennial conference
+ (AISMC, later AISC) devoted to those connections. It follows a
+ similar perspective expressed for the first such conference in 1992
+ and then revised and expanded 5 years later. Issues related to the
+ computational management of mathematical knowledge are
+ highlighted.",
+ paper = "Calm10.pdf"
+}
+
+\end{chunk}
+
+\index{Calude, Cristian S.}
+\index{Muller, Christine}
+\begin{chunk}{axiom.bib}
+@article{Calu09,
+ author = "Calude, Cristian S. and Muller, Christine",
+ title = {{Formal Proof: Reconciling Correctness and Understanding}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "Hilbert's concept of formal proof is an ideal of rigour for
+ mathematics which has important applications in mathematical
+ logic, but seems irrelevant for the practice of mathematics. The
+ advent, in the last twenty years, of proof assistants was followed
+ by an impressive record of deep mathematical theorems formally
+ proved. Formal proof is practically achievable. With formal proof,
+ correctness reaches a standard that no penandpaper proof can
+ match, but an essential component of mathematics  the insight
+ and understanding  seems to be in short supply. So, what makes a
+ proof understandable? To answer this question we first suggest a
+ list of symptoms of understanding. We then propose a vision of an
+ environment in which users can write and check formal proofs as
+ well as query them with reference to the symptoms of
+ understanding. In this way, the environment reconciles the main
+ features of proof: correctness and understanding.",
+ paper = "Calu09.pdf"
+}
+
+\end{chunk}
+
+\index{Campbell, J.A.}
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear70,
+ author = "Campbell, J.A. and Hearn, Anthony C.",
+ title = {{Symbolic Analysis of Feynman Diagrams by Computer}},
+ journal = "J. of Computational Physics",
+ volume = "5",
+ number = "2",
+ pages = "280327",
+ year = "1970",
+ abstract =
+ "We describe a system of programs in the language LISP 1.5 which
+ handles all stages of calculation from the specification of an
+ elementaryparticle process in terms of a Hamiltonian of
+ interaction or Feynman diagrams to the derivation of an absolute
+ square of the matrix element for the process. Examples of
+ significant parts of the program are presented in the text, while
+ a detailed listing of this material is contained in two Appendices
+ which are avaiable on request from the authors.",
+ paper = "Camp70.pdf"
+}
+
+\end{chunk}
+
\index{Cardelli, Luca}
\begin{chunk}{axiom.bib}
@techreport{Card93,
@@ 13421,6 +14357,123 @@ when shown in factored form.
\end{chunk}
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\index{Kohlhase, Michael}
+\begin{chunk}{axiom.bib}
+@article{Care14,
+ author = "Carette, Jacques and Farmer, William M. and Kohlhase, Michael",
+ title = {{Realms: A Structure for Consolidating Knowledge about
+ Mathematical Theories}},
+ journal = "LNCS",
+ volume = "8543",
+ year = "2014",
+ abstract =
+ "Since there are different ways of axiomatizing and developing a
+ mathematical theory, knowledge about such a theory may reside in
+ many places and in many forms within a library of formalized
+ mathematics. We introduce the notion of a realm as a structure for
+ consolidating knowledge about a mathematical theory. A realm
+ contains several axiomatizations of a theory that are separately
+ developed. Views interconnect these developments and establish
+ that the axiomatizations are equivalent in the sense of being
+ mutually interpretable. A realm also contains an external interface
+ that is convenient for users of the library who want to apply the
+ concepts and facts of the theory without delving into the details
+ of how the concepts are facts were developed. We illustrate the
+ utility of realms through a series of examples. We also give an
+ outline of the mechanisms that are needed to create and maintain
+ realms.",
+ paper = "Care14.pdf"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Care17,
+ author = "Carette, Jacques and Farmer, William M.",
+ title = {{Formalizing Mathematical Knowledge as a Biform Theory
+ Graph: A Case Study}},
+ journal = "LNCS",
+ volume = "10383",
+ year = "2017",
+ abstract =
+ "A biform theory is a combination of an axiomatic theory and an
+ algorithmic theory that supports the integration of reasoning and
+ computation. These are ideal for formalizing algorithms that
+ manipulate mathematical expressions. A theory graph is a network
+ of theories connected by meaningpreserving theory morphisms that
+ map the formulae of one theory to the formulas of another
+ theory. Theory graphs are in turn well suited for formalizing
+ mathematical knowledge at the most convenient level of abstraction
+ using the most convenient vocabulary. We are interested in the
+ problem of whether a body of mathematical knowledge can be
+ effectively formalized as a theory graph of biform theories. As a
+ test case, we look at the graph of theories encoding natural
+ number arithmetic. We used two different formalisms to do this,
+ which we describe and compare. The first is realized in
+ CTT$_{uqe}$, a version of Church's type theory with quotation and
+ evaluation, and the second is realized in Agda, a dependently
+ typed programming language.",
+ paper = "Care17.pdf"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
+\index{Farmer, William M.}
+\index{Sharoda, Yasmine}
+\begin{chunk}{axiom.bib}
+@article{Care18,
+ author = "Carette, Jacques and Farmer, William M. and Sharoda, Yasmine",
+ title = {{Biform Theories: Project Description}},
+ journal = "LNCS",
+ volume = "11006",
+ year = "2018",
+ abstract =
+ "A biform theory is a combination of an axiomatic theory and an
+ algorithmic theory that supports the integration of reasoning and
+ computation. These are ideal for specifying and reasoning about
+ algorithms that manipulate mathematical expressions. However,
+ formalizing biform theories is challenging as it requires the
+ means to express statements about the interplay of what these
+ algorithms do and what their actions mean mathematically. This
+ paper describes a project to develop a methodology for expressing,
+ manipulating, managing, and generating mathematical knowledge as a
+ network of biform theories. It is a subproject of MathScheme, a
+ longterm project at McMaster University to produce a framework
+ for integrating formal deduction and symbolic computation.",
+ paper = "Care18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Carlisle, David}
+\index{Dewar, Mike}
+\begin{chunk}{axiom.bib}
+@article{Carl03,
+ author = "Carlisle, David and Dewar, Mike",
+ title = {{NAG Library Documentation}},
+ journal = "LNCS",
+ volume = "2594",
+ year = "2003",
+ abstract =
+ "This paper describes the management and evolution of a large
+ collection of 1200 documents detailing the functionality in NAG
+ Library products.
+
+ This provides a case study addressing many of the issues which
+ concern the ``MKM'' project, involving conversion of legacy
+ formats (SGML and Latex) to XML, and inferring semantic content
+ from mainly presentational mathematical expressions.",
+ paper = "Carl03.pdf"
+}
+
+\end{chunk}
+
\index{Castagna, Giuseppe}
\index{Lanvin, Victor}
\index{Petrucciani, Tommaso}
@@ 13509,6 +14562,225 @@ when shown in factored form.
\end{chunk}
+\index{Char, Bruce W.}
+\index{Geddes, Keith O.}
+\index{Gonnet, Gaston H.}
+\begin{chunk}{axiom.bib}
+@article{Char84,
+ author = "Char, Bruce W. and Geddes, Keith O. and Gonnet, Gaston H.",
+ title = {{GCDHEU: Heuristic polynomial GCD algorithm base on Integer
+ GCD computation}},
+ journal = "LNCS",
+ volume = "174",
+ pages = "285296",
+ year = "1984",
+ abstract =
+ "The design of algorithms for polynomial GCD computation has been
+ a continuing area of research since the beginning of the
+ development of symbolic computation systems. The earliest efforts
+ were mainly directed at PRS (Polynomial Remainder Sequence)
+ algorithms which are a direct generalization of Euclid's
+ algorithm. The main algorithms of this type are the Reduced PRS
+ algorithm and the Subresultant PRS algorithm. Hearn discusses the
+ use of trial divisions to further improve the performance of PRS
+ algorithms. The first fundamentally different polynomial GCD
+ algorithm was the modular algorithm. To amek the modular algorithm
+ competative for sparse multivariate polynomals, Zippel developed
+ the sparse modular algorithm. Another modulartype algorithm was
+ the Henselbased EZ GCD algorithm which was later improved as the
+ EEZ GCD algorithm.
+
+ the present paper discusses a new heuristic algorithm, GCDHEU,
+ which is found to be very efficient for problems in a small number
+ of variables. The heuristic algorithm can be viewed as
+ amodulartype algorithm in that it uses evaluation and
+ interpolation, but only a single evaluation per variable is
+ used. The heuristic algorithm can be incorporated into a
+ reorganized form of the EEZ GCD algorithm such that the base of
+ the EEZ GCD algorithm, rather than a univariate GCD algorithm, is
+ GCDHEU which is often successful for problems in up to four variables.",
+ paper = "Char84.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cheng, Howard}
+\index{Labahn, George}
+\begin{chunk}{axiom.bib}
+@inproceedings{Chen06,
+ author = "Cheng, Howard and Labahn, George",
+ title = {{On Computing Polynomial GCDs in Alternate Bases}},
+ booktitle = "ISSAC '06",
+ publisher = "ACM",
+ year = "2006",
+ pages = "4754",
+ abstract =
+ "In this paper, we examine the problem of computing the greatest
+ common divisor (GCD) of univariate polynomials represented in
+ different bases. When the polynomials are represented in Newton
+ basis or a basis of orthogonal polynomials, we show that the
+ wellknown Sylvester matrix can be generalized. We give
+ fractionfree and modular algorithms to directly compute the GCD
+ in the alternate basis. These algorithms are suitable for
+ computation in domains where growth of coefficients in
+ intermediate computations are a central concern. In the cases of
+ Newton basis and bases using certain orthogonal polynomials, we
+ also show that the standard subresultant algorithm can be applied
+ easily. If the degrees of the input polynomials is at most $n$ and
+ the degree of the GCD is at least $n/2$, our algorithms outperform
+ the corresponding algorithms using the standard power basis.",
+ paper = "Chen06.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Cheng, Howard}
+\index{Labahn, George}
+\index{Zhou, Wei}
+\begin{chunk}{axiom.bib}
+@article{Chen08,
+ author = "Cheng, Howard and Labahn, George and Zhou, Wei",
+ title = {{Computing Polynomial LCD and GCD in Lagrange Basis}},
+ journal = "Communications in Computer Algebra",
+ volume = "42",
+ number = "3",
+ pages = "129130",
+ year = "2008",
+ paper = "Chen08.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Chen, Changbo}
+\index{Maza, Marc Moreno}
+\index{Pan, Wei}
+\index{Xie, Yuzhen}
+\begin{chunk}{axiom.bib}
+@article{Chen08a,
+ author = "Chen, Changbo and Maza, Marc Moreno and Pan, Wei and Xie, Yuzhen",
+ title = {{On the Verification of Polynomial System Solvers}},
+ journal = "Frontiers of Computer Science in China",
+ volume = "2",
+ number = "1",
+ pages = "5566",
+ year = "2008",
+ abstract =
+ "We discuss the verification of mathematical software solving
+ polynomial systems symbolically by way of triangular
+ decomposition. Standard verification techniques are highly
+ resource consuming and apply only to polynomial systems which are
+ easy to solve. We exhibit a new approach which manipulates
+ constructible sets represented by regular systems. We provide
+ comparative benchmarks of different verification procedures
+ applied to four solvers on a large set of wellknown polynomial
+ systems. Our experimental results illustrate the high efficiency
+ of our new approach. In particular, we are able to verify
+ triangular decomposition of polynomial systems which are not easy
+ to solve.",
+ paper = "Chen08a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Chen, Changbo}
+\index{Covanov, Svyatoslav}
+\index{Mansouri, Farnam}
+\index{Maza, Marc Moreno}
+\index{Xie, Ning}
+\index{Xie, Yuzhen}
+\begin{chunk}{axiom.bib}
+@article{Chen14,
+ author = "Chen, Changbo and Covanov, Svyatoslav and Mansouri, Farnam
+ and Maza, Marc Moreno and Xie, Ning and Xie, Yuzhen",
+ title = {{Basic Polynomial Algebra Subprograms}},
+ journal = "Communications in Computer Algebra",
+ volume = "48",
+ number = "3/4",
+ pages = "197201",
+ year = "2014",
+ paper = "Chen14.pdf"
+}
+
+\end{chunk}
+
+\index{Chen, Changbo}
+\index{Maza, Marc Moreno}
+\begin{chunk}{axiom.bib}
+@article{Chen15,
+ author = "Chen, Changbo and Maza, Marc Moreno",
+ title = {{Simplification of Cylindrical Algebraic Formulas}},
+ journal = "LNCS",
+ volume = "9301",
+ pages = "119134",
+ year = "2015",
+ comment = "Int. Workshop on Computer Algebra in Scientific Computing",
+ paper = "Chen15.pdf"
+}
+
+\end{chunk}
+
+\index{Chen, Changbo}
+\index{Maza, Marc Moreno}
+\begin{chunk}{axiom.bib}
+@article{Chen16,
+ author = "Chen, Changbo and Maza, Marc Moreno",
+ title = {{Quantifier Elimination by Cylindrical Algebraic
+ Decomposition based on Regular Chains}},
+ journal = "Journal of Symbolic Computation",
+ volume = "75",
+ pages = "7493",
+ year = "2016",
+ abstract =
+ "A quantifier elimination algorithm by cylindrical algebraic
+ decomposition based on regular chains is presented. The main idea
+ is to refine a complex cylindrical tree until the signs of
+ polynomials appearing in the tree are sufficient to distinguish
+ the true and false cells. We report an implementation of our
+ algorithm in the RegularChains library in MAPLE and illustrate its
+ effectiveness by examples.",
+ paper = "Chen16.pdf"
+}
+
+\end{chunk}
+
+\index{Cheng, Eugenia}
+\begin{chunk}{axiom.bib}
+@misc{Chen04,
+ author = "Cheng, Eugenia",
+ title = {{How to write proofs: A quick guide}},
+ link = "\url{http://cheng.staff.shef.ac.uk/proofguide/proofguide.pdf}",
+ year = "2004",
+ paper = "Chen04.pdf",
+ keywords = "printed,DONE"
+}
+
+\end{chunk}
+
+\index{Chojecki, Przemyslaw}
+\begin{chunk}{axiom.bib}
+@article{Choj17,
+ author = "Chojecki, Przemyslaw",
+ title = {{DeepAlgebra  An Outline of a Program}},
+ journal = "LNCS",
+ volume = "10383",
+ year = "2017",
+ abstract =
+ "We outline a program in the area of formalization of mathematics
+ to automate theorem proving in algebra and algebraic geometry. We
+ propose a construction of a dictionary between automated theorem
+ provers and (La)Tex exploiting syntactic parsers. We describe its
+ application to a repository of humanwritten facts and definitions
+ in algebraic geometry (The Stacks Project). We use deep learing
+ techniques.",
+ paper = "Choj17.pdf"
+}
+
+\end{chunk}
+
\index{Chiswell, Ian}
\index{Hodges, Wilfrid}
\begin{chunk}{axiom.bib}
@@ 13523,6 +14795,35 @@ when shown in factored form.
\end{chunk}
+\index{Conrad, Brian}
+\begin{chunk}{axiom.bib}
+@misc{Conr05,
+ author = "Conrad, Brian",
+ title = {{Impossibility Theorems for Elementary Integration}},
+ year = "2005",
+ link =
+ "\url{http://www2.maths.ox.ac.uk/cmi/library/academy/LectureNotes05/Conrad.pdf}",
+ abstract =
+ "Liouville proved that certain integrals, most famously
+ $\int{e^{x^2}}~dx$, cannot be expressed in elementary terms. We
+ explain how to give precise meaning to the notion of integration
+ ``in elementary terms'', and we formulate Liouville's theorem
+ that characterizes the possible form of elementary
+ antiderivatives. Using this theorem, we deduce a practical
+ criterion for proving such impossibility results in special cases.
+
+ This criterion is illustrated for the Gaussian integral
+ $\int{e^{x^2}}~dx$ from probability theory, the logarithmic
+ integral $\int{}~dt/log(t)$ from the study of primes, and
+ elliptic integrals. Our exposition is aimed at students who are
+ familiar with calculus and elementary abstract algebra (at the
+ level of polynomial rings $F(t)$ over a field $F$).",
+ paper = "Conr05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Christiansen, David Thrane}
\begin{chunk}{axiom.bib}
@misc{Chri18,
@@ 13583,6 +14884,19 @@ when shown in factored form.
\end{chunk}
+\index{Clark, K.L.}
+\index{Tarnlund, S.A.}
+\begin{chunk}{axiom.bib}
+@book{Clar82,
+ author = "Clark, K.L. and Tarnlund, S.A.",
+ title = {{Logic Programming}},
+ publisher = "Academic Press",
+ year = "1982",
+ isbn = "0121755207"
+}
+
+\end{chunk}
+
\index{Clochard, Martin}
\index{Gondelman, Leon}
\index{Pereira, Mario}
@@ 13657,6 +14971,66 @@ when shown in factored form.
\end{chunk}
+\index{Cohen, Cyril}
+\index{Mahboubi, Assia}
+\begin{chunk}{axiom.bib}
+@article{Cohe10,
+ author = "Cohen, Cyril and Mahboubi, Assia",
+ title = {{A Formal Quantifier Elimination for Algebraically Closed Fields}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "We prove formally that the first order theory of algebraically
+ closed fields enjoys quantifier elimination, and hence is
+ decidable. This proof is organized in two modular parts. We first
+ reify the first order theory of rings and prove that quantifier
+ elimination leads to decidability. Then we implement an algorithm
+ which constructs a quantifier free formula from any first order
+ formula in the theory of ring. If the underlying ring is in fact
+ an algebraically closed field, we prove that the two formulas have
+ the same semantic. The algorithm producing the quantifier free
+ formula is programmed in continuation passing style, which leads
+ to both a concise program and an elegant proof of semantics
+ correctness.",
+ paper = "Cohe10.pdf"
+}
+
+\end{chunk}
+
+\index{Cohl, Howard S.}
+\index{GreinerPetter, Andre}
+\index{Schubotz, Moritz}
+\begin{chunk}{axiom.bib}
+@article{Cohl18,
+ author = "Cohl, Howard S. and GreinerPetter, Andre and Schubotz, Moritz",
+ title = {{Automated Symbolic and Numerical Testing of DLMF Formulae
+ Using Computer Algebra Systems}},
+ journal = "LNCS",
+ volume = "11006",
+ year = "2018",
+ abstract =
+ "We have developed an automated procedure for symbolic and
+ numerical testing of formulae extracted from the National
+ Institute of Standards and Technology (NIST) Digital Library of
+ Mathematical Functions (DLMF). For the NIST Digital Repository of
+ Mathematical Formulae, we have developed conversion tools from
+ semantic Latex to Computer Algebra System (CAS) MAPLE which relies
+ on Youssef's partofmath tagger. We convert a test data subset of
+ 4,078 semantics Latex DLMF formulae extracted from the DLMF to the
+ native CAS representation and then apply an automated scheme for
+ symbolic and numerical testing and verification. Our framework is
+ implemented using Java and MAPLE. We describe in detail the
+ conversion process which is required so that the CAS is able to
+ correctly interpret the mathematical representation of the
+ formulae. We describe the improvement of the effectiveness of our
+ automated scheme through incremental enhancements (making more
+ precise) of the mathematical semantics markup of the formulae.",
+ paper = "Cohl18.pdf"
+}
+
+\end{chunk}
+
\index{Collofello, James S.}
\begin{chunk}{axiom.bib}
@techreport{Coll88,
@@ 13697,7 +15071,23 @@ when shown in factored form.
}
\end{chunk}

+
+\index{Corless, Robert}
+\index{Postma, Erik}
+\index{Stoutemyer, David}
+\begin{chunk}{axiom.bib}
+@inproceedings{Corl11,
+ author = "Corless, Robert and Postma, Erik and Stoutemyer, David",
+ title = {{GCD of Multivariate Approximate Polynomials using
+ Beautification with the Subtractive Algorithm}},
+ booktitle = "Int. Workshop on SymbolicNumeric Computation",
+ publisher = "ACM",
+ year = "2011",
+ paper = "Corl11.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
\index{Copeland, B. Jack}
\begin{chunk}{axiom.bib}
@@ 13765,7 +15155,7 @@ when shown in factored form.
keywords = "printed"
}
\end{chunk
+\end{chunk}
\index{Cutland, Nigel}
\begin{chunk}{axiom.bib}
@@ 13805,7 +15195,20 @@ when shown in factored form.
year = "2008",
isbn = "9783540208792",
paper = "Dale08.pdf"
+}
+
+\end{chunk}
+\index{Daly, Timothy}
+\begin{chunk}{axiom.bib}
+@misc{Daly18a,
+ author = "Daly, Timothy",
+ title = {{Proving Axiom Sane Talk}},
+ comment = "International Conference on Mathematical Software",
+ journal = "LNCS",
+ volume = "10931",
+ year = "2018",
+ paper = "Daly18a.pdf"
}
\end{chunk}
@@ 13825,6 +15228,234 @@ when shown in factored form.
\end{chunk}
+\index{Davenport, James}
+\begin{chunk}{axiom.bib}
+@misc{Davexxa,
+ author = "Davenport, James",
+ title = {{Integration in Finite Terms}},
+ year = "unknown",
+ paper = "Davexxa.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Davenport, James}
+\index{Padget, Julian}
+\begin{chunk}{axiom.bib}
+@article{Dave85c,
+ author = "Davenport, James and Padget, Julian",
+ title = {{HEUGCD: How Elementary Upperbounds Generate Cheaper Data}},
+ journal = "LNCS",
+ volume = "204",
+ year = "1985",
+ booktitle = "EUROCAL '85 European Conference on Computer Algebra",
+ publisher = "Springer",
+ abstract =
+ "The work presented in this paper is a direct consequence of the
+ ideas set forth by Char et al (1984b) describing a new technique
+ for computing the greatest common divisor of polynomials.",
+ paper = "Dave85c.pdf"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
+\index{Dewar, Michael C.}
+\index{Richardson, Michael G.}
+\begin{chunk}{axiom.bib}
+@inbook{Dave92f,
+ author = "Davenport, James H. and Dewar, Michael C. and
+ Richardson, Michael G.",
+ title = {{Symbolic and Numeric Computation: the Example of IRENA}},
+ booktitle = "Symbolic and Numerical Computation for Artificial Intelligence",
+ pages = "347362",
+ year = "1992",
+ publisher = "Academic Press",
+
+ abstract =
+ "Historically symbolic and numeric computation have pursued
+ different lines of evolution, have been written in different
+ languages and generally seen to be competitive rather than
+ complementary techniques. Even when both were used to solve a
+ problem ad hoc methods were used to transfer the data between
+ them.
+
+ We first discuss the reasons for this dichotomy, and then present
+ IRENA, a system being developed by the authors to present an
+ integrated environment with all the facilities of Reduce combined
+ with the functionality of the NAG FORTRAN library.
+
+ Not only does IRENA allow the Reduce user to make calls to the NAG
+ Library interactively, it also converts a natural input
+ representation to the required unnatural FORTRAN one and
+ viceversa on output, which results in a much more intuitive
+ interface. Many parameters have default values and so need not be
+ supplied by th user.",
+ paper = "Dave92f.pdf"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Dave01,
+ author = "Davenport, James H.",
+ title = {{Mathematical Knowledge Management}},
+ booktitle = "Int. Workshop on Mathematical Knowledge Management",
+ link = "\url{https://www.emis.de/proceedings/MKM2001/davenport.pdf}",
+ publisher = "RISC",
+ year = "2001",
+ paper = "Dave01.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Dave03,
+ author = "Davenport, James H.",
+ title = {{MKM from Book to Computer: A Case Study}},
+ journal = "LNCS",
+ volume = "2594",
+ year = "2003",
+ abstract =
+ "[2] is one of the great mathematical knowledge
+ repositories. Nevertheless, it was written for a different era,
+ and for human readership. In this paper, we describe the sorts of
+ knowledge in one chapter (elementary transcendental functions) and
+ the difficulties in making this sort of knowledge formal. This
+ makes us ask questions about the nature of a Mathematical
+ Knowledge Repository, and whether a database is enough, or whether
+ more ``intelligence'' is required.",
+ paper = "Dave03.pdf"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Dave07a,
+ author = "Davenport, James H.",
+ title = {{What Might ``Understand a Function'' Mean?}},
+ journal = "LNCS",
+ volume = "4573",
+ year = "2007",
+ abstract =
+ "Many functions in classical mathematics are largely defined in
+ terms of their derivatives, so Bessel's function is ``the''
+ solution of Bessel's equation, etc. For definiteness, we need to
+ add other properties, such as initial values, branch cuts,
+ etc. What actually makes up ``the definition'' of a function in
+ computer algebra? The answer turns out to be a combination of
+ arithmetic and analytic properties.",
+ paper = "Dave07a.pdf"
+}
+
+\end{chunk}
+
+\index{Davenport, James H.}
+\index{Kohlhase, Michael}
+\begin{chunk}{axiom.bib}
+@article{Dave09,
+ author = "Davenport, James H. and Kohlhase, Michael",
+ title = {{Unifying Math Ontologies: A Tale of Two Standards}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "One of the fundamental and seemingly simple aims of mathematical
+ knowledge management (MKM) is to develop and standardize formats
+ that allow to ``represent the meaning of the objects of
+ mathematics''. The open formats OpenMath and MathML address this,
+ but differ subtly in syntax, rigor, and structural viewpoints
+ (notably over calculus). To avoid fragmentation and smooth out
+ interoperability obstacles, effort is under way to align them into
+ a joint format OpenMath/MathML 3. We illustrate the issues that
+ come up in such an alignment by looking at three main areas: bound
+ variables and conditions, calculus (which relates to the previous)
+ and ``lifted'' nary operators.",
+ paper = "Dave09.pdf"
+}
+
+\end{chunk}
+
+\index{Day, Martin V.}
+\begin{chunk}{axiom.bib}
+@book{Dayx16,
+ author = "Day, Martin V.",
+ title = {{An Introduction to Proofs and the Mathematical Vernacular}},
+ year = "2016",
+ publisher = "Virginia Tech",
+ link = "\url{www.math.vt.edu/people/day/ProofsBook/IPaMV.pdf}",
+ paper = "Dayx16.pdf"
+}
+
+\end{chunk}
+
+\index{Denes, Maxime}
+\index{Mortberg, Anders}
+\index{Siles, Vincent}
+\begin{chunk}{axiom.bib}
+@misc{Dene19,
+ author = "Denes, Maxime and Mortberg, Anders and Siles, Vincent",
+ title = {{A Refinementbased Approach to Computational Algebra in COQ}},
+ year = "2019",
+ link = "\url{www.cse.chalmers.se/~mortberg/papers/coqeal.pdf}",
+ abstract =
+ "We describe a stepbystep approach to the implementation and
+ formal verification of efficient algebraic algorithms. Formal
+ specifications are expressed on rich data types which are suitable
+ for deriving essential theoretical properties. These
+ specifications are then refined to concrete implementations on
+ more efficient data structures and linked to their abstract
+ counterparts. We illustrate this methodology on key applications:
+ matrix rank computation, Winograd's fast matrix product,
+ Karatsuba's polynomial multiplication, and the gcd of multivariate
+ polynomials.",
+ paper = "Dene19.pdf",
+ keywords = "printed",
+}
+
+\end{chunk}
+
+\index{Deplagne, Eric}
+\index{Kirchner, Claude}
+\begin{chunk}{axiom.bib}
+@article{Depl02,
+ author = "Deplagne, Eric and Kirchner, Claude",
+ title = {{Deduction versus Computation: The Case of Induction}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "The fundamental difference and the essential complementarity
+ between computation and deduction are central in computer algebra,
+ automated deduction, proof assistants and in frameworks making
+ them cooperating. In this work we show that the fundamental proof
+ method of induction can be udnerstood and implemented as either
+ computation or deduction.
+
+ Inductive proofs can be built either explicitly by making use of
+ an induction principle or implicitly by using the socalled
+ induction by rewriting and inductionless induction methods. When
+ mechanizing proof construction, explicit induction is used in
+ proof assistants and implicit induction is used in rewrite based
+ automated theorem provers. The two approaches are clearly
+ complementary but up to now there was no framework able to
+ encompass and to understand uniformly the two methods. In this
+ work, we propose such an approach based on the general notion of
+ deduction modulo. We extend slightly the original version of the
+ deduction modulo framework and we provide modularity properites
+ for it. We show how this applies to a uniform understanding of the
+ so called induction by rewriting method and how this relates
+ directly to the general use of the induction principle.",
+ paper = "Depl02.pdf"
+}
+
+\end{chunk}
+
\index{Dershowitz, Nachum}
\index{Gurevich, Yuri}
\begin{chunk}{axiom.bib}
@@ 13856,6 +15487,22 @@ when shown in factored form.
\end{chunk}
+\index{Dewar, Mike}
+\index{Carlisle, David}
+\begin{chunk}{axiom.bib}
+@inproceedings{Dewa01,
+ author = "Dewar, Mike and Carlisle, David",
+ title = {{Mathematical Software: The Next Generation?}},
+ booktitle = "Int. Workshop on Mathematical Knowledge Management",
+ link = "\url{https://www.emis.de/proceedings/MKM2001/printed/dewar.pdf}",
+ publisher = "RISC",
+ year = "2001",
+ paper = "Dewa01.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\index{Dijkstra, Edsger W.}
\index{Feijen, W.H.J}
\begin{chunk}{axiom.bib}
@@ 13911,21 +15558,160 @@ when shown in factored form.
\end{chunk}
\subsection{E} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Dominguez, Cesar}
+\begin{chunk}{axiom.bib}
+@article{Domi08,
+ author = "Dominguez, Cesar",
+ title = {{Formalizing in Coq Hidden Algebras to Specify Symbolic
+ Computation Systems}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "This work is an attempt to formalize, using the Coq proof
+ assistant, the algebraic specification of the data structures
+ appearing in two symbolic computation systems for algebraic
+ topology called EAT and Kenzo. The specification of these
+ structures have been obtained through an operation, called imp
+ operation, between different specification frameworks as standard
+ algebraic specifications and hidden specifications. Resuing
+ previous Coq implementations of universal algebra and category
+ theory we have proposed a Coq formalization of the imp operation,
+ extending the representation to the particular hidden algebras
+ which take part in this operation.",
+ paper = "Domi08.pdf"
+}
\index{Enderton, Herbert B.}
+\end{chunk}
+
+\index{Dominguez, Cesar}
+\index{Rubio, Julio}
\begin{chunk}{axiom.bib}
@book{Ende01,
 author = "Enderton, Herbert B.",
 title = {{A Mathematical Introduction to Logic}},
 publisher = "Harcourt Academic Press",
 year = "2001",
 isbn = "0122384520",
 paper = "Ende01.pdf"
+@article{Domi10,
+ author = "Dominguez, Cesar and Rubio, Julio",
+ title = {{Computing in Coq with Infinite Algebraic Data Structures}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "Computational content encoded into constructive type theory
+ proofs can be used to make computing experiments over concrete
+ data structures. In this paper, we explore this possibility when
+ working in Coq with chain complexes of infinite type (that is to
+ say, generated by infinite sets) as a part of the formalization of
+ a hierarchy of homological algebra structures.",
+ paper = "Domi10.pdf"
+}
+
+\end{chunk}
+
+\index{Donahue, J.}
+\begin{chunk}{axiom.bib}
+@misc{Dona77,
+ author = "Donahue, J.",
+ title = {{On the semantics of ``Data Type''}},
+ comment = "Cornell University",
+ year = "1977"
}
\end{chunk}
+\index{Dunfield, Joshua}
+\index{Krishnaswami, Neelakantan R.}
+\begin{chunk}{axiom.bib}
+@misc{Dunf13,
+ author = "Dunfield, Joshua and Krishnaswami, Neelakantan R.",
+ title = {{Complete and Easy Bidirectional Typechecking for HigherRank
+ Polymorphism}},
+ link = "\url{https://arxiv.org/pdf/1306.6032.pdf}",
+ year = "2013",
+ abstract =
+ "Bidirectional typechecking, in which terms either synthesize a
+ type or are checked against a known type, has become popular for
+ its scalability (unlike DamasMilner type inference, bidirectional
+ typing remains decidable even for very expressive type systems),
+ its error reporting, and its relative ease of
+ implementation. Following design principles from proof theory,
+ bidirectional typing can be applied to many type constructs. The
+ principles underlying a bidirectional approach to polymorphism,
+ however, are less obvious. We give a declarative, bidirectional
+ account of higherrank polymorphism, grounded in proof theory;
+ this calculus enjoys many properties such as $\eta$reduction and
+ predictability of annotations. We give an algorithm for
+ implementing the declarative system; our algorithm is remarkably
+ simple and wellbehaved, despite being both sound and complete.",
+ paper = "Dunf13.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Dupee, Brian J.}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Dupe96,
+ author = "Dupee, Brian J. and Davenport, James H.",
+ title = {{An Intelligent Interface to Numerical Routines}},
+ journal = "LNCS",
+ volume = "1128",
+ pages = "252262",
+ year = "1996",
+ abstract =
+ "Links from Computer Algebra Systems to Numerical Libraries have
+ been increasingly made available. However, they remain, like the
+ numerical routines which comprise the libraries, difficult to use
+ by a novice and there is little help in choosing the appropriate
+ routine for any given problem, should there be a choice.
+
+ Computer Algebra Systems use generic names for each problem
+ area. For example, 'integrate' (or 'int') is used for integration
+ of a function, whatever method the code may use. Numeric
+ interfaces still use different names for each method together with
+ a variety of extra parameters, some of which may be
+ optional. Ideally, we should extend the generic name structure to
+ cover numerical routines. This would then, necessarily, require
+ algorithms for making an assessment of the efficacy of different
+ methods where such a choice exists.
+
+ This paper considers the link to the NAG Fortran Library from
+ version 2.0 of Axiom and shows how we can build on this to extend
+ and simplify the interface using an expert system for choosing and
+ using the numerical routines.",
+ paper = "Dupe96.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Dzamonja, Mirna}
+\begin{chunk}{axiom.bib}
+@misc{Dzam18,
+ author = "Dzamonja, Mirna",
+ title =
+ {{A New Foundational Crisis in Mathematics, Is it really happening?}},
+ link = "\url{https://arxiv.org/pdf/1802.06221.pdf}",
+ year = "2018",
+ abstract =
+ "The article reconsiders the position of the foundations of
+ mathematics after the discovery of HoTT. Discussion that this
+ discovery has generated in the community of mathematicians,
+ philosophers and computer scientists might indicate a new crisis
+ in the foundation of mathematics. By examining the mathematical
+ facts behind HoTT and their relation with the existing
+ foundations, we conclude that the present crisis is not one. We
+ reiterate a pluralist vision of the foundations of mathematics.
+
+ The article contains a short survey of the mathematical and
+ historical background needed to understand the main tenets of the
+ fundational issues.",
+ paper = "Dzam18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\subsection{E} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
\index{Ellis, Ferris}
\begin{chunk}{axiom.bib}
@misc{Elli18,
@@ 13952,8 +15738,190 @@ when shown in factored form.
\end{chunk}
+\index{Ehrig, Hartmut}
+\index{Kreowski, HansJorg}
+\index{Thatcher, James}
+\index{Wagner, Eric}
+\index{Wright, Jesse}
+\begin{chunk}{axiom.bib}
+@article{Ehri80a,
+ author = "Ehrig, Hartmut and Kreowski, HansJorg and Thatcher, James
+ and Wagner, Eric and Wright, Jesse",
+ title = {{Parameterized Data Types in Algebraic Specification Languages}},
+ journal = "LNCS",
+ volume = "85",
+ year = "1980",
+ booktitle = "Automata, Languages and Programming",
+ publisher = "Springer",
+ paper = "Ehri80a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Enderton, Herbert B.}
+\begin{chunk}{axiom.bib}
+@book{Ende01,
+ author = "Enderton, Herbert B.",
+ title = {{A Mathematical Introduction to Logic}},
+ publisher = "Harcourt Academic Press",
+ year = "2001",
+ isbn = "0122384520",
+ paper = "Ende01.pdf"
+}
+
+\end{chunk}
+
+\index{Ershov, A.P.}
+\begin{chunk}{axiom.bib}
+@misc{Ersh77,
+ author = "Ershov, A.P.",
+ title = {{On the Essence of Compilation}},
+ comment = "Proc. IFIP Working Conf. on Formal Description of
+ Programming Concepts, Vol. 1",
+ year = "1977"
+}
+
+\end{chunk}
+
\subsection{F} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Farmer, William M.}
+\index{Mohrenschildt, Martin v.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Farm01,
+ author = "Farmer, William M. and Mohrenschildt, Martin v.",
+ title = {{A Formal Framework for Managing Mathematics}},
+ booktitle = "Int. Workshop on Mathematical Knowledge Management",
+ link = "\url{https://www.emis.de/proceedings/MKM2001/farmer.pdf}",
+ publisher = "RISC",
+ year = "2001",
+ abstract =
+ "Mathematics is a process of creating, exploring, and connnecting
+ mathematical models. This paper present a fromal framework for
+ managing the mathematics process as well as the mathematical
+ knowledge produced by the process. The central idea of the
+ framework is the notion of a biform theory which is simultaneously
+ an axiomatic theory and an algorithmic theory. Representing a
+ collection of mathematical models, a biform theory provides a
+ formal context for both deduction and computation, constructing
+ sound deductive and computational rules, and developing networks
+ of biform theories linked by interpretations. The framework is not
+ tied to a specific underlying logic; it can be used with many
+ popular logics such as first order logic, simple type theory, and
+ set theory. Many of the ideas and mechanisms used in the framework
+ are inspired by the IMPS Interactive Mathematical Proof System.",
+ paper = "Farm01.pdf"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm13a,
+ author = "Farmer, William M.",
+ title = {{The Formalization of SyntaxBased Mathematical Algorithms
+ Using Quotation and Evaluation}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "Algorithms like those for differentiating functional expressions
+ manipulate the syntactic structure of mathematical expressions in
+ a mathematically meaningful way. A formalization of such an
+ algorithm should include a specification of its computational
+ behavior, a specification of its mathematical meaning, and a
+ mechanism for applying the algorithm to actual
+ expressions. Achieving these goals requires the ability to
+ integrate reasoning about the synta of the expressions with
+ reaoning about what the expressions mean. A syntax framework is a
+ mathematical structure that is an abstract model for a syntax
+ reasoning system. It contains a mapping of expressions to
+ syntactic values that represent the syntactic structures of the
+ expressions; a language for reasoning about syntactic values; a
+ quotation mechanism to refer to the syntactic value of an
+ expression; and an evaluation mechanism to refer to the value of
+ the expression represented by a syntactic value. We present and
+ compare two approaches, based on instances of a syntax framework,
+ to formalize a syntaxbased mathematical algorithm in a formal
+ theory $T$. In the first approach the syntactic values for the
+ expressions manipulated by the algorithm are members of an
+ inductive type in $T$, but quotation and evaluation are functions
+ defined in the matatheory of $T$. In the second approach every
+ expression in $T$ is represented by a syntactic value, and
+ quotation and evalution are operators in $T$ itself.",
+ paper = "Farm13a.pdf"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm16,
+ author = "Farmer, William M.",
+ title = {{Incorporating Quotation and Evaluation into Church's Type
+ Theory: Syntax and Semantics}},
+ journal = "LNCS",
+ volume = "9791",
+ year = "2016",
+ abstract =
+ "CTT$_{qe}$ is a version of Church's type theory that includes
+ quotation and evaluation operators that are similar to quote and
+ eval in the Lisp programming language. With quotation and
+ evaluation it is possible to reason in CTT$_{qe}$ about the
+ interplay of the syntax and semantics of expressions and, as a
+ result, to formalize syntaxbased mathematical algorithms. We
+ present the syntax and semantics of CTT$_{qe}$ and give several
+ examples that illustrate the usefulness of having quotation and
+ evaluation in CTT$_{qe}$. We do not give a proof system for
+ CTT$_{qe}$ but we do sketch what a proof system could look like.",
+ paper = "Farm16.pdf"
+}
+
+\end{chunk}
+
+\index{Farmer, William M.}
+\begin{chunk}{axiom.bib}
+@article{Farm17,
+ author = "Farmer, William M.",
+ title = {{Theory Morphisms in Church's Type Theory with Quotation
+ and Evaluation}},
+ journal = "LNCS",
+ volume = "10383",
+ year = "2017",
+ abstract =
+ "CTT$_{qe}$ is a version of Church's type theory with global
+ quotation and evaluation operators that is engineered to reason
+ about the interplay of syntax and semantics and to formalize
+ syntaxlevel mathematical algorithms. CTT$_{uqe}$ is a variant of
+ CTT$_{qe}$ that admits undefined expressions, partial functions,
+ and multiple base types of individuals. It is better suited than
+ CTT$_{qe}$ as a logic for building networks of theories connected
+ by theory morphisms. This paper presents the syntax and semantics
+ of CTT$_{uqe}$, defines a notion of a theory morphism from one
+ CTT$_{uqe}$ theory to another, and gives two simple examples
+ involving monoids that illustrate the use of theory morphisms in
+ CTT$_{qe}$.",
+ paper = "Farm17.pdf"
+}
+
+\end{chunk}
+
+\index{Felleisen, Matthias}
+\begin{chunk}{axiom.bib}
+@techreport{Fell85,
+ author = "Felleisen, Matthias",
+ title = {{Transliterating Prolog into Scheme}},
+ type = "technical report",
+ number = "182",
+ institution = "University of Indiana",
+ year = "1985",
+ paper = "Fell85.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Filliatre, JeanChristophe}
\begin{chunk}{axiom.bib}
@misc{Fill13a,
@@ 13965,6 +15933,23 @@ when shown in factored form.
\end{chunk}
+\index{Fitt, A.D.}
+\index{Hoare, G.T.Q}
+\begin{chunk}{axiom.bib}
+@article{Fitt93,
+ author = "Fitt, A.D. and Hoare, G.T.Q",
+ title = {{The ClosedForm Integration of Arbitrary Functions}},
+ journal = "The Mathematical Gazette",
+ volume = "77",
+ number = "479",
+ pages = "227236",
+ year = "1993",
+ paper = "Fitt93.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Fitting, Melvin}
\begin{chunk}{axiom.bib}
@book{Fitt69,
@@ 13977,6 +15962,20 @@ when shown in factored form.
\end{chunk}
+\index{Fourer, Robert}
+\index{Gay, David M.}
+\index{Kernighan, Brian W.}
+\begin{chunk}{axiom.bib}
+@misc{Four03,
+ author = "Fourer, Robert and Gay, David M. and Kernighan, Brian W.",
+ title = {{AMPL Reference}},
+ link = "\url{https://ampl.com/BOOK/CHAPTERS/24refman.pdf}",
+ year = "2003",
+ paper = "Four03.pdf"
+}
+
+\end{chunk}
+
\index{Fredrikson, Matt}
\begin{chunk}{axiom.bib}
@misc{Fred16,
@@ 14002,6 +16001,17 @@ when shown in factored form.
\end{chunk}
+\begin{chunk}{axiom.bib}
+@misc{Gapt19,
+ author = "Unknown",
+ title = {{GAPT: General Architecture for Proof Theory}},
+ year = "2019",
+ link = "\url{https://www.logic.at/gapt/downloads/gaptusermanual.pdf}",
+ paper = "Gapt19.pdf"
+}
+
+\end{chunk}
+
\index{Geuvers, Herman}
\begin{chunk}{axiom.bib}
@article{Geuv00,
@@ 14035,6 +16045,62 @@ when shown in factored form.
\end{chunk}
+\index{Gianni, Patrizia}
+\index{Trager, Barry}
+\begin{chunk}{axiom.bib}
+@article{Gian85a,
+ author = "Gianni, Patrizia and Trager, Barry",
+ title = {{GCD's and Factoring Multivariate Polynomials using
+ Grobner Bases}},
+ journal = "LNCS",
+ volume = "204",
+ year = "1985",
+ booktitle = "EUROCAL '85 European Conference on Computer Algebra",
+ publisher = "Springer",
+ abstract =
+ "This paper shows how Grobner basis computations can be used to
+ compute multivariate gcds, perform Hensel lifting, and reduce
+ multivariate factorization to univariate. The essential idea is to
+ produce an ideal containing the desired polynomial as an element
+ of least degree. The construction is somewhat analogous to the
+ recent lattice algorithms for polynomial factorization. A major
+ difference is that we don't need to perform the hensel lifting as
+ a separate step; one Brobner basis computation is sufficient to
+ find the desired multivariate factor or gcd. We produce algorithms
+ which are vary simple and may be of use on small systems where
+ code size is critical. We feel that these results demonstrate the
+ fundamental importance of the Grobner basis in computer algebra.",
+ paper = "Gian85a.pdf"
+}
+
+\end{chunk}
+
+\index{Giannini, Paola}
+\begin{chunk}{axiom.bib}
+@techreport{Gian85,
+ author = "Giannini, Paola",
+ title = {{Type Checking and Type Deduction Techniques for
+ Polymorphic Programming Languages}},
+ type = "technical report",
+ institution = "Carnegie Mellon University",
+ number = "CMUCS85187",
+ year = "1985",
+ abstract =
+ "In this paper we present some of the syntactic issues that arise
+ in polymorphic programming languages. In particular we examine
+ type checking and deduction in two different polymorphic type
+ strucutres: the parametric lambdacalculus (with let construct)
+ and the polymorphic or secondorder lambdacalculus. In both
+ approaches the behavior of types is formalized with type inference
+ rules. Examples of programming languages following those
+ approaches are presented and some of their specific problems
+ studied.",
+ paper = "Gian85.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Girard, JeanYves}
\begin{chunk}{axiom.bib}
@article{Gira87,
@@ 14072,6 +16138,44 @@ when shown in factored form.
\end{chunk}
+\index{Gleich, David}
+\begin{chunk}{axiom.bib}
+@misc{Glei05,
+ author = "Gleich, David",
+ title = {{Finite Calculus: A Tutorial for Solving Nasty Sums}},
+ year = "2005",
+ abstract =
+ "In this tutorial, I will first explain the need for finite
+ calculus using an example sum I think is difficult to solve. Next,
+ I will show where this sum actually occurs and why it is
+ important. Following that, I will present all the mathematics
+ behind finite calculus and a series of theorems to make it helpful
+ before concluding with a set of examples to show that it really is
+ useful.",
+ paper = "Glei05.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Goguen, J.A.}
+\index{Thatcher, J.W.}
+\index{Wagner, E.G.}
+\index{Wright, J.B.}
+\begin{chunk}{axiom.bib}
+@techreport{Gogu76,
+ author = "Goguen, J.A. and Thatcher, J.W. and Wagner, E.G. and
+ Wright, J.B.",
+ title = {{An Initial Algebra Approach to the Specification,
+ Correctness and Implementation of Abstract Data Types}},
+ type = "Research Report",
+ institution = "IBM Research",
+ number = "RC6487",
+ year = "1976"
+}
+
+\end{chunk}
+
\index{Goldblatt, Robert}
\begin{chunk}{axiom.bib}
@book{Gold84,
@@ 14091,7 +16195,7 @@ when shown in factored form.
@incollection{Gord79a,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
 title = "Front Matter",
+ title = {{Front Matter}},
booktitle = "Edinburgh LCF",
publisher = "SpringerVerlag",
year = "1979",
@@ 14109,7 +16213,7 @@ when shown in factored form.
@incollection{Gord79b,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
 title = "Introduction",
+ title = {{Introduction}},
booktitle = "Edinburgh LCF",
publisher = "SpringerVerlag",
year = "1979",
@@ 14127,7 +16231,7 @@ when shown in factored form.
@incollection{Gord79c,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
 title = "ML",
+ title = {{ML}},
booktitle = "Edinburgh LCF",
publisher = "SpringerVerlag",
year = "1979",
@@ 14145,7 +16249,7 @@ when shown in factored form.
@incollection{Gord79d,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
 title = "PPLAMBDA",
+ title = {{PPLAMBDA}},
booktitle = "Edinburgh LCF",
publisher = "SpringerVerlag",
year = "1979",
@@ 14163,7 +16267,7 @@ when shown in factored form.
@incollection{Gord79e,
author = "Gordon, Michael J. and Milner, Arthur J. and
Wadsworth, Christopher P.",
 title = "APPENDIX",
+ title = {{APPENDIX}},
booktitle = "Edinburgh LCF",
publisher = "SpringerVerlag",
year = "1979",
@@ 14200,7 +16304,6 @@ when shown in factored form.
drawing relationships between mathematical structures that
provides insight for a wide audience. The most in dept map is
available online for public use.",
 comment = "\url{http://www.mit.edu/~kepner/GravelMathMap.pdf}",
paper = "Grav18.pdf"
}
@@ 14225,6 +16328,28 @@ when shown in factored form.
\end{chunk}
+\index{Griesmer, J.H.}
+\index{Jenks, R.D.}
+\index{Yun, D.Y.Y}
+\begin{chunk}{axiom.bib}
+@article{Grie78a,
+ author = "Griesmer, J.H. and Jenks, R.D. and Yun, D.Y.Y",
+ title = {{A Taxonomy for Algebraic Computation}},
+ journal = "ACM SIGSAM Bulletin",
+ volume = "12",
+ number = "3",
+ pages = "2528",
+ year = "1978",
+ abstract =
+ "Recently the authors responded to a request from Professor
+ Anthony Ralston of the State University of New York at Buffalo to
+ participate in the review of a proposed Taxonomy of Computer
+ Science and Engineering.",
+ paper = "Grie78a.pdf"
+}
+
+\end{chunk}
+
\index{Gurevich, Yuri}
\begin{chunk}{axiom.bib}
@article{Gure12,
@@ 14309,6 +16434,18 @@ when shown in factored form.
\end{chunk}
+\index{Hammack, Richard}
+\begin{chunk}{axiom.bib}
+@book{Hamm18,
+ author = "Hammack, Richard",
+ title = {{Book of Proof}},
+ publisher = "Hammack, Richard",
+ year = "2018",
+ paper = "Hamm18.pdf"
+}
+
+\end{chunk}
+
\index{Hantler, Sidney L.}
\index{King, James C.}
\begin{chunk}{axiom.bib}
@@ 14344,7 +16481,7 @@ when shown in factored form.
than explicitly. This leads naturally to the use of additional
assertions which are called ``inductive assertions.''",
paper = "Hant76.pdf",
 keywords = "printed",
+ keywords = "printed"
}
\end{chunk}
@@ 14361,6 +16498,33 @@ when shown in factored form.
\end{chunk}
\index{Harper, Robert}
+\begin{chunk}{axiom.bib}
+@article{Harp92,
+ author = "Harper, Robert",
+ title = {{Constructing Type Systems over an Operational Semantics}},
+ journal = "J. Symbolic Computation",
+ volume = "14",
+ pages = "7184",
+ year = "1992",
+ abstract =
+ "Type theories in the sense of MartinLof and the NuPRL system are
+ based on taking as primitive a typefree programming language
+ given by an operational semantics, and defining types as partial
+ equivalence relations on the set of closed terms. The construction
+ of a type system is based on a general form of inductive
+ definition that may either be taken as acceptable in its own
+ right, or further explicated in terms of other patterns of
+ induction. One suc account, based on a general theory of
+ inductively defined relations, was given by Allen. An alternative
+ account, based on an essentially set theoretic argument, is
+ presented.",
+ paper = "Harp92.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Harper, Robert}
\index{Stone, Christopher}
\begin{chunk}{axiom.bib}
@techreport{Harp97,
@@ 14419,6 +16583,113 @@ when shown in factored form.
\end{chunk}
+\index{Harvey, David}
+\index{van der Hoeven, Joris}
+\begin{chunk}{axiom.bib}
+@misc{Harv19,
+ author = "Harvey, David and van der Hoeven, Joris",
+ title = {{Integer Multiplication in Time O(n log n)}},
+ link = "\url{https://hal.archivesouvertes.fr/hal.02070778/document}",
+ year = "2019",
+ abstract =
+ "We present an algorithm that computes the product of two
+ nbit intgers in O(n log n) bit operations",
+ paper = "Harv19.pdf"
+}
+
+\end{chunk}
+
+\index{Havas, George}
+\index{Majewski, Bohdan}
+\index{Matthews, K.R.}
+\begin{chunk}{axiom.bib}
+@techreport{Hava95,
+ author = "Havas, George and Majewski, Bohdan and Matthews, K.R.",
+ title = {{Extended GCD Algorithms}},
+ type = "technical report",
+ institution = "University of Queensland",
+ number = "TR0302",
+ year = "1995",
+ abstract =
+ "Extended gcd calculation has a long history and plays an
+ important role in computational number theory and linear
+ algebra. Recent results have shown that finding optimal
+ multipliers in extended gcd calculations is difficult. We study
+ algorithms for finding good multipliers and present new algorithms
+ with improved performance. We present a wellperforming algorithm
+ which is based on lattice basis reduction methods and may be
+ formally analyzed. We also give a relatively fast algorithm with
+ moderate performance.",
+ paper = "Hava95.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Havas, George}
+\index{Majewski, Bohdan}
+\begin{chunk}{axiom.bib}
+@techreport{Hava97,
+ author = "Havas, George and Majewski, Bohdan",
+ title = {{Extended GCD Algorithms}},
+ type = "technical report",
+ institution = "University of Queensland",
+ number = "TR0325",
+ year = "1997",
+ abstract =
+ "Given an integer vector of $n$ positive number numbers
+ $a=\vert a_i \vert^n_{i=1}$ the extended gcd problem asks for an
+ integer vector $x$ of length $n$ such that
+ \[xa^T=\sum_{i=1}^n x_ia_i = gcd(a_1,a_2,\ldots,a_n)\]
+
+ For many applications it is vital that some measure of $x$,
+ $\vert{x}\vert$ is small. We have proved, however, that if we choose
+ either the max norm or the zero matric the question of finding
+ $x$ such that $\vert{x}\vert$ is smaller than some positive constant
+ $K$ is NPcomplete. We conjecture that the questions remains
+ NPcomplete for other norms.
+
+ In the light of these results we hae proposed two approximation
+ algorithms. Their respective complexities are
+ $O(n^2 log(max_i\{a_i\}))$ and $O(n^4 log(max_i\{a_i\}))$.
+ Theoretical analysis of the algorithms leads
+ to unsatisfactory bounds on the quality of the solution. Thus here
+ we undertake a practical study of the methods, where their
+ performance is matched against optimal solutions.",
+ paper = "Hava97.pdf"
+}
+
+\end{chunk}
+
+\index{Havas, George}
+\index{Majewski, Bohdan S.}
+\index{Matthews, Keith R.}
+\begin{chunk}{axiom.bib}
+@article{Hava98,
+ author = "Havas, George and Majewski, Bohdan S. and Matthews, Keith R.",
+ title = {{Extended GCD and Hermite Normal Form Algorithms via
+ Lattice Basis Reduction}},
+ journal = "Experimental Mathematics",
+ volume = "7",
+ number = "2",
+ pages = "125136",
+ year = "1998",
+ abstract =
+ "Extended gcd calculation has a long history and plays an
+ important role in computational number theory and linear
+ algebra. Recent results have shown that finding optimal
+ multipliers in extended gcd calculations is difficult. We present
+ an algorithm which uses lattice basis reduction to produce small
+ integer multipliers $x_1,\ldots,x_m$ for the equation
+ $s=gcd(x_1,\ldots,x_m)x_1s_1+\ldots+x_ms_m$ where
+ $s_1,\ldots,s_m$ are given integers. The method generalises to
+ produce small unimodular transformation matrices for computing the
+ hermite normal form of an integer matrix.",
+ paper = "Hava98.pdf"
+}
+
+\end{chunk}
+
\index{Haynes, Christopher T.}
\index{Friedman, Daniel P.}
\begin{chunk}{axiom.bib}
@@ 14498,6 +16769,31 @@ when shown in factored form.
\end{chunk}
+\index{Henderson, Peter}
+\index{Morris Jr., James H.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Hend76,
+ author = "Henderson, Peter and Morris Jr., James H.",
+ title = {{A Lazy Evaluator}},
+ booktitle = "3rd Symp. on Principles of Programming Languages",
+ publisher = "ACM",
+ pages = "95103",
+ year = "1976",
+ abstract =
+ "A different way to execute pure LISP programs is presented. It
+ delays the evaluation of parameters and list structures without
+ ever having to perform more evaluation steps than the usual
+ method. Although the central idea can be found in earlier work
+ this paper is of interest since it treats a rather wellknown
+ language and works out an algorithm which avoids full
+ substitution. A partial correctness proof using ScottStrachey
+ semantics is sketched in a later section.",
+ paper = "Hend76.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
\index{Henglein, Friedrich}
\begin{chunk}{axiom.bib}
@phdthesis{Heng89,
@@ 14541,7 +16837,131 @@ when shown in factored form.
Finally, we offer an explanation for the apparent practicality of
polymorphic type inference in the face of theoretical
intractability results.",
 paper = "Heng89.pdf",
+ paper = "Heng89.pdf"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear71,
+ author = "Hearn, Anthony C.",
+ title = {{Applications of Symbol Manipulation in Theoretical Physics}},
+ journal = "Communications of the ACM",
+ volume = "14",
+ number = "8",
+ pages = "511516",
+ year = "1971",
+ paper = "Hear71.pdf"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear72,
+ author = "Hearn, Anthony C.",
+ title = {{An Improved NonModular Polynomial GCD Algorithm}},
+ journal = "ACM SIGSAM Bulletin",
+ volume = "23",
+ pages = "1015",
+ year = "1972",
+ abstract =
+ "An improved nonmodular algorithm for the calculation of the
+ greatest common divisor of two multivariate polynomials is
+ presented.",
+ paper = "Hear72.pdf"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@book{Hear73,
+ author = "Hearn, Anthony C.",
+ title = {{REDUCE2 Users Manual}},
+ comment = "Computing Physics Group",
+ publisher = "University of Utah",
+ year = "1973"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Hear74,
+ author = "Hearn, Anthony C.",
+ title = {{A Mode Analysing Algebraic Manipulation Program}},
+ booktitle = "Proc. 1974 annual ACM Conference. Vol 2",
+ publisher = "ACM",
+ year = "1974"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear79,
+ author = "Hearn, Anthony C.",
+ title = {{Nonmodular computation of polynomial GCDs using Trial Division}},
+ journal = "LNCS",
+ volume = "72",
+ pages = "227239",
+ year = "1979",
+ abstract =
+ "This paper describes a new algorithm for the determination of the
+ GCD of two multivariate polynomials by nonmodular means.",
+ paper = "Hear79.pdf"
+}
+
+\end{chunk}
+
+\index{Heras, Jonathan}
+\index{Pascual, Vico}
+\index{Rubio, Julio}
+\begin{chunk}{axiom.bib}
+@article{Hera09,
+ author = "Heras, Jonathan and Pascual, Vico and Rubio, Julio",
+ title = {{Using Open Mathematical Documents to Interface Computer
+ Algebra and Proof Assistant Systems}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "Mathematical Knowledge can be encoded by means of Open
+ Mathematical Documents (OMDoc) to interface both Computer Algebra
+ and Proof Assistant systems. In this paper, we show how a unique
+ OMDoc structure can be used to dynamically generate, both a
+ Graphical User Interface for a Computer Algebra system and a
+ script for a Proof Assistant. This generic approach has been made
+ concrete through a first prototype interfacing the Kenzo Computer
+ Algebra system with the ACL2 Theorem Prover, both based on the
+ Common Lisp programming language. An OMDoc repository has been
+ developed allowing the user to customize the application in an
+ easy way.",
+ paper = "Hera09.pdf"
+}
+
+\end{chunk}
+
+\index{Heras, Jonathan}
+\index{Komendantskaya, Ekaterina}
+\begin{chunk}{axiom.bib}
+@article{Hera13,
+ author = "Heras, Jonathan and Komendantskaya, Ekaterina",
+ title = {{ML4PG in Computer Algebra Verification}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "ML4PG is a machinelearning extension that provides statical
+ proof hints during the process of Coq/SSReflect proof
+ development. In this paper, we use ML4PG to find proof patterns in
+ the CoqEAL library  a library that was devised to verify the
+ correctness of Computer Algebra algorithms. In particular, we use
+ ML4PG to help us in the formalisation of an efficient algorithm to
+ computer the inverse of triangular matrices.",
+ paper = "Hera13.pdf"
}
\end{chunk}
@@ 14581,6 +17001,34 @@ when shown in factored form.
\end{chunk}
+\index{Hetzl, Stefan}
+\begin{chunk}{axiom.bib}
+@article{Hetz12,
+ author = "Hetzl, Stefan",
+ title = {{Project Presentation: Algorithmic Structuring and
+ Compression of Proofs (ASCOP)}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "Computergenerated proofs are typically analytic, i.e. they
+ essentially consist only of formulas which are present in the
+ theorem that is shown. In contrast, mathematical proffs written by
+ humans almost never are: they are highly structured due to the use
+ of lemmas.
+
+ The ASCOP project aims at developing algorithms and software which
+ structure and abbreviate analytic proofs by computing useful
+ lemmas. These algorithms will be based on recent groundbreaking
+ results establishing a new connection between proof theory and
+ formal language theory. This connecion allows the application of
+ efficient algorithms based on formal grammars to structure and
+ compress proofs.",
+ paper = "Hetz12.pdf"
+}
+
+\end{chunk}
+
\index{Heyting, A.}
\begin{chunk}{axiom.bib}
@book{Heyt56,
@@ 14629,6 +17077,114 @@ when shown in factored form.
\end{chunk}
+\index{Horozal, Fulya}
+\index{Iacob, Alin}
+\index{Jucovschi, Constantin}
+\index{Kohlhase, Michael}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Horo11,
+ author = "Horozal, Fulya and Iacob, Alin and Jucovschi, Constantin
+ and Kohlhase, Michael and Rabe, Florian",
+ title = {{Combining Source, Content, Presentation, Narration, and
+ Relational Presentation}},
+ journal = "LNCS",
+ volume = "6824",
+ year = "2011",
+ abstract =
+ "In this paper, we try to bridge the gap between different
+ dimensions / incarnations of mathematical knowledge: MKM
+ representation formats (content), their humanoriented languages
+ (source, presentation), their narrative linearizations
+ (narration), and relational presentations used in the semantic
+ web. The central idea is to transport solutions from software
+ engineering to MKM regarding the parallel interlinked maintenance
+ of the different incarnations. We show how the integration of
+ these incarnations can be utilized to enrich the authoring and
+ viewing processes, and we evaluate our infrastructure on the LATIN
+ Logic Atlas, a modular library of logic formalizations, and a set
+ of computer science lecture notes written in STEX  a modular,
+ semantic variant of LATEX.",
+ paper = "Horo11.pdf"
+}
+
+\end{chunk}
+
+\index{Horozal, Fulya}
+\index{Kohlhase, Michael}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Horo12,
+ author = "Horozal, Fulya and Kohlhase, Michael and Rabe, Florian",
+ title = {{Extending MKM Formats at the Statement Level}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "Successful representation and markup languages find a good
+ balance between giving the user freedom of expression, enforcing
+ the fundamental semantic invariants of the modeling framework, and
+ allowing machine support for the underlying semantic
+ structures. MKM formats maintain strong invariants while trying to
+ be foundationally unconstrained, which makes the induced design
+ problem particularly challenging.
+
+ In this situation, it is standard practice to define a minimal
+ core language together with a scripting/macro facility for
+ syntactic extensions that map into the core language. In practice,
+ such extension facilities are either fully unconstrained (making
+ invariants and machine support difficult) or limited to the object
+ level (keeping the statement and theory levels fixed).
+
+ In this paper we develop a general methodology for extending MKM
+ representation formats at the statement level. We show the utility
+ (and indeed necessity) of statementlevel extensions by
+ redesigning the OMDoc format into a minimal, regular core language
+ (strict OMDoc) and an extension (pragmatic OMDoc) that maps into
+ strict OMDoc.",
+ paper = "Horo12.pdf"
+}
+
+\end{chunk}
+
+\index{Horozal, Fulya}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Horo15,
+ author = "Horozal, Fulya and Rabe, Florian",
+ title = {{Formal Logic Definitions for Interchange Languages}},
+ journal = "LNCS",
+ volume = "9150",
+ year = "2015",
+ abstract =
+ "System integration often requires standardized interchange
+ languages, via which systems can exchange mathematical
+ knowledge. Major examples are the MathMLbased markup languages
+ and TPTP. However, these languages standardize only the syntax of
+ the exchanged knowledge, which is insufficient when the involved
+ logics are complex or numerous. Logical frameworks, on the other
+ hand, allow representing the logics themselves (and are thus aware
+ of the semantics), but they abstract from the concrete syntax.
+
+ Maybe surprisingly, until recently, stateoftheart logical
+ frameworks were not quite able to adequately represent logics
+ commonly used in formal systems. Using a recent extension of the
+ logical framework LF, we show how to give concise formal
+ definitions of the logics used in TPTP. We can also formally
+ define translations and combinations between the various TPTP
+ logics. This allows us to build sematicsaware tool support such
+ as typechecking TPTP content.
+
+ While our presentation focuses on the current TPTP logics, our
+ approach can be easily extended to other logics and interchange
+ languages. In particular, our logic representations can be used
+ with both TPTP and MathML. Thus, a single definition of the
+ semantics can be used with either interchange syntax.",
+ paper = "Horo15.pdf"
+}
+
+\end{chunk}
+
\index{Hrbacek, Karel}
\index{Jech, Thomas}
\begin{chunk}{axiom.bib}
@@ 14644,8 +17200,87 @@ when shown in factored form.
\end{chunk}
\subsection{I} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+\begin{chunk}{axiom.bib}
+@book{IBMx78,
+ author = "IBM",
+ title = {{LISP/370 Program Description / Operations Manual}},
+ publisher = "IBM Research",
+ year = "1978",
+ comment = "SH2020760"
+}
+
+\end{chunk}
+
+\index{Iancu, Mihnea}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Ianc12,
+ author = "Iancu, Mihnea and Rabe, Florian",
+ title = {{Management of Change in Declarative Languages}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "Due to the high degree of interconnectedness of formal
+ mathematical statements and theories, human authors often have
+ difficulties anticipating and tracking the effects of a change in
+ large bodies of symbolic mathematical knowledge. Therefore, the
+ automation of change management is desirable. But while computers
+ can in principle detect and propagate changes automatically, this
+ process must take the semantics of the underlying mathematical
+ formalism into account. Therefore, concrete management of change
+ solutions are difficult to realize.
+
+ The MMT language was designed as a generic declarative language
+ that captures universal structural features while avoiding a
+ commitment to a particular formalism. Therefore, it provides a
+ promising framework for the systematic study of changes in
+ declarative languages. We leverage this framework by providing a
+ generic change management solution at the MMT level, which can be
+ instantiated for arbitrary specific languages.",
+ paper = "Ianc12.pdf"
+}
+
+\end{chunk}
+
\subsection{J} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Jammer, Max}
+\begin{chunk}{axiom.bib}
+@book{Jamm66,
+ author = "Jammer, Max",
+ title = {{The Conceptual Development of Quantum Mechanics}},
+ year = "1996",
+ publisher = "McGrawHill"
+}
+
+\end{chunk}
+
+\index{Jeffrey, David J.}
+\index{Rich, Albert D.}
+\begin{chunk}{axiom.bib}
+@article{Jeff10,
+ author = "Jeffrey, David J. and Rich, Albert D.",
+ title = {{Reducing Expression Size Using RuleBased Integration}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "This paper describes continuing progress on the development of a
+ repository of transformation rules relevant to indefinite
+ integration. The methodology, however, is not restricted to
+ integration. Several optimization goals are being pursued,
+ including achieving the best form for the output, reducing the
+ size of the repository while retaining its scope, and minimizing
+ the number of steps required for the evaluation process. New
+ optimizations for expression size are presented.",
+ paper = "Jeff10.pdf",
+ keywords = "DONE"
+}
+
+\end{chunk}
+
\index{Jeffrey, Richard}
\begin{chunk}{axiom.bib}
@book{Jeff81,
@@ 14658,6 +17293,37 @@ when shown in factored form.
\end{chunk}
+\index{Jenks, Richard D.}
+\begin{chunk}{axiom.bib}
+@article{Jenk75,
+ author = "Jenks, Richard D.",
+ title = {{Course Outline: Yale University, New Haven}},
+ journal = "SIGSAM Bulletin",
+ volume = "9",
+ number = "3",
+ pages = "910",
+ publisher = "ACM",
+ year = "1975",
+ paper = "Jenk75.pdf",
+ keywords = "axiomref, printed, DONE"
+}
+
+\end{chunk}
+
+\index{Jensen, Kathleen}
+\index{Wirth, Niklaus}
+\begin{chunk}{axiom.bib}
+@book{Jens75,
+ author = "Jensen, Kathleen and Wirth, Niklaus",
+ title = {{PASCAL User Manual and Report}},
+ publisher = "SpringerVerlag",
+ year = "1975",
+ isbn = "0387901442",
+ keywords = "owned"
+}
+
+\end{chunk}
+
\subsection{K} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Kahrs, Stefan}
@@ 14753,7 +17419,6 @@ when shown in factored form.
year = "2012",
link = "\url{https://fas.org/irp/agency/dod/dsb/autonomy.pdf}",
comment = "verification, validation, and trust"

}
\end{chunk}
@@ 14767,7 +17432,7 @@ when shown in factored form.
journal = "J. ACM",
volume = "27",
number = "1",
 pages = "128.145",
+ pages = "128145",
year = "1980",
abstract =
"In this paper an algorithm for the determination of runtime
@@ 14784,6 +17449,30 @@ when shown in factored form.
\end{chunk}
+\index{Kerber, Manfred}
+\begin{chunk}{axiom.bib}
+@article{Kerb10,
+ author = "Kerber, Manfred",
+ title = {{Proofs, Proofs, Proofs, and Proofs}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "In logic there is a clear concept of what constitutes a proof and
+ what not. A proof is essentially defined as a finite sequence of
+ formulae which are either axioms or derived by proof rules from
+ formulae earlier in the sequence. Sociologically, however, it is
+ more difficult to say what should constitute a proof and what
+ not. In this paper we will look at different forms of proofs and
+ try to clarify the concept of proof in the wider meaning of the
+ term. This has implications on how proofs should be represented
+ formally.",
+ paper = "Kerb10.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
\index{Kiczales, Gregor}
\index{des Rivieres, Jim}
\index{Bobrow, Daniel G.}
@@ 14798,6 +17487,35 @@ when shown in factored form.
\end{chunk}
+\index{Kiss, Csongor}
+\index{Eisenbach, Susan}
+\index{Field, Tony}
+\index{Jones, Simon Peyton}
+\begin{chunk}{axiom.bib}
+@inproceedings{Kiss18,
+ author = "Kiss, Csongor and Eisenbach, Susan and Field, Tony and
+ Jones, Simon Peyton",
+ title = {{Higherorder Typelevel Programming in Haskell}},
+ booktitle = "Proc. ACM Programming Languages",
+ year = "2018",
+ publisher = "ACM",
+ abstract =
+ "Type family applications in Haskell must be fully saturated. This
+ means that all typelevel functions have to be firstorder,
+ leading to code that is both messy and long winded. In tis paper
+ we detail an extension to GHC that removes this restriction. We
+ augment Haskell's existing type arrow, $\rightarrow$, with an
+ unmatchable arrow $\twoheadrightarrow$, that supports partial
+ application of type families without compromising soundness. A
+ soundness proof is provided. We show how the techniques described
+ can lead to substantial codesize reduction (circa 80\%) in the
+ typelevel logic of commonlyused typelevel libraries whilst
+ simultaneously improving code quality and readability.",
+ paper = "Kiss18.pdf"
+}
+
+\end{chunk}
+
\index{Kleene, S.C.}
\index{Vesley, R.E.}
\begin{chunk}{axiom.bib}
@@ 14844,6 +17562,246 @@ when shown in factored form.
\end{chunk}
+\index{Kohlhase, Michael}
+\index{Muller, Christine}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Kohl08a,
+ author = "Kohlhase, Michael and Muller, Christine and Rabe, Florian",
+ title = {{Notations for Living Mathematical Documents}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "Notations are central for understanding mathematical
+ discourse. Readers would like to read notations that transport the
+ meaning well and prefer notations that are familiar to
+ them. Therefore, authors optimze the choice of notations with
+ respect to these two criteria, while at the same time trying to
+ remain consistent over the document and their own prior
+ publications. In print media where notations are fixed at
+ publication time, this is an overconstrained problem. In living
+ documents notations can be adapted at reading time, taking reader
+ preferences into account.
+
+ We present a representational infrastructure for notations in
+ living mathematical documents. Mathematical notations can be
+ defined declaratively. Author and reader can extensionally define
+ the set of available notation definitions at arbitrary document
+ levels, and they can guide the notation selection function via
+ intensional annotations.
+
+ We give an abstract specification of notation definitions and the
+ flexible rendering algorithms and show their coverage on
+ paradigmatic examples. We show how to use this framework to render
+ OPENMATH and ContentMathML to PresentationMathML, but the
+ approach extends to arbitrary content and presentation formats. We
+ discuss prototypical implementations of all aspects of the
+ rendering pipeline.",
+ paper = "Kohl08a.pdf"
+}
+
+\end{chunk}
+
+\index{Kohlhase, Michael}
+\index{Mance, Felix}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Kohl13,
+ author = "Kohlhase, Michael and Mance, Felix and Rabe, Florian",
+ title = {{A Universal Machine for Biform Theory Graphs}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "Broadly speaking, there are two kinds of semanticsaware
+ assistant systems for mathematics: proof assistants express the
+ semantic in logic and emphasize deduction, and computer algebra
+ systems express the semantics in programming languages and
+ empahsize computation. Combining the complementary strengths of
+ both approaches while mending their complementary weaknesses has
+ been an important goal of the mechanized mathematics community for
+ some time.
+
+ We pick up on the idea of biform theories and interpret it in the
+ MMT/OMDOC framework which introduced the foundationsastheories
+ approach, and can thus represent both logics and programming
+ languages as theories. This yields a formal, modular framework of
+ biform theory graphs which mixes specifications and implemenations
+ sharing the module system and typing information.
+
+ We present automated knowledge management work flows that
+ interface to existing specification/programming tools and enable
+ an OPENMATH Machine, that operationalizes biform theories,
+ evaluating expressions by exhaustively applying the
+ implementations of the respective operators. We evaluate the new
+ biform framework by adding implementations to the OPENMATH
+ standard content dictionaries.",
+ paper = "Kohl13.pdf"
+}
+
+\end{chunk}
+
+\index{Kotelnikov, Evgenii}
+\index{Kovacs, Laura}
+\index{Reger, Giles}
+\index{Voronkov, Andrei}
+\begin{chunk}{axiom.bib}
+@inproceedings{Kote16,
+ author = "Kotelnikov, Evgenii and Kovacs, Laura and Reger, Giles and
+ Voronkov, Andrei",
+ title = {{The Vampire and the FOOL}},
+ booktitle = "SIGPLAN Conf. on Certified Programs and Proofs",
+ year = "2016",
+ publisher = "ACM",
+ pages = "3748",
+ abstract =
+ "This paper presents new features recently implemented in the
+ theorem prover Vampire, namely support for firstorder logic with
+ a first class boolean sort (FOOL) and polymorphic arrays. In
+ addition to having a first class boolean sort, FOOL also contains
+ ifthenelse and letin expressions. We argue that presented
+ extensions facilitate reasoningbased program analysis, both by
+ increasing the expressivity of firstorder reasoners and by gains
+ in efficiency.",
+ paper = "Kote16.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kotelnikov, Evgenii}
+\begin{chunk}{axiom.bib}
+@phdthesis{Kote18,
+ author = "Kotelnikov, Evgenii",
+ title = {{Automated Theorem Proving with Extensions of FirstOrder Logic}},
+ school = "Chalmers",
+ year = "2018",
+ abstract =
+ "Automated theorem provers are computer programs that check
+ whether a logical conjecture follows from a set of logical
+ statements. The conjecture and the statements are expressed in the
+ language of some formal logic, such as firstorder logic. Theorem
+ provers for firstorder logic have been used for automation in
+ proof assistants, verification of programs, static analysis of
+ networks, and other purposes. However, the efficient usage of
+ these provers remains challenging. One of the challenges is the
+ complexity of translating domain problems to firstorder
+ logic. Not only can such translation be cumbersome due to semantic
+ differences between the domain and the logic, but it might
+ inadvertently result in problems that provers cannot easily handle.
+
+ The work presented in the thesis addresses this challenge by
+ developing an extension of firstorder logic named FOOL. FOOL
+ contains syntactical features of programming languages and more
+ expressive logics, is friendly for translation of problems from
+ various domains, and can be efficiently suported by existing
+ theorem provers. We describe the syntax and semantics of FOOL and
+ present a simple translation from FOOL to plain firstorder
+ logic. We describe an efficient clausal normal form transformation
+ algorithm for FOOL and based on it implement a support for FOOL in
+ the Vampire theorem prover. We illustrate the efficient use of
+ FOOL for program verification by describing a concise encoding of
+ next state relations of imperative programs in FOOL. We show a
+ usage of features of FOOL in problems of static analysis of
+ networks. We demonstrate the efficiency of automated theorem
+ proving in FOOL with an extensive set of experiments. In these
+ experiments we compare the performance of Vampire on a large
+ collection of problems from various sources translated to FOOL and
+ ordinary firstorder logic. Finally, we fix the syntax for FOOL in
+ TPTP, the standard language of firstorder theorem provers.",
+ paper = "Kote18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kovacs, Laura}
+\index{Voronkov, Andrei}
+\begin{chunk}{axiom.bib}
+@misc{Kova13,
+ author = "Kovacs, Laura and Voronkov, Andrei",
+ title = {{FirstOrder Theorem Proving and Vampire}},
+ year = "2013",
+ link = "\url{http://www.cse.chalmers.se/~laurako/pub/CAV13_Kovacs.pdf}",
+ abstract =
+ "In this paper we give a short introduction in firstorder theorem
+ proving and the use of the theorem prover Vampire. We discuss the
+ superposition calculus and explain the key concepts of saturation
+ and redundancy elimination, present saturation algorithms and
+ preprocessing, and demonstrate how these concepts are implemented
+ in Vampire. Further, we also cover more recent topics and features
+ of Vampire designed for advanced applications, including
+ satisfiability checking, theory reasoning, interpolation,
+ consequence elimination, and program analysis.",
+ paper = "Kova13.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Kozen, Dexter}
+\index{Landau, Susan}
+\index{Zippel, Richard}
+\begin{chunk}{axiom.bib}
+@article{Koze94,
+ author = "Kozen, Dexter and Landau, Susan and Zippel, Richard",
+ title = {{Decomposition of Algebraic Functions}},
+ journal = "LNCS",
+ volume = "877",
+ pages = "8092",
+ year = "1994",
+ abstract =
+ "Functional decomposition  whether a function $f(x)$ can be
+ written as a composition of functions $g(h(x))$ in a nontrivial
+ way  is an important primitive in symbolic computation
+ systems. The problem of univariate polynomial decomposition was
+ shown to have an efficient solution by Kozen and Landau.
+ Dickerson and von zur Gathen gave algorithms for certain
+ multivariate cases. Zippel showed how to decompose rational
+ functions. In this paper, we address the issue of decomposition of
+ algebraic functions. We show that the problem is related to
+ univariate resultants in algebraic function fields, and in fact
+ can be reformulated as a problem of resultant decomposition. We
+ characterize all decompositions of a given algebraic function up
+ to isomorphism, and give an exponential time algorithm for finding
+ a nontrivial one if it exists. The algorithm involves genus
+ calculations and constructing transcendental generators of fields
+ of genus zero.",
+ paper = "Koze94.pdf"
+}
+
+\end{chunk}
+
+\index{Krebbers, Robbert}
+\index{Spitters, Bas}
+\begin{chunk}{axiom.bib}
+@article{Kreb11,
+ author = "Krebbers, Robbert and Spitters, Bas",
+ title = {{Computer Certified Efficient Exact Reals in Coq}},
+ journal = "LNCS",
+ volume = "6824",
+ year = "2011",
+ abstract =
+ "Floating point operations are fast, but require continuous effort
+ on the part of the user in order to ensure that the results are
+ correct. This burden can be shifted away from the user by
+ providing a library of exact analysis in which the computer
+ handles the error estimates. We provide an implementaiton of the
+ exact real numbers in the Coq proof assistant. This improves on
+ the earlier Coqimplementation by O'Connor in two ways: we use
+ dyadic rationals built from the machine integers and we optimize
+ computation of power series by using approximate
+ division. Moreover, we use type classes for clean mathematical
+ interfaces. This appears to be the first time that type classes
+ are used in heavy computation. We obtain over a 100 times speed up
+ of the basic operations and indications for improving the Coq system.",
+ paper = "Kreb11.pdf"
+}
+
+\end{chunk}
+
+
\index{Kreitz, Christoph}
\index{Rahli, Vincent}
\begin{chunk}{axiom.bib}
@@ 14918,7 +17876,6 @@ when shown in factored form.
year = "2015",
isbn = "9781942341321",
paper = "Lear15.pdf"

}
\end{chunk}
@@ 14952,8 +17909,120 @@ when shown in factored form.
\end{chunk}
+\begin{chunk}{axiom.bib}
+@misc{lion137,
+ author = "Unknown",
+ title = {{Thoughts in Free Time}},
+ link = "\url{https://lion137.blogspot.com/2019/02/fundamentalalgorithmspolynomialgcd.html}",
+ comment = "\url{https://github.com/lion137/Fundamental_Algorithsms}",
+ year = "2019"
+}
+
+\end{chunk}
+
+\index{Lipton, Richard J.}
+\begin{chunk}{axiom.bib}
+@article{Lipt94,
+ author = "Lipton, Richard J.",
+ title = {{StraightLine Complexity and Integer Factorization}},
+ journal = "LNCS",
+ volume = "877",
+ pages = "7179",
+ year = "1994",
+ abstract =
+ "We show that if polynomials with many rational roots have
+ polynomial length straightline complexity, then integer
+ factorization is 'easy'",
+ paper = "Lipt94.pdf"
+}
+
+\end{chunk}
+
+\index{Liskov, Barbara}
+\index{Zilles, Stephen}
+\begin{chunk}{axiom.bib}
+@article{Lisk77a,
+ author = "Liskov, Barbara and Zilles, Stephen",
+ title = {{Programming with Abstract Data Types}},
+ journal = "SIGPLAN Notices",
+ volume = "9",
+ number = "4",
+ pages = "5059",
+ year = "1977",
+ abstract =
+ "The motivation behind the work in veryhighlevel languages is to
+ ease the programming task by providing the programmer with a
+ language containing primitives or abstractions suitable to his
+ problem area. The programmer is then able to spend his effort in
+ the right place; he concentrates on solving his problem, and the
+ resulting program will be more reliable as a result. Clearly, this
+ is a worthwhile goal.
+
+ Unfortunately, it is very difficult for a designer to select in
+ advance all the abstractions which the users of his language might
+ need. If a language is to be used at all, it is likely to be used
+ to solve problems which its designer did not envision, and for
+ which the abstractions embedded in the language are not sufficient.
+
+ This paper presents an approach which allows the set of builtin
+ abstractions to be augmented when the need for a new data
+ abstraction is discovered. This approach to the handling of
+ abstraction is an outgrowth of work on designing a language for
+ structured programming. Relevant aspects of this language are
+ described, and examples of the use and definitions of abstraction
+ are given.",
+ paper = "Lisk77a.pdf"
+}
+
+\end{chunk}
+
+\index{Ly, Kim Quyen}
+\begin{chunk}{axiom.bib}
+@misc{Lyxx15,
+ author = "Ly, Kim Quyen",
+ title = {{Formalization in Coq of Polynomial Interpretations on
+ Rationals}},
+ year = "2015",
+ link = "\url{https://www.di.ens.fr/~quyen/publication/ly10.pdf}",
+ paper = "Lyxx15.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{M} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Mackie, Ian}
+\index{Pinto, Jorge Sousa}
+\begin{chunk}{axiom.bib}
+@article{Mack02,
+ author = "Mackie, Ian and Pinto, Jorge Sousa",
+ title = {{Encoding Linear Logic with Interaction Combinators}},
+ journal = "Information and Computation",
+ volume = "176",
+ pages = "153186",
+ year = "2002",
+ abstract =
+ "The purpose of this paper is to demonstrate how Lafont's
+ interaction combinators, a system of three symbols and six
+ interaction rules, can be used to encode linear
+ logic. Specifically, we give a translation of the multiplicative,
+ exponential, and additive fragments of linear logic together with
+ a strategy for cutelimination which can be faithfully
+ simulated. Finally, we show briefly how this encoding can be used
+ for evaluating $\lambda$terms. In addition to offering a very
+ simple, perhaps the simplest, system of rewriting for linear logic
+ and the $\lambda$calculus, the interaction net implementation
+ that we present has been shown by experimental testing to offer a
+ good level of sharing in terms of the number of cutelimination
+ steps (resp. $\beta$reduction steps). In particular it performs
+ better than all extant finite systems of interaction nets.",
+ paper = "Mack02.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Ma, KwanLiu}
\index{Kessler, Robert R.}
\begin{chunk}{axiom.bib}
@@ 15009,6 +18078,51 @@ when shown in factored form.
\end{chunk}
+\index{Mahboubi, Assia}
+\begin{chunk}{axiom.bib}
+@article{Mahb13,
+ author = "Mahboubi, Assia",
+ title = {{The Rooster and the Butterflies}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "This paper describes a machinechecked proof of the JordenHolder
+ theorem for finite groups. This purpose of this description is to
+ discuss the representation of the elementary concepts of finite
+ group theory inside type theory. The design choices underlying
+ these representations were crucial to the successful formalization
+ of a complete proof of the Odd Order Theorem in the Coq system.",
+ paper = "Mahb13.pdf"
+}
+
+\end{chunk}
+
+\index{Majewski, Bohdan}
+\index{Havas, George}
+\begin{chunk}{axiom.bib}
+@article{Maje94,
+ author = "Majewski, Bohdan and Havas, George",
+ title = {{The Complexity of Greatest Common Divisor Computations}},
+ journal = "LNCS",
+ volume = "877",
+ pages = "184193",
+ year = "1994",
+ abstract =
+ "We study the complexity of expressing the greatest common divisor
+ of $n$ positive numbers as a linear combination of the
+ numbers. We prove the NPcompleteness of finding an optimal set of
+ multipliers with respect to either of $L_0$ metric or the
+ $L_\infty$ norm. We present and analyze a new method for
+ expressing the gcd of $n$ numbers as their linear combination and
+ give an upper bound on the size of the largest multiplier
+ produced by this method, which is optimal.",
+ paper = "Maje94.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{MartinL\"of, P.}
\begin{chunk}{axiom.bib}
@article{Mart84,
@@ 15098,6 +18212,33 @@ when shown in factored form.
\end{chunk}
+\index{Meier, Andreas}
+\index{Sorge, Volker}
+\index{Colton, Simon}
+\begin{chunk}{axiom.bib}
+@article{Meie02,
+ author = "Meier, Andreas and Sorge, Volker and Colton, Simon",
+ title = {{Employing Theory Formationi to Guide Proof Planning}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "The invention of suitable concepts to characterise mathematical
+ structures is one of the most challenging tasks for both human
+ mathematicians and automated theorem provers alike. We present an
+ approach where automatic concept formation is used to guide
+ nonisomorphism proofs in the residue class domain. The main idea
+ behind the proof is to automatically identify discriminants for
+ two given structures to show that they are not
+ isomorphic. Suitable discriminants are generated by a theory
+ formation system; the overall proof is constructe by a proof
+ planner with the additional support of traditional automated
+ theorem provers and a computer algebra system.",
+ paper = "Meie02.pdf"
+}
+
+\end{chunk}
+
\index{Meili, Mario}
\begin{chunk}{axiom.bib}
@misc{Meilxx,
@@ 15131,7 +18272,7 @@ when shown in factored form.
year = "2009",
link = "\url{https://www.win.tue.nl/~hzantema/semssm.pdf}",
abstract =
 "n this paper we are going to describe the Wandâ€™s type inference
+ "In this paper we are going to describe the Wandâ€™s type inference
algorithm and weâ€™ll try to extend this algorithm with the notion of
polymorphic let. By means of a type system, which weâ€™re going to
extend with some constraint language, we are able to extend the
@@ 15149,6 +18290,19 @@ when shown in factored form.
\end{chunk}
+\index{Morris Jr., J.H.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Morr73,
+ author = "Morris Jr., J.H.",
+ title = {{Types are not Sets}},
+ booktitle = "Symp. on the Principles of Programming Languages",
+ publisher = "ACM",
+ pages = "120124",
+ year = "1973"
+}
+
+\end{chunk}
+
\index{Moschovakis, Y.N.}
\begin{chunk}{axiom.bib}
@article{Mosc84,
@@ 15169,6 +18323,74 @@ when shown in factored form.
\end{chunk}
+\index{Moses, Joel}
+\index{Zippel, Richard}
+\begin{chunk}{axiom.bib}
+@article{Mose79,
+ author = "Moses, Joel and Zippel, Richard",
+ title = {{Algorithms for the Integration of Algebraic Functions}},
+ journal = "LNCS",
+ volume = "72",
+ pages = "426430",
+ year = "1979",
+ paper = "Mose79.pdf"
+}
+
+\end{chunk}
+
+\index{Moses, Joel}
+\begin{chunk}{axiom.bib}
+@article{Mose72,
+ author = "Moses, Joel",
+ title = {{Toward a General Theory of Special Functions}},
+ journal = "Communications of the ACM",
+ volume = "15",
+ number = "7",
+ pages = "550554",
+ year = "1972",
+ abstract =
+ "A list of a number of natural developments for the field of
+ algebraic manipulation is given. Then the prospects for a general
+ theory of functions defined by ordinary differential equations are
+ discussed. The claim is made that recent developments in
+ mathematics indicate that it should be possible to algorithmically
+ generate many properties of solutions to differential
+ equations. Such a theory is preferable to a less general effort to
+ make algebraic manipulation systems knowledgeable about the usual
+ special functions (e.g. exponential, hypergeometric).",
+ paper = "Mose72.pdf",
+ keywords = "printed, DONE"
+}
+
+\end{chunk}
+
+\index{Mosses, Peter}
+\begin{chunk}{axiom.bib}
+@article{Moss80,
+ author = "Mosses, Peter",
+ title = {{A Constructive Approach to Compiler Correctness}},
+ journal = "LNCS",
+ volume = "85",
+ year = "1980",
+ booktitle = "Automata, Languages and Programming",
+ publisher = "Springer",
+ abstract =
+ "It is suggested that denotational semantics definitions of
+ programming languages should be based on a small number of
+ abstract data types, each embodying a fundamental concept of
+ computation. Once these fundamental abstract data types have been
+ implemented in a particular target language (e.g. stackmachine
+ code), it is a simple matter to construct a correct compiler for
+ any source language from its denotational semantic definition. The
+ approach is illustrated by constructing a compiler similar to the
+ one which was proved correct by Thatcher, Wagner \& Wright
+ (1979). Some familiarity with manysorted algebras is presumed.",
+ paper = "Moss80.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Moy, Yannick}
\index{Wallenburg, Angela}
\begin{chunk}{axiom.bib}
@@ 15215,6 +18437,96 @@ when shown in factored form.
\end{chunk}
+\index{Muller, Dennis}
+\index{Gauthier, Thibault}
+\index{Kaliszyk, Cezary}
+\index{Kohlhase, Michael}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Mull17,
+ author = "Muller, Dennis and Gauthier, Thibault and Kaliszyk, Cezary
+ and Kohlhase, Michael and Rabe, Florian",
+ title = {{Classification of Alignments Between Concepts of Formal
+ Mathematical Systems}},
+ journal = "LNCS",
+ volume = "10383",
+ year = "2017",
+ abstract =
+ "Mathematical knowledge is publicly available in dozens of
+ different formats and languages, ranging from informal
+ (e.g. Wikipedia) to formal corpora (e.g. Mizar). Despite an
+ enormous amount of overlap between these corpora, only few
+ machineactionalbe connections exist. We speak of alignment if the
+ same concept occurs in different libraries, possibly with slightly
+ different names, notations, or formal definitions. Leveraging
+ these alignments creates a huge potential for knowledge sharing
+ and transfer, e.g. integrating theorem provers ore reusing
+ services across systems. Notably, even imperfect alignments,
+ i.e. concepts that are very similar rather than identical, can
+ often play very important roles. Specifically, in machine learning
+ techniques for theorem proving and in automation techniques that
+ use these, they allow learningreasoning base automation for
+ theorem provers to take inspiration from proofs from different
+ formal proof libraries or semiformal libraries even if the latter
+ is based on a different mathematical foundation. We present a
+ classification of alignments and design a simple format for
+ describing alignments, as well as an infrastructure for sharing
+ them. We propose these as a centralized standard for the
+ community. Finally, we present an initial collection of
+ approximately 12000 alignments from the different kinds of
+ mathematical corpora, including proof assistant libraries and
+ semiformal corpora as a public resource.",
+ paper = "Mull17.pdf"
+}
+
+\end{chunk}
+
+\index{Muller, Dennis}
+\index{Kohlhase, Michael}
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Mull18,
+ author = "Muller, Dennis and Kohlhase, Michael and Rabe, Florian",
+ title = {{Automatically Finding Theory Morphisms for Knowledge
+ Management}},
+ journal = "LNCS",
+ volume = "11006",
+ year = "2018",
+ abstract =
+ "We present a method for finding morphisms between formal
+ theories, both within as well as across libraries based on
+ different logical foundations. As they induce new theorems in the
+ target theory for any of the source theory, theory morphisms are
+ highvalue elements of a modular formaly library. Usually, theory
+ morphisms are manually encoded, but this practice requires authors
+ who are familiar with source and target theories at the same time,
+ which limits the scalability of the manual approach.
+
+ To remedy this problem, we have developed a morphism finder
+ algorithm that automates theory morphism discovery. In this paper we
+ present an implementation in the MMT system and show specific use
+ cases. We fous on an application of theory discovery, where a user
+ can check whether a (part of a) formal theory already exists in
+ some library, potentially avoiding duplication of work or
+ suggesting an opportunity for refactoring.",
+ paper = "Mull18.pdf"
+}
+
+\end{chunk}
+
+\index{Muller, JeanMichel}
+\begin{chunk}{axiom.bib}
+@book{Mull16,
+ author = "Muller, JeanMichel",
+ title = {{Elementary Functions: Algorithms and Implementation}},
+ isbn = "9781489979810",
+ publisher = "Birkhauser",
+ year = "2016",
+ paper = "Mull16.pdf"
+}
+
+\end{chunk}
+
\index{Murphy, Robin R.}
\begin{chunk}{axiom.bib}
@book{Murp18,
@@ 15226,7 +18538,6 @@ when shown in factored form.
"\url{https://mitpress.mit.edu/books/roboticsthroughsciencefiction}",
isbn = "9780262536264",
comment = "verification, validation, and trust"

}
\end{chunk}
@@ 15264,6 +18575,29 @@ when shown in factored form.
\subsection{N} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Nederpelt, R.}
+\index{Kamareddine, F.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Nede01,
+ author = "Nederpelt, R. and Kamareddine, F.",
+ title = {{An Abstract Syntax for a Formal Language of Mathematics}},
+ booktitle = "4th int. Tbilisi Symp. on Language, Logic, and Computation",
+ publisher = "unknown",
+ year = "2001",
+ abstract =
+ "This paper provides an abstract syntax for a formal language of
+ mathematics. We call our language Weak Type Theory (abbreviated
+ WTT). WTT will be as faithful as possible to the mathematician's
+ language yet will be formal and will not allow ambiguities. WTT
+ can be used as an intermediatry between the natural language of
+ the mathematician and the formal language of the logician. As far
+ as we know, this is the first extensive formalization of an
+ abstract syntax of a formal language of mathematics.",
+ paper = "Nede01.pdf"
+}
+
+\end{chunk}
+
\index{Nimmer, Jeremy W.}
\index{Ernst, Michael D.}
\begin{chunk}{axiom.bib}
@@ 15322,6 +18656,34 @@ when shown in factored form.
\end{chunk}
+\index{Norrish, Michael}
+\index{Slind, Konrad}
+\begin{chunk}{axiom.bib}
+@article{Norr02,
+ author = "Norrish, Michael and Slind, Konrad",
+ title = {{A Thread of HOL Development}},
+ journal = "Computer Journal",
+ volume = "45",
+ number = "1",
+ pages = "3745",
+ year = "2002",
+ abstract =
+ "The HOL system is a mechanized proof assistant for higher order
+ logic that has been under continuous development since the
+ mid1980s, by an everchanging group of developers and external
+ contributors. we give a brief overview of various implementations
+ of the HOL logic before focusing on the evolution of certain
+ important features available in a recent implementation. We also
+ illustrate how the module system of Standard ML provided security
+ and modularity in the construction of the HOL kernel, as well as
+ serving in a separate capacity as a useful representation medium
+ for persistent, hierarchical logical theories.",
+ paper = "Norr02.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{O} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Oberhoff, Sebastian}
@@ 15344,6 +18706,27 @@ when shown in factored form.
\end{chunk}
+\begin{chunk}{axiom.bib}
+@inproceedings{Oisd18,
+ author = "Anonymous",
+ title = {{Solving Rings in Agda}},
+ booktitle = "Proc. ACM Program. Lang.",
+ publisher = "ACM",
+ year = "2018",
+ abstract =
+ "We present a new library which automates the construction of
+ equivalence proofs between polynomials over commutative rings and
+ semirings in the programming language Agda [Norell and Chapman
+ 2008]. It is significantly faster than Agda's existing solver. We
+ use reflection to provide a simple interface to the solver, and
+ demonstrate how to use the constructed proofs to provide
+ stepbystep solutions.",
+ paper = "Oisd18.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Olsson, Ola}
\index{Wallenburg, Angela}
\begin{chunk}{axiom.bib}
@@ 15423,8 +18806,58 @@ when shown in factored form.
\end{chunk}
+\index{Ostebee, Arnold}
+\index{Zorn, Paul}
+\begin{chunk}{axiom.bib}
+@article{Oste93,
+ author = "Ostebee, Arnold and Zorn, Paul",
+ title = {{Telegraphic Reviews}},
+ journal = "The American Mathematical Monthly",
+ volume = "100",
+ number = "8",
+ pages = "812817",
+ year = "1993",
+ paper = "Oste93.pdf",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
\subsection{P} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Parisse, Bernard}
+\begin{chunk}{axiom.bib}
+@misc{Pari19,
+ author = "Parisse, Bernard",
+ title = {{Computing Huge Groebner Basis like Cyclic10 over
+ $\mathbb{Q}$ with Giac}},
+ link = "\url{https://hal.archivesouvertes.fr/hal02081648}",
+ year = "2019",
+ abstract =
+ "We present a short description on how to finetune the
+ modular algorithm implemented in the Giac computer algebra system
+ to reconstruct large Groebner basis over $\mathbb{Q}$. The
+ classical cyclic10 benchmark will serve as example.",
+ paper = "Pari19.pdf"
+}
+
+\end{chunk}
+
+\index{Paulson, Lawrence C.}
+\begin{chunk}{axiom.bib}
+@inbook{Paul90b,
+ author = "Paulson, Lawrence C.",
+ title = {{Designing a Theorem Prover}},
+ booktitle = "Handbook of Logic in Computer Science, Volume 2",
+ publisher = "Oxford University Press",
+ pages = "415475",
+ year = "1992",
+ paper = "Paul90b.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Pfenning, Frank}
\begin{chunk}{axiom.bib}
@misc{Pfen06,
@@ 15450,6 +18883,31 @@ when shown in factored form.
\end{chunk}
+\index{Pirog, Maciej}
+\index{Gibbons, Jeremy}
+\begin{chunk}{axiom.bib}
+@misc{Piroxx,
+ author = "Pirog, Maciej and Gibbons, Jeremy",
+ title = {{Extended Abstract: A Functional Derivation of the Warren
+ Abstract Machine}},
+ link = "\url{http://www.cs.ox.ac.uk/jeremy.gibbons/publications/wam.pdf}",
+ year = "unknown",
+ abstract =
+ "Based on Danvy et al.'s functional correspondence, we give a
+ further example of gradual refinement of an interpreter into a
+ known, lowlevel abstract machine underlying realworld compilers,
+ by deriving an abstract model of the Warren Abstract Machine from
+ a simple resolutionbased Prolog interpreter. We show that other
+ wellknown functional programming techniques (namely, explicit
+ laziness and semipersistent data structures) can help to develop
+ abstract machines without detailed examination of the semantics
+ realised by the interpreter.",
+ paper = "Piroxx.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Pittman, Dan}
\begin{chunk}{axiom.bib}
@misc{Pitt18,
@@ 15462,6 +18920,42 @@ when shown in factored form.
\end{chunk}
+\index{Piskac, Ruzica}
+\begin{chunk}{axiom.bib}
+@inproceedings{Pisk15,
+ author = "Piskac, Ruzica",
+ title = {{From Decision Procedures to Synthesis Procedures}},
+ booktitle = "Symp. on Symbolic and Numeric Algorithms for
+ Scientific Computing",
+ publisher = "ACM",
+ year = "2015",
+ abstract =
+ "Software synthesis is a technique for automatically generating
+ code from a given specification. The goal of software synthesis is
+ to make software development easier while increasing both the
+ productivity of the programmer and the correctness of the produced
+ code. In this paper we present an approach to synthesis that
+ relies on the use of automated reasoning and decision
+ procedures. First we describe how to generalize decision
+ procedures into predictable and complete synthesis
+ procedures. Here completeness means that the procedure is
+ guaranteed to find code that satisfies the given specification. We
+ illustrate the process of turning a decision procedure into a
+ synthesis procedure using linear integer arithmetic as an example.
+
+ However, writing a complete specification can be a tedious task,
+ sometimes even harder than writing the code itself. To overcome
+ this problem, ideally the user could provide a few inputoutput
+ examples, and then the code should be automatically derived. We
+ outline how to broaden usability and applications of current
+ software synthesis techniques. We conclude with an outlook on
+ possible future research directions and applications of synthesis
+ procedures.",
+ paper = "Pisk15.pdf"
+}
+
+\end{chunk}
+
\index{Platzer, Andre}
\begin{chunk}{axiom.bib}
@book{Plat18,
@@ 15587,6 +19081,132 @@ when shown in factored form.
\subsection{R} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+\index{Rabe, Florian}
+\index{Kohlhase, Michael}
+\index{Coen, Claudio Sacerdoti}
+\begin{chunk}{axiom.bib}
+@article{Rabe11,
+ author = "Rabe, Florian and Kohlhase, Michael and Coen, Claudio Sacerdoti",
+ title = {{A Foundational View on Integration Problems}},
+ journal = "LNCS",
+ volume = "6824",
+ year = "2011",
+ abstract =
+ "The integration of reasoning and computation services across
+ system and language boundaries is a challenging problem of
+ computer science. In this paper, we use integration for the
+ scenario where we have two systems that we integrate by moving
+ problems and solutions between them. While this scenario is often
+ approached from an engineering perspective, we take a foundational
+ view. Based on the generic declarative language MMT, we develop a
+ theoretical framework for system integration using theories and
+ partial theory morphisms. Because MMT permits representations of
+ the metalogical foundations themselves, this includes integration
+ across logics. We discuss save and unsafe integration schemes and
+ devise a general form of safe integration.",
+ paper = "Rabe11.pdf"
+}
+
+\end{chunk}
+
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Rabe12,
+ author = "Rabe, Florian",
+ title = {{A Query Language for Formal Mathematical Libraries}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "One of the most promising applications of mathematical knowledge
+ management is search: Even if we restrict attention to the tiny
+ fragment of mathematics that has been formalized, the amount
+ exceeds the comprehension of an individual human.
+
+ Based on the generic representation language MMT, we introduce the
+ mathematical query langauge QMT: It combines simplicity,
+ expressivity, and scalability while avoiding a commitment to a
+ particular logical formalism. QMT can integrate various search
+ paradigms such as unification, semantic web, or XQuery style
+ queries, and QMT queries can span different mathematical
+ libraries.
+
+ We have implemented QMT as a part of the MMT API. This combination
+ provides a scalable indexing and query engine that can be readily
+ applied to any library of mathematical knowledge. While our focus
+ here is on libraries that are available in a content markup
+ language, QMT naturally extends to presentation and narration
+ markup languages.",
+ paper = "Rabe12.pdf"
+}
+
+\end{chunk}
+
+\index{Rabe, Florian}
+\begin{chunk}{axiom.bib}
+@article{Rabe15,
+ author = "Rabe, Florian",
+ title = {{Generic Literals}},
+ journal = "LNCS",
+ volume = "9150",
+ year = "2015",
+ abstract =
+ "MMT is a formal framework that combines the flexibility of
+ knowledge representation languages like OPENMATH with the formal
+ rigor of logical frameworks like LF. It systematically abstracts
+ from theoretical and practical aspects of individual formal
+ languages and tries to develop as many solutions as possible
+ generically.
+
+ In this work, we allow MMT theories to declare userdevined
+ literals, which makes literals as userextensible as operators,
+ axioms, and notations. This is particularly important for
+ framework languages, which must be able to represent any choice of
+ literals. Theoretically, our literals are introduced by importing
+ a model that defines the denotations of some types and function
+ symbols. Practically, MMT is coupled with a programming language,
+ in which these models are defined.
+
+ Our results are implemented in the MMT system. In particular,
+ literals and computation on them are integrated with the parser
+ and type checker.",
+ paper = "Rabe15.pdf"
+}
+
+\end{chunk}
+
+\index{Raja, Amar}
+\index{Rayner, Matthew}
+\index{Sexton, Alan}
+\index{Sorge, Volker}
+\begin{chunk}{axiom.bib}
+@article{Raja06,
+ author = "Raja, Amar and Rayner, Matthew and Sexton, Alan and
+ Sorge, Volker",
+ title = {{Towards a Parser for Mathematical Formula Recognition}},
+ journal = "LNCS",
+ volume = "4108",
+ year = "2006",
+ abstract =
+ "For the transfer of mathematical knowledge from paper to
+ electronic form, the reliable automatic analysis and understanding
+ of mathematical texts is crucial. A robust system for this task
+ needs to combine low level character recognition with higher level
+ structural analysis of mathematical formulas. We present progress
+ towards this goal by extending a databasedriven optical character
+ recognition system for mathematics with two high level analysis
+ features. One extends and enhances the traditional approach of
+ projection profile cutting. The second aims at integrating the
+ recognition process with graph grammar rewriting by giving support
+ to the interactive construction and validation of grammar
+ rules. BOth approaches can be successfully employed to enhance the
+ capabilities of our system to recognise and reconstruct compound
+ mathematical expressions.",
+ paper = "Raja06.pdf"
+}
+
+\end{chunk}
+
\index{Rees, Jonathan}
\index{Adams, Norman I.}
\begin{chunk}{axiom.bib}
@@ 15679,6 +19299,89 @@ when shown in factored form.
\end{chunk}
+\index{Risch, Robert H.}
+\begin{chunk}{axiom.bib}
+@article{Risc76,
+ author = "Risch, Robert H.",
+ title = {{Implicitly Elementary Integrals}},
+ journal = "Proc. Amer. Math.",
+ volume = "57",
+ number = "1",
+ pages = "17",
+ year = "1976",
+ paper = "Risc76.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Risch, Robert H.}
+\begin{chunk}{axiom.bib}
+@article{Risc79,
+ author = "Risch, Robert H.",
+ title = {{Algebraic Properties of the Elementary Functions of Analysis}},
+ journal = "American Journal of Mathematics",
+ volume = "101",
+ number = "4",
+ pages = "743759",
+ year = "1979",
+ abstract =
+ "The elementary functions of a complex variable $z$ are those
+ functions built up from the rational functions of $z$ by
+ exponentiation, taking logarithms, and algebraic operations. The
+ purpose of this paper is first, to prove a 'structure theorem'
+ which shows that if an algebraic relation holds among a set of
+ elementary functions, then they must satisfy an algebraic relation
+ of a special kind. Then we make four applications of this theorem,
+ obtaining both new and old results which are described here
+ briefly (and imprecisely).
+ \begin{enumerate}
+ \item An algorithm is given for telling when two elementary
+ expressions define the same function.
+ \item A characterization is derived of those ordinary differential
+ equations having elementary solutions
+ \item The four basic functions of elementary calculus  exp, log,
+ tan, tan$^{1}$,  are shown to be 'irredundant'
+ \item A characterization is given of elementary functions
+ possessing elementary inverses.
+ \end{enumerate}",
+ paper = "Risc79.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Ritt, J.F.}
+\begin{chunk}{axiom.bib}
+\article{Ritt25,
+ author = "Ritt, J.F.",
+ title = {{Elementary Functions and their Inverses}},
+ journal = "Transactions of the American Mathematical Society",
+ volume = "27",
+ pages = "6890",
+ year = "1925",
+ paper = "Ritt25.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Robinson, J.A.}
+\index{Sibert, E.E.}
+\begin{chunk}{axiom.bib}
+@techreport{Robi80,
+ author = "Robinson, J.A. and Sibert, E.E.",
+ title = {{Loglisp: An Alternative to Prolog}},
+ type = "technical report",
+ institution = "University of Syracuse",
+ number = "807",
+ year = "1980",
+ paper = "Robi80.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Roessle, Ian}
\index{Verbeek, Freek}
\index{Ravindran, Binoy}
@@ 15739,6 +19442,32 @@ when shown in factored form.
\end{chunk}
+\index{Russinoff, David M.}
+\begin{chunk}{axiom.bib}
+@article{Russ92,
+ author = "Russinoff, David M.",
+ title = {{A Verified Prolog Compiler for the Warren Abstract Machine}},
+ journal = "Journal of Logic Programming",
+ volume = "13",
+ number = "4",
+ pages = "367412",
+ year = "1992",
+ abstract =
+ "We extend the theory of Prolog to provide a framework for the
+ study of Prolog compilation technology. For this purpose, we first
+ demonstrate the semantic equivalence of two Prolog interpreters: a
+ conventional SLDrefutation procedure and one that employs
+ Warren's ``last call'' optimization. Next, we formally define the
+ Warren Abstract Machine (WAM) and its instruction set and present
+ a Prolog compiler for the WAM. Finally, we prove that the WAM
+ execution of a compiled Prolog program produces the same result as
+ the interpretation of its source.",
+ paper = "Russ92.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\subsection{S} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Sannella, Donald}
@@ 15852,12 +19581,72 @@ when shown in factored form.
\end{chunk}
+\index{Shallit, Jeffrey}
+\index{Sorenson, Jonathan}
+\begin{chunk}{axiom.bib}
+@article{Shal94,
+ author = "Shallit, Jeffrey and Sorenson, Jonathan",
+ title = {{Analysis of a LeftShift Binary GCD Algorithm}},
+ journal = "Journal of Symbolic Computation",
+ volume = "17",
+ number = "6",
+ pages = "473486",
+ year = "1994",
+ abstract =
+ "We introduce a new leftshift binary algorithm, LSBGCD, for
+ computing the greatest common divisor of two integers, and we
+ provide an analysis of the worstcase behavior of the
+ algorithm. The analysis depends on a theorem of Ramharter about
+ the extremal behavior of certain continuants.",
+ paper = "Shal94.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Schorre, D.V.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Scho64,
+ author = "Schorre, D.V.",
+ title = {{META II: A SyntaxOriented Compiler Writing Language}},
+ booktitle = "19th National Conference of the ACM",
+ publisher = "ACM",
+ year = "1964",
+ abstract =
+ "META II is a compiler writing language which consists of syntax
+ equations resembling Backus normal form and into which
+ instructions to output assembly language commands are
+ inserted. Compilers have been written in this language for VALGOL
+ I and VALGOL II. The former is a simple algebraic language
+ designed for the purpose of illustrating META II The latter
+ contains a fairly arge subset of ALGOL 60.
+
+ The method of writing compilers which is given in detail in the
+ paper may be explained briefly as follows. Each synta equation is
+ translated into a recursive subroutine which tests the input
+ string for a particular phrase structure, and deletes it if
+ found. Backup is avoided by the extensive use of factoring in the
+ syntax equations. For each source language, an interpreter is
+ written and programs are compiled into that interpretive language.
+
+ META II is not intended as a standard language which everyone will
+ use to write compilers. Rather, it is an example of a simple
+ working language which can give one a good start in designing a
+ compilerwriting compiler suited to his own needs. Indeed, the
+ META II compiler is written in its own language, thus lending
+ itself to modification.",
+ paper = "Scho64.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Shivers, Olin}
\begin{chunk}{axiom.bib}
@techreport{Shiv90,
author = "Shivers, Olin",
 title = {{DataFlow Analysis and Type Recovery in Scheme}}
, year = "1990",
+ title = {{DataFlow Analysis and Type Recovery in Scheme}},
+ year = "1990",
type = "technical report",
institution = "Carnegie Mellon University",
number = "CMUCS90115",
@@ 16079,6 +19868,29 @@ when shown in factored form.
\end{chunk}
+\index{Stratford, Jonathan}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Stra08,
+ author = "Stratford, Jonathan and Davenport, James H.",
+ title = {{Unit Knowledge Management}},
+ journal = "LNCS",
+ volume = "5144",
+ year = "2008",
+ abstract =
+ "In 9, various observations on the handling of (physical) units in
+ OpenMath were made. In this paper, we update those observations,
+ and make some comments based on a working unit converter that,
+ because of its OpenMathbased design, is modular, extensible, and
+ reflective. We also note that some of the issues in an effective
+ converter, such as the rules governing abbreviations, being more
+ linguistic than mathematical, do not lend themselves to easy
+ expression in OpenMath.",
+ paper = "Stra08.pdf"
+}
+
+\end{chunk}
+
\index{Steenkiste, Peter}
\index{Hennessy, John}
\begin{chunk}{axiom.bib}
@@ 16117,6 +19929,20 @@ when shown in factored form.
\end{chunk}
+\index{Storjohann, Arne}
+\begin{chunk}{axiom.bib}
+@inproceedings{Stor97,
+ author = "Storjohann, Arne",
+ title = {{A Solution to the extended GCD problem with applications}},
+ booktitle = "ISSAC '97",
+ publisher = "ACM",
+ year = "1997",
+ paper = "Stor97.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Stump, Aaron}
\begin{chunk}{axiom.bib}
@misc{Stum18,
@@ 16130,6 +19956,29 @@ when shown in factored form.
\end{chunk}
+\index{Sturm, Thomas}
+\begin{chunk}{axiom.bib}
+@article{Stur02,
+ author = "Sturm, Thomas",
+ title = {{Integration of Quantifier Elimination with Constraint
+ Logic Programming}},
+ journal = "LNCS",
+ volume = "2385",
+ year = "2002",
+ abstract =
+ "We examine the potential of an extension of constraint logic
+ programming, where the admissible constraints are arbitrary
+ firstorder formulas over some domain. Constraint solving is
+ realized by effective quantifier elimination. The arithmetic is
+ always exact. We descrbe the conceptual advantages of our approach
+ and the capabilities of the current implementation
+ CLP(RL). Supported domains are $\mathbb{R}$, $\mathbb{C}$, and
+ $\mathbb{Q}_p$. For our discussion here we restrict to $\mathbb{R}$.",
+ paper = "Stur02.pdf"
+}
+
+\end{chunk}
+
\subsection{T} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\index{Tan, Yong Kiam}
@@ 16172,6 +20021,46 @@ when shown in factored form.
\end{chunk}
+\index{Tankink, Carst}
+\index{Kaliszyk, Cezary}
+\index{Urban, Josef}
+\index{Geuvers, Herman}
+\begin{chunk}{axiom.bib}
+@article{Tank13,
+ author = "Tankink, Carst and Kaliszyk, Cezary and Urban, Josef and
+ Geuvers, Herman",
+ title = {{Formal Mathematics on Display: A Wiki for Flyspeck}},
+ journal = "LNCS",
+ volume = "7961",
+ year = "2013",
+ abstract =
+ "The AGORA system is a prototype ``Wiki for Formal Mathematics'',
+ with an aim to support developing and documenting large
+ formalizations of mathematics in a proof assistant. The functions
+ implemented in AGORA include inbrowser editing, strong AI/ATP
+ proof advice, verification, and HTML rendering. The HTML rendering
+ contains hyperlinks and provides ondemand explanation of the
+ proof state for each proof step. In the present paper we show the
+ prototype Flyspeck Wiki as an instance of AGORA for HOL Light
+ formalizations. The wiki can be used for formalizations of
+ mathematics and for writing informal wiki pages about
+ mathematics. Such informal pages may contain islands of formal
+ text, which is used here for providing an initial crosslinking
+ between Hales's informal Flyspeck book, and the formal Flyspeck
+ development.
+
+ The AGORA platform intends to address distributed wikistyle
+ collaboration on large formalization projects, in particular both
+ the aspect of immediate editing, verification and rendering of
+ formal code, and the aspect of gradual and mutual refactoring and
+ correspondence of the initial informal text and its
+ formalization. Here, we highlight these features with the Flyspeck
+ Wiki.",
+ paper = "Tank13.pdf"
+}
+
+\end{chunk}
+
\index{BreazuTannen, Val}
\index{Coquand, Thierry}
\index{Gunter, Carl A.}
@@ 16216,6 +20105,47 @@ when shown in factored form.
\end{chunk}
+\index{Tarau, Paul}
+\begin{chunk}{axiom.bib}
+@misc{Tara16,
+ author = "Tarau, Paul",
+ title = {{A Hitchhiker's Guide to Reinventing a Prolog Machine}},
+ year = "2016",
+ abstract =
+ "We take a fresh, ``cleanroom'' look at implementing Prolog by
+ deriving its translation to an executable representation and its
+ execution algorithm from a simple Horn Clause metainterpreter.
+ The resulting design has some interesting properties:
+ \begin{itemize}
+ \item the heap representation of terms and the abstract machine
+ instruction encodings are the same.
+ \item no dedicated code area is used as the code is placed
+ directly on the heap.
+ \item unification and indexing operations are orthogonal
+ \item filtering of matching clauses happens without building new
+ structures on the heap
+ \item variables in function and predicate symbol positions are
+ handled with no performance penalty
+ \item a simple Englishlike syntax is used as an intermediate
+ representation for clauses and goals
+ \item the same Englishlike syntax can be used by programmers
+ directly as an alternative to classic Prolog syntax
+ \item solutions of (multiple) logic engines are exposed as answer
+ streams that can be combined through typical functional
+ programming patterns
+ \item performance of a basic interpreter implemeting our design is
+ within a factor of 2 of a highly optimized WAMbased system
+ \end{itemize}
+
+ To help placing our design on the fairly rich map of Prolog
+ systems, we discuss similarities to existing Prolog abstract
+ machines, with emphasis on separating necessary commonalities from
+ arbitrary implementation choices.",
+ paper = "Tara16.pdf"
+}
+
+\end{chunk}
+
\index{Tarditi, David}
\index{Morrisett, Greg}
\index{Cheng, Perry}
@@ 16281,6 +20211,38 @@ when shown in factored form.
\end{chunk}
+\index{Trager, Barry}
+\begin{chunk}{axiom.bib}
+@article{Trag79,
+ author = "Trager, Barry",
+ title = {{Integration of Simple Radical Extensions}},
+ journal = "LNCS",
+ volume = "72",
+ pages = "408414",
+ year = "1979",
+ abstract =
+ "Risch's landmark paper presented the first decision procedure for
+ the integration of elementary functions. In that paper he required
+ that the functions appearing in the integrand be algebraically
+ independent. Shortly afterwards in [Risalg] and [Ris70] he relaxed
+ that restriction and outlined a complete decision procedure for
+ the integration of elementary functions in finite
+ terms. Unfortunately his algorithms for dealing with algebraic
+ functions required considerably more complex machinery than his
+ earlier ones for purely transcendental functions. Moses'
+ implementation of the earlier approach in MACSYMA demonstrated its
+ practicality, whereas the same has yet to be done for Risch's more
+ recent approach.
+
+ This paper will show how Risch's earlier techniques can be
+ generalized to deal with unnested radicals. While this may seem a
+ severe restriction, perusing an integral table such as [Bois61] will
+ show that fewer than 1\% of the problems are excluded.",
+ paper = "Trag79.pdf"
+}
+
+\end{chunk}
+
\subsection{U} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{V} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ 16439,6 +20401,116 @@ when shown in factored form.
\end{chunk}
+\index{Wang, Ke}
+\begin{chunk}{axiom.bib}
+@misc{Wang19,
+ author = "Wang, Ke",
+ title = {{Learning Scalable and Precise Representation of Program
+ Semantics}},
+ year = "2019",
+ abstract =
+ "Neural program embedding has shown potential in aiding the
+ analysis of largescale, complicated software. Newly proposed deep
+ neural architectures pride themselves on learning program
+ semantics rather than superficial syntactic features. However, by
+ considering the source code only, the vast majority of neural
+ networks do not capture a deep, precise representation of program
+ semantics. In this paper, we present DYPRO, a novel deep neural
+ network that learns from program execution traces. Compared to the
+ prior dynamic models, not only is DYPRO capable of generalizing
+ across multiple executions for learning a program's dynamic
+ semantics in its entirety, but DYPRO is also more efficient when
+ dealing with programs yielding long execution traces. For
+ evaluation, we task DYPRO with semantic classification
+ (i.e. categorizing programs based on their semantics) and compared
+ it against two prominent static models: Gated Graph Neural Network
+ and TreeLSTM. We find that DYPRO achieves the highest prediction
+ accuracy among all models. To further reeal the capacity of all
+ aforementioned deep neural architectures, we examine if the models
+ can learn to detect deeper semantic properties of a program. In
+ particular given a task of recognizing loop invariants, we show
+ DYPRO beats all static models by a wide margin.",
+ paper = "Wang19.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Warren, David H.D.}
+\index{Pereira, Luis M.}
+\begin{chunk}{axiom.bib}
+@misc{Warr77,
+ author = "Warren, David H.D. and Pereira, Luis M.",
+ title = {{Prolog  The Language and its Implementation Compared
+ with Lisp}},
+ year = "1977",
+ link =
+ "\url{http://www.public.imtbstsp.eu/~gibson/Teaching/TeachingReadingMaterial/WarrenPereiraPereira77.pdf}",
+ abstract =
+ "Prolog is a simple but powerful programming language founded on
+ symbolic logic. The basic computational mechanism is a pattern
+ matching process (``unification'') operating on general record
+ structures (``terms of logic''). We priefly review the language
+ and compare it especially with pure Lisp. The remainder of the
+ paper discusses techniques for implementing Prolog efficiently; in
+ particular we describe how to compile the patterns involved in the
+ matching process. These techniques are as incorporated in our
+ DECsystem10 Prolog compiler (written in Prolog). The code it
+ generates is comparable in speed with that produced by existing
+ DEC10 Lisp compilers. We argue that pattern matching is a better
+ method for expressing operations on structured data than
+ conventional selectors and constructors  both for the user and
+ for the implementor.",
+ paper = "Warr77.pdf"
+}
+
+\end{chunk}
+
+\index{Warren, David H.D.}
+\begin{chunk}{axiom.bib}
+@techreport{Warr83,
+ author = "Warren, David H.D.",
+ title = {{An Abstract Prolog Instruction Set}},
+ type = "technical note",
+ number = "309",
+ institution = "SRI International",
+ year = "1983",
+ paper = "Warr83.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Watt, Stephen M.}
+\begin{chunk}{axiom.bib}
+@article{Watt09,
+ author = "Watt, Stephen M.",
+ title = {{Algorithms for the Functional Decomposition of Laurent
+ Polynomials}},
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
+ abstract =
+ "Recent work has detailed the conditions under which univariate
+ Laurent polynomials have functional decompositions. This paper
+ presents algorithms to compute such univariate Laurent polynomial
+ decompositions efficiently and gives their multivariate
+ generalizations.
+
+ One application of functiona decomposition of Laurent polynomials
+ is the functional decomposition of socalled ``symbolic
+ polynomials''. These are polynomiallike objects whose exponents
+ are themselves integervalued polynomials rather than
+ integers. The algebraic independence of $X$, $X^n$, $X^{n^2/2}$,
+ etc., and some elementary results on integervalued polynomials
+ allow problems with symbolic polynomials to be reduced to problems
+ with multivariate Laurent polynomials. Hence we are interested in
+ the functional decomposition of these objects.",
+ paper = "Watt09.pdf"
+}
+
+\end{chunk}
+
\index{Weirich, Stephanie}
\begin{chunk}{axiom.bib}
@misc{Weir18,
@@ 16461,6 +20533,55 @@ when shown in factored form.
\end{chunk}
+\index{Wiedijk, Freek}
+\begin{chunk}{axiom.bib}
+@article{Wied03b,
+ author = "Wiedijk, Freek",
+ title = {{Comparing Mathematical Provers}},
+ journal = "LNCS",
+ volume = "2594",
+ year = "2003",
+ abstract =
+ "We compare fifteen systems for the formalizatioin of mathematics
+ with a computer. We present several tables that list various
+ properties of these programs. The three main dimensions on which
+ we compare these systems are: the size of their library, the
+ strength of their logic and their level of automation.",
+ paper = "Wied03b.pdf"
+}
+
+\end{chunk}
+
+\index{Wilson, David J.}
+\index{Bradford, Russell J.}
+\index{Davenport, James H.}
+\begin{chunk}{axiom.bib}
+@article{Wils12,
+ author = "Wilson, David J. and Bradford, Russell J. and
+ Davenport, James H.",
+ title = {{Speeding Up Cylindrical Algebraic Decomposition by
+ Groebner Bases}},
+ journal = "LNCS",
+ volume = "7362",
+ year = "2012",
+ abstract =
+ "Groebner Bases and Cylindrical Algebraic Decomposition are
+ generally thought of as two, rather different, methods of looking
+ at systems of equations and, in the case of Cylindrical Algebraic
+ Decomposition, inequalities. However, even for a mixed system of
+ equalities and inequalities, it is possible to apply Groebner
+ bases to the (conjoined) equalities before invoking CAD. We see
+ that this is, quite often but not always, a beneficial
+ preconditioning of the CAD problem.
+
+ It is also possible to precondition the (conjoined) inequalities
+ with respect to the equalities, and this can also be useful in
+ many cases.",
+ paper = "Wils12.pdf"
+}
+
+\end{chunk}
+
\index{Winkler, Franz}
\begin{chunk}{axiom.bib}
@book{Wink84,
@@ 17099,7 +21220,7 @@ when shown in factored form.
year = "2018",
pages = "681690",
paper = "Avig18a.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 17890,7 +22011,7 @@ when shown in factored form.
\index{Paule, Peter}
\index{Saad, Husam L.}
\begin{chunk}{axiom.bib}
@article{Chen08,
+@article{Chen08a,
author = "Chen, William Y.C. and Paule, Peter and Saad, Husam L.",
title = {{Converging to Gosper's Algorithm}},
journal = "Advances in Applied Mathematics",
@@ 17906,7 +22027,7 @@ when shown in factored form.
for finding rational solutions to linear difference equations with
polynomial coefficients. Our approach easily extends to the
qanalogues.",
 paper = "Chen08.pdf",
+ paper = "Chen08a.pdf",
keywords = "printed"
}
@@ 19166,7 +23287,7 @@ when shown in factored form.
keywords = "printed"
}
\end{chumk}
+\end{chunk}
\index{Felty, Amy}
\index{Miller, Dale}
@@ 19594,7 +23715,7 @@ when shown in factored form.
Groups (the famous 'monster theorem' whose proof spans 10,000 pages in
400 articles).",
paper = "Gont09a.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 22890,7 +27011,8 @@ when shown in factored form.
the semantics of procedures when these are allowed in assignment
statements. The conclusion traces some of the background of the
project and points the way to future work.",
 paper = "Scot71.pdf"
+ paper = "Scot71.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 23594,6 +27716,8 @@ when shown in factored form.
keywords = "axiomref"
}
+\end{chunk}
+
\subsection{U} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\subsection{V} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ 24172,7 +28296,8 @@ when shown in factored form.
booktitle = "OPLSS 2106",
link = "\url{http://noamz.org/oplss16/refinementsnotes.pdf}",
year = "2016",
 paper = "Zeil16a.pdf"
+ paper = "Zeil16a.pdf",
+ keywords = "printed"
}
\end{chunk}
@@ 26630,6 +30755,37 @@ when shown in factored form.
\end{chunk}
\index{Carette, Jacques}
+\index{Sexton, Alan P.}
+\index{Sorge, Volker}
+\index{Watt, Stephen M.}
+\begin{chunk}{axiom.bib}
+@article{Care10a,
+ author = "Carette, Jacques and Sexton, Alan P. and Sorge, Volker and
+ Watt, Stephen M.",
+ title = {{Symbolic Domain Decomposition}},
+ journal = "LNCS",
+ volume = "6167",
+ year = "2010",
+ abstract =
+ "Decomposing the domain of a function into parts has many uses in
+ mathematics. A domain may naturally be a union of pieces, a
+ function may be defined by cases, or different boundary conditions
+ may hold on different regions. For any particular problem the
+ domain can be given explicitly, but when dealing with a family of
+ problems given in terms of symbolic parameters, matters become
+ more difficult. This article shows how hybrid sets, that is
+ multisets allowing negative multiplicity, may be used to express
+ symbolic domain decompositions in an efficient, elegant and
+ uniform way, simplifying both computation and reasoning. We apply
+ this theory to the arithmetic of piecewise functions and symbolic
+ matrices and show how certain operations may be reduced from
+ exponential to linear complexity.",
+ paper = "Care10a.pdf"
+}
+
+\end{chunk}
+
+\index{Carette, Jacques}
\index{Farmer, William M.}
\index{Jeremic, Filip}
\index{Maccio, Vincent}
@@ 29533,6 +33689,8 @@ when shown in factored form.
keywords = "printed"
}
+\end{chunk}
+
\index{Lamport, Leslie}
\begin{chunk}{axiom.bib}
@misc{Lamp14,
@@ 33876,6 +38034,33 @@ SIAM J. Computing Vol 18 pp 893905 (1989)
\end{chunk}
+\index{Davenport, J.H.}
+\begin{chunk}{axiom.bib}
+@article{Dave79,
+ author = "Davenport, J.H.",
+ title = {{The Computerisation of Algebraic Geometry}},
+ journal = "LNCS",
+ volume = "72",
+ pages = "119133",
+ year = "1979",
+ abstract =
+ "This paper is concerned with the problems of performing computer
+ algebra when the variables involved are related by some algebraic
+ dependencies. It is shown that heuristic or ad hoc treatment of
+ such cases leads rapidly to problems, and the proper mathematical
+ foundations for the treatment of algebraic functions is
+ presented. The formalism leads directly to the requirement for
+ algorithms to find the genus of an algebraic curve, and to
+ discover what function, if any, is associated with a given
+ divisor. These algorithms and the relevant computational
+ techniques are briefly described. In a concluding section the
+ areas where these techniques are required in an integration scheme
+ for algebraic functions are explained.",
+ paper = "Dave79.pdf"
+}
+
+\end{chunk}
+
\index{Davenport, James H.}
\begin{chunk}{axiom.bib}
@article{Dave79c,
@@ 34640,9 +38825,14 @@ ACM Proc. 1976 annual conference pp425428
\index{Norman, Arthur C.}
\index{Davenport, James H.}
\begin{chunk}{ignore}
\bibitem[Norman 79]{Nor79} Norman, A.C.; Davenport, J.H.
+\begin{chunk}{axiom.bib}
+@article{Norm79,
+ author = "Norman, Arthur C. and Davenport, James H.",
title = {{Symbolic Integration  The Dust Settles?}},
+ journal = "LNCS",
+ volume = "72",
+ pages = "398407",
+ year = "1979",
abstract = "
By the end of the 1960s it had been shown that a computer could find
indefinite integrals with a competence exceeding that of typical
@@ 34654,7 +38844,8 @@ ACM Proc. 1976 annual conference pp425428
been breached. In this paper we survey the work that has grown out of
the abovementioned early results, showing where the development has
been smooth and where it has spurred work in seemingly unrelated fields.",
 paper = "Nor79.pdf"
+ paper = "Norm79.pdf"
+}
\end{chunk}
@@ 34860,11 +39051,14 @@ Comm. Math. Helv., Vol 18 pp 283308, (1946)
\index{Rich, Albert D.}
\index{Jeffrey, David J.}
\begin{chunk}{ignore}
\bibitem[Rich 09]{Rich09} Rich, A.D.; Jeffrey, D.J.
+\begin{chunk}{axiom.bib}
+@article{Rich09,
+ author = "Rich, Albert D. and Jeffrey, David J.",
title = {{A Knowledge Repository for Indefinite Integration
Based on Transformation Rules}},
 link = "\url{http://www.apmaths.uwo.ca/~arich/A%2520Rulebased%2520Knowedge%2520Repository.pdf}",
+ journal = "LNCS",
+ volume = "5625",
+ year = "2009",
abstract = "
Taking the specific problem domain of indefinite integration, we
describe the ongoing development of a repository of mathematical
@@ 34877,6 +39071,7 @@ Comm. Math. Helv., Vol 18 pp 283308, (1946)
minimality. The benefits of the approach are illustrated with
examples, and with the results of comparisons with other approaches.",
paper = "Rich09.pdf"
+}
\end{chunk}
@@ 34889,7 +39084,18 @@ Comm. Math. Helv., Vol 18 pp 283308, (1946)
type = "Research Report",
number = "SP2801/002/00",
institution = "System Development Corporation, Santa Monica, CA, USA",
 year = "1968"
+ year = "1968",
+ abstract =
+ "This paper advances the study of the problem of integration of
+ elementary functions in finite terms to within one step of a
+ complete solution. A previous paper gave an algorithm for
+ integrating those elementary functions which are built up using
+ rational operations, exponentials and logarithms, under the
+ condition that the exponentials and logarithms could not be
+ replaced by adjoining constants and performing algebraic
+ operations. Now it is show that with algebraic operations allowed,
+ the problem reduces to a problem in the theory of algebraci
+ functions which is believed to be decidable."
}
\end{chunk}
@@ 34952,25 +39158,14 @@ Comm. Math. Helv., Vol 18 pp 283308, (1946)
\end{chunk}
\index{Risch, Robert}
\begin{chunk}{axiom.bib}
@article{Risc79,
 author = "Risch, Robert",
 title = {{Algebraic properties of the elementary functions of analysis}},
 journal = "American Journal of Mathematics",
 volume = "101",
 pages = "743759",
 year = "1979"
}

\end{chunk}

\index{Ritt, Joseph Fels}
\begin{chunk}{ignore}
\bibitem[Ritt 48]{Ritt48} Ritt, J.F.
+\begin{chunk}{axiom.bib}
+@book{Ritt48,
+ author = {{Ritt, Joseph Fels}},
title = {{Integration in Finite Terms}},
Columbia University Press, New York 1948
% REF:00046
+ publisher = "Columbia University Press, New York",
+ year = "1948"
+}
\end{chunk}
@@ 38491,7 +42686,7 @@ Proc ISSAC 97 pp172175 (1997)
lists. Finally, we compare the execution time of this parsing method
to the builtin methods of Common Lisp.",
paper = "Bake91.pdf",
 keywords = "printed"
+ keywords = "printed, DONE"
}
\end{chunk}
@@ 39358,11 +43553,13 @@ Proc ISSAC 97 pp172175 (1997)
\index{Maza, Marc Moreno}
\index{Wilson, David}
\begin{chunk}{axiom.bib}
@misc{Engl14b,
+@article{Engl14b,
author = "England, Matthew and Bradford, Russell and Chen, Changbo and
Davenport, James H. and Maza, Marc Moreno",
title = {{Problem formulation for truthtable invariant cylindrical
algebraic decomposition by incremental triangular decomposition}},
+ journal = "LNCS",
+ volume = "8543",
link = "\url{https://arxiv.org/pdf/1404.6371.pdf}",
year = "2014",
abstract =
@@ 42970,7 +47167,8 @@ Proc ISSAC 97 pp172175 (1997)
author = "Dewar, Michael C.",
title = {{Interfacing algebraic and numeric computation}},
year = "1991",
 school = "University of Bath, UK, England"
+ school = "University of Bath, UK, England",
+ paper = "Dewa91.pdf"
}
\end{chunk}
@@ 43394,7 +47592,17 @@ Proc ISSAC 97 pp172175 (1997)
booktitle = "EUROCAL 85 European Conf. Comput. Algebra Proc. Vol. 2",
pages = "417",
year = "1985",
 link = "\url{http://www.math.ncsu.edu/~kaltofen/bibliography/85/Ka85_eurocal.pdf}",
+ link =
+ "\url{http://www.math.ncsu.edu/~kaltofen/bibliography/85/Ka85_eurocal.pdf}",
+ abstract =
+ "A new algorithm is introduced which computes the multivariate
+ leading coefficients of polynomial factors from their univariate
+ images. This algorithm is incorporated into a sparse Hensel
+ lifting scheme and only requires the factorization of a single
+ univariate image. The algorithm also provides the content of the
+ input polynomial in the main variable as a byproduct. We show how
+ we can take advantage of this property when coputing the GCD of
+ multivariate polynomials by sparse Hensel lifting.",
paper = "Kalt85d.pdf"
}
@@ 44264,7 +48472,7 @@ Proc ISSAC 97 pp172175 (1997)
\index{Constable, Robert L.}
\index{Underwood, Judith L.}
\begin{chunk}{axiom.bib}
@article{Aitk99,
+@article{Aitk99a,
author = "Aitken, William E. and Constable, Robert L. and
Underwood, Judith L.",
title = {{Metalogical frameworks. II: Developing a reflected decision
@@ 44297,7 +48505,7 @@ Proc ISSAC 97 pp172175 (1997)
that using a rich underlying logic permits an abstract account of the
approach so that the results carry over to different implementations
and other logics.",
 paper = "Aitk99.pdf",
+ paper = "Aitk99a.pdf",
keywords = "axiomref"
}
@@ 45054,28 +49262,6 @@ American Mathematical Society (1994)
\end{chunk}
\index{Blair, Fred W.}
\index{Griesmer, James H.}
\index{Jenks, Richard D.}
\begin{chunk}{axiom.bib}
@inproceedings{Blai70,
 author = "Blair, Fred W. and Griesmer, James H. and Jenks, Richard D.",
 title = {{An interactive facility for symbolic mathematics}},
 booktitle = "Proc. International Computing Symposium, Bonn, Germany",
 year = "1970",
 pages = "394419",
 abstract =
 "The SCRATCHPAD/1 system is designed to provide an interactive symbolic
 coputational facility for the mathematician user. The system features
 a user language designed to capture the style and succinctness of
 mathematical notation, together with a facility for conveniently
 introducing new notations into the language. A comprehensive system
 library incorporates symbolic capabilities provided by such systems as
 SIN, MATHLAB, and REDUCE.",
 keywords = "axiomref"
}

\end{chunk}
\index{Blair, Fred W.}
\index{Griesmer, James H.}
@@ 45132,12 +49318,15 @@ American Mathematical Society (1994)
\index{Blair, Fred W.}
\index{Jenks, Richard D.}
\begin{chunk}{ignore}
\bibitem[Blair 70a]{BJ70}
+@techreport{Blai70,
author = "Blair, Fred W. and Jenks, Richard D.",
title = {{LPL: LISP programming language}},
+ type = "technical report",
+ institution = "IBM",
+ number = "RC3062",
year = "1970",
IBM Research Report, RC3062 Sept ,
keywords = "axiomref"
+}
\end{chunk}
@@ 45240,7 +49429,7 @@ IBM Research Report, RC3062 Sept ,
differences amenable to formal treatment, by asking ``under which
congruence is the pupil's answer equal to the teacher's?''.",
paper = "Brad09.pdf",
 keywords = "axiomref"
+ keywords = "axiomref, DONE"
}
\end{chunk}
@@ 47461,6 +51650,7 @@ Coding Theory and Applications Proceedings. SpringerVerlag, Berlin, Germany
features, the author sketches how it could be applied to symbolic
computation. A comparison with Scratchpad II is attempted. XFun seems
to exhibit more flexibility simplicity and uniformity.",
+ paper = "Dalm92.pdf",
keywords = "axiomref",
beebe = "Dalmas:1992:PFL"
}
@@ 47792,7 +51982,8 @@ VM/370 SPAD.SCRIPTS August 24, 1979 SPAD.SCRIPT
year = "1980",
number = "RC 8537 (\#37198)",
comment = "http://www.computerhistory.org/collections/catalog/102719109",
 keywords = "axiomref"
+ paper = "Dave80a.pdf",
+ keywords = "axiomref, printed, DONE"
}
\end{chunk}
@@ 51904,7 +56095,7 @@ SpringerVerlag, Berlin, Germany / Heildelberg, Germany / London, UK / etc.,
\index{Griesmer, James H.}
\index{Jenks, Richard D.}
\begin{chunk}{axiom.bib}
@InProceedings{Grie71,
+@InProceedings{Grie70,
author = "Griesmer, James H. and Jenks, Richard D.",
title = {{SCRATCHPAD/1  an interactive facility for symbolic mathematics}},
booktitle = "Proc. second ACM Symposium on Symbolic and Algebraic
@@ 51913,7 +56104,8 @@ SpringerVerlag, Berlin, Germany / Heildelberg, Germany / London, UK / etc.,
year = "1971",
pages = "4258",
doi = "http://dx.doi.org/10.1145806266",
 link = "\url{http://delivery.acm.org/10.1145/810000/806266/p42griesmer.pdf}",
+ link =
+ "\url{http://delivery.acm.org/10.1145/810000/806266/p42griesmer.pdf}",
abstract = "
The SCRATCHPAD/1 system is designed to provide an interactive symbolic
computational facility for the mathematician user. The system features
@@ 51922,8 +56114,8 @@ SpringerVerlag, Berlin, Germany / Heildelberg, Germany / London, UK / etc.,
introducing new notations into the language. A comprehensive system
library incorporates symbolic capabilities provided by such systems as
SIN, MATHLAB, and REDUCE.",
 paper = "Grie71.pdf",
 keywords = "axiomref",
+ paper = "Grie70.pdf",
+ keywords = "axiomref,printed",
beebe = "Griesmer:1971:SIF"
}
@@ 52005,7 +56197,8 @@ SpringerVerlag, Berlin, Germany / Heildelberg, Germany / London, UK / etc.,
its language and its capabilities. This is followed by an example
which illustrates its use in an application involving the solution of
an integral equation.",
 keywords = "axiomref",
+ paper = "Grie72.pdf",
+ keywords = "axiomref, printed, DONE",
beebe = "Griesmer:1972:SCV"
}
@@ 52053,6 +56246,7 @@ SpringerVerlag, Berlin, Germany / Heildelberg, Germany / London, UK / etc.,
Transform computation represents a slight modification of one written
by Dr. David Barton, when he was a summer visitor during 1972 at the
Watson Research Center.",
+ paper = "Grie74.pdf",
keywords = "axiomref"
}
@@ 52085,7 +56279,8 @@ SpringerVerlag, Berlin, Germany / Heildelberg, Germany / London, UK / etc.,
volume = "9",
number = "3",
pages = "1317",
 year = "1975"
+ year = "1975",
+ paper = "Grie75a.pdf"
}
\end{chunk}
@@ 52107,6 +56302,7 @@ SpringerVerlag, Berlin, Germany / Heildelberg, Germany / London, UK / etc.,
processing, mathematics, notably logic and number theory, and
applications largely in physics. The lectures will deal with all of these
to a varying extent.",
+ paper = "Grie75b.pdf",
keywords = "axiomref"
}
@@ 53276,12 +57472,66 @@ SpringerVerlag, Berlin, Germany / Heildelberg, Germany / London, UK / etc.,
\end{chunk}
\index{Jenks, Richard D.}
\begin{chunk}{ignore}
\bibitem[Jenks 69]{Jen69} Jenks, R. D.
+\begin{chunk}{axiom.bib}
+@techreport{Jenk70,
+ author = "Jenks, Richard D.",
title = {{META/LISP: An interactive translator writing system}},
Research Report International Business Machines, Inc., Thomas J.
Watson Research Center, Yorktown Heights, NY, USA, 1969 RC2968 July 1970
+ type = "research report",
+ number = "RC2968",
+ year = "1970",
+ institution = "IBM Research",
+ abstract =
+ "META/LISP is a general purpose translator writing system for IBM
+ System/360 currently running on TSS, CP/CMS, and OS/360. The input
+ to the system is a source program which simultaneously describes
+ 1) the syntax of some input data to be translated and
+ 2) algorithms which operate on the input data and a pushdown stack
+ to accomplish the desired translation; the output of the system is
+ a compiled program for translating that input data. In particular
+ when the input data are statements of a higherlevel language to
+ be translated into assembly language, META/LISP serves as a
+ compilercompiler. META/LISP uses the topdown syntaxdirected
+ approach which makes the system extremely attractive for the
+ design and implementation of experimental languages; using
+ META/LISP such compilers are easy to write, easy to check out, and
+  most importantly  easy to modify interactively. The appendices
+ which follow a rather complete description of the system including
+ a selfdescription of the META/LISP compiler.",
+ paper = "Jenk70.pdf",
+ keywords = "axiomref, printed, DONE"
+}
+
+\end{chunk}
+
+\index{Jenks, Richard D.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Jenk71a,
+ author = "Jenks, Richard D.",
+ title = {{META LISP and META PLUS:: Tools for Rapidly Implementing
+ extendable language translators}},
+ booktitle = "Proc. 2nd ACM Symposium on Symbolic and Algebraic
+ Manipulation",
+ publisher = "ACM",
+ pages = "281",
+ year = "1971",
+ abstract =
+ "A unique feature of the SCRATCHPAD system for symbolic
+ manipulation is its powerful translation facilities. The essential
+ components are META/LISP, a translator writing system, and
+ META/PLUS, a facility for immediately extending the syntax of any
+ translator produced through META/LISP. This talk will illustrate
+ how these facilities may be used to produce a conversational
+ higherlevel LISP system. The language chosen is called ALPL
+ because of its similarity to APL and LPL, a language resident in
+ the SCRATCHPAD system. The essential characteristics of ALPL are
+ described by eight syntax rules together with brief examples of
+ corresponding ALPL and LISP programs. It is shown how a META/LISP
+ program may be easily written to produce a conversation ALPL
+ system. The ALPL language is then incrementally extended by
+ introducing new notations defined in terms of existing ALPL
+ constructs through calls to META/PLUS.",
keywords = "axiomref"
+}
\end{chunk}
@@ 53480,7 +57730,7 @@ Watson Research Center, Yorktown Heights, NY, USA, 1969 RC2968 July 1970
\begin{chunk}{axiom.bib}
@article{Jenk77,
author = "Jenks, Richard D.",
 title = {{On the design of a modebased symbolic system}},
+ title = {{On the Design of a ModeBased Symbolic System}},
journal = "SIGGAM Bulletin",
volume = "11",
number = "1",
@@ 53492,7 +57742,8 @@ Watson Research Center, Yorktown Heights, NY, USA, 1969 RC2968 July 1970
programming with rewrite rules and LET and IS patternmatch constructs.
An important feature of this design is the provision for modevalued
variables which allow algebraic domains to be runtime parameters.",
 keywords = "axiomref"
+ paper = "Jenk77.pdf",
+ keywords = "axiomref, printed"
}
\end{chunk}
@@ 53509,7 +57760,8 @@ Watson Research Center, Yorktown Heights, NY, USA, 1969 RC2968 July 1970
publisher = "SpringerVerlag",
isbn = "3540095195",
comment = "IBM Research Report RC 8073 Jan 1980",
 keywords = "axiomref"
+ paper = "Jenk79.pdf",
+ keywords = "axiomref, printed"
}
\end{chunk}
@@ 58968,6 +63220,7 @@ J. of Symbolic Computation 36 pp 513533 (2003)
power of our apprach and its application in constructing algebraic
concepts. The full calculus has been implemented and tested with our
LA compiler which generated executable files.",
+ paper = "Sant05.pdf",
keywords = "axiomref"
}
@@ 60077,7 +64330,7 @@ Kognitive Systeme, Universit\"t Karlsruhe 1992
system, and is not confined to functions which compute with basic
data types, such as floating point numbers.",
paper = "Smit07.pdf",
 keywords = "axiomref",
+ keywords = "axiomref, printed",
beebe = "Smith:2007:ADA"
}
@@ 60504,6 +64757,7 @@ Kognitive Systeme, Universit\"t Karlsruhe 1992
pages = "3233",
year = "1985",
isbn = "0387159835 (vol. 1),0387159843 (vol. 2)",
+ paper = "Suto85.pdf",
keywords = "axiomref",
beebe = "Sutor:1985:SIC"
}
@@ 64764,6 +69018,18 @@ Rocky Mountain J. Math. 14 119139. (1984)
\end{chunk}
+\index{Cohen, J.D.}
+\index{Jenks, R.D.}
+\begin{chunk}{axiom.bib}
+@misc{Cohe80,
+ author = "Cohen, J.D. and Jenks, R.D.",
+ title = {{On Resolution and Coercion in MODLISP}},
+ comment = "in preparation",
+ year = "1980"
+}
+
+\end{chunk}
+
\index{Cohn, Paul Moritz}
\begin{chunk}{axiom.bib}
@book{Cohn65,
@@ 65199,11 +69465,11 @@ Signum Newsletter. 13 (2) 1218. (1978)
\end{chunk}
\index{Demers, A.}
\index{Donahue, J.}
+\index{Demers, Alan}
+\index{Donahue, James}
\begin{chunk}{axiom.bib}
@techreport{Deme79,
 author = "Demers, A. and Donahue, J.",
+ author = "Demers, Alan and Donahue, James",
title = {{Revised Report on RUSSELL}},
year = "1979",
type = "technical report",
@@ 65213,6 +69479,25 @@ Signum Newsletter. 13 (2) 1218. (1978)
\end{chunk}
+\index{Demers, Alan}
+\index{Donahue, James}
+\begin{chunk}{axiom.bib}
+@inproceedings{Deme80,
+ author = "Demers, Alan and Donahue, James",
+ title = {{Type Completeness as a Language Principle}},
+ booktitle = "POPL 80",
+ publisher = "ACM",
+ pages = "234244",
+ year = "1980",
+ abstract =
+ "The problem of Von Neumann languages is that their changeable
+ parts have so little expressive power  John Backus",
+ paper = "Deme80.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
\index{Demmel, J. W.}
\begin{chunk}{ignore}
\bibitem[Demmel 89]{Dem89} Demmel J W
@@ 70281,7 +74566,8 @@ ACM Transactions on Mathematical Software, 32(2):180194, June 2006.
applied to any language represented in Mmt. A plugin interface permits
injecting syntactic and semantic idiosyncrasies of individual formal
languages.",
 paper = "Rabe13a.pdf, printed"
+ paper = "Rabe13a.pdf",
+ keywrods = "printed"
}
\end{chunk}
diff git a/changelog b/changelog
index f118734..06db22c 100644
 a/changelog
+++ b/changelog
@@ 1,3 +1,6 @@
+20190531 tpd src/axiomwebsite/patches.html 20190531.01.tpd.patch
+20190531 tpd books/bookvolbib add MODLISP Davenport references
+20190531 tpd books/bookvol4 add MODLISP Davenport chapter
20190527 tpd src/axiomwebsite/patches.html 20190527.01.tpd.patch
20190527 tpd books/multind.sty multiple index files in a book
20190526 tpd src/axiomwebsite/patches.html 20190526.01.tpd.patch
diff git a/patch b/patch
index 8f4e9d3..80e4dc6 100644
 a/patch
+++ b/patch
@@ 3609,3 +3609,562 @@ Goal: Proving Axiom Sane
\end{chunk}
+\index{Davenport, James}
+\begin{chunk}{axiom.bib}
+@misc{Davexxa,
+ author = "Davenport, James",
+ title = {{Integration in Finite Terms}},
+ year = "unknown",
+ paper = "Davexxa.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Jenks, Richard D.}
+\begin{chunk}{axiom.bib}
+@article{Jenk75,
+ author = "Jenks, Richard D.",
+ title = {{Course Outline: Yale University, New Haven}},
+ journal = "SIGSAM Bulletin",
+ volume = "9",
+ number = "3",
+ pages = "910",
+ publisher = "ACM",
+ year = "1975",
+ paper = "Jenk75.pdf",
+ keywords = "axiomref, printed, DONE"
+}
+
+\end{chunk}
+
+\index{Jenks, Richard D.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Jenk71a,
+ author = "Jenks, Richard D.",
+ title = {{META LISP and META PLUS:: Tools for Rapidly Implementing
+ extendable language translators}},
+ booktitle = "Proc. 2nd ACM Symposium on Symbolic and Algebraic
+ Manipulation",
+ publisher = "ACM",
+ pages = "281",
+ year = "1971",
+ abstract =
+ "A unique feature of the SCRATCHPAD system for symbolic
+ manipulation is its powerful translation facilities. The essential
+ components are META/LISP, a translator writing system, and
+ META/PLUS, a facility for immediately extending the syntax of any
+ translator produced through META/LISP. This talk will illustrate
+ how these facilities may be used to produce a conversational
+ higherlevel LISP system. The language chosen is called ALPL
+ because of its similarity to APL and LPL, a language resident in
+ the SCRATCHPAD system. The essential characteristics of ALPL are
+ described by eight syntax rules together with brief examples of
+ corresponding ALPL and LISP programs. It is shown how a META/LISP
+ program may be easily written to produce a conversation ALPL
+ system. The ALPL language is then incrementally extended by
+ introducing new notations defined in terms of existing ALPL
+ constructs through calls to META/PLUS.",
+ keywords = "axiomref"
+}
+
+\end{chunk}
+
+\index{Davenport, James}
+\index{Padget, Julian}
+\begin{chunk}{axiom.bib}
+@article{Dave85c,
+ author = "Davenport, James and Padget, Julian",
+ title = {{HEUGCD: How Elementary Upperbounds Generate Cheaper Data}},
+ journal = "LNCS",
+ volume = "204",
+ year = "1985",
+ booktitle = "EUROCAL '85 European Conference on Computer Algebra",
+ publisher = "Springer",
+ abstract =
+ "The work presented in this paper is a direct consequence of the
+ ideas set forth by Char et al (1984b) describing a new technique
+ for computing the greatest common divisor of polynomials.",
+ paper = "Dave85c.pdf"
+}
+
+\end{chunk}
+
+\index{Char, Bruce W.}
+\index{Geddes, Keith O.}
+\index{Gonnet, Gaston H.}
+\begin{chunk}{axiom.bib}
+@article{Char84,
+ author = "Char, Bruce W. and Geddes, Keith O. and Gonnet, Gaston H.",
+ title = {{GCDHEU: Heuristic polynomial GCD algorithm base on Integer
+ GCD computation}},
+ journal = "LNCS",
+ volume = "174",
+ pages = "285296",
+ year = "1984",
+ abstract =
+ "The design of algorithms for polynomial GCD computation has been
+ a continuing area of research since the beginning of the
+ development of symbolic computation systems. The earliest efforts
+ were mainly directed at PRS (Polynomial Remainder Sequence)
+ algorithms which are a direct generalization of Euclid's
+ algorithm. The main algorithms of this type are the Reduced PRS
+ algorithm and the Subresultant PRS algorithm. Hearn discusses the
+ use of trial divisions to further improve the performance of PRS
+ algorithms. The first fundamentally different polynomial GCD
+ algorithm was the modular algorithm. To amek the modular algorithm
+ competative for sparse multivariate polynomals, Zippel developed
+ the sparse modular algorithm. Another modulartype algorithm was
+ the Henselbased EZ GCD algorithm which was later improved as the
+ EEZ GCD algorithm.
+
+ the present paper discusses a new heuristic algorithm, GCDHEU,
+ which is found to be very efficient for problems in a small number
+ of variables. The heuristic algorithm can be viewed as
+ amodulartype algorithm in that it uses evaluation and
+ interpolation, but only a single evaluation per variable is
+ used. The heuristic algorithm can be incorporated into a
+ reorganized form of the EEZ GCD algorithm such that the base of
+ the EEZ GCD algorithm, rather than a univariate GCD algorithm, is
+ GCDHEU which is often successful for problems in up to four variables.",
+ paper = "Char84.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Gianni, Patrizia}
+\index{Trager, Barry}
+\begin{chunk}{axiom.bib}
+@article{Gian85a,
+ author = "Gianni, Patrizia and Trager, Barry",
+ title = {{GCD's and Factoring Multivariate Polynomials using
+ Grobner Bases}},
+ journal = "LNCS",
+ volume = "204",
+ year = "1985",
+ booktitle = "EUROCAL '85 European Conference on Computer Algebra",
+ publisher = "Springer",
+ abstract =
+ "This paper shows how Grobner basis computations can be used to
+ compute multivariate gcds, perform Hensel lifting, and reduce
+ multivariate factorization to univariate. The essential idea is to
+ produce an ideal containing the desired polynomial as an element
+ of least degree. The construction is somewhat analogous to the
+ recent lattice algorithms for polynomial factorization. A major
+ difference is that we don't need to perform the hensel lifting as
+ a separate step; one Brobner basis computation is sufficient to
+ find the desired multivariate factor or gcd. We produce algorithms
+ which are vary simple and may be of use on small systems where
+ code size is critical. We feel that these results demonstrate the
+ fundamental importance of the Grobner basis in computer algebra.",
+ paper = "Gian85a.pdf"
+}
+
+\end{chunk}
+
+\index{Griesmer, J.H.}
+\index{Jenks, R.D.}
+\index{Yun, D.Y.Y}
+\begin{chunk}{axiom.bib}
+@article{Grie78a,
+ author = "Griesmer, J.H. and Jenks, R.D. and Yun, D.Y.Y",
+ title = {{A Taxonomy for Algebraic Computation}},
+ journal = "ACM SIGSAM Bulletin",
+ volume = "12",
+ number = "3",
+ pages = "2528",
+ year = "1978",
+ abstract =
+ "Recently the authors responded to a request from Professor
+ Anthony Ralston of the State University of New York at Buffalo to
+ participate in the review of a proposed Taxonomy of Computer
+ Science and Engineering.",
+ paper = "Grie78a.pdf"
+}
+
+\end{chunk}
+
+\index{Davenport, J.H.}
+\begin{chunk}{axiom.bib}
+@article{Dave79,
+ author = "Davenport, J.H.",
+ title = {{The Computerisation of Algebraic Geometry}},
+ journal = "LNCS",
+ volume = "72",
+ pages = "119133",
+ year = "1979",
+ abstract =
+ "This paper is concerned with the problems of performing computer
+ algebra when the variables involved are related by some algebraic
+ dependencies. It is shown that heuristic or ad hoc treatment of
+ such cases leads rapidly to problems, and the proper mathematical
+ foundations for the treatment of algebraic functions is
+ presented. The formalism leads directly to the requirement for
+ algorithms to find the genus of an algebraic curve, and to
+ discover what function, if any, is associated with a given
+ divisor. These algorithms and the relevant computational
+ techniques are briefly described. In a concluding section the
+ areas where these techniques are required in an integration scheme
+ for algebraic functions are explained.",
+ paper = "Dave79.pdf"
+}
+
+\end{chunk}
+
+\index{Trager, Barry}
+\begin{chunk}{axiom.bib}
+@article{Trag79,
+ author = "Trager, Barry",
+ title = {{Integration of Simple Radical Extensions}},
+ journal = "LNCS",
+ volume = "72",
+ pages = "408414",
+ year = "1979",
+ abstract =
+ "Risch's landmark paper presented the first decision procedure for
+ the integration of elementary functions. In that paper he required
+ that the functions appearing in the integrand be algebraically
+ independent. Shortly afterwards in [Risalg] and [Ris70] he relaxed
+ that restriction and outlined a complete decision procedure for
+ the integration of elementary functions in finite
+ terms. Unfortunately his algorithms for dealing with algebraic
+ functions required considerably more complex machinery than his
+ earlier ones for purely transcendental functions. Moses'
+ implementation of the earlier approach in MACSYMA demonstrated its
+ practicality, whereas the same has yet to be done for Risch's more
+ recent approach.
+
+ This paper will show how Risch's earlier techniques can be
+ generalized to deal with unnested radicals. While this may seem a
+ severe restriction, perusing an integral table such as [Bois61] will
+ show that fewer than 1\% of the problems are excluded.",
+ paper = "Trag79.pdf"
+}
+
+\end{chunk}
+
+\index{Moses, Joel}
+\index{Zippel, Richard}
+\begin{chunk}{axiom.bib}
+@article{Mose79,
+ author = "Moses, Joel and Zippel, Richard",
+ title = {{Algorithms for the Integration of Algebraic Functions}},
+ journal = "LNCS",
+ volume = "72",
+ pages = "426430",
+ year = "1979",
+ paper = "Mose79.pdf"
+}
+
+\end{chunk}
+
+\index{Avigad, Jeremy}
+\begin{chunk}{axiom.bib}
+@misc{Avig19,
+ author = "Avigad, Jeremy",
+ title = {{The Mechanization of Mathematics}},
+ year = "2019",
+ comment = "The Big Proof Workshop"
+ paper = "Avig19.pdf",
+ keywords = "DONE"
+}
+
+\end{chunk}
+
+\index{Mosses, Peter}
+\begin{chunk}{axiom.bib}
+@article{Moss80,
+ author = "Mosses, Peter",
+ title = {{A Constructive Approach to Compiler Correctness}},
+ journal = "LNCS",
+ volume = "85",
+ year = "1980",
+ booktitle = "Automata, Languages and Programming",
+ publisher = "Springer",
+ abstract =
+ "It is suggested that denotational semantics definitions of
+ programming languages should be based on a small number of
+ abstract data types, each embodying a fundamental concept of
+ computation. Once these fundamental abstract data types have been
+ implemented in a particular target language (e.g. stackmachine
+ code), it is a simple matter to construct a correct compiler for
+ any source language from its denotational semantic definition. The
+ approach is illustrated by constructing a compiler similar to the
+ one which was proved correct by Thatcher, Wagner \& Wright
+ (1979). Some familiarity with manysorted algebras is presumed.",
+ paper = "Moss80.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Ehrig, Hartmut}
+\index{Kreowski, HansJorg}
+\index{Thatcher, James}
+\index{Wagner, Eric}
+\index{Wright, Jesse}
+\begin{chunk}{axiom.bib}
+@article{Ehri80a,
+ author = "Ehrig, Hartmut and Kreowski, HansJorg and Thatcher, James
+ and Wagner, Eric and Wright, Jesse",
+ title = {{Parameterized Data Types in Algebraic Specification Languages}},
+ journal = "LNCS",
+ volume = "85",
+ year = "1980",
+ booktitle = "Automata, Languages and Programming",
+ publisher = "Springer",
+ paper = "Ehri80a.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Liskov, Barbara}
+\index{Zilles, Stephen}
+\begin{chunk}{axiom.bib}
+@article{Lisk77a,
+ author = "Liskov, Barbara and Zilles, Stephen",
+ title = {{Programming with Abstract Data Types}},
+ journal = "SIGPLAN Notices",
+ volume = "9",
+ number = "4",
+ pages = "5059",
+ year = "1977",
+ abstract =
+ "The motivation behind the work in veryhighlevel languages is to
+ ease the programming task by providing the programmer with a
+ language containing primitives or abstractions suitable to his
+ problem area. The programmer is then able to spend his effort in
+ the right place; he concentrates on solving his problem, and the
+ resulting program will be more reliable as a result. Clearly, this
+ is a worthwhile goal.
+
+ Unfortunately, it is very difficult for a designer to select in
+ advance all the abstractions which the users of his language might
+ need. If a language is to be used at all, it is likely to be used
+ to solve problems which its designer did not envision, and for
+ which the abstractions embedded in the language are not sufficient.
+
+ This paper presents an approach which allows the set of builtin
+ abstractions to be augmented when the need for a new data
+ abstraction is discovered. This approach to the handling of
+ abstraction is an outgrowth of work on designing a language for
+ structured programming. Relevant aspects of this language are
+ described, and examples of the use and definitions of abstraction
+ are given.",
+ paper = "Lisk77a.pdf"
+}
+
+\end{chunk}
+
+\index{Black, A.P.}
+\begin{chunk}{axiom.bib}
+@techreport{Blac80,
+ author = "Black, A.P.",
+ title = {{Exception Handling and Data Abstraction}},
+ type = "Research Report",
+ institution = "IBM Research",
+ number = "RC8059",
+ year = "1980"
+}
+
+\end{chunk}
+
+\index{Goguen, J.A.}
+\index{Thatcher, J.W.}
+\index{Wagner, E.G.}
+\index{Wright, J.B.}
+\begin{chunk}{axiom.bib}
+@techreport{Gogu76,
+ author = "Goguen, J.A. and Thatcher, J.W. and Wagner, E.G. and
+ Wright, J.B.",
+ title = {{An Initial Algebra Approach to the Specification,
+ Correctness and Implementation of Abstract Data Types}},
+ type = "Research Report",
+ institution = "IBM Research",
+ number = "RC6487",
+ year = "1976"
+}
+
+\end{chunk}
+
+\index{Donahue, J.}
+@misc{Dona77,
+ author = "Donahue, J.",
+ title = {{On the semantics of ``Data Type''}},
+ comment = "Cornell University",
+ year = "1977"
+}
+
+\end{chunk}
+
+\index{Ershov, A.P.}
+\begin{chunk}{axiom.bib}
+@misc{Ersh77,
+ author = "Ershov, A.P.",
+ title = {{On the Essence of Compilation}},
+ comment = "Proc. IFIP Working Conf. on Formal Description of
+ Programming Concepts, Vol. 1",
+ year = "1977"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@book{Hear73,
+ author = "Hearn, Anthony C.",
+ title = {{REDUCE2 Users Manual}},
+ comment = "Computing Physics Group",
+ publisher = "University of Utah",
+ year = "1973"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear71,
+ author = "Hearn, Anthony C.",
+ title = {{Applications of Symbol Manipulation in Theoretical Physics}},
+ journal = "Communications of the ACM",
+ volume = "14",
+ number = "8",
+ pages = "511516",
+ year = "1971",
+ paper = "Hear71.pdf"
+}
+
+\end{chunk}
+
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear72,
+ author = "Hearn, Anthony C.",
+ title = {{An Improved NonModular Polynomial GCD Algorithm}},
+ journal = "ACM SIGSAM Bulletin",
+ volume = "23",
+ pages = "1015",
+ year = "1972,
+ abstract =
+ "An improved nonmodular algorithm for the calculation of the
+ greatest common divisor of two multivariate polynomials is
+ presented.",
+ paper = "Hear72.pdf"
+}
+
+\end{chunk}
+
+\index{Campbell, J.A.}
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear70,
+ author = "Campbell, J.A. and Hearn, Anthony C.",
+ title = {{Symbolic Analysis of Feynman Diagrams by Computer}},
+ journal = "J. of Computational Physics",
+ volume = "5",
+ number = "2",
+ pages = "280327",
+ year = "1970",
+ abstract =
+ "We describe a system of programs in the language LISP 1.5 which
+ handles all stages of calculation from the specification of an
+ elementaryparticle process in terms of a Hamiltonian of
+ interaction or Feynman diagrams to the derivation of an absolute
+ square of the matrix element for the process. Examples of
+ significant parts of the program are presented in the text, while
+ a detailed listing of this material is contained in two Appendices
+ which are avaiable on request from the authors.",
+ paper = "Camp70.pdf"
+}
+
+\end{chunk}
+
+\index{Campbell, J.A.}
+\index{Hearn, Anthony C.}
+\begin{chunk}{axiom.bib}
+@article{Hear70,
+ author = "Campbell, J.A. and Hearn, Anthony C.",
+ title = {{Symbolic Analysis of Feynman Diagrams by Computer}},
+ journal = "J. of Computational Physics",
+ volume = "5",
+ number = "2",
+ pages = "280327",
+ year = "1970",
+ abstract =
+ "We describe a system of programs in the language LISP 1.5 which
+ handles all stages of calculation from the specification of an
+ elementaryparticle process in terms of a Hamiltonian of
+ interaction or Feynman diagrams to the derivation of an absolute
+ square of the matrix element for the process. Examples of
+ significant parts of the program are presented in the text, while
+ a detailed listing of this material is contained in two Appendices
+ which are avaiable on request from the authors.",
+ paper = "Camp70.pdf"
+}
+
+\end{chunk}
+
+\index{Henderson, Peter}
+\index{Morris Jr., James H.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Hend76,
+ author = "Henderson, Peter and Morris Jr., James H.",
+ title = {{A Lazy Evaluator}},
+ booktitle = "3rd Symp. on Principles of Programming Languages",
+ publisher = "ACM",
+ pages = "95103",
+ year = "1976",
+ abstract =
+ "A different way to execute pure LISP programs is presented. It
+ delays the evaluation of parameters and list structures without
+ ever having to perform more evaluation steps than the usual
+ method. Although the central idea can be found in earlier work
+ this paper is of interest since it treats a rather wellknown
+ language and works out an algorithm which avoids full
+ substitution. A partial correctness proof using ScottStrachey
+ semantics is sketched in a later section.",
+ paper = "Hend76.pdf",
+ keywords = "printed"
+}
+
+\end{chunk}
+
+\index{Jensen, Kathleen}
+\index{Wirth, Niklaus}
+\begin{chunk}{axiom.bib}
+@book{Jens75,
+ author = "Jensen, Kathleen and Wirth, Niklaus",
+ title = {{PASCAL User Manual and Report}},
+ publisher = "SpringerVerlag",
+ year = "1975",
+ isbn = "0387901442",
+ keywords = "owned"
+}
+
+\end{chunk}
+
+\begin{chunk}{axiom.bib}
+@book{IBMx78,
+ author = "IBM",
+ title = {{LISP/370 Program Description / Operations Manual}},
+ publisher = "IBM Research",
+ year = "1978",
+ comment = "SH2020760"
+}
+
+\end{chunk}
+
+\index{Morris Jr., J.H.}
+\begin{chunk}{axiom.bib}
+@inproceedings{Morr73,
+ author = "Morris Jr., J.H.",
+ title = {{Types are not Sets}},
+ booktitle = "Symp. on the Principles of Programming Languages",
+ publisher = "ACM",
+ pages = "120124",
+ year = "1973"
+}
+
+\end{chunk}
diff git a/src/axiomwebsite/patches.html b/src/axiomwebsite/patches.html
index ba0e657..fc8b9c3 100644
 a/src/axiomwebsite/patches.html
+++ b/src/axiomwebsite/patches.html
@@ 5998,6 +5998,8 @@ books/Newsletter.September85.pdf updated
books/bookheader.tex add names to credit list
20190527.01.tpd.patch
books/multind.sty multiple index files in a book
+20190531.01.tpd.patch
+books/bookvol4 add MODLISP Davenport chapter

1.9.1