lp:axiom
- Get this branch:
- bzr branch lp:axiom
Branch merges
Branch information
Import details
This branch is an import of the HEAD branch of the Git repository at https://github.com/daly/axiom.git.
Last successful import was .
Recent revisions
- 2921. By daly
-
books/bookvolbib add references
Goal: Proving Axiom Sane
\index{Wirth, Niklaus}
\begin{chunk}{ axiom.bib}
@book{Wirt13,
author = "Wirth, Niklaus",
title = {{Project Oberon}},
publisher = "ACM Press",
isbn = "0-201-54428-8",
year = "2013"
}\end{chunk}
\index{Wirth, Niklaus}
\begin{chunk}{ axiom.bib}
@misc{Wirt14,
author = "Wirth, Niklaus",
title = {{Reviving a Computer System of 25 Years Ago}},
link = "\url{https://www.youtube. com/watch? v=EXY78gPMvl0}",
year = "2014"
}\end{chunk}
\index{Alpern, Bowen L.}
\begin{chunk}{ axiom.bib}
@phdthesis{Alpe86,
author = "Alpern, Bowen L.",
title = {{Proving Temporal Properties of Concurrent Programs: A
Non- Temporal Approach}},
school = "Cornell University",
year = "1986",
abstract =
"This thesis develops a new method for proving properties of
concurrent programs and gives formal definitions for safety and
liveness. A property is specified by a property recognizer -- a
finite-state machine that accepts the sequences of program states
in the property it specifies. A property recognizer can be
constructed for any temporal logic formula.To prove that a program satisfies a property specified by a
deterministic program recognizer, one must show that any history
of the program will be accepted by the recognizer. This is done by
demonstrating that proof obligations derived from the recognizer
are met. These obligations require the program prover to exhibit
certain invariant assertions and variant functions and to prove
the validty of certain predicates and Hoare triples. Thus, the
same techniques used to prove local correctness of a while loop
can be used to prove temporal properties of concurrent
programs. No temporal inference is required.The invariant assertions required by the proof obligations
establish a correspondence between the states of the program and
those of the recogniser. Such correspondences can be denoted by
property outlines, a generalization of proof outlines.Some non-deterministic property recognizers have no deterministic
equivalents To prove that a program satisfies a non-deterministic
property, a deterministic sub-property that the program satisfies
must be found. This is shown possible, provided the program state
space is finite.Finally, safety properties are formalized as the closed sets of a
topological space and liveness properties as its dense sets. Every
property is shown to be the intersection of a safety property and
a liveness property. A technique for separating a property
specified by a deterministic property recognizer into its safety
and liveness aspects is also presented.",
keywords = "printed"
}\end{chunk}
\index{Hobbs, Jerry R.}
\index{Stickel, Mark}
\index{Appelt, Douglas}
\index{Martin, Paul}
\begin{chunk}{ axiom.bib}
@article{Hobb93,
author = "Hobbs, Jerry R. and Stickel, Mark and Appelt, Douglas
and Martin, Paul",
title = {{Interpretation as Abduction}},
journal = "Artifical Intelligence",
volume = "63",
number = "1-2",
pages = "69-142",
uear = "1993",
abstract =
"Abduction is inference to the best explanation. In the TACITUS
project at SRI we have developed an approach to abductive
inference, called ``weighted abduction'', that has resulted in a
significant simplification of how the problem of interpreting
texts is conceptualized. The interpretation of a text is the
minimal explanation of why the text would be true. More precisely,
to interpret a text, one must prove the logical form of the text
from what is already mutually known, allowing for coercions,
merging redundancies where possible, and making assumptions where
necessary. It is shown how such ``local pragmatics'' problems as
reference resolution, the interpretation of compound nominals, the
resolution of syntactic ambiguity and metonymy, and schema
recognition can be solved in this manner. Moreover, this approach
of ``interpretation as abduction' can be combined with the older
view of ``parsing as deduction' to produce an elegant and thorough
integration of syntax, semantics, and pragmatics, one that spans
the range of linguistic phenomena from phonology to discourse
structure. Finally, we discuss means for making the abduction
process efficient, possibilities for extending the approach to
other pragmatics phenomena, and the semantics of the weights and
costs in the abduction scheme.",
keywords = "printed"
}\end{chunk}
\index{Stickel, Mark E.}
\begin{chunk}{ axiom.bib}
@techreport{Stic89,
author = "Stickel, Mark E.",
title = {{A Prolog Technology Theorem Prover: A New Exposition and
Implementi on in Prolog}},
type = "technical note",
institution = "SRI International",
number = "464",
year = "1989",
link = "\url{www.ai.sri. com/~stickel/ pttp.html} ",
abstract =
"A Prolog technology theorem prover (PTTP) is an extension of
Prolog that is complete for the full first-order predicate
calculus. It differs from Prolog in its use of unification with
the occurs check for soundness, depth-first iterative-deepening
search instead of unbounded depth-first search to make the search
strategy complete, and the model elimination reduction rule that
is added to Prolog inferences to make the inference system
complete. This paper describes a new Prolog-based implementation
of PTTP. It uses three compile-time transformations to translate
formulas into Prolog clauses that directly execute, with the
support of a few run-time predicates, the model elimination
procedure with depth-first iterative-deepening search and
unification with the occurs check. Its high performance exceeds
that of Prolog-based PTTP interpreters, and it is more concise and
readable than the earlier Lisp-based compiler, which makes it
superior for expository purposes. Examples of inputs and outputs
of the compile-time transformations provide an easy and quite
precise way to explain how PTTP works, This Prolog-based version
makes it easier to incorporate PTTP theorem-proving ideas into
Prolog programs. Some suggestions are made on extensions to Prolog
that could be used to improve PTTP's performance.",
keywords = "printed"
}\end{chunk}
\index{Nehrkorn, Klaus}
\begin{chunk}{ axiom.bib}
@inproceedings{Nehr85,
author = "Nehrkorn, Klaus",
title = {{Symbolic Integration of Exponential Polynomials}},
booktitle = "European COnference on Computer Algebra",
publisher = "Springer",
pages = "599-600",
year = "1985",
comment = "LNCS 204",
paper = "Nehr85.pdf"
}\end{chunk}
\index{Singer, M.F.}
\index{Davenport, J.H.}
\begin{chunk}{ axiom.bib}
@inproceedings{Sing85a,
author = "Singer, M.F. and Davenport, J.H.",
title = {{Elementary and Liouvillian Solutions of Linear
Differenti al Equations}},
booktitle = "European COnference on Computer Algebra",
publisher = "Springer",
pages = "595-596",
year = "1985",
comment = "LNCS 204",
paper = "Sing85a.pdf"
}\end{chunk}
\index{Watt, Stephen M.}
\begin{chunk}{ axiom.bib}
@inproceedings{Watt85,
author = "Watt, Stephen M.",
title = {{A System for Parallel Computer Algebra Programs}},
booktitle = "European COnference on Computer Algebra",
publisher = "Springer",
pages = "537-538",
year = "1985",
comment = "LNCS 204",
paper = "Watt85.pdf"
}\end{chunk}
\index{Robbiano, Lorenzo}
\begin{chunk}{ axiom.bib}
@inproceedings{Robb85,
author = "Robbiano, Lorenzo",
title = {{Term Orderings on the Polynomial Ring}},
booktitle = "European COnference on Computer Algebra",
publisher = "Springer",
pages = "513-517",
year = "1985",
comment = "LNCS 204",
paper = "Robb85.pdf"
}\end{chunk}
\index{Hohlfeld, Bernhard}
\begin{chunk}{ axiom.bib}
@inproceedings{Hohl85,
author = "Hohlfeld, Bernhard",
title = {{Correctness Proofs of the Implementation of Abstract Data
Types} },
booktitle = "European COnference on Computer Algebra",
publisher = "Springer",
pages = "446-447",
year = "1985",
comment = "LNCS 204",
paper = "Hohl85.pdf"
}\end{chunk}
\index{Emmanuel, Kounalis}
\begin{chunk}{ axiom.bib}
@inproceedings{Emma85,
author = "Emmanuel, Kounalis",
title = {{Completeness in Data Type Specifications}},
booktitle = "European COnference on Computer Algebra",
publisher = "Springer",
pages = "348-362",
year = "1985",
comment = "LNCS 204",
paper = "Emma85.pdf"
}\end{chunk}
\index{
Aspetsberger, K.}
\begin{chunk}{ axiom.bib}
@inproceedings{Aspe85,
author = "Aspetsberger, K.",
title = {{Substitution Expressions: Extracting Solutions of non-Horn
Clause Proofs}},
booktitle = "European COnference on Computer Algebra",
publisher = "Springer",
pages = "78-86",
year = "1985",
comment = "LNCS 204",
paper = "Aspe85.pdf"
}\end{chunk}
w
\index{Bini, Dario}
\index{Pan, Victor}
\begin{chunk}{ axiom.bib}
@inproceedings{Bini85,
author = "Bini, Dario and Pan, Victor",
title = {{Algorithms for Polynomial Division}},
booktitle = "European COnference on Computer Algebra",
publisher = "Springer",
pages = "1-3",
year = "1985",
comment = "LNCS 204",
paper = "Bini85.pdf"
}\end{chunk}
\index{Monagan, Michael}
\index{Pearce, Roman}
\begin{chunk}{ axiom.bib}
@article{Mona12,
author = "Monagan, Michael and Pearce, Roman",
title = {{POLY: A New Polynomial Data Structure for Maple 17}},
journal = "Communications in Computer Algebra",
publisher = "ACM",
volume = "46",
number = "4",
year = "2012",
abstract =
"We demonstrate how a new data structure for sparse distributed
polynomials in the Maple kernel significantly accelerates a large
subset of Maple library routines The POLY data structure and its
associated kernel operations (degree, coeff, subs, has, diff,
eval,...) are programmed for high scalability, allowing
polynomials to have hundreds of millions of terms, and very low
overhead, increasing parallel speedup in existing routines and
improving the performance of high live Maple library routines.",
paper = "Mona12.pdf"
}\end{chunk}
\index{Cherry, G.W.}
\index{Caviness, B.F.}
\begin{chunk}{ axiom.bib}
@inproceedings{Cher84
author = "Cherry, G.W. and Caviness, B.F.",
title = {{Integration in Finite Terms With Special Functions:
A Progress Report}},
booktitle = "International Symbposium on Symbolic and Algebraic
Manipulation" ,
pages = "351-358",
publisher = "Springer",
year = "1984",
comment = "LNCS 174",
abstract =
"Since R. Risch published an algorithm for calculating symbolic
integrals of elementary functions in 1969, there has been an
interest in extending his methods to include nonelementary
functions. We report here on the recent development of two
decision procedures for calculating integrals of transcendental
elementary functions in terms of logarithmic integrals and error
functions Both of these algorithms are based on the Singer,
Saunders, Caviness extension of Liouville's theorem on integration
in finite terms. Parts of the logarithmic integral algorithm have
been implemented in Macsyma and a brief demonstraction is given.",
paper = "Cher84.pdf"
}\end{chunk}
\index{Kaltofen, Erich}
\begin{chunk}{ axiom.bib}
@inproceedings{Kalt84,
author = "Kaltofen, E.",
title = {{A Note on the Risch Differential Equation}},
booktitle = "International Symbposium on Symbolic and Algebraic
Manipulation" ,
pages = "359-366",
publisher = "Springer",
year = "1984",
comment = "LNCS 174",
abstract =
"This paper relates to the technique of integrating a function in
a purely transcendental regular elementary Liouville extension by
prescribing degree bounds for the transcendentals and then solving
linear systems over the constants. The problem of finding such
bounds explicitly remains yet to be solved due to the so-called
third possibilities in the estimates for the degrees given in
R. Risch's original algorithm.We prove that in the basis case in which we have only exponentials
of rational functions, the bounds arising from the third
possibilities are again degree bounds of the inputs. This result
provides an algorithm for solving the differential equation
$y^\prime =f^\prime y= g$ in $y$ where $f$, $g$, and $y$ are
rational functions over an arbitrary constant field. This new
algorithm can be regarded as a direct generalization of the
algorithm by E. Horowitz for computing the rational part of the
integral of a rational function (i.e. $f^\prime = 0$), though its
correctness proof is quite different.",
paper = "Kalt84.pdf"
}\end{chunk}
\index{Viry, G.}
\begin{chunk}{ axiom.bib}
@inproceedings{Viry84,
author = "Viry, G.",
title = {{Simplification of Polynomials in n Variables}},
booktitle = "International Sympoium on Symbolic and Algebraic
Manipulation" ,
pages = "64-73",
publisher = "Springer",
year = "1984",
comment = "LNCS 174",
paper = "Viry84.pdf"
}\end{chunk}
\index{Wang, Paul S.}
\begin{chunk}{ axiom.bib}
@inproceedings{Wang84,
author = "Wang, Paul S.",
title = {{Implementation of a p-adic Package for Polynomial
Factorizat ion and other Related Operations}},
booktitle = "International Symbposium on Symbolic and Algebraic
Manipulation" ,
pages = "86-99",
publisher = "Springer",
year = "1984",
comment = "LNCS 174",
abstract =
"The design and implementation of a $p-adic$ package, called
{\bf P}-pack, for polynomial factorization, gcd, squarefree
decomposition and univariate partial fraction expansion are
presented. {\bf P}-pack is written in FRANZ LISP, and can be
loaded into VAXIMA and run without modification. The physical
organization of the code modules and their logical relations are
described. Sharing of code among different modules and techniques
for improved speed are discussed.",
paper = "Wang84.pdf"
}\end{chunk}
\index{Najid-Zejli, H.}
\begin{chunk}{ axiom.bib}
@inproceedings{Naji84,
author = "Najid-Zejli, H.",
title = {{Computation in Radical Extensions}},
booktitle = "International Symbposium on Symbolic and Algebraic
Manipulation" ,
pages = "115-122",
publisher = "Springer",
year = "1984",
comment = "LNCS 174",
paper = "Naji84.pdf"
}\end{chunk}
\index{Czapor, Stephen R.}
\index{Geddes, Keith O.}
\begin{chunk}{ axiom.bib}
@inproceedings{Czap84,
author = "Czapor, Stephen R. and Geddes, Keith O.",
title = {{A Comparison of Algorithms for the Symbolic Computation of
Pade Approximants}},
booktitle = "International Symbposium on Symbolic and Algebraic
Manipulation" ,
pages = "248-259",
publisher = "Springer",
year = "1984",
comment = "LNCS 174",
abstract =
"This paper compares three algorithms for the symbolic computation
of Pad\'e approximants: an $O(n^3)$ algorithm based on the direct
solutions of the Hankel linear system exploiting only the property
of symmetry, and $O(n^2)$ algorithm based on the extended
Euclidean algorithm, and an $O(n~log^2~n)$ algorithm based on a
divide-and-conquer version of the extended Euclidean algorithm.
Implementations of these algorithms are presented and some timing
comparisons are given. It is found that the $O(n^2)$ algorithm is
often the fastest for practical sizes of problems and,
surprisingly, the $O(n^2)$ algorithm wins in the important case
where the power series being approximated has an exact rational
function representation.",
paper = "Czap84.pdf"
}\end{chunk}
\index{Lenstra, Arjen K.}
\begin{chunk}{ axiom.bib}
@inproceedings{Lens84,
author = "Lenstra, Arjen K.",
title = {{Polynomial Factorizaion by Root Approximation}},
booktitle = "International Symbposium on Symbolic and Algebraic
Manipulation" ,
pages = "272-276",
publisher = "Springer",
year = "1984",
comment = "LNCS 174",
abstract =
We show that a constructive version of the fundamental theorem of
algebra [3], combined with the basis reduction algorithm from [1],
yields a polynomial-time algorithm for factoring polynomials in
one variable with rational coefficients.",
paper = "Lens84.pdf"
}\end{chunk}
\index{Davenport, James H.}
\begin{chunk}{ axiom.bib}
@inproceedings{Dave84c,
author = "Davenport, James H.",
title = {{y'+fy=g}},
booktitle = "International Symbposium on Symbolic and Algebraic
Manipulation" ,
pages = "341-350",
publisher = "Springer",
year = "1984",
comment = "LNCS 174",
abstract =
"In this paper, we look closely at the equation of the title,
originally consider by Risch, which arises in the integration of
exponentials. We present a minor improvement of Risch's original
presentation, a generalisation of that presentation to algebraic
functions $f$ and $g$, and a new algorithm for the solution of
this equation. Full details of the last two are to appear
elsewhere.",
paper = "Dave84c.pdf"
}\end{chunk}
\index{Lazard, Daniel}
\begin{chunk}{ axiom.bib}
@inproceedings{Laza79,
author = "Lazard, Daniel",
title = {{Systems of Algebraic Equations}},
booktitle = "International Symbposium on Symbolic and Algebraic
Manipulation" ,
pages = "88-94",
publisher = "Springer",
year = "1979",
comment = "LNCS 72",
paper = "Laza79.pdf"
}\end{chunk}
\index{Tang, Min}
\index{Li, Bingyu}
\index{Zeng, Zhenbing}
\begin{chunk}{ axiom.bib}
@article{Tang18,
author = "Tang, Min and Li, Bingyu and Zeng, Zhenbing",
title = {{Computing Sparse GCD of Multivariate Polynomials via
Polynomial Interpolation}},
journal = "System Science and Complexity",
volume = "31",
pages = "552-568",
year = "2018",
abstract =
"The problem of computing the greatest common divisor (GCD) of
multivariate polynomials, as one of the most important tasks of
computer algebra and symbolic computation in more general scope,
has been studied extensively since the beginning of the
interdisciplinary of mathematics with computer science. For many
real applications such as digital image restoration and
enhancement, robust control theory of nonlinear systems,
$L_1$-norm convex optimizations in compressed sensing techniques,
as well as algebraic decoding of Reed-Solomon and BCH codes, the
concept of sparse GCD plays a core role where only the greatest
common divisors with much fewer terms than the original
polynomials are of interest due to the nature of problems or data
structures. This paper presents two methods via multivariate
polynomial interpolation which are based on the variation of
Zippel's method and Ben-Or/Tiwari algorithm, respectively. To
reduce computational complexity, probabilistic techniques and
randomization are employed to deal with univariate GCD computation
and univariate polynomial interpolation. The authors demonstrate
the practical performance of our algorithms on a significant body
of examples. The implemented experiment illustrates that our
algorithms are efficient for a quite wide range of input.",
paper = "Tang18.pdf"
}\end{chunk}
\index{Miola, Alfonso}
\index{Yun, David Y.Y.}
\begin{chunk}{ axiom.bib}
@article{Miol74,
author = "Miola, Alfonso and Yun, David Y.Y.",
title = {{Computational Aspects of Hensel-type Univariate Polynomial
Greatest Common Divisor Algorithms}},
journal = "ACM Sigplan Bulletin",
year = "1974",
abstract =
"Two Hensel-type univariate polynomial Greatest Common Divisor
(GCD) algorithms are presented and compared. The regular linear
Hensel construction is shown to be generally more efficient than
the Zassenhaus quadratic contstruction. The UNIGCD algorithm for
UNIvariate polynomial GCD computations, based on the regular
Hensel construction is then presented and compared with the
Modular algorithm based on the Chinese Remainder Algorithm. From
both an analytical and an experimental point of view, the UNIGCD
algorithm is shown to be preferable for many common univariate GCD
computations. This is true even for dense polynomials, which was
considered to be the most suitable case for the application of the
Modular algorithm.",
paper = "Miol74.pdf"
}\end{chunk}
\index{Brown, W.S.}
\begin{chunk}{ axiom.bib}
@article{Brow71,
author = "Brown, W.S.",
title = {{On Euclid's Algorithm and the Computation of Polynomial
Greatest Common Divisors}},
journal = "J. ACM",
volume = "18",
pages = "478-504",
year = "1971",
abstract =
"This paper examines the computation of polynomial greatest common
divisors by various generalizations of Euclid's algorithm. The
phenomenon of coefficient growth is described, and the history of
successful efforts first to control it and then to eliminate it is
related.The recently developed modular algorithm is presented in careful
detail, with special attention to the case of multivariate
polynomials.The computing times for the classical algorithm and for the
modular algorithm are analyzed, and it is shown that the modular
algorithm is markedly superior. In fact, in the multivariate case,
the maximum computing time for the modular algorithm is strictly
dominated by the maximum computing time for the first
pseudo-division in the classical algorithm.",
paper = "Brow71.pdf",
keywords = "printed"
}\end{chunk}
\index{Collins, George E.}
\begin{chunk}{ axiom.bib}
@article{Coll67,
author = "Collins, George E.",
title = {{Subresultants and Reduced Polynomial Remainder
Sequences} },
journal = "J. ACM",
volume = "14",
number = "1",
pages = "128-142",
year = "1967",
paper = "Coll67.pdf"
}\end{chunk}
\index{Brown, W.S.}
\index{Traub, J.F.}
\begin{chunk}{ axiom.bib}
@article{Brow71a,
author = "Brown, W.S. and Traub, J.F.",
title = {{On Euclid's Algorithm and the Theory of Subresultants}},
journal = "J. ACM",
volume = "18",
number = "4",
pages = "505-514",
year = "1971",
abstract =
"This papers presents an elementary treatment of the theory of
subresultants, and examines the relationship of the subresultants
of a given pair of polynomials to their polynomial remainder
sequence as determined by Euclid's algorithm. Two important
versions of Euclid's algorithm are discussed. The results are
essentially the same as those of Collins, but the presentatino is
briefer, simpler, and somewhat more general.",
paper = "Brow71a.pdf",
keywords = "printed"
}\end{chunk}
\index{Moses, Joel}
\index{Zippel, Richard}
\begin{chunk}{ axiom.bib}
@article{Mose79a,
author = "Moses, Joel and Zippel, Richard",
title = {{An Extension of Liouville's Theorem}},
journal = "LNCS",
volume = "72",
pages = "426-430",
year = "1979",
paper = "Mose79a.pdf"
}\end{chunk}
\index{Gabriel, Richard}
\begin{chunk}{ axiom.bib}
@misc{Gabr91,
author = "Gabriel, Richard",
title = {{Lisp: Good News, Bad News, How to Win Big}},
year = "1991",
link = "\url{https://www.dreamsongs .com/WIB. html}"
}\end{chunk}
\index{Chatley, Robert}
\index{Donaldson, Alastair}
\index{Mycroft, Alan}
\begin{chunk}{ axiom.bib}
@article{Chat19,
author = "Chatley, Robert and Donaldson, Alastair and
Mycroft, Alan",
title = {{The Next 7000 Programming Languages}},
journal = "LNCS",
volume = "10000",
pages = "250-282",
year = "2019",
abstract =
"Landin's seminal paper "The next 700 programming languages"
considered programming languages prior to 1966 and speculated on
the next 700. Half-a-century on, we cast programming languages in a
Darwinian 'tree of life' and explore languages, their features
(genes) and language evolution from a viewpoint of 'survival of the
fittest'.We investigatge this thesis by exploring how various anguages fared
in the past, and then consider the divergence between the languages
{\sl empirically used in 2017} and the langauge features one might
have expected if the languages of the 1960s had evolved optimally
to fill programming niches.This leads us to characterise three divergences, or 'elephants in
the room', were actual current language use, or feature provision,
differs from that which evolution might suggest. We conclude by
speculating on future language evolution.",
paper = "Chat19.pdf",
keywords = "DONE"
}\end{chunk}
\index{Pierce, Benjamin C.}
\begin{chunk}{ axiom.bib}
@misc{Pier08,
author = "Pierce, Benjamin C.",
title = {{Types Considered Harmful}},
comment = "Talk at Mathematical Foundations of Programming Languages",
year = "2008",
link = "\url{https://www.cis. upenn.edu/ ~bcpierce/ papers/ harmful- mfps.pdf}",
paper = "Pier08.pdf",
keywords = "DONE"
}\end{chunk}
\index{Eisenberg, Richard A.}
\index{Vytiniotis, Dimitrios}
\index{Jones, Simon Peyton}
\index{Weirich, Stephanie}
\begin{chunk}{ axiom.bib}
@inproceedings{Eise14,
author = "Eisenberg, Richard A. and Vytiniotis, Dimitrios and
Jones, Simon Peyton and Weirich, Stephanie",
title = {{Closed Type Families with Overlapping Equations}},
booktitle = "POPL 14",
year = "2014",
abstract =
"Open, type-level functions are a recent innovatino in Haskell
that move Haskell towards the expressiveness of dependent types,
while retaining the lok and feel of a practial programming
language. This paper shows how to increase expressiveness still
further, by adding closed type functions whose equations may
overlap, and may have non-linear patterns over an open type
universe. Although practically useful and simple to implement,
these features go {\sl beyond} conventional dependent type theory
in some respects, and have a subtle metatheory.",
paper = "Eise14.pdf"
}\end{chunk}
\index{Wand, Mitchell}
\index{Friedman, Dan}
\begin{chunk}{ axiom.bib}
@articlen{Wand78,
author = "Wand, Mitchell and Friedman, Dan",
title = {{Compiling Lambda-Expressions Using Continuations and
Factorizat ions}},
journal = "Computer Languages",
volume = "3",
pages = "241-263",
year = "1978",
abstract =
"We present a source-level transformation for recursion-removal
with several interesting characteristics:
\begin{itemize}
\item[(i)] the algorithm is simple and provably correct
\item[(ii)] the stack utilization regime is chosen by the compiler
rather than being fixed by the run-time environment
\item[(iii)] the stack is available at the source language level
so that further optimizations are possible
\item[(iv)] the algorithm arises from ideas in category theory
\end{itemize}
In addition to its implications for compilers, the transformation
algorithm is useful as an implementation technique for advanced
LISP-based systems, and one such application is described.",
paper = "Wand78.pdf",
keywords = "printed"
}\end{chunk}
\index{Rapoport, Marianna}
\index{Kabir, Ifaz}
\index{He, Paul}
\index{Lhotak, Ondrej}
\begin{chunk}{ axiom.bib}
@misc{Rapo17,
author = "Rapoport, Marianna and Kabir, Ifaz and He, Paul and
Lhotak, Ondrej",
title = {{A Simple Soundness Proof for Dependent Object Types}},
year = "2017",
link = "\url{https://arxiv. org/pdf/ 1706.03814v1. pdf}",
abstract =
"Dependent Object Types (DOT) is intended to be a core calculus
for modelling Scala. Its distinguishing feature is abstract type
members, fields in objects that hold types rather than
values. Proving soundness of DOT has been surprisingly
challenging, and existing proofs are complicated, and reason about
multiple concepts at the same time (e.g. types, values,
evaluation). To serve as a core calculus for Scala, DOT should be
easy to experiment with and extend, and therefore its soundness
proof needs to be easy to modify.This paper presents a simple and modular proof strategy for
reasoning in DOT. The strategy separates reasoning about types
from other concerns. It is centered around a theorem that connects
the full DOT type system to a restricted variant in which the
challenges and paradoxes caused by abstract type members are
eliminated. Almost all reasoning in the proof is done in the
intuitive worlds of this restricted type system. Once we have the
necessary results about types, we observe that the other aspects
of DOT are mostly standard and can be incorporated into a
soundness proof using familiar techniques known from other
calculi.Our paprer comes with a machine-verified version of the proof in
Coq.",
paper = "Rapo17.pdf",
keywords = "printed"
}\end{chunk}
\index{Daumas, Marc}
\index{Lester, David}
\index{Munoz, Cesar}
\begin{chunk}{ axiom.bib}
@misc{Daum07,
author = "Daumas, Marc and Lester, David and Munoz, Cesar",
title = {{Verified Real Number Calculations: A Library for Interval
Arithmetic }},
year = "2007",
link = "\url{https://arxiv. org/pdf/ 0708.3721. pdf}",
abstract =
"Real number calculations on elementary functions are remarkably
difficult to handle in mechanical proofs. In this paper, we show
how these calculations can be performed within a theorem prover or
proof assistant in a convenient and highly automated as well as
interactive way. First, we formally establish upper and lower
bounds for elementary functions. Then, based on these bounds, we
develop a rational interval arithmetic where real number
calculations take place in an algebraic setting. In order to
reduce the dependency effect of interval arithmetic, we integrate
two techniques: interval splitting and taylor series
expansions. This pragmatic approach has been developed, and
formally verified, in a theorem prover. The formal development
also includes a set of customizable strategies to automate proofs
involving explicit calculations over real numbers. Our ultimate
goal is to provide guaranteed proofs of numerical properties with
minimal human theorem-prover interaction.",
paper = "Daum07.pdf"
}\end{chunk}
\index{Appel, Andrew W.}
\index{Dockins, Robert}
\index{Hobor, Aquinas}
\index{Beringer, Lennart}
\index{Dodds, Josiah}
\index{Stewart, Gordon}
\index{Blazy, Sandrine}
\index{Leroy, Xavier}
\begin{chunk}{ axiom.bib}
@book{Appe14,
author = "Appel, Andrew W. and Dockins, Robert and Hobor, Aquinas
and Beringer, Lennart and Dodds, Josiah and
Stewart, Gordon and Blazy, Sandrine and Leroy, Xavier",
title = {{Program Logics for Certified Compilers}},
publisher = "Cambridge University Press",
isbn = "978-1-107-04801- 0",
year = "2014",
paper = "Appe14.pdf"
}\end{chunk}
\index{Leroy, Xavier}
\begin{chunk}{ axiom.bib}
@misc{Lero08,
author = "Leroy, Xavier",
title = {{Formal Verification of a Realistic Compiler}},
year = "2008",
link = "\url{https://xavierleroy. org/publi/ compcert- CACM.pdf}",
abstract =
"This paper reports on the development and formal verification
(proof of semantic preservation) of CompCert, a compiler from
Clight (a large subset of the C programming language) to PowerPC
assembly code, using the Coq proof assistant both for programming
the compiler and for proving its correctness. Such a verificed
compiler is useful in the context of critical software and its
formal verification: the verification of the compiler guarantees
that the safety properties proved on the source code hold for the
executable compiled code as well.",
paper = "Lero08.pdf"
}\end{chunk}
\index{Bauer, Andrej}
\index{Haselwarter, Philipp G.}
\index{Lumsdaine, Peter LeFanu}
\begin{chunk}{ axiom.bib}
@misc{Baue20,
author = "Bauer, Andrej and Haselwarter, Philipp G. and
Lumsdaine, Peter LeFanu",
title = {{A General Definition of Dependent Type Theories}},
year = "2020",
link = "\url{https://arxiv. org/pdf/ 2009.05539. pdf}",
abstract =
"We define a general class of dependent type theories,
encompassing Martin L\"of's intuitionistic type theories and
variants and extensions. The primary aim is pragmatic: to unify
and organise their study, allowing results and constructions to be
given in reasonable generality, rather than just for specific
theories. Compared with other approaches, our definition stays
closer to the direct or naive reading of syntax, yielding the
traditional presentation of specific theories as closely as
possible.Specifically, we give three main definitions: {\sl raw type
theories}, a minimal setup for discussing dependently typed
derivability; {\sl acceptable type theories}, including extra
conditions ensuring well-behavedness; and {\sl well-presented type
theories}, generalisig how in traditional presentations, the
well-behavedness of a type theory is established step by step as
the type theory is built up. Following these, we show that various
fundamental fitness-for-purpose metatheorems hold in this
generality.Much of the present work has been formalised in the proof
assistant Coq.",
paper = "Baue20.pdf",
keywords = "printed"
}\end{chunk}
\index{Longley, John}
\begin{chunk}{ axiom.bib}
@article{Long99,
author = "Longley, John",
title = {{Matching Typed and Untyped Realizability}},
journal = "Theoretical Computer Science",
volume = "23",
number = "1",
year = "1999",
abstract =
"Realizability interpretations of logics are given by saying what
it means for computational objects of some kind to {\sl realize}
logical formulae. The computational object in question might be
drawn from an untyped universe of computation, such as a partial
combinatory algebra, or they might be typed objects such as terms
of a PCF-style programming languag. In some instances, one can
show that a particular untyped realizability interpretation
matches a particular typed one, in the sense that they give the
same set of realizable formulae. In this case, we haev a very good
fit indeed between the typed language and the untyped
realizability model -- we refer to this condition as
{\sl (constructive) logical full abstraction}.We give some examples of this situation for a variety of
extensions of PCF. Of particular interest are some models that are
logically fully abstract for typed languages including
{\sl non-functional} features. Our results establish connections
between what is computable in various programming languages and
what is true inside various realizable toposes. We consider some
examples of logical formulae to illustrate these ideas, in
particular their application to exact real-number compatibility.",
paper = "Long99.pdf",
keywords = "printed"
}\end{chunk}
\index{Bowman, William J.}
\begin{chunk}{ axiom.bib}
@misc{Bowm20,
author = "Bowman, William J.",
title = {{Cur: Designing a Less Devious Proof Assistant}},
year = "2020",
link = "\url{https://vimeo. com/432569820}",
abstract =
"Dijkstra said that our tools can have a profound and devious
influence on our thinking. I find this especially true of modern
proof assistants, with ``devious'' out-weighing ``profound''. Cur
is an experiment in design that aims to be less devious. The
design emphasizes language extension, syntax manipulation, and
DSL construction and integration. This enables the user to be in
charge of how they think, rather than requiring the user to
contort their thinking to that of the proof assistant. In this
talk, my goal is to convince you that you want similar
capabilities in a proof assistant, and explain and demonstrate
Cur's attempt at solving the problem.",
keywords = "DONE"
}\end{chunk}
\index{Croxford, Martin}
\index{Chapman, Roderick}
\begin{chunk}{ axiom.bib}
@misc{Crox05,
author = "Croxford, Martin and Chapman, Roderick",
title = {{Correctness by Construction: A Manifesto for
High- Integrity Software}},
year = "2005",
link = "\url{https://citeseerx. ist.psu. edu/viewdoc/ download? doi=10. 1.1.190. 867}",
abstract =
"High-integrity software systems are often so large that
conventional development processes cannot get anywhere near
achieving tolerable defect rates. This article presents an
approach that has delivered software with very low defect rates
cost-effectively. We describe the technical details of the
approach and the results achieved, and discuss how to overcome
barriers to adopting such best practice approaches. We conclude by
observing that where such approaches are compatible and can be
deployed in combination, we have the opportunity to realize the
extremely low defect rates needed for high integrity software
composed of many million lines of code.",
paper = "Crox05.pdf",
keywords = "DONE"
}\end{chunk}
\index{Kowalski, R.}
\begin{chunk}{ axiom.bib}
@article{Kowa84,
author = "Kowalski, R.",
title = {{The Relation Between Logic Programming and Logic
Specificat ion}},
journal = "Phil. Trans. R. Soc. Lond.",
volume = "312",
pages = "345-351",
year = "1984",
link = "\url{www.doc.ic. ac.uk/~ rak/papers/ logic%20program ming%20and% 20specification .pdf}",
abstract =
"Formal logic is widely accepted as a program specification
language in computing science. It is ideally suited to the
representation of knowledge and the description of problems
without regard to the choice of programming language. Its use as a
specification language is compatible not only with conventional
programming languages but also with programming languages based
entirely on logic itself. In this paper I shall investigate the
relation that holds when both programs and program specifications
are expressed in formal logic.In many cases, when a specification {\sl completely} defines the
relations to be computed, there is no syntactic distinction
between specification and program. Moreover the same mechanism
that is used to execute logic programs, namely automated
deduction, can also be used to execute logic specifications. Thus
all relations defined by complete specifications are
executable. The only difference between a complete specification
and a program is one of efficiency. A program is more efficient
than a specification.",
paper = "Kowa84.pdf",
keywords = "printed"
}\end{chunk}
\index{Cousot, Patrick}
\index{Cousot, Radhia}
\index{Fahndrich, Manuel}
\index{Logozzo, Francesco}
\begin{chunk}{ axiom.bib}
@misc{Cous13,
author = "Cousot, Patrick and Cousot, Radhia and Fahndrich, Manuel
and Logozzo, Francesco",
title = {{Automatic Inference of Necessary Preconditions}},
year = "2013",
abstract =
"We consider the problem of {\sl automatic} precondition
inference. We argue that the common notion of {\sl sufficient}
precondition inference (i.e., under which precondition is the
program correct?) imposes too large a burden on callers, and hence
it is unfit for automatic program analysis. Therefore, we define
the problem of {\sl necessary} precondition inference (ie. under
which precondition, if violated, will the program {\sl always} be
incorrect?). We designed and implemented several new abstract
interpretation-based analyses to infer atomic, disjunctive,
universally and existentially quantified necessary preconditions.We experimentally validated the analyses on large scale industrial
code. For unannotated code, the inference algorithms find
necessary preconditions for almost 64\% of methods which contained
warnings. In 27\% of these cases the inferred preconditions were
also {\sl sufficient}, meaning all warnings within the method body
disappeared. For annotated code, the inference algorithms find
necessary preconditions for over 68\% of methods with warnings. In
almost 50\% of these cases the preconditions were also
sufficient. Overall, the precision improvement obtained by
precondition inference (counted as the additional number of
methods with no warnings) ranged between 9\% and 21\%.",
paper = "Cous13.pdf"
}\end{chunk}
\index{Calcagno, Cristiano}
\index{Distefano, Dino}
\index{Dubreil, Jeremy}
\index{Gabi, Dominik}
\index{Hooimeijer, Pieter}
\index{Luca, Martino}
\index{O'Hearn, Peter}
\index{Papakonstantino u, Irene}
\index{Purbrick, Jim}
\index{Rodriguez, Dulma}
\begin{chunk}{ axiom.bib}
@misc{Calc16,
author = "Calcagno, Cristiano and Distefano, Dino and
Dubreil, Jeremy and Gabi, Dominik and
Hooimeijer , Pieter and Luca, Martino and
O' Hearn, Peter and Papakonstantinou, Irene and
Purbrick, Jim and Rodriguez, Dulma",
title = {{Moving Fast with Software Verification}},
year = "2016",
abstract =
"For organisations like Facebook, high quality software is
imprtant. However, the pace of change and increasing complexity of
modern code makes it difficult to produce error-free
software. Available tools are often lacking in helping programmers
developer more reliable and secure applications.Formal verificatin is a technique able to detect software errors
statically, before a product is actually shipped. Although this
aspect makes this technology very appealing in principle, in
practice there have been many difficulties that have hindered the
application of software verification in industrial
environments. In particular, in an organisation like Facebook
where the release cycle is fast compared to more traditional
industries, the deployment of formal techniques is highly
challenging.This paper describes our experience in integrating a verification
tool based on static analysis into the software development cycle
at Facebook.",
paper = "Calc16.pdf"
}\end{chunk}
\index{Ryder, Chris}
\index{Thompson, Simon}
\begin{chunk}{ axiom.bib}
@techreport{Ryde99,
author = "Ryder, Chris and Thompson, Simon",
title = {{Aldor meets Haskell}},
type = "technical report",
institution = "University of Kent",
number = "21762",
year = "1999",
abstract =
"The aim of this project was to attempt to output a Haskell
representation of the Aldor compiler's abstract syntax tree. The
purpose of this is to enable the representation to be executed and
to give an experimental platform in which to look at how to
circumvent some of the limitations of the Aldor compilers type
checker.",
paper = "Ryde99.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Corless, Robert}
\index{Watt, Stephen}
\index{Zhi, Lihong}
\begin{chunk}{ axiom.bib}
@article{Corl04,
author = "Corless, Robert and Watt, Stephen and Zhi, Lihong",
title = {{QR Factoring to Compute the GCD of Univariate Approximate
Polynomial s}},
journal = "IEEE Trans. on Signal Processing",
volume = "52",
number = "12",
year = "2004",
abstract =
"We present a stable and practical algorithm that uses QR factors
of the Sylvester matrix to compute the greatest common divisor
(GCD) of univariate approximate polynomials over $\mathbb{R}[x]$
or $\mathbb{C}[x]$. An approximate polynomial is a polynomial with
coefficients that are not known with certainty. The algorithm of
this paper improves over previously published algorithms by
handling the case when common roots are near to or outside the
unit circle, by splitting and reversal if necessary. The algorithm
has been tested on thousands of examples, including pairs of
polynomials of up to degree 1000, and is now distributed as the
program QRGCD in the SNAP package of Maple 9.",
paper = "Corl04.pdf"
}\end{chunk}
\index{Watt, Stephen M.}
\begin{chunk}{ axiom.bib}
@phdthesis{Watt85,
author = "Watt, Stephen M.",
title = {{Bounded Parallelism in Computer Algebra}},
school = "University of Waterloo",
year = "1985",
link = "\url{http://www.csd. uwo.ca/ ~watt/pub/ reprints/ 1985-smw- phd.pdf}",
abstract =
"This thesis examines the performance improvements that can be
made by exploiting parallel processing in symbolic mathmatical
computation. The study focuses on the use of high-level
parallelism in the case where the number of processors is fixed
and independent of the problem size, as in existing
multiprocessors. Since seemingly small changes to the inputs can cause dramatic
changes in the execution times of many algorithms in computer
algebra, it is not generally useful to use static scheduling. We
find it is possible, however, to exploit the high-level parallelism
in many computer algebra problems using dynamic scheduling methods
in which subproblems are treated homogeneously.Our investigation considers the reduction of execution time in
both the case of AND-parallelism, where all of the subproblems
must be completed, and the less well studied case of
OR-parallelism, where completing any one of the subproblems is
sufficient. We examine the use of AND and OR-parallelism in terms
of the {\sl problem heap} and {\sl collusive} dynamic scheduling
schemes which allow a homogeneous treatment of subtasks. A useful
generalization is also investigated in which each of the subtasks
may either succeed or fail and execution completes when the first
success is obtained.We study certain classic problems in computer algebra within this
framework. A collusive method for integer factorization is
presented. This method attempts to find different factors in
parallel, taking the first one that is discovered. Problem heap
algorithms are given for the computation of multivariate
polynomial GCDs and the computation of Grobner bases. The GCD
algorithm is based on the sparse modular GCD and performs the
interpolations in parallel. The Grobner basis algorithm exploits
the independence of the reductions in basis completion to obtain a
parallel method.In order to make evaluations in concrete terms, we have
constructed a system for running computer algebra programs on a
multiprocessor. The system is a version of Maple able to
distribute processes over a local area network. The fact that the
multiprocessor is a local area network need not be considered by
the programmer.",
paper = "Watt85.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Watt, Stephen M.}
\begin{chunk}{ axiom.bib}
@article{Watt06,
author = "Watt, Stephen M.",
title = {{Making Computer Algebra More Symbolic}},
journal = "Proc. Transgressive Computing",
pages = "43-49",
year = "2006",
abstract =
"This paper is a step to bring closer together two views of
computing with mathematical objects: the view of ``symbolic
computation'' and the view of ``computer algebra''. Symbolic
computation may be seen as working with expression trees
representing mathematical formulae and applying various rules to
transform them. Computer algebra may be seen as developing
constructive algorithms to compute algebraic quanties in various
arithmetic domains, possibly involving indeterminates. Symbolic
computation allows a wider range of expression, while computer
algebra admits greater algorithmic precision. We examine the
problem of providing polynomials symbolic exponents. We present a
natural algebraic structure in which such polynomials may be
defined and a notion of factorization under which these
polynomials form a UFD.",
paper = "Watt06.pdf"
}\end{chunk}
\index{Watt, Stephen M.}
\begin{chunk}{ axiom.bib}
@inproceedings{Watt06a,
author = "Watt, Stephen M.",
title = {{Post Facto Type Extension for Mathematical Programming}},
booktitle = "Workshop on Domain Specific Aspect Language",
publisher = "ACM",
year = "2006",
abstract =
"We present the concept of {\sl post facto extensions}, which may
be used to enrich types after they have been defined. Adding
exported behaviours without altering data representation permits
existing types to be augmented without renaming. This allows large
libraries to be structured in a clean, layered fashion and allows
independently developed software components to be used
together. This form of type extension has been found to be
particularly useful in mathematical software, where often new
abstractions are applicable to existing objects. We describe an
implementation of post facto extension, as provided by Aldor, and
explain how it has been used to structure a large mathematical
library.",
paper = "Watt06a.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Bronstein, Manuel}
\index{Maza, Marc Moreno}
\index{Watt, Stephen M.}
\begin{chunk}{ axiom.bib}
@inproceedings{Bron07a,
author = "Bronstein, Manuel and Maza, Marc Moreno and
Watt, Stephen M.",
title = {{Generic Programming Techniques in Aldor}}
booktitle = "Proc. of AWFS",
publisher = "unknown",
year = "2007",
pages = "72-77",
abstract =
"Certain problems in mathematical computing present unusual
challenges in structuring software libraries. Although generics,
or templates as they are sometimes called, have been in wide use
now for many years, new ways to use them to improve library
architecture continue to be discovered. We find mathemaical
software to be particularly useful in exploring these ideas
because the domain is extremely rich while being
well-specified. In this paper we present two techniques for using
generics in mathematical computation: The first allows efficient
formulation of generic algorithms for modular computations. The
second allow mathematical domains to be endowed with additional
algorithms after they are defined and to have these algorithms
used in generic libraries.",
paper = "Bron07a.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Watt, Stephen M.}
\begin{chunk}{ axiom.bib}
@inproceedings{Watt09a,
author = "Watt, Stephen M.",
title = {{On the Future of Computer Algebra Systems at the threshold
of 2010}},
booktitle = "Proc. ASCM-MACIS",
publisher = "unknown",
pages = "422-430",
abstract =
"This paper discusses ways in which software systems for computer
algebra could be improved if designed from scratch today rather
than evolving designs from the 1980s",
paper = "Watt09a.pdf"
}\end{chunk}
\index{Mao, Lei}
\begin{chunk}{ axiom.bib}
@misc{Maox20,
author = "Mao, Lei",
title = {{Euclidean Algorithm}},
link = "\url{https://leimao. github. io/blog/ Euclidean- Algorithm}",
year = "2020"
}\end{chunk}
\index{Makkai, Michael}
\begin{chunk}{ axiom.bib}
@misc{Makk07,
author = "Makkai, Michael",
title = {{Formal Specification of Computer Programs}},
year = "2007",
link = "\url{https://www.math. mcgill. ca/makkai/ MATH3182007/ 31807552. pdf}",
paper = "Makk07.pdf",
keywords = "printed"
}\end{chunk}
\index{Baker, Henry G.}
\begin{chunk}{ axiom.bib}
@article{Bake93b,
author = "Baker, Henry G.",
title = {{Equal Rights for Functional Object or, The More Things
Change, the More They Are the Same}},
journal = "OOPS",
publisher = "ACM",
pages = "2-27",
year = "1993",
abstract =
"We argue that intensional {\sl object identity} in
object-oriented programming languages and databases is best
defined operationally by sode-effect semantics. A corollary is
that ``functional'' objects have extensional semantics. This model
of object identity, which is analogous to the ormal forms of
relational algebra, provides cleaner semantics for the
value-transmission operations and built-in primitive equality
predicate of a programming language, and eliminates the confusion
surrounding ``call-by-value'' and ``call-by-reference' ' as well as
the confusion of multiple equality predicates.Implementation issues are discusssed, and this model is shown to
have significant performance advantages in persistent, parallel,
distributed and multilingua processing environments. This model
also provides insight into the ``type equivalence'' problem of
Algol-68, Pascal and Ada.",
paper = "Bake93b.pdf",
keywords = "printed"
}\end{chunk}
\index{Dijkstra, Edsger W.}
\begin{chunk}{ axiom.bib}
@misc{Dijk12,
author = "Dijkstra, Edsger W.",
title = {{Some Meditations on Advanced Programming}},
link = "\url{https://www.cs. utexas. edu/users/ EWD/transcripti ons/EWD00xx/ EWD32.html}",
year = "2012"
}\end{chunk}
\index{Lim, Jay P.}
\index{Aanjaneya, Mridul}
\index{Gustafson, John}
\index{Nagarakatte, Santosh}
\begin{chunk}{ axiom.bib}
@misc{Limx20,
author = "Lim, Jay P. and Aanjaneya, Mridul and Gustafson, John
and Nagarakatte, Santosh",
title = {{A Novel Approach to Generate Correctly Rounded Math
Libraries for New Floating Point Representations}},
link = "\url{https://arxiv. org/pdf/ 2007.05344. pdf}",
year = "2020",
abstract =
"Given the importance of floating-point (FP) performance in
numerous domains, several new variants of FP and its alternatives
have been proposed (e.g. Bfloat16, TensorFloat32, and
Posits). These representations do not have correctly rounded math
libraries. Further, the use of existing FP libraries for these new
representations can produce incorrect results. This paper proposes
a novel methodology for generating polynomial approximations that
can be used to implement correctly rounded math libraries. Existing
methods produce polynomials that approximate the real value of an
elementary function $f(x)$ and experience wrong results due to
errors in the approximation and due to rounding errors in the
implementation. In contrast, our approach generates polynomials
that approximate the correctly rounded value of $f(x)$ (i.e. the
value of $f(x)$ rounded to the target representation). This
methodology provides more margin to identify efficient polynomials
that produce correctly rounded results for all inputs. We frame
the problem of generating efficient polynomials that produce
correctly rounded results as a linear programming problem. Our
approach guarantees that we produce the correct result even with
range reduction techniques. Using our approach, we have developed
correctly rounded, yet faster, implementations of elementary
functions for multiple target representations. Our Bfloat16
library is 2.3x faster than the corresponding state-of-the-art
while producing correct results for all inputs.",
paper = "Limx20.pdf"
}\end{chunk}
\index{Lanczos, Cornelius}
\begin{chunk}{ axiom.bib}
@misc{Lanc13,
author = "Lanczos, Cornelius",
title = {{Cornelius Lanczos (1893-1974) about Mathematics}},
year = "2013",
link = "\url{https://www.youtube. com/watch? v=avSHHi9QCjA}"
}\end{chunk}
\index{Schultz, Daniel}
\begin{chunk}{ axiom.bib}
@misc{Schu16,
author = "Schultz, Daniel",
title = {{Trager's Algorithm for Integration of Algebraic Functions
Revisited} },
year = "2016",
link = "\url{https://sites. psu.edu/ dpsmath/ files/2016/ 12/IntegrationO nCurves- 2hhuby8. pdf}",
abstract =
"Building on work of Risch in the 1980s and Liouville in the
1840s, Trager published an algorithm for deciding if a given
algebraic function has an elementary antiderivative. While this
algorithm is theoretically complete, it is incomplete in the sense
that assumptions are made about the function to be integrated in
relation to the defining equation for the algebraic
irrationality. These assumptions can be justified by a change of
variables in the defining equation, but this does not lead to the
most natural algorithm for integration. We fill in the 'gaps' in
Trager's algorithm and partially implement this algorithm in the
computer algebra system Mathematica. Various extensions to
Trager's algorithm are also discussed including a remedy to
several of the possible points of failure in the algorithm as well
as the problem of the presence of zero divisors in the algebraic
function to be integrated.",
paper = "Schu16.pdf"
}\end{chunk}
\index{Elbakyan, Alexandra}
\index{Francis, Robin}
\begin{chunk}{ axiom.bib}
@misc{Elba20,
author = "Elbakyan, Alexandra and Francis, Robin",
title = {{Should Knowledge Be Free?}},
year = "2020",
link = "\url{https://www.youtube. com/watch? v=PriwCi6SzLo}"
}\end{chunk}
\index{Ferguson, Craig}
@book{Ferg06,
author = "Ferguson, Craig",
title = {{Between the Bridge and the River}},
year = "2006",
publisher = "Chronicle Books LLC",
isbn = 978-0-8118-7303-1",
keywords = "DONE"
}\end{chunk}
\index{Axiom Authors}
\begin{chunk}{ axiom.bib}
@misc{list20,
author = "Axiom Authors",
title = {{axiom-developer Archives}},
year = "2020",
link = "\url{https://lists. nongnu. org/archive/ html/axiom- developer/}"
}\end{chunk}
\index{Bridges, Jeff}
\begin{chunk}{ axiom.bib}
@misc{Lebo98,
author = "Coen, Ethan and Coen, Joel",
title = {{The Big Lebowski}},
year = "1998",
link = "\url{https://www.imdb. com/title/ tt0118715}"
}\end{chunk}
\index{Davant, James B.}
\begin{chunk}{ axiom.bib}
@article{Dava75,
author = "Davant, James B.",
title = {{Wittgenstein on Russell's Theory of Types}},
journal = "Notre Dame Journal of Formal Logic",
volume = "16",
number = "1",
year = "1975",
paper = "Dava75.pdf",
keywords = "DONE"
}\end{chunk}
\begin{
chunk}{ axiom.bib}
@article{Cont81,
author = "Unknown",
title = {{Final Report on the National Commission of New
Technologi cal Uses of Copyrighted Works}},
journal = "Computer/Law Journal",
volume = "3",
number = "1",
year = "1981",
comment = "CONTU Report",
paper = "Cont81.pdf"
}\end{chunk}
\index{Dijkstra, Edsger W.}
\begin{chunk}{ axiom.bib}
@article{Dijk74,
author = "Dijkstra, Edsger W.",
title = {{Programming as a Discipline of Mathematical Nature}},
year = "1974",
comment = "EWD361",
link = "\url{https://www.cs. utexas. edu/users/ EWD/ewd03xx/ EWD361. PDF}",
keywords = "DONE"
}\end{chunk}
\index{Rich, Albert}
\index{Scheibe, Patrick}
\index{Abbasi, Nasser}
\begin{chunk}{ axiom.bib}
@article{Rich18,
author = "Rich, Albert and Scheibe, Patrick and Abbasi, Nasser",
title = {{Rule-based Integration: An Extensive System of Symbolic
Integratio n Rules}},
journal = "Journal of Open Source Software",
volume = "3.32",
year = "2018",
abstract =
"Finding the antiderivative of expressions is often challenging
and requires advanced mathematical skills even for simple looking
problems. Computer algebra aystems (CAS) like Mathematica (Wolfram
Research, Inc., Champaign, IL), Maple (Maplesoft, a division of
Waterloo Maple Inc., Waterloo, Ontario), and Maxima
(maxima.sourceforge. net) provide integrators to compute
antiderivatives symbolically. However, these systems give no
insight as to how an antiderivative is found or why it could not
be computed. Also, they use advanced methods incomprehensible to
humans that often result in huge antiderivatives unnecessarily
involving special functions.",
paper = "Rich18.pdf",
keywords = "axiomref, DONE"
}\end{chunk}
\index{Scheibe, Patrick}
\begin{chunk}{ axiom.bib}
@misc{Sche18,
author = "Scheibe, Patrick",
title = {{Rubi - The Rule-based Integrator for Mathematica}},
link = "\url{https://community. wolfram. com/groups/ -/m/t/1421180}",
year = "2018",
keywords = "axiomref, DONE"
}\end{chunk}
\index{Hutton, Graham}
\index{Meijer, Erik}
\begin{chunk}{ axiom.bib}
@techreport{Hutt96,
author = "Hutton, Graham and Meijer, Erik",
title = {{Monadic Parser Combinators}},
type = "technical report",
institution = "University of Nottingham",
number = "MOTTCS-TR-96-4",
year = "1996",
link = "\url{http://raw.githubuserc ontent. com/drewc/ smug/master/ doc/monparsing. org}",
abstract =
"In functional programming, a popular approach to building
recursive descent parsers is to model parsers as functions, and to
define higher-order functions (or combinators) that implement
grammar constructions such as sequencing, choice, and
repetition. Such parsers form an instance of a monad, an algebraic
structure from mathematics that has proved useful for addressing a
number of computational problems. The purpose of this artice is to
provide a step-by-step tutorial on the monadic approach to
building functional parsers, and to explain some of the benefits
that result from exploiting monads. No prior knowledge of parser
combinators or of monads is assumed. Indeed, this article can also
be viewed as a first introduction to the use of monads in
programming.",
paper = "Hutt96.txt"
}\end{chunk}
\index{Jahren, Hope}
\begin{chunk}{ axiom.bib}
@book{Jahr20,
author = "Jahren, Hope",
title = {{The Story of More: How We Got to Climate Change and Where
to Go from Here}},
publisher = "Vintage Books",
isbn = "978-0-525-56399- 6",
year = "2020",
keywords = "DONE"
}\end{chunk}
\index{Maling, K.}
\begin{chunk}{ axiom.bib}
@techreport{Mali59,
author = "Maling, K.",
title = {{The Lisp Differentiation Demonstration Program}}
type = "AI Memo",
institution = "MIT Project MAC",
number = "AI Memo 10",
year = "1959",
link = "\url{ftp://publications.ai. mit.edu/ ai-publications /pdf/AIM- 10.pdf}",
paper = "Mali59.pdf"
}\end{chunk}
\index{Moses, Joel}
\begin{chunk}{ axiom.bib}
@techreport{Mose66,
author = "Moses, Joel",
title = {{Symbolic Integration}},
type = "AI Memo",
institution = "MIT Project MAC",
number = "AI Memo 97",
year = "1966",
link = "\url{ftp://publications.ai. mit.edu/ ai-publications /pdf/AIM- 097.pdf}",
abstract =
"A program has been written which is capable of integrating all
but two of the problems solved by Slagle's symbolic integration
program SAINT. In contrast to SAINT, it is a purely algorithmic
program and it has achieved running times two to three orders of
magnitude faster than SAINT. This program and some of the basic
routines which it uses are described. A heuristic for integrtion,
called the Edge heuristic, is presented. It is claimed that this
heuristic with the aid of a few algorithms is capable of solving
all the problems solved by the algorithmic program and many others
as well.',
paper = "Mose66.pdf"
}\end{chunk}
\index{Moses, Joel}
\begin{chunk}{ axiom.bib}
@techreport{Mose66a,
author = "Moses, Joel",
title = {{Symbolic Integration - II}},
type = "AI Memo",
institution = "MIT Project MAC",
number = "AI Memo 97A",
year = "1966",
link = "\url{ftp://publications.ai. mit.edu/ ai-publications /pdf/AIM- 097a.pdf}",
abstract =
"In this memo we describe the current state of the integration
program originally described in AI Memo 97 (MAC-M-310). Familiarity
with Memo 97 is assumed. Some of the algorithms described in that
memo have been extended. Certain new algorithms and a simple
integration by parts routine have been added. The current program
can integrate all of the problems which were solved by SAINT and
also the two problems attempted by it and not solved. Due to the
addition of a decision procedure the program is capable of
identifying certain integrands (suc as $e^x$ or $e^x/x$ as not
integrable in closed form.",
paper = "Mose66a.pdf"
}\end{chunk}
\index{Constable, Robert}
\begin{chunk}{ axiom.bib}
@misc{Cons12
author = "Constable, Robert",
title = {{Proofs as Processes}},
year = "2012",
link = "\url{https://www.cs. uoregon. edu/research/ summerschool/ summer12/ curriculum. html}",
keywords = "DONE"}
\end{chunk}
\index{Mimram, Samuel}
\begin{chunk}{ axiom.bib}
@book{Mimr20,
author = "Mimram, Samuel",
title = {{Program = Proof}},
publisher = "Polytechnique.fr",
year = "2020",
link = "\url{www.lix.polytechniq ue.fr/Labo/ Samuel. Mimram/ teaching/ IFN551/ course. pdf}",
paper = "Mimr20.pdf
}\end{chunk}
\index{Voevodsky, Vladimir}
\begin{chunk}{ axiom.bib}
@misc{Voev14,
author = "Voevodsky, Vladimir",
title = {{Univalent Foundations}},
link = "\url{http://www.math. ias.edu/ ~vladimir/ Site3/Univalent _Foundations_ files/2014_ IAS.pdf}",
year = "2014",
paper = "Voev14.pdf",
keywords = "DONE"
}\end{chunk}
\index{Gratzer, Daniel}
\index{Kavvos, G.A.}
\index{Nuyts, Andreas}
\index{Birkedal, Lars}
\begin{chunk}{ axiom.bib}
@misc{Grat20,
author = "Gratzer, Daniel and Kavvos, G.A. and Nuyts, Andreas
and Birkedal, Lars",
title = {{Multimodel Dependent Type Theory}},
year = "2020",
link = "\url{https://arxiv. org/pdf/ 2011.15021. pdf}",
abstract =
"We introduce MTT, a dependent type theory which supports multiple
modalities. MTT is parameterized by a mode theory which specifies
a collection of modes, modalities, and transformations between
them. We show that different choices of mode theory allow us to
use the same type theory to compute and reason in many modal
situations, including guarded recursion, axiomatic cohesion, and
parametric quantification. We reproduce examples from prior work
in guarded recursion and axiomatic cohesion -- demonstrating that
MTT constitutes a simple and usable syntax whose instantiations
intuitively correspond to previous handcrafted modal type
theories. In some cases, instantiating MTT to a particular
situation unearths a previously unknown type theory that improves
upon prior systems. Finally, we investigate the metatheory of
MTT. We prove the consistency of MTT and establish canonicity
through an extension of recent type-theoretic gluing
techniques. These results hold irrespective of the choice of mode
theory, and thus apply to a wide variety of modal situations.",
paper = "Grat20.pdf"
}\end{chunk}
\index{Smullyan, Raymond M.}
\begin{chunk}{ axiom.bib}
@book{Smul95,
author = "Smullyan, Raymond M.",
title = {{First-Order Logic}},
publisher = "Dover",
year = "1995",
isbn = "978-0-486-68370- 6",
keywords = "shelf"
}\end{chunk}
\index{Pfenning, Frank}
\begin{chunk}{ axiom.bib}
@misc{Pfen17,
author = "Pfenning, Frank",
title = {{Lecture Notes on Sequent Calculus}},
year = "2017",
link = "\url{http://www.cs. cmu.edu/ ~fp/courses/ 15317-f17/ lectures/ 09-seqcalc. pdf}",
paper = "Pfen17.pdf",
keywords = "printed"
}\end{chunk}
\index{Stepanov, Alexander}
\index{McJones, Paul}
\begin{chunk}{ axiom.bib}
@book{Step09,
author = "Stepanov, Alexander and McJones, Paul",
title = {{Elements of Programming}},
publisher = "Semigroup Press",
year = "2009",
isbn = "978-0-578-22214- 1",
keywords = "shelf"
}\end{chunk}
\index{Floyd, Robert W.}
\index{Knuth, Donald E.}
\begin{chunk}{ axiom.bib}
@article{Floy90,
author = "Floyd, Robert W. and Knuth, Donald E.",
title = {{Addition Machines}},
journal = "SIAM Journal on Computing",
volume = "19",
number = "2",
pages = "329-340",
year = "1990",
comment = "Stanford Report STAN-CS-89-1268",
link =
"\url{i.stanford. edu/pub/ cstr/reports/ cs/tr/89/ 1268/CS- TR-89-1268. pdf}",
abstract =
"It is possible to compute gcd(x,y) efficiently with only
$O(\log xy)$ additions and subtractions, when three arithmetic
registers are available but not when there are only two. Several
other functions, such as $x^y \bmod z$, are also efficiently
computatable in a small number of registers, using only addition,
subtraction, and comparison.",
paper = "Floy90.pdf"
}\end{chunk}
\index{Almeida, Jose Bacelar}
\index{Frade, Maria Joao}
\index{Pinto, Jorge Sousa}
\index{de Sousa, Simao Melo}
\begin{chunk}{ axiom.bib}
@book{Alme11,
author = "Almeida, Jose Bacelar and Frade, Maria Joao and
Pinto, Jorge Sousa and de Sousa, Simao Melo",
title = {{Rigorous Software Development}},
year = "2011",
publisher = "Springer",
isbn = "978-0-85729-017- 5",
keywords = "shelf"
}\end{chunk}
\index{Nielson, Flemming}
\index{Nielson, Hanne Riis}
\begin{chunk}{ axiom.bib}
@book{Niel19,
author = "Nielson, Flemming and Nielson, Hanne Riis",
title = {{Formal Methods}},
year = "2019",
publisher = "Springer",
keywords = "shelf"
}\end{chunk}
\index{Kaufmann, Matt}
\index{Moore, J Strother}
\begin{chunk}{ axiom.bib}
@misc{Kauf97,
author = "Kaufman, Matt and Moore, J Strother",
title = {{An Industrial Strength Theorem Prover for a Logic Based on
Common Lisp}},
year = "1997",
link = "\url{web.eecs.umich. edu/~bchandra/ courses/ papers/ Kaufmann_ ACL2.pdf} ",
abstract =
"ACL2 is a re-implemented extended version of Boyer and Moore's
Nqthm and Kaufmann's Pc-Nqthm intended for large scale
verification projects. This paper deals primarily with how we
scaled up Nqthm's logic to an ``industrial strength'' programming
language -- namely, a large applicative subset of Common Lisp --
while preserving the use of total functions within a logic. This
makes it possible to run formal models efficiently while keeping
the logic simple. We enumerate many other important features of
ACL2 and we briefly summarize two industrial applications: a model
of the Motorola CAP digital signal processing chip and the proof
of the correctness of the kernel of the floating point division
algorithm on the AMD$5_k$86 microprocessor by Advanced Micro
Devices, Inc.",
paper = "Kauf97.pdf"
}\end{chunk}
\index{Dijkstra, Edsger W.}
\begin{chunk}{ axiom.bib}
@misc{Dijk88a,
author = "Dijkstra, Edsger W.",
title = {{On the Cruelty of Really Teaching Computing Science}},
year = "1988",
link = "\url{http://www.cs. utexas. edu/users/ EWD/ewd10xx/ EWD1036. PDF}",
paper = "Dijk88a.pdf"
}\end{chunk}
\index{Pfenning, Frank}
\begin{chunk}{ axiom.bib}
@misc{Pfen12,
author = "Pfenning, Frank",
title = {{Proof Theory Foundations}},
year = "2012",
link = "\url{https://www.cs. uoregon. edu/research/ summerschool/ summer12/ videos/ Pfenning1_ 0.mp4}",
keywords = "DONE"
}\end{chunk}
\index{Avigad, Jeremy}
\begin{chunk}{ axiom.bib}
@misc{Avig20,
author = "Avigad, Jeremy",
title = {{Mathematical Logic}},
year = "2020",
comment = "book preprint",
paper = "Avig20.pdf"
}\end{chunk}
\index{Pfenning, Frank}
\begin{chunk}{ axiom.bib}
@misc{Pfen16,
author = "Pfenning, Frank",
title = {{Lecture Notes on Focusing}},
year = "2016",
link = "\url{http://www.cs. cmu.edu/ ~fp/courses/ 15816-f16/ lectures/ 18-focusing. pdf}",
paper = "Pfen16.pdf",
keywords = "printed"
}\end{chunk}
\index{Dowek, Gilles}
\begin{chunk}{ axiom.bib}
@misc{Dowe15,
author = "Dowek, Gilles",
title = {{Deduction Modulo Theory}},
year = "2015",
link = "\url{http://arxiv.org/ pdf/1501. 06523.pdf}",
paper = "Dowe15.pdf",
keywords = "printed"
}\end{chunk}
\index{Andreoli, Jean-Marc}
\begin{chunk}{ axiom.bib}
@article{Andr01,
author = "Andreoli, Jean-Marc",
title = {{Focussing and Proof Construction}},
journal = "Annals of Pure and Applied Logic",
volume = "107",
pages = "131-163",
year = "2001",
abstract =
"This paper proposoes a synthetic presentation of the proof
construction paradigm, which underlies most of the research and
development in the so-called ``logic programming'' area. Two
essential aspects of this paradigm are discussed here: true
non-determinism and partial information. A new formulation of
Focussing, the basic property used to deal with non-determinism in
proof construction, is presented. This formulation is then used to
introduce a general constraint-based technique capable of dealing
with partial information in proof construction. One of the
baselines of the paper is to avoid to rely on syntax to describe
the key mechanisms of the paradigm. In fact, the bipolar
decomposition of formulas captures their main structure, which can
then be directly mapped into a sequent system that uses only
atoms. This system thus completely ``dissolves'' the syntax of the
formulas and retains only their behavioural content as far as
proof construction is concerned. One step further is taken with
the so-called ``abstract'' proofs, which dissolves in a similar
way the specific tree-like syntax of the proofs themselves and
retains only what is relevant to proof construction.",
paper = "Andr01.pdf",
keywords = "printed"
}\end{chunk}
\index{Touretzky, David S.}
\begin{chunk}{ axiom.bib}
@book{Tour90,
author = "Touretzky, David S.",
title = {{Common Lisp: A Gentle Introduction to Symbolic Computation}},
year = "1990",
publisher = "Benjamin/Cummings Publishing Company",
paper = "Tour90.pdf"
}\end{chunk}
\index{Lawvere, F. William}
\begin{chunk}{ axiom.bib}
@article{Lawv69,
author = "Lawvere, F. William",
title = {{Adjointness in Foundations}},
journal = "Dialectica",
volume = "23",
number = "3/4",
year = "1969",
pages = "281-296",
paper = "Lawv69.pdf",
keywords = "printed"
}\end{chunk}
\index{Lawvere, F. William}
\begin{chunk}{ axiom.bib}
@article{Lawv06,
author = "Lawvere, F. William",
title = {{Adjointness in Foundations: Author's Commentary}},
journal = "Theory and Applications of Categories",
volume = "16",
pages = "1-16",
year = "2006",
paper = "Lawv06.pdf"
}\end{chunk}
\index{Claessen, Koen}
\index{Hughes, John}
\begin{chunk}{ axiom.bib}
@inproceedings{Clae00,
author = "Claessen, Koen and Hughes, John",
booktitle = "Proc. 5th ACM SIGPLAN Conf. on Functional Programming",
publisher = "ACM",
pages = "268-279",
year = "2000",
link = "\url{cs.tufts.edu/ ~nr/cs257/ archive/ john-hughes/ quick.pdf} ",
abstract =
"QuickCheck is a tool which aids the Haskell programmer in
formulating and testing properties of programs. Properties are
described as Haskell functions, and can be automatically tested on
random input, but it is also possible to define custom test data
generators. We present a number of case studies, in which the tool
was successfully used, and also point out some pitfalls to
avoid. Random testing is especially suitable for functional
programs because properties can be stated at a fine grain. When a
function is built from separately tested components, then random
testing suffices to obtain good coveraoge of the definition under
test.",
paper = "Clae00.pdf",
keywords = "printed"
}\end{chunk}
\index{Hughes, John}
\begin{chunk}{ axiom.bib}
@misc{Hugh12,
author = "Hughes, John",
title = {{Monads and all that}},
year = "2012",
link = "\url{https://www.cs. uoregon. edu/research/ summerschool/ summer12/ curriculum. html}",
keywords = "DONE"}
\end{chunk}
\index{Hales, Thomas C.}
\begin{chunk}{ axiom.bib}
@misc{Hale18,
author = "Hales, Thomas C.".
title = <<A Review of the Lean Theorem Prover>>,
year = "2018",
link = "\url{htts://jiggerwit. wordpress. com/2018/ 09/18/a- review- of-the- lean-theorem- prover} ",
keywords = "printed, DONE"
}\end{chunk}
\index{Selsam, Daniel}
\begin{chunk}{ axiom.bib}
@misc{Sels16,
author = "Selsam, Daniel",
title = {{A Standalone Proof-checker for the Lean Theorem Prover}},
year = "2016",
link = "\url{www.scs.stanford. edu/16wi- cs240h/ projects/ selsam. pdf}",
comment = "\url{https://github. com/dselsam/ tc}",
paper = "Sels16.pdf",
keywords = "printed"
}\end{chunk}
\index{Backhouse, Roland}
\index{Chisholm, Paul}
\index{Malcolm, Grant}
\index{Saaman, Erik}
\begin{chunk}{ axiom.bib}
@misc{Back88,
author = "Backhouse, Roland and Chisholm, Paul and Malcolm, Grant
and Saaman, Erik",
title = {{Do-it-yourself Type Theory}},
year = "1988",
link = "\url{www.cs.nott. ac.uk/~ psarb2/ MPC/DOYTypeTheo ry.pdf} ",
paper = "Back88.pdf",
keywords = "printed"
}\end{chunk}
\index{van der Hoeven, Joris}
\begin{chunk}{ axiom.bib}
@misc{Hoev20,
author = "van der Hoeven, Joris",
title = {{Overview of the Mathemagix Type System}},
year = "2020",
link = "\url{https://www.texmacs. org/joris/ mmxtyping/ mmxtyping. html}",
abstract =
"The goal of the Mathematix project is to develop a new and free
software for computer algebra and computer analysis, based on a
strongly typed and compiled language. In this paper, we focus on
the underlying type system of this language, which allows for
heavy overloading, including parameterized overloading with
parameters in so called ``categories''. The exposition is informal
and aims at giving the reader an overview of the main concepts,
ideas and differences with existing languages. In a forthcoming
paper, we intend to describe the formal semantics of the type
system in more details.",
keywords = "axiomref"
}\end{chunk}
\index{Bittner, Calvin John}
\index{Grossman, Bertrand M.}
\index{Jenks, Richard Dimick}
\index{Watt, Stephen Michael}
\index{Williams, Richard Quimby}
\begin{chunk}{ axiom.bib}
@misc{Bitt01,
author = "Bittner, Calvin John and Grossman, Bertrand M. and
Jenks, Richard Dimick and Watt, Stephen Michael and
Williams, Richard Quimby",
title = {{Computer-Program Compilers Comprising a Program
Augmentati on Capability}},
link = "\url{https://cs.uwaterloo. ca/~smwatt/ pub/reprints/ 2001-us- 6223341. pdf}",
comment = "U.S. Patent 6,223,341",
paper = "Bitt01.pdf"
}\end{chunk}
\index{Rijke, Eghert}
\begin{chunk}{ axiom.bib}
@mastersthesis{Rijk12,
author = "Rijke, Eghert",
title = {{Homotopy Type Theory}},
school = "Utrecht University",
year = "2012",
paper = "Rijk12.pdf"
}\end{chunk}
\index{Thompson, Simon}
\index{Timochouk, Leonid}
\begin{chunk}{ axiom.bib}
@misc{Thomxx,
author = "Thompson, Simon and Timochouk, Leonid",
title = {{The Aldor\-\- language}},
year = "unknown",
link = "\url{fricas-wiki.math. uni.wroc. pl/public/ refs/thompson- aldor.pdf} ",
abstract =
"This paper introduces the Aldor\-\- language, which is a
functional programming language with dependent types and a
powerful, type-based, overloading mechanism. The language is built
on a subset of Aldor, the `library compiler' language for the
Axiom computer algebra system. Aldor\-\- is designed with the
intention of incorporating logical reasoning into computer algebra
computations.The paper contains a formal account of the semantics and type
system of Aldor\-\-; a general discussion of overloading and how
the overloading in Aldor\-\- fits into the general scheme;
examples of logic within Aldor\-\- and notes on the implementation
of the system.",
paper = "Thomxx.pdf",
keywords = "printed, axiomref"
}\end{chunk}
\index{Harvey, David}
\begin{chunk}{ axiom.bib}
@misc{Harv20,
author = "Harvey, David",
title = {{An Exponent One-Fifth Algorithm for Deterministic Integer
Factorisat ion}},
year = "2020",
link = "\url{https://arxiv. org/pdf/ 2010.05450. pdf}",
abstract =
"Hittmeir recently presented a deterministic algorithm that
provably computes the prime factorisation of a positive integer
$N$ in $N^{2/9+o(1)}$ bit operations. Prior to this breakthrough,
the best known complexity bound for this problem was
$N^{1/4+o(1)}$, a result going back to the 1970s. In this paper we
push Hittmeir's techniques further, obtaining a rigorous,
deterministic factoring algorithm with complexity $N^{1/5+o(1)}$.",
paper = "Harv20.pdf"
}\end{chunk}
\index{Fateman, Richard}
\index{James, Timothy}
\begin{chunk}{ axiom.bib}
@misc{Fate99c,
author = "Fateman, Richard and James, Timothy",
title = {{Analysis of a Web User Interface for Mathematics}},
year = "1999",
link = "\url{https://people. eecs.berkeley. edu/~fateman/ papers/ tjames. pdf}",
abstract =
"What can we learn from the range of over 7000 queries made to
TILU, a symbolic integration problem-solving server during the
course of more than two years? We have saved all queries during
this experiment, and based on our analysis, and experimented with
improvements in the computer-human interaction components of our
web server. We believe our experience will be useful in the design
of similar servers that require human input of mathematics.",
paper = "Fate99c.pdf"
}\end{chunk}
\index{Rabe, Florian}
\begin{chunk}{ axiom.bib}
@misc{Rabe20,
author = "Rabe, Florian",
title = {{MMT: A Foundation-Independent Logical System}},
year = "2020",
link = "\url{https://vimeo. com/421123419}",
keywords = "DONE"
}\end{chunk}
\index{McBride, Conor}
\begin{chunk}{ axiom.bib}
@misc{Mcbr20,
author = "McBride, Conor",
title = {{Epigram 2 - Autopsy, Obituary, Apology}},
year = "2020",
link = "\url{https://vimeo. com/428161108}",
keywords = "DONE"
}\end{chunk}
\index{Greenberg, Michael}
\begin{chunk}{ axiom.bib}
@inproceedings{Gree19,
author = "Greenberg, Michael",
title = {{The Dynamic Practice and Static Theory of Gradual
Typing} },
booktitle = "3rd Summit on Advances in Programming Languages",
publisher = "Dagstuhl Publishing",
pages = "6:1-6:20",
year = "2019",
abstract =
"We can tease apart the research on gradual types into two
'lineages': a pragmatic, implementation-oriented, dynamic-first
lineage and a formal, type-theoretic, static-first lineage. The
dynamic-first lineage's focus is on taming particular idioms --
'pre-existing conditions' in untyped programming languages. The
static-first lineage's focus is on interoperation and individual
type system features, rather than the collection of features found
in any particular language. Both appear in programming languages
research under the name ``gradual typing'', and they are in active
conversation with each other.What are these two lineages? What challenges and opportunities
await the static-first lineage? What progress has been made so
far?",
paper = "Gree19.pdf"
}\end{chunk}
\index{Garcia, Ronald}
\begin{chunk}{ axiom.bib}
@misc{Garc18,
author = "Garcia, Ronald",
title = {{Gradual Typing}},
year = "2018",
link = "\url{www.youtube. com/watch? v=fQRRxaWsuxI} ",
}\end{chunk}
\index{Henglein, Fritz}
\begin{chunk}{ axiom.bib}
@article{Heng94,
author = "Henglein, Fritz",
title = {{Dynamic Typing: Syntax and Proof Theory}},
journal = "Science of Computer Programming",
volume = "23",
pages = "197-230",
year = "1994",
abstract =
"We present the {\sl dynamically typed $\lambda$-calculus} , an
extension of the statically typed $\lambda$-calculus with a
special type Dyn and explicit {\sl dynamic type coercions}
corresponding to run-time type tagging and type check-and-untag
operations. Programs in run-type typed languages can be
interpreted in the dynamically typed $\lambda$-calculus via a
nondeterministic {\sl completion process} that inserts explicit
coercions and type declarations such that a well-typed term
results.We characterize when two different completions of the same
run-time typed programs are {\sl coherent} with an equational
theory that is independent of the underlying
$\lambda$-theory. This theory is refined by orienting some
equations to define {\sl safety} and {\sl minimality} of
completions. Intuitively, a safe completion is one that does not
produce an error at run-time which another completion would have
avoided, and a minimal completion is a safe completion that
executes fewest tagging and check-and-untag operations amongst all
safe completions.We show that every untyped $\lambda$-term has a safe completion at
any type and that it is unique modulo a suitable congruence
relation. Furthermore, we present a rewriting system for
generating minimal completions. Assuming strong normalization of
this rewriting system we show that every $\lambda I$-term has a
minimal completion at any type, which is furthermore unique modulo
equality in the dynamically typed $\lambda$-calculus. ",
paper = "Heng94.pdf"
}\end{chunk}
\index{Wright, Andrew K.}
\index{Felleisen, Mattias}
\begin{chunk}{ axiom.bib}
@techreport{Wrig92,
author = "Wright, Andrew K. and Felleisen, Mattias",
title = {{A Syntactic Approach to Type Soundness}},
type = "technical report",
institution = "Rich University",
number = "TR91-160",
year = "1992",
abstract =
"We present a new approach to proving type soundness for
Hindley/Milner- style polymorphic type systems. The keys to our
approach are (1) an adaptation of subject reduction theorems from
combinatory logic to programming languages, and (2) the use of
rewriting techniques for the specification of the language
semantics. The approach easily extends from polymorphic functional
languages to imperative languages that provide references,
exceptions, continuations, and similar features. We illustrate the
technique with a type soundness theorem for the core of Standard
ML, which includes the first type soundess proof for polymorphic
exceptions and continuations.",
paper = "Wrig92.pdf",
keywords = "printed"
}\end{chunk}
\index{Jones, Neil D.}
\index{Gomard, Carsten K.}
\index{Sestoft, Peter}
\begin{chunk}{ axiom.bib}
@book{Jone93,
author = "Jones, Neil D. and Gomard, Carsten K. and Sestoft, Peter",
title = {{Partial Evaluation and Automatic Program Generation}},
year = "1993",
publisher = "Prentice Hall",
link = "\url{www.itu.di/ ~setoft/ pebook/ jonesgomardsest oft-letter. pdf}",
paper = "Jone93.pdf"
}\end{chunk}
\index{Prawitz, Dag}
\begin{chunk}{ axiom.bib}
@book{Praw65,
author = "Prawitz, Dag",
title = {{Natural Deduction}},
publisher = "Dover",
isbn = "978-0-486-44655-4"
keywords = "shelf"
}\end{chunk}
\index{Negri, Sara}
\index{von Plato, Jan}
\begin{chunk}{ axiom.bib}
@book{Negr01,
author = "Negri, Sara and von Plato, Jan",
title = {{Structural Proof Theory}},
publisher = "Cambridge University Press",
year = "2001",
isbn = "978-0-521-06842- 0",
keywords = "shelf"
}\end{chunk}
\index{Herda, Michal}
\begin{chunk}{ axiom.bib}
@book{Herd20,
author = "Herda, Michal",
title = {{The Common Lisp Condition System}},
publisher = "Apress",
year = "2020",
isbn = "978-1-4842-6133- 0",
keywords = "shelf"
}\end{chunk}
\index{Troelstra, A.S.}
\index{Schwichtenberg, H.}
\begin{chunk}{ axiom.bib}
@book{Troe96,
author = "Troelstra, A.S. and Schwichtenberg, H.",
title = {{Basic Proof Theory}},
year = "1996",
publisher = "Cambridge",
isbn = "0-521-77911-1",
keywords = "shelf"
}\end{chunk}
\index{Reynolds, John C.}
\begin{chunk}{ axiom.bib}
@book{Reyn81,
author = "Reynolds, John C.",
title = {{The Craft of Programming}},
publisher = "Prentice-Hall",
year = "1981",
isbn = "0-13-188862-5",
keywords = "shelf"
}\end{chunk}
\index{Hindley, J. Roger}
\begin{chunk}{ axiom.bib}
@book{Hind97,
author = "Hindley, J. Roger",
title = {{Basic Simple Type Theory}},
publisher = "Cambridge",
year = "1997",
isbn = "0-521-05422-2",
keywords = "shelf"
}\end{chunk}
- 2920. By daly
-
books/bookvolbib add references
Goal: Proving Axiom Sane
\index{Driscoll, Kevin R.}
\begin{chunk}{ axiom.bib}
@misc{Dris20,
author = "Driscoll, Kevin R.",
title = {{Murphy Was An Optimiist}},
year = "2020",
link = "\url{www.rvs.uni- bielefeld. de/publications /DriscollMurphy v19.pdf} ",
comment = "<email address hidden>",
paper = "Dris20.pdf"
}\end{chunk}
\index{Doctorow, Cory}
\begin{chunk}{ axiom.bib}
@misc{Doct11,
author = "Doctorow, Cory",
title = {{The Coming War on General Computation}},
year = "2011",
link = "\url{http://opentranscripts .org/transcript /coming- war-on- general- computation}",
keywords = "DONE"
}\end{chunk}
\index{Farvardin, Kavon}
\index{Reppy, John}
\begin{chunk}{ axiom.bib}
@inproceedings{Farv20,
author = "Farvardin, Kavon",
title = {{From Folklore to Fact: Comparing Implementation of Stacks
and Continuations}},
booktitle = "Programming Language Design and Implementation",
publisher = "ACM SIGPLAN",
year = "2020",
abstract =
"The efficient implementation of function calls and non-local
control transfers is a critical part of modern language
implementations and is important in the implementation of
everything from recursion, higher-order functions, concurrency and
coroutines, to task-based parallelism. In a compiler, these
features can be supported by a variety of mechanisms, including
call stacks, segmented stacks, and heap-allocated continuation
closures.An implementor of a high-level language with advanced control
features might ask the question ``what is the best choice for my
implementation?'' Unfortunately, the current literature does not
provide much guidance, since previous studies suffer from various
flaws in methodology and are outdated for modern hardware. In the
absence of recent, well-normalized measurements and a holistic
overview of their implementation specifics, the path of least
resistance when choosing a strategy is to trust folklore, but the
folklore is also suspect.This paper attempts to rememdy this situation by providing an
``apples-to-apples' ' comparison of five different approaches to
implementing call stacks and continuations. This comparison uses
the same source language, compiler pipeline, LLVM-backend, and
runtime system, with the only differences being those required by
the differences in implementation strategy. We compare the
implementation challenges of the different approaches, their
sequential performance, and their suitability to support advanced
control mechanisms, including supporting heavily threaded code. In
addition to the comparison of implementation strategies, the
paper's contributions also include a number of useful
implementation techniques that we discover along the way.",
paper = "Farv20.pdf"
}\end{chunk}
\index{Srivastava, Saurabh}
\index{Gulwani, Sumit}
\index{Foster, Jeffrey S.}
\begin{chunk}{ axiom.bib}
@inproceedings{Sriv10,
author = "Srivastava, Saurabh and Gulwani, Sumit and Foster, Jeffrey S.",
title = {{From Program Verification to Program Synthesis}},
booktitle = "37th Symp. on Principles of Programming Languages",
publisher = "ACM",
pages = "313-326",
year = "2010",
abstract =
"This paper describes a novel technique for the synthesis of
imperative programs. Automated program synthesis has the potential
to make programming and the design of systems easier by allowing
programs to be specified at a higher-level than executable
code. In our approach, which we call proof-theoretic synthesis,
the user provides an input-output functional specification, a
description of the atomic operations in the progamming language,
and a specification of the synthesized program's looping
structure, allowed stack space, and bound on usage of certain
operations. Our technique synthesizes a program, if there exists
one, that meets the input-output specification and uses only the
given resources.The insight behind our approach is to interpret program synthesis
as generalized program verification, which allows us to bring
verification tools and techniques to program synthesis. Our
synthesis algorithm works by creating a program with unknown
statements, guards, inductive invariants, and ranking
functions. It then generates constraints that relate the unknowns
and enforces three kinds of requirements: partial correctness,
loop termination, and well-formedness conditions on program
guards. We formalize the requirements that program verification
tools must meet to solve these constraints and use tools from
prior work as our synthesizers.We demonstrate the feasibility of the proposed approach by
synthesizing programs in three different domains: arithmetic,
sorting, and dynamic programming. Using verification tools that we
previously built in the VS3 project we are able to synthesize
programs for complicated arithmetic algorithms including
Strassen's matrix multiplication and Bresenham's line drawing;
several sorting algorithms; and several dynamic programming
algorithms. For these programs, the median time for synthesis is
14 seconds, and the ratio of synthesis to verification time ranges
between 1x to 92x (with a median of 7x), illustrating the potential
of the approach.",
paper = "Sriv10.pdf"
}\end{chunk}
\index{Hoare, Tony}
\begin{chunk}{ axiom.bib}
@article{Hoar95,
author = "Hoare, Tony",
title = {{Unification of Theories: A Challenge for Computing Science}},
booktitle = "Lecture Notes in Computer Science",
publisher = "Springer",
volume = "1130",
year = "1995",
abstract =
"Unification of theories is the long-standing goal of the natural
sciences; and modern physics offers a spectacular paradigm of its
achievement. The structure of modern mathematics has also been
determined by its great unifying theories -- topology, algebra and
the like. The same ideals and goals are shared by researchers and
students of theorectical computing science.",
paper = "Hoar95.pdf"
}\end{chunk}
\index{Mylonakis, Nikos}
\begin{chunk}{ axiom.bib}
@article{Mylo95,
author = "Mylonakis, Nikos",
title = "Behavioural Specifications in Type Theory",
booktitle = "Lecture Notes in Computer Science",
publisher = "Springer",
volume = "1130",
year = "1995",
abstract =
"In this paper we give a new view of the type theory UTT (Uniform
theory of dependent types) [5] as a system to formally develop
programs from algebraic specifications, comparable to
e.g. EML([9]). We will focus our attention on behavioural
specifications since they have not been deeply studied in a type
theoretical setting, and we describe how to develop proofs about
behavioural satifaction.",
paper = "Mylo95.pdf"
}\end{chunk}
\index{Shparlinski, Igor E.}
\begin{chunk}{ axiom.bib}
@book{Shpa99,
author = "Shparlinski, Igor E.",
title = {{Finite Fields: Theory and Computation}},
publisher = "Kluwer Academic",
year = "1999",
isbn = "0-7923-5662-4",
keywords = "axiomref"
}\end{chunk}
\index{Lescanne, Pierre}
\begin{chunk}{ axiom.bib}
@article{Lesc95,
author = "Lescanne, Pierre",
title = {{The Lambda Calculus as an Abstract Data Type}},
booktitle = "Lecture Notes in Computer Science",
publisher = "Springer",
volume = "1130",
year = "1995",
abstract =
"Calculi of explicit substitutions are theoretical tools to
describe lambda calculus as first-order rewrite systems. Therefore
a presentation as an abstract data type is possible. During an
invited lecture at the Workshop on Abstract Data Types, we
presented calculi of explicit substitutions, especially the
calculus $\lambda$v. The theoretical background of $\lambda$v can
be found in [1], therefore we are not entering in the details of
the theory as we did during the oral presentation. However, we
take the essence of the talk, namely the formalizatino of
$\lambda$v in a specification language based on ADT. We have
chosen LSL, the LARCH Shared Language [7] to illustrate the beauty
of abstract data types which lies in its precision and
concision. Moreoever we take freedom with respect to this
language.",
paper = "Lesc95.pdf",
keywords = "printed"
}\end{chunk}
\index{Lamport, Leslie}
\begin{chunk}{ axiom.bib}
@misc{Lamp09,
author = "Lamport, Leslie",
title = {{The PlusCal Algorithm Language}},
year = "2009",
link = "\url{https://lamport. azurewebsites. net/pubs/ pluscal. pdf}",
abstract =
"Algorithms are different from programs and should not be
described with programming languages. The only simple alternative
to programming languages has been pseudo-code. PlusCal is an
algorithm language that an be used right now to replace
pseudo-code, for both sequential and concurrent algorithms. It is
based on the TLA+ specification language, and a PlusCal algorithm
is automatically translated to a TLA+ specification that can be
checked with the TLC model checker and reasoned about formally.",
paper = "Lamp09.pdf",
keywords = "printed"
}\end{chunk}
\index{Sedgewick, Robert}
\begin{chunk}{ axiom.bib}
@book{Sedg88,
author = "Sedgewick, Robert",
title = {{Algorithms}},
publisher = "Addison-Wesley",
year = "1988"
}\end{chunk}
\index{Armstrong, Joe}
\begin{chunk}{ axiom.bib}
@misc{Arms14,
author = "Armstrong, Joe",
title = {{The Mess We're In}},
link = "\url{https://www.youtube. com/watch? v=lKXe3HUG2l4}",
year = "2014",
keywords = "axiomref, DONE"
}\end{chunk}
\index{Turing, A. M.}
\begin{chunk}{ axiom.bib}
@article{Turi49,
author = "Turing, A. M.",
title = {{Checking a Large Routine}},
journal = "Annals of the History of Computing",
volume = "6",
number = "2",
year = "1949",
abstract =
"The standard references for work on program proofs attribute the
early statement of direction to John McCarthy (e.g. McCarthy
1963); the first workable methods to Peter Naur (1966) and Robert
Floyd (1967); and the provision of more formal systems to C.A.R
Hoare (1969) and Edsger Dijkstra (1976). The early papers of some
of the computing pioneers, however, show an awareness of the need
for proofs of program correctness and even present workabe methods
(.e.g Goldstine and von Neumann 1947; Turing 1949).The 1949 paper by Alan M. Turing is remarkable in many
respects. The three (foolscap) pages of text contain an excellent
motivation by analogy, a proof of a program with two nested loops,
and an indication of a general proof method very like that of
Floyd. Unfortunately, the paper is made extremely difficult to
read by a large number of transcription errors. For example, all
instances of the factorial sign (Turing used $|n$) have been
omitted in the commentary, and ten other identifiers are written
incorrectly. It would appear to be worth correcting these errors
and commenting on the proof from the viewpoint of subsequent work
on program proofs.Turing delivered this paper in June 1949, at the inaugural
conference of the EDSAC, the computer at Cambridge University
built under the direction of Maurice V. Wilkes. Turing had been
writing programs for an electronic computer since the end of 1945
-- at first for the proposed ACE, the computer project at the
National Physical Laboraty, but since October 1948 for the
Manchester prototype computer, of which he was deputy
director. The references in his paper to $2^{40}$ are reflections
of the 40-bit ``lines'' of the Manchester machine storage system.The following is the text of Turing's 1949 paper, corrected to the
best of our ability to what we believe Turing intended. We have
made no changes in spelling, punctuation, or grammar.",
paper = "Turi49.pdf"
}\end{chunk}
\index{Hoare, Charles Antony Richard}
\begin{chunk}{ axiom.bib}
@article{Hoar81,
author = "Hoare, Charles Antony Richard",
title = {{The Emperor's Old Clothes}},
journal = "CACM",
volume = "24",
number = "2",
pages = "75-83",
abstract =
"The author recounts his experiences in the implementation,
design, and standardization of computer programming languages, and
issues a warning for the future.",
paper = "Hoar18.pdf",
keywords = "DONE"
}\end{chunk}
\index{Wirth, Niklaus}
\begin{chunk}{ axiom.bib}
@article{Wirt95,
author = "Wirth, Niklaus",
title = {{A Plea for Lean Software}},
publisher = "IEEE",
journal = "Computer",
year = "1995",
pages = "64-68",
paper = "Wirt95.pdf",
keywords = "DONE"
}\end{chunk}
\index{Liskov, Barbara}
\begin{chunk}{ axiom.bib}
@misc{Lisk12,
author = "Liskov, Barbara",
title = {{Programming the Turing Machine}},
year = "2012",
link = "\url{https://www.youtube. com/watch? v=ibRar7sWulM}",
keywords = "DONE"
}\end{chunk}
\index{Di Franco, Anthony}
\index{Rubio-Gonzalez, Cindy}
\begin{chunk}{ axiom.bib}
@misc{Difr17,
author = "Di Franco, Anthony and Rubio-Gonzalez, Cindy",
title = {{A Comprehensive Study of Real-World Numerical Bug
Characteri stics}} ,
year = "2017",
link = "\url{https://web.cs. ucdavis. edu/~rubio/ includes/ ase17.pdf}",
abstract =
"Numerical software is used in a wide variety of applications
including safety-critical systems, which have stringent
correctness requirements, and whose failures have catastrophic
consequences that endanger human life. Numerical bugs are known to
be particularly difficult to diagnose and fix, largely due to the
use of approximate representations of numbers such as floating
point. Understanding the characteristics of numerical bugs is the
first step to combat them more effectively. In this paper, we
present the first comprehansive study of real-world numerical
bugs. Specifically, we identify and carefully examine 269
numerical bugs from five widely-used numerical software libraries:
NumPy, SciPy, LAPACK, GNU Scientific Library, and Elemental. We
propose a categorization of numerical bugs, and discuss their
frequency, symptoms and fixes. Our study opens new directions in
the areas of program analysis, testing, and automated program
repair of numerical software, and provides a collection of
real-world numerical bugs.",
paper = "Difr17.pdf"
}\end{chunk}
\index{Talcott, Carolyn}
\begin{chunk}{ axiom.bib}
@inproceedings{Talc90,
author = "Talcott, Carolyn",
title = {{A Theory for Program and Data Type Specifications}},
booktitle = "DISCO 1990",
year = "1990",
pages = "91-100",
paper = "Talc90.pdf"
}\end{chunk}
\index{Jouannaud, Jean-Pierre}
\index{Marche, Claude}
\begin{chunk}{ axiom.bib}
@inproceedings{Joua90,
author = "Jouannaud, Jean-Pierre",
title = {{Completion modulo Associativity, Commutativity and
Identity (ACI)}},
booktitle = "DISCO 1990",
year = "1990",
pages = "111-120",
abstract =
"Rewriting with associativity, commutativity and identity has been
an open problem for along time. In a recent paper [PBW89], Baird,
Peterson and Wilkerson introduced the notion of constrained
rewriting, to avoid the problem of non-termination inherent to the
use of identities. We build up on this idea in two ways: by giving
a complete set of rules for completion modulo these axioms; by
showing how to build appropriate orderings for proving termination
of constrained rewriting modulo associativity, commutativity and
identity.",
paper = "Joua90.pdf",
keywords = "printed"
}\end{chunk}
\index{Reynaud, Jean-Claude}
\begin{chunk}{ axiom.bib}
@inproceedings{Reyn90,
author = "Reynaud, Jean-Claude",
title = {{Putting Algebraic Components Together: A Dependent Type
Approach }},
booktitle = "DISCO 1990",
year = "1990",
pages = "141-150",
abstract =
"We define a framework based on dependent types for putting
algebraic components together. It is defined with freely generated
categories. In order to preserve initial, loos and constrainted
semantics of components, we introduce the notion of
SPEC-categories which look like specific finitely co-complete
categories. A constructive approach which includes
parameterization techniques is sued to define new components from
basic predefined ones. The problem of the internal coding of
external signature symbols is introduced.",
paper = "Reyn90.pdf",
keywords = "printed"
}\end{chunk}
\index{Dewar, M.C.}
\index{Richardson, M.G.}
\begin{chunk}{ axiom.bib}
@inproceedings{Dewa90,
author = "Dewar, M.C. and Richardson, M.G.",
title = {{Reconciling Symbolic and Numeric Computation in a
Practical Setting}},
booktitle = "DISCO 1990",
year = "1990",
pages = "195-204",
paper = "Dewa90.pdf",
keywords = "printed"
}\end{chunk}
\index{Jebelean, Tudor}
\begin{chunk}{ axiom.bib}
@inproceedings{Jebe93,
author = "Jebelean, Tudor",
title = {{Improving the Multiprecision Euclidean Algorithm}},
booktitle = "DISCO 1993",
year = "1993",
pages = "45-58",
publisher = "Springer",
abstract =
"We improve the implementation of Lehmer-Euclid algorithm for
multiprecision integer GCD computations by partial consequence
computation on pairs of double digits, enhanced conditions for
exiting the partia consequence computatin, and approximative GCD
computation. The combined effect of these improvements is an
experimentally measured speed-up by a factor of 2 over the
currently used implementation.",
paper = "Jebe93.pdf",
keywords = "printed"
}\end{chunk}
\index{Hennicker, Rolf}
\begin{chunk}{ axiom.bib}
@inproceedings{Henn90,
author = "Hennicker, Rolf",
title = {{Context Induction: A Proof Principle for Behavioural
Abstractio ns}},
booktitle = "DISCO 1990",
year = "1990",
pages = "101-110",
abstract =
"An induction principle, called context induction, is presented
which is appropriate for the verification of behavioural
properties of abstract data types. The usefulness of the proof
principle is documented by several applications: the verification
of behavioural theorems over a behavioural specification, the
verification of behavioural implementations and the verification
of ``forget=restrict- identify" implementations. In particular it is shown that behavioural implementations and
``forget-restrict- identify' ' implementations (under certain
assumptions) can be characterized by the same context condition,
i.e. (under the given assumptions) both concepts are
equivalent. This leads to the suggestion to use context induction
as a uniform proof method for correctness proofs of formal
implementations.",
paper = "Henn90.pdf",
keywords = "printed"
}\end{chunk}
\index{Calmet, J.}
\index{Tjandra, I.A.}
\begin{chunk}{ axiom.bib}
@inproceedings{Calm93,
author = "Calmet, J. and Tjandra, I.A.",
title = {{A Unified-Algebra- based Specification Language for
Symbolic Computing}},
booktitle = "DISCO 1993",
year = "1993",
pages = "122-133",
publisher = "Springer",
abstract =
"A precise and perspicuous specification of mathematical domains
of computation and their inherently related type inference
mechanisms is a prerequisite for the design and systematic
development of a system for symbolic computing. This paper
describes FORMAL, a language for giving modular and
well-structured specifications of such domains and particularly of
``mathematical objects''. A novel framework for algebraic
specification involving so-called ``unified algebras'' has been
adopted, where sorts are treated as values. The adoption of this
framework aims also at being capable of specifying polymorphism,
unifying the notions of ``parametric'' and ``inclusion''
polymorphisms. Furthermore, the operational nature of the
specification formalisms allows a straightforward transformation
into an executable form.",
paper = "Calm93.pdf",
keywords = "printed, axiomref"
}\end{chunk}
\index{Fritzson, Peter}
\index{Engelson, Vadim}
\index{Viklund, Lars}
\begin{chunk}{ axiom.bib}
@inproceedings{Frit93,
author = "Fritzson, Peter and Engelson, Vadim and Viklund, Lars",
title = {{Variant Handling, Inheritance and Composition in the
ObjectMath Computer Algebra Environment}},
booktitle = "DISCO 1993",
year = "1993",
pages = "145-163",
publisher = "Springer",
abstract =
"ObjectMath is a high-level programming environment and modeling
language for scientific computing which supports variants and
graphical browsing in the environment and integrates
object-oriented constructs such as classes and single and multiple
inheritance within a computer algebra language. In addition,
composition of objects using the part-of relation and support for
solution of systems of equations is provided. This environment is
currently being used for industrial applications in scientific
computing. The ObjectMath environment is designed to handle
realistic problems. This is achieved by allowing the user to
specify transformations and simplifications of formulae in the
model, in order to arrive at a representation which is efficiently
solvable. When necessary, equations can be transformed to C++ code
for efficient numerical solution. The re-use of equations through
inheritance in general reduces models by a factor of two to three,
compared to a direct representation in the Mathematica computer
algebra language. Also, we found that multiple inheritance from
orthogonal classes facilitates re-use and maintenance of
application models.",
paper = "Frit93.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Grivas, Georgios}
\index{Maeder, Roman E.}
\begin{chunk}{ axiom.bib}
@inproceedings{Griv93,
author = "Grivas, Georgios and Maeder, Roman E.",
title = {{Matching and Unification for the Object-Oriented Symbolic
Computatio n System}},
booktitle = "DISCO 1993",
year = "1993",
pages = "164-176",
publisher = "Springer",
abstract =
"Term matching has become one of the most important privmitive
operations for symbolic computation. This paper describes the
extension of the object-oriented symbolic computation system
AlgBench with pattern matching and unification facilities. The
various pattern objects are organized in subclasses of the class
of the composite expressions. This leads to a clear design and a
distributed implementation of the pattern matcher in the
subclasses. New pattern object classes can consequently be added
easily to the system. Huet's and our simple mark and retract
algorithm for standard unification as well as Stickel's algorithm
for associative commutative unification have been implemented in
an object-oriented style. Unifiers are selected at runtime. We
extend Mathematica's type-constrained pattern matching by taking
into account inheritance information from a user-defined hierarchy
of object types. the argument unification is basically instance
variable unification. The improvement of the pattern matching
operation of a rule- and object-based symbolic computation system
with unification in an object-oriented way seems to be very
appropriate.",
paper = "Griv93.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Santas, Philip S.}
\begin{chunk}{ axiom.bib}
@article{Sant93,
author = "Santas, Philip S.",
title = {{A Type System for Computer Algebra}},
booktitle = "DISCO 1993",
year = "1993",
pages = "177-191",
publisher = "Springer",
abstract =
"We examine type systems for support of subtypes and categories in
computer algebra systems. By modeling representation of instances
in terms of existential types instead of recursive types, we
obtain not only a simplified model, but we build a basis for
defining subtyping among algebraic domains. The introduction of
metaclasses, facilitates the task, by allowing the inference of
type classes. By means of type classes and existential types we
construct subtype relations without involving coercions.",
paper = "Sant93.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Zwillinger, Daniel}
\begin{chunk}{ axiom.bib}
@book{Zwil92,
author = "Zwillinger, Daniel",
title = {{Handbook of Integration}},
publisher = "Jones and Bartlett",
year = "1992",
isbn = "0-86720-293-9",
keywords = "axiomref"
}\end{chunk}
\index{Limongelli, C.}
\index{Temperini, M.}
\begin{chunk}{ axiom.bib}
@inproceedings{Limo93,
author = "Limongelli, C. and Temperini, M.",
title = {{On the Uniform Representation of Mathematical Data
Structures }},
booktitle = "DISCO 1993",
year = "1993",
pages = "319-330",
publisher = "Springer",
abstract =
"Topics about the integration of the numeric and symbolic
computation paradigms are discussed. Mainly an approach through a
uniform representation of numbers and symbols is presented, that
allows for the application of algebraic algorithms to numeric
problems. The $p$-adic constructions is the basis of the unifying
representation environment. An integrated version of the Hensel
algorithm is presented, which is able to perform symbolic and
numeric computations over instances of ground (concrete) and
parametric structures, and symbolic computations over instances of
abstract structures. Examples are provided to show how the
approach outlined and the proposed implementation can treat both
cases of symbolic and numeric computations. In the numeric case it
is shown that the proposed extension of the Hensel Algorithm can
allow for the exact manipulation of numbers. Moreover, such an
extension avoids the use of simplification algorithms, since the
computed results are already in simplified form.",
paper = "Limo93.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Walsh, Toby}
\begin{chunk}{ axiom.bib}
@inproceedings{Wals93,
author = "Walsh, Toby",
title = {{General Purpose Proof Plans}},
booktitle = "DISCO 1993",
year = "1993",
pages = "319-330",
publisher = "Springer",
abstract =
"One of the key problems in the design and implementation of
automated reasoning systems is the specification of heuristics to
control search. ``Proof planning'' has been developed as a
powerful technique for declaratively specifying high-level proof
strategies. This paper describes an extension to proof planning to
allow for the specification of more general purpose plans.",
paper = "Wals93.pdf"
}\end{chunk}
\index{Calmet, J.}
\index{Comon, H.}
\index{Lugiez, D.}
\begin{chunk}{ axiom.bib}
@article{Calm89,
author = "Calmet, J. and Comon, H. and Lugiez, D.",
title = {{Type Inference Using Unification in Computer Algebra}},
journal = "LNC",
volume = "307",
publisher = "Springer",
paper = "Calm89.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Poll, Erik}
\begin{chunk}{ axiom.bib}
@misc{Pollxx,
author = "Poll, Erik",
title = {{The type system of Axiom}},
link = "\url{https://www.cs. ru.nl/E. Poll/talks/ axiom.pdf}",
paper = "Polxx.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Pottier, Francois}
\index{Regis-Gianas, Yann}
\begin{chunk}{ axiom.bib}
@inproceedings{Pott06,
author = "Pottier, Francois and Regis-Gianas, Yann",
title = {{Stratified Type Inference For Generalized Algebraic Data
Types} },
booktitle = "POPL'06",
year = "2006",
publisher = "ACM",
abstract =
"We offer a solution to the type inference problem for an
extension of Hindley and Milner's type system with generalized
algebraic data types. Our approach is in two {\sl strata}. The
bottom stratum is a core language that marries type
{\sl inference} in the style of Hindley and Milner with type
{\sl checking} for generalized algebraic data types. This results
in an extremely simple specification, where case constructs must
carry an explicit type annotation and type conversions must be
made explicit. The top stratum consists of (two variants of) an
independent {\sl shape inference} algorithm. This algorithm
accepts a source term that contains some explicit type
information, propagates this information in a local, predictable
way, and produces a new source term that carries more explicit
type information. It can be viewed as a preprocessor that helps
produce some of the type annotations required by the bottom
stratum. It is proven {\sl sound} in the sense that it never
inserts annotations that could contradict the type derivation that
the programmer has in mind.",
paper = "Pott06.pdf"
}\end{chunk}
\index{Calmet, Jacques}
\index{Lugiez, Denis}
\begin{chunk}{ axiom.bib}
@article{Calm87a,
author = "Calmet, Jacques and Lugiez, Denis",
title = {{A Knowledge-based System for Computer Algebra}},
journal = "ACM SIGSAM Bulletin",
volume = "21",
number = "1",
year = "1987",
abstract =
"This paper reports on a work in progress aiming at designing and
implementing a system for representing and manipulating
mathematical knowledge. Its kernel is a computer algebra system
but it shows several of the features of the so-called
knowledge-based systems. The main issues considered here are the
software engineering aspects of the project, the definition of a
new language to support the system and the use of AI techniques in
a field where algebraic algorithms are the building stones of
systems. This defines an environment which enables not only to
have data-bases of knowledge but also to implement an expert use
of this knowledge.",
paper = "Calm87a.pdf",
keywords = "axiomref"
}\end{chunk}
\begin{
chunk}{ axiom.bib}
@misc{HOPLxx,
author = "unknown",
title = {{Scratchpad II}},
year = "2020",
link = "\url{https://hopl.info/ showlanguage2. prx?exp= 566}",
keywords = "axiomref"
}\end{chunk}
\index{Davenport, James H.}
\begin{chunk}{ axiom.bib}
@misc{Dave86a,
author = "Davenport, James H.",
title = {{SCRATCHPAD II Programming Language Reference}},
comment = "IBM T.J. Watson",
year = "1986"
}\end{chunk}
\index{Jenks, R.}
\index{Trager, B.}
\index{Watt, S.M.}
\index{Sutor, R.S.}
\begin{chunk}{ axiom.bib}
@misc{Jenk85,
author = "Jenks, R. and Trager, B. and Watt, S.M. and Sutor, R.S.",
title = {{Scratchpad II Programming Language Manual}},
year = "1985",
keywords = "axiomref"
}\end{chunk}
\index{Chudnovsky, David V.}
\index{Jenks, Richard D.}
\begin{chunk}{ axiom.bib}
@book{Chud89,
author = "Chudnovsky, David V. and Jenks, Richard D.",
title = {{Computer Algebra. Pure and Applied Mathematics}},
publisher = "Springer",
year = "1989"
}\end{chunk}
\index{Padget, J.A.}
\begin{chunk}{ axiom.bib}
@inproceedings{Padg85,
author = "Padget, J.A.",
title = {{Current Development in LISP}},
booktitle = "EUROCAL 85",
publisher = "Springer",
year = "1985",
abstract =
"This paper is divided into three sections. The first gives an
overview of the presently available and emerging dialects of LISP
and how design decisions within them affect symbolic algebra. The
second discusses recent developments, in particular, Common LISP
subsetting, portability, pure language research and mixed paradigm
systems. The third part is devoted to what is happening in
specialised LISP hardware in Japan, in the United States and in
Europe. The subject matter of each of these three sections is so
tightly interwoven however that the detailed discussion of some
material may be postponed until a later section although some
readers it might seem appropriate for inclusion earlier. It shoud
also be mentioned that this is a survey, therefore the items have
been selected for mention on the grounds of interest rather than
completeness.",
paper = "Padg85.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Kreisel, G.}
\begin{chunk}{ axiom.bib}
@inproceedings{Krei85,
author = "Kreisel, G.",
title = {{Proof Theory and the Synthesis of Programs: Potential and
Limitation s}},
booktitle = "EUROCAL 85",
publisher = "Springer"
paper = "Krei85.pdf"
}\end{chunk}
\index{Vazou, Niki}
\index{Breitner, Joachim}
\index{Kunkel, Rose}
\index{Horn, David Van}
\index{Hutton, Graham}
\begin{chunk}{ axiom.bib}
@inproceedings{Vazo18,
author = "Vazou, Niki and Breitner, Joachim and Kunkel, Rose and
Horn, David Van and Hutton, Graham",
title = {{Theorem Proving for All: Equational Reasoning in Liquid
Haskell} },
booktitle = "Haskell'18",
publisher = "ACM",
year = "2018",
isbn = "978-1-4503-5835- 4",
link = "\url{http://www.cs. nott.ac. uk/~pszgmh/ tpfa.pdf}",
abstract =
"Equational reasoning is one of the key features of pure
functional languages such as Haskell. To date, however, such
reasoning always took place eternally to Haskell, either manually
on paper, or machanised in a theorem prover. This article shows
how equational reasoning can be performed directly and seamlessly
within Haskell itself, and be checked using Liquid Haskell. In
particular, language learners -- to whom external theorem provers
are out of reach -- can benefit from having their proofs
mechanically checked. Concretely, we show how the equational
proofs and derivations from Hutton's textbook can be recast as
proofs in Haskell (spoiler: they look essentially the same).",
paper = "Vazo18.pdf"
}\end{chunk}
\index{Jaskelioff, Mauro}
\index{Ghani, Neil}
\index{Hutton, Graham}
\begin{chunk}{ axiom.bib}
@inproceedings{Jask08,
author = "Jaskelioff, Mauro and Ghani, Neil and Hutton, Graham",
title = {{Modularity and Implementation of Mathematical Operational
Semantics} },
publisher = "Elsevier",
year = "2008",
link = "\url{http://www.cs. nott.ac. uk/~pszgmh/ modular. pdf}",
abstract =
"Structural operational semantics is a popular technique for
specifying the meaning of programs by means of inductive
clauses. One seeks syntactic restrictions on those clauses so that
the resulting operational semantics is well-behaved. This approach
is simple and concrete but it has some drawbacks. Turi pioneered a
more abstract categorical treatment based upon the idea that
operational semantics is essentially a distribution of syntax over
behaviour. In this article we take Turi's approach in two new
directions. Firstly, we show how to write operational semantics as
modular components and how to combine such components to specify
complete languages. Secondly, we show how the categorical nature
of Turi's operational semantics makes it ideal for implementation
in a functional programming language such as Haskell.",
paper = "Jask08.pdf"
}\end{chunk}
\index{Pickard, Mitchell}
\index{Hutton, Graham}
\begin{chunk}{ axiom.bib}
@inproceedings{Pick20,
author = "Pickard, Mitchell and Hutton, Graham",
title = {{Dependently-Typed Compilers Don't Go Wrong}},
booktitle = "Programming Languages",
publisher = "ACM",
year = "2020",
link = "\url{http://www.cs. nott.ac. uk/~pszgmh/ well-typed. pdf}",
abstract =
"Compilers are difficult to write, and difficult to get
right. Bahr and Hutton recently developed a new technique for
calculating compilers directly from specifications of their
correctness, which ensures that the resulting compilers are
correct-by-construction . To date, however, this technique has only
been applicable to source languages that are untyped. In this
article, we show that moving to a dependently-typed setting allows
us to naturally support typed source languages, ensure that all
compilation components are type-safe, and make the resulting
calculations easier to mechanically check using a proof
assistant.",
paper = "Pick20.pdf"
}\end{chunk}
\index{Doorn, Floris van}
\index{Ebner, Gabriel}
\index{Lewis, Robert Y.}
\begin{chunk}{ axiom.bib}
@misc{Door20,
author = "Doorn, Floris van and Ebner, Gabriel and
Lewis, Robert Y.",
title = {{Maintaining a Library of Formal Mathematics}},
link = "\url{https://arxiv. org/pdf/ 2004.03673}",
year = "2020",
abstract =
"The Lean mathematical library mathlib is developed by a community
of users with very different backgrounds and levels of
experience. To lower the barrier of entry for contributors and to
lessen the burden of reviewing contributions, we have developed a
number of tools for the library which check proof developments for
subtle mistakes in the code and generate documentation suited for
our varied audience.",
paper = "Door20.pdf"
}\end{chunk}
\index{Rekdal, Ole Bjorn}
\begin{chunk}{ axiom.bib}
@article{Rekd14,
author = "Rekdal, Ole Bjorn",
title = {{Academic Urban Legends}},
journal = "Social Studies of Science",
volume = "44",
number = "4",
pages = "638-654",
year = "2014",
abstract =
"Many of the messages presented in respectable scientific
publications are, in fact, based on various forms of rumors. Some
of these rumors appear so frequently, and in such complex,
colorful, and entertaining ways that we can think of them as
academic urban legends. The explanation for this phenomenon is
usually that the authors have lazily, sloppily, or fraudulently
employed sources, and peer reviewers and editors have not
discovered these weaknesses in the manuscripts during
evaluation. To illustrate this phenomenon, I draw upon a
remarkable case in which a decimal point error appears to have
misled millions into believing that spinach is a good nutritional
source of iron. Through this example, I demonstrate how an
academic urban legend can be conceived and born, and can continue
to grow and reproduce within academia and beyond.",
paper = "Rekd14.pdf",
keywords = "DONE"
}\end{chunk}
\index{Hoare, C.A.R}
\begin{chunk}{ axiom.bib}
@article{Hoar72a,
author = "Hoare, C.A.R",
title = {{Proof of Correctness of Data Representations}},
journal = "Acta Informatica",
volume = "1",
pages = "271-281",
year = "1972",
abstract =
"A powerful method of simplifying the proofs of program
correctness is suggested; and some new light is shed on the
problem of functions with side-effects.",
paper = "Hoar72a.pdf",
keywords = "printed"
}\end{chunk}
\index{Streicher, Thomas}
\begin{chunk}{ axiom.bib}
@book{Stre91,
author = "Streicher, Thomas",
title = {{Semantics of Type Theory}},
year = "1991",
publisher = "Springer",
isbn = "978-1-4612-6757-7"
}\end{chunk}
\index{Dijkstra, E.W.}
\begin{chunk}{ axiom.bib}
@misc{Dijk67,
author = "Dijkstra, E.W.",
title = {{A Constructive Approach to the Problem of Program Correctness}},
year = "1967",
link = "\url{https://www.cs. utexas. edu/users/ EWD/ewd02x/ EWD209. PDF}",
abstract =
"As an alternative to methods by which the correctness of given
programs can be established a posteriori, this paper proposes to
control the process of program generation such as to produce a
priori correct programs. An example is treated to show the form
that such a control then might take. This exampe comes from the
field of parallel programming; the way in which it is treated is
representative for the way in which a whole multiprogramming
system has actually been constructed.",
paper = "Dijk67.pdf",
keywords = "printed"
}\end{chunk}
\index{Schoett, Oliver}
\begin{chunk}{ axiom.bib}
@article{Scho90,
author = "Schoett, Oliver",
title = {{Behavioural Correctness of Data Representations}},
journal = "Science of Computer Programming",
volume = "14",
year = "1990",
pages = "43-57",
abstract =
"Two methods for proving the correctness of data representation
are presented which employ a mathematical relation between the
data values in a representation and those in its abstract
model. One method reflects the behavioural equaivalence relation
of abstract data type theory, and the other a new ``behavioural
inclusion'' notion that formalizes the idea of a ``partial
representation'' of a data type. These correctness concepts and proof methods are strictly more
general than the conventional ones based on abstraction functions,
and they are no longer affected by ``implementation bias'' in
specifications.",
paper = "Scho90.pdf",
keywords = "printed"
}\end{chunk}
\index{Macor, Jackson}
\begin{chunk}{ axiom.bib}
@misc{Maco15,
author = "Macor, Jackson",
title = {{A Brief Introduction to Type Theory and the Univalence
Axiom} },
year = "2015",
comment = "U. Chicago REU",
link = "\url{https://math.uchicago. edu/~may/ REU2015/ REUPapers/ Macor.pdf}",
abstract =
"In this paper, we will introduce the basic concepts and notation
of modern type theory in an informal manner. We will discuss
functions, type formation, the nature of proof, and proceed to
prove some basic results in sentential logic. These efforts will
culminate in a proof of the axiom of choice in type theory. We
will further introduce a few concepts in homotopy type theory, a
modern invention which sees to provide a foundation of mathematics
without ZFC type theory. We will conclude with the univalence
axiom, an indispensable tool in homotopy type theory, and use it
to prove a stronger version of the axiom of choice.",
paper = "Maco15.pdf",
keywords = "printed, DONE"
}\end{chunk}
\index{Hebisch, Waldemar}
\begin{chunk}{ axiom.bib}
@misc{Hebi20,
author = "Hebisch, Waldemar",
title = {{History of FriCAS}},
year = "2020",
link = "\url{http://fricas. sourceforge. net/history. html}",
keywords = "axiomref, DONE"
}\end{chunk}
\index{Kaltofen, Erich}
\index{Rolletschek, Heinrich}
\begin{chunk}{ axiom.bib}
@inproceedings{Kalt85f,
author = "Kaltofen, Erich and Rolletschek, Heinrich",
title = {{Arithmetic in Quadratic Fields with Unique Factorization}},
booktitle = "Research Contributions from the Euro. Conf. on Comp. Alg.",
series = "Lecture Notes in Computer Science Volume 204",
volume = "2",
pages = "279-288",
year = "1985",
isbn = "0-387-15983-5 (vol. 1),0-387-15984-3 (vol. 2)",
paper = "Kalt85f.pdf"
}\end{chunk}
\index{Kaltofen, Erich}
\index{Rolletschek, Heinrich}
\begin{chunk}{ axiom.bib}
@inproceedings{Kalt85f,
author = "Kaltofen, Erich and Rolletschek, Heinrich",
title = {{Arithmetic in Quadratic Fields with Unique Factorization}},
booktitle = "Research Contributions from the Euro. Conf. on Comp. Alg.",
series = "Lecture Notes in Computer Science Volume 204",
volume = "2",
pages = "279-288",
year = "1985",
isbn = "0-387-15983-5 (vol. 1),0-387-15984-3 (vol. 2)",
abstract =
"In a quadratic field $\mathbb{Q}(\sqrt{ D})$, $D$ a squarefree
integer, with class number 1 any algebraic integer can be
decomposed uniquely into primes but for only 21 domains Euclidean
algorithms are known. We prove that for $D \le -19$ even remainder
sequences with possibly non-decreasing norms cannot determine the
GCD of arbitrary inputs. We then show how to compute the greatest
common divisor of the algebraic integers in any fixed
$\mathbb{Q}(\sqrt{ D})$ with class number 1 in $O(S^2)$ binary
steps where $S$ is the number of bits needed to encode the
inputs. We also prove that in any domain the computation of the
prime factorization of an algebraic integer can be reduced in
polynomial-time to factoring its norm into rational primes. Our
reduction is based on a constructive version of a theorem by
A. Thue. Finally we present another GCD algorithm for complex
quadratic fields based on a short lattice vector construction.",
paper = "Kalt85f.pdf"
}\end{chunk}
\index{Greif, J.M.}
\begin{chunk}{ axiom.bib}
@inproceedings{Grei85,
author = "Greif, J.M.",
title = {{The SMP Pattern Matcher}},
booktitle = "Research Contributions from the Euro. Conf. on Comp. Alg.",
series = "Lecture Notes in Computer Science Volume 204",
volume = "2",
pages = "303-314",
year = "1985",
isbn = "0-387-15983-5 (vol. 1),0-387-15984-3 (vol. 2)",
abstract =
"A new pattern matcher has been implemented for the symbolic
manipulation program SMP. The pattern matcher correctly treats
associative and commutative functions, functions with arbitrary
symmetries under permutations of their arguments, and patterns
subject to arbitrary logical predicates. A pattern is said to match
a candidate instance if the set of expressions it represents is a
superset of those represented by the instance (either or both may
contain generic symbols which match possibly infinite sets of
expressions). The matcher is primarily syntactic, but is able to
find matches for syntactically different expressions which become
literally equivalent upon replacing the generic symbols in the
pattern by their bindings to subexpressions of the instance and
simplifying the result. The bindings must be determined by literal
comparison with the instance, not by solving equations.This paper discusses issues arising during the construction of
this pattern matcher, and gives examples of using pattern matching
for extending the power of built-in operations and adding new
mathematical knowledge to a symbol manipulation program.",
paper = "Grei85.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Marti, Jed}
\begin{chunk}{ axiom.bib}
@article{Mart83,
author = "Marti, Jed",
title = {{The Little META Translator Writing System}},
journal = "Software -- Practice and Experience",
volume = "13",
pages = "941-959",
year = "1983",
abstract =
"The Little META Translator Writing System is a tool for
implementing experimental compilers, interpreters, preprocessors
and higher level translator writing systems on a
microprocessor. This paper presents the salient features of the
system through the implementation of three translators. Some
knowledge of LISP is assumed.",
paper = "Mart83.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Fateman, Richard J.}
\begin{chunk}{ axiom.bib}
@misc{Fate96a,
author = "Fateman, Richard J.",
title = {{What Computer Algebra Systems Can't Solve Simple
Equations} },
year = "1996",
link = "\url{https://people. eecs.berkeley. edu/~fateman/ papers/ y=z2w.pdf}",
paper = "Fate96a.pdf"
}\end{chunk}
\index{Paule, Peter}
\begin{chunk}{ axiom.bib}
@book{Paul13,
author = "Paule, Peter",
title = {{Mathematics, Computer Science and Logic -- A Never Ending
Story} },
publisher = "Springer",
year = "2013",
paper = "Paul13.pdf"
}\end{chunk}
\index{Buchberger, B.}
\index{Loos, R.}
\begin{chunk}{ axiom.bib}
@inbook{Buch82a
author = "Buchberger, Bruno and Loos, Rudiger",
title = {{Algebraic Simplification}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "11-44",
abstract =
"Some basic techniques for the simplification of terms are
surveyed. In two introductory sections the problem of canonical
algebraic simplification is formally stated and some elementary
facts are derived that explain the fundamental role of
simplification in computer algebra. In the subsequent sections two
major groups of simplification techniques are presented: special
techniques for simplifying terms over numerical domains and
completion algorithms for simplification with respect to sets of
equations. Within the first group canonical simplification
algorithms for polynomials, rational expressions, radical
expressions and transcendental expressions are treated (Sections
3-7). As examples for completion algorithms the Knuth-Bendix
algorithm for rewrite rules and an algorithm for completing bases
of polynomial ideals are described (Sections 8-11).",
paper = "Buch82.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Lauer, M.}
\begin{chunk}{ axiom.bib}
@InCollection{Laue82,
author = "Lauer, M.",
title = {{Computing by Homomorphic Images}},
booktitle = "Computer Algebra: Symbolic and Algebraic Computation",
pages = "139-168",
year = "1982",
publisher = "Springer",
isbn = "978-3-211-81684- 4",
abstract =
"After explaining the general technique of Computing by homomorphic
images, the Chinese remainder algorithm and the Hensel lifting
construction are treated extensively. Chinese remaindering is first
presented in an abstract setting. Then the specialization to Euclidean
domains, in particular $\mathbb{Z}$, $\mathbb{K}[y]$, and
$\mathbb{Z}[y_1, \ldots, y_n]$ is treated. The lifting construction
is first also presented in an abstract form from which Hensel's
Lemma derives by specialization. After introducing Zassenhaus'
quadratic lifting construction, again, the case of $\mathbb{Z}$
and $\mathbb{X}[y_1, \ldots, y_r]$ is considered. For both techniques,
Chinese remaindering as well as the lifting algorithms, a complete
computational example is presented and the most frequent application
is discussed."
paper = "Buch82.pdf"
}\end{chunk}
\index{Loos, R.}
\begin{chunk}{ axiom.bib}
@inbook{Loos82,
author = "Loos, R",
title = {{Generalized Polynomial Remainder Sequences}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "115-138",
abstract =
"Given two polynomials over an integral domain, the problem is to
compute their polynomial remainder sequence (p.r.s) over the same
domain. Following Habischt, we show how certain powers of leading
coefficients enter systematically all following remainders. The
key tool is the subresultant chain of two polynomials. We study
the primitive, the reduced and the improved subresultant p.r.s
algorithm of Brown and Collins as basis for computing polynomial
greatest common divisors, result and or Sturm sequences. Habicht's
subresultant theorem allows new and simple proofs of many results
and algorithms found in different ways in computer algebra.",
paper = "Buch82.pdf"
}\end{chunk}
\index{Neubuser, J.}
\begin{chunk}{ axiom.bib}
@inbook{Neub82,
author = "Neubuser, J.",
title = {{Computing with Groups and Their Character Tables}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "45-56",
abstract =
"In this survey an attempt is made to give some impression of the
capabilities of currently available programs for computations with
finitely generated groups and their representations.",
paper = "Buch82.pdf"
}\end{chunk}
\index{Norman, A.C.}
\begin{chunk}{ axiom.bib}
@inbook{Norm82a,
author = "Norman, A.C.",
title = {{Integration in Finite Terms}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "57-70",
abstract =
"A survey on algorithms for integration in finite terms is
given. The emphasis is on indefinite integration. Systematic
methods for rational, algebraic and elementary transcendental
integrands are reviewed. Heuristic techniques for indefinite
integration, and techniques for definite integration and ordinary
differental equations are touched only briefly.",
paper = "Buch82.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Norman, A.C.}
\begin{chunk}{ axiom.bib}
@inbook{Norm82b,
author = "Norman, A.C.",
title = {{Computing in Transcendental Extensions}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "169-172",
abstract =
"Performing the rational operations in a field extended by a
transcendental element is equivalent to performing arithmetic in
the field of rational functions over the field. The computational
difficulty associated with such extensions is in verifying that
proposed extensions are transcendental. When the extensions being
considered are functions, and where a differentiation operator can
be defined for them, structure theorems can be used to determine
the character of the extension and to exhibit a relationship
between the adjoined element and existing quantities in case the
adjoined element is not transcendental.",
paper = "Buch82.pdf"
}\end{chunk}
\index{Lafon, J.C.}
\begin{chunk}{ axiom.bib}
@inbook{Lafo82,
author = "Lafon, J.C.",
title = {{Summation in Finite Terms}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "71-77",
abstract =
A survey on algorithms for summation in finite terms is given. After a
precise definition of the problem the cases of polynomial and rational
summands are treated. The main concern of this paper is a description
of Gosper's algorithm, which is applicable for a wide class of
summands. Karr's theory of extension difference fields and some
heuristic techniques are touched on briefly.",
paper = "Buch82.pdf",
keywords = "axiomref, survey"
}\end{chunk}
\index{Collins, G.E.}
\index{Mignotte, M.}
\index{Winkler, F.}
\begin{chunk}{ axiom.bib}
@inbook{Coll82,
author = "Collins, G.E. and Mignotte, M. and Winkler, F.",
title = {{Arithmetic in Basic Algebraic Domains}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
isbn = "978-3-211-81684- 4",
pages = "189-220",
year = "1982",
abstract =
"This chapter is devoted to the arithmetic operations, essentially
addition, multiplication, exponentiation, division, gcd calculations
and evaluation, on the basic algebraic domains. The algorithms for
these basic domains are those most frequently used in any computer
algebra system. Therefore the best known algorithms, from a
computational point of view, are presented. The basic domains
considered here are the rational integers, the rational numbers,
integers modulo $m$, Gaussian integers, polynomials, rational
functions, power series, finite fields and $p$-adic numbers. BOunds on
the maximum, minimum and average computing time ($t^{+},t^{-}, t^{*}$) for
the various algorithms are given.",
paper = "Buch82.pdf"
}\end{chunk}
\index{Collins, G.E.}
\begin{chunk}{ axiom.bib}
@inbook{Coll82b,
author = "Collins, G.E.",
title = {{Quantifier Elimination for Real Closed Fields: A Guide to
the Literature}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "79-82",
abstract =
"This article provides a brief summary of the most important
publications relating to quantifier elimination for the elementary
theory of real closed fields. Especially mentioned is the
cylindrical algebraic decomposition method and its relation to the
facilities of computer algebra facilities.",
paper = "Buch82.pdf"
}\end{chunk}
\index{Collins, G.E.}
\index{Loos, R.}
\begin{chunk}{ axiom.bib}
@inbook{Coll82c,
author = "Collins, G.E. and Loos, R.",
title = {{Real Zeros of Polynomials}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "83-94",
abstract =
"Let $A$ be a polynomial over $\mathbb{Z}$, $\mathbb{Q}$,
$\mathbb{Q}(\alpha) $ where $\alpha$ is a real algebraic
number. The problem is to compute a sequence of disjoint intervals
with rational endpoints, each containing exactly one real zero of
$A$ and together containing all real zeros of $A$. We describe an
algorithm due to Kronecker based on the minimum root separation,
Sturm's algorithm, an algorithm based on Rolle's theorem due to
Collins and Loos and the modified Uspensky algorithm due to
Collins and Aritas. For the last algorithm a recursive version
with correctness proof is given which appears in print for the
first time.",
paper = "Buch82.pdf"
}\end{chunk}
\index{Loos, R.}
\begin{chunk}{ axiom.bib}
@inbook{Loos82a,
author = "Loos, R",
title = {{Computing in Algebraic Extensions}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "173-188",
abstract =
"The aim of this chapter is an introduction to elementary
algorithms in algebraic extensions, mainly over $\mathbb{Q}$ and,
to some extent, over $GF(p)$. We will talk about arithmetic in
$\mathbb{Q}(\alpha) $ and $GF(p^n)$ in Section 1 and some
polynomial algorithms with coefficients from these domains in
Section 2. Then, we will consider the field $K$ of all algebraic
numbers over $\mathbb{Q}$ and show constructively that $K$ indeed
is a field that multiple extensions can be replaced by single ones
and the $K$ is algebraically closed, i.e. that zeros of algebraic
number polynomials will be elements of $K$ (Section 4-6). For this
purpose we develop a simple resultant calculus which reduces all
operations on algebraic numbers to polynomial arithmetic on long
integers together with some auxiliary arithmetic on rational
intervals (Secion 3). Finally, we present some auxiliary algebraic
number algorithms used in other chapters of this volume (Section
7). This chapter does not include any special algorithms of
algebraic number theory. For an introduction and survey with an
extensive bibliography the reader is referred to Zimmer.",
paper = "Buch82.pdf"
}\end{chunk}
\index{Hulzen, J.A. van}
\index{Calmet, J.}
\begin{chunk}{ axiom.bib}
@inbook{Hulz82a,
author = "Hulzen, J.A. van and Calmet, J.",
title = {{Computer Algebra Systems}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "221-244",
abstract =
A survey is given of computer algebra systems, with emphasis on
design and implementation aspects, by presenting a review of the
development of ideas and methods in a historical perspective, by
us considered as instrumental for a better understanding of the
rich diversity of now available facilities. We first indicate
which classes of mathematical expressions can be stated and
manipulated in different systms before we touch on different
general aspects of usage, design and implementation, such as
language design, encoding, dynamic storage allocation and a
symbolic-numeric interface. Then we discuss polynomial and
rational function systems, by describing ALTRAN and SAC-2. This is
followed by a comparison of some of the features of MATHLAB-68,
SYMBAL and FORMAC, which are pretended general purpose
systems. Before considering giants (MACSYMA and SCRATCHPAD) and
gnomes (muMATH-79), we give the main characteristics of TRIGMAN,
CAMAL and REDUCE, systems we tend to consider as grown out special
purpose facilities. Finally we mention some modern algebra systems
(CAYLEY and CAMAC-79) in relation to recent proposals for a
language for computational algebra. We conclude by stipulating the
importance of documentation. Throughout this discussion related
systems and facilities will be mentioned. Noticeable are ALKAHEST
II, ALPAK, ANALITIK, ASHMEDAI, NETFORM, PM, SAC-1, SCHOONSCHIP,
SHEEP, SML, SYCOPHANTE and TAYLOR.",
paper = "Buch82.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Calmet, J.}
\index{Hulzen, J.A. van}
\begin{chunk}{ axiom.bib}
@inbook{Calm82,
author = "Calmet, J. and Hulzen, J.A. van",
title = {{Computer Algebra Applications}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "245-258",
abstract =
"A survey of applications based either on fundamental algorithms
in computer algebra or on the use of a computer algebra system is
presented. Since many survey articles are previously published, we
did not attempts to be exhaustive. We discuss mainly recent work
in biology, chemistry, physics, mathematics and computer science,
thus again confirming that applications have both engineering and
scientific aspects, i.e. apart from delivering results they assist
in gaining insight as well.",
paper = "Buch82.pdf"
}\end{chunk}
\index{Mignotte, M.}
\begin{chunk}{ axiom.bib}
@inbook{Mign82,
author = "Mignotte, M.",
title = {{Some Useful Bounds}},
booktitle = {{Computer Algebra: Symbolic and Algebraic Computation}},
publisher = "Springer",
year = "1982",
isbn = "978-3-211-81684- 4",
pages = "259-264",
abstract =
"Some fundamental inequalities for the following values are
listed: the determinant of a matrix, the absolute value of the
roots of a polynomial, the coefficients of divisors of
polynomials, and the minimal distance between the roots of a
polynomial. These inequalities are useful for the analysis of
algorithms in various areas of computer algebra.",
paper = "Buch82.pdf"
}\end{chunk}
\index{Moller, Anders}
\index{Schwartzbach, Michael I.}
\begin{chunk}{ axiom.bib}
@book{Moll19,
author = "Moller, Anders and Schwartzbach, Michael I.",
title = {{Static Program Analysis}},
publisher = "Aarhus University",
year = "2019"
}\end{chunk}
\index{Guillaume, Alexandre}
\begin{chunk}{ axiom.bib}
@phdthesis{Guil98,
author = "Guillaume, Alexandre",
title = {{De ALDOR a Zermelo}},
school = "University Paris VI",
year = "1998"
}\end{chunk}
- 2919. By daly
-
src/input/fuzz*
Goal: Regression testing
These are regression test files that Axiom correctly handles.
They are based on an implementation of Guillaume Lample and Francois Charton
``Deep Learning for Symbolic Computation'' \cite{Lamp19},
posted on Jan 2, 2020 by Waldek Hebisch - 2918. By daly
-
books/bookvolbib add references
Goal: Proving Axiom Sane
\index{Driscoll, Kevin R.}
\begin{chunk}{ axiom.bib}
@misc{Dris20,
author = "Driscoll, Kevin R.",
title = {{Murphy Was An Optimiist}},
year = "2020",
link = "\url{www.rvs.uni- bielefeld. de/publications /DriscollMurphy v19.pdf} ",
comment = "<email address hidden>",
paper = "Dris20.pdf"
}\end{chunk}
\index{Doctorow, Cory}
\begin{chunk}{ axiom.bib}
@misc{Doct11,
author = "Doctorow, Cory",
title = {{The Coming War on General Computation}},
year = "2011",
link = "\url{http://opentranscripts .org/transcript /coming- war-on- general- computation}",
keywords = "DONE"
}\end{chunk}
\index{Farvardin, Kavon}
\index{Reppy, John}
\begin{chunk}{ axiom.bib}
@inproceedings{Farv20,
author = "Farvardin, Kavon",
title = {{From Folklore to Fact: Comparing Implementation of Stacks
and Continuations}},
booktitle = "Programming Language Design and Implementation",
publisher = "ACM SIGPLAN",
year = "2020",
abstract =
"The efficient implementation of function calls and non-local
control transfers is a critical part of modern language
implementations and is important in the implementation of
everything from recursion, higher-order functions, concurrency and
coroutines, to task-based parallelism. In a compiler, these
features can be supported by a variety of mechanisms, including
call stacks, segmented stacks, and heap-allocated continuation
closures.An implementor of a high-level language with advanced control
features might ask the question ``what is the best choice for my
implementation?'' Unfortunately, the current literature does not
provide much guidance, since previous studies suffer from various
flaws in methodology and are outdated for modern hardware. In the
absence of recent, well-normalized measurements and a holistic
overview of their implementation specifics, the path of least
resistance when choosing a strategy is to trust folklore, but the
folklore is also suspect.This paper attempts to rememdy this situation by providing an
``apples-to-apples' ' comparison of five different approaches to
implementing call stacks and continuations. This comparison uses
the same source language, compiler pipeline, LLVM-backend, and
runtime system, with the only differences being those required by
the differences in implementation strategy. We compare the
implementation challenges of the different approaches, their
sequential performance, and their suitability to support advanced
control mechanisms, including supporting heavily threaded code. In
addition to the comparison of implementation strategies, the
paper's contributions also include a number of useful
implementation techniques that we discover along the way.",
paper = "Farv20.pdf"
}\end{chunk}
\index{Srivastava, Saurabh}
\index{Gulwani, Sumit}
\index{Foster, Jeffrey S.}
\begin{chunk}{ axiom.bib}
@inproceedings{Sriv10,
author = "Srivastava, Saurabh and Gulwani, Sumit and Foster, Jeffrey S.",
title = {{From Program Verification to Program Synthesis}},
booktitle = "37th Symp. on Principles of Programming Languages",
publisher = "ACM",
pages = "313-326",
year = "2010",
abstract =
"This paper describes a novel technique for the synthesis of
imperative programs. Automated program synthesis has the potential
to make programming and the design of systems easier by allowing
programs to be specified at a higher-level than executable
code. In our approach, which we call proof-theoretic synthesis,
the user provides an input-output functional specification, a
description of the atomic operations in the progamming language,
and a specification of the synthesized program's looping
structure, allowed stack space, and bound on usage of certain
operations. Our technique synthesizes a program, if there exists
one, that meets the input-output specification and uses only the
given resources.The insight behind our approach is to interpret program synthesis
as generalized program verification, which allows us to bring
verification tools and techniques to program synthesis. Our
synthesis algorithm works by creating a program with unknown
statements, guards, inductive invariants, and ranking
functions. It then generates constraints that relate the unknowns
and enforces three kinds of requirements: partial correctness,
loop termination, and well-formedness conditions on program
guards. We formalize the requirements that program verification
tools must meet to solve these constraints and use tools from
prior work as our synthesizers.We demonstrate the feasibility of the proposed approach by
synthesizing programs in three different domains: arithmetic,
sorting, and dynamic programming. Using verification tools that we
previously built in the VS3 project we are able to synthesize
programs for complicated arithmetic algorithms including
Strassen's matrix multiplication and Bresenham's line drawing;
several sorting algorithms; and several dynamic programming
algorithms. For these programs, the median time for synthesis is
14 seconds, and the ratio of synthesis to verification time ranges
between 1x to 92x (with a median of 7x), illustrating the potential
of the approach.",
paper = "Sriv10.pdf"
}\end{chunk}
\index{Hoare, Tony}
\begin{chunk}{ axiom.bib}
@article{Hoar95,
author = "Hoare, Tony",
title = {{Unification of Theories: A Challenge for Computing Science}},
booktitle = "Lecture Notes in Computer Science",
publisher = "Springer",
volume = "1130",
year = "1995",
abstract =
"Unification of theories is the long-standing goal of the natural
sciences; and modern physics offers a spectacular paradigm of its
achievement. The structure of modern mathematics has also been
determined by its great unifying theories -- topology, algebra and
the like. The same ideals and goals are shared by researchers and
students of theorectical computing science.",
paper = "Hoar95.pdf"
}\end{chunk}
\index{Mylonakis, Nikos}
\begin{chunk}{ axiom.bib}
@article{Mylo95,
author = "Mylonakis, Nikos",
title = "Behavioural Specifications in Type Theory",
booktitle = "Lecture Notes in Computer Science",
publisher = "Springer",
volume = "1130",
year = "1995",
abstract =
"In this paper we give a new view of the type theory UTT (Uniform
theory of dependent types) [5] as a system to formally develop
programs from algebraic specifications, comparable to
e.g. EML([9]). We will focus our attention on behavioural
specifications since they have not been deeply studied in a type
theoretical setting, and we describe how to develop proofs about
behavioural satifaction.",
paper = "Mylo95.pdf"
}\end{chunk}
\index{Shparlinski, Igor E.}
\begin{chunk}{ axiom.bib}
@book{Shpa99,
author = "Shparlinski, Igor E.",
title = {{Finite Fields: Theory and Computation}},
publisher = "Kluwer Academic",
year = "1999",
isbn = "0-7923-5662-4",
keywords = "axiomref"
}\end{chunk}
\index{Lescanne, Pierre}
\begin{chunk}{ axiom.bib}
@article{Lesc95,
author = "Lescanne, Pierre",
title = {{The Lambda Calculus as an Abstract Data Type}},
booktitle = "Lecture Notes in Computer Science",
publisher = "Springer",
volume = "1130",
year = "1995",
abstract =
"Calculi of explicit substitutions are theoretical tools to
describe lambda calculus as first-order rewrite systems. Therefore
a presentation as an abstract data type is possible. During an
invited lecture at the Workshop on Abstract Data Types, we
presented calculi of explicit substitutions, especially the
calculus $\lambda$v. The theoretical background of $\lambda$v can
be found in [1], therefore we are not entering in the details of
the theory as we did during the oral presentation. However, we
take the essence of the talk, namely the formalizatino of
$\lambda$v in a specification language based on ADT. We have
chosen LSL, the LARCH Shared Language [7] to illustrate the beauty
of abstract data types which lies in its precision and
concision. Moreoever we take freedom with respect to this
language.",
paper = "Lesc95.pdf",
keywords = "printed"
}\end{chunk}
\index{Lamport, Leslie}
\begin{chunk}{ axiom.bib}
@misc{Lamp09,
author = "Lamport, Leslie",
title = {{The PlusCal Algorithm Language}},
year = "2009",
link = "\url{https://lamport. azurewebsites. net/pubs/ pluscal. pdf}",
abstract =
"Algorithms are different from programs and should not be
described with programming languages. The only simple alternative
to programming languages has been pseudo-code. PlusCal is an
algorithm language that an be used right now to replace
pseudo-code, for both sequential and concurrent algorithms. It is
based on the TLA+ specification language, and a PlusCal algorithm
is automatically translated to a TLA+ specification that can be
checked with the TLC model checker and reasoned about formally.",
paper = "Lamp09.pdf",
keywords = "printed"
}\end{chunk}
\index{Sedgewick, Robert}
\begin{chunk}{ axiom.bib}
@book{Sedg88,
author = "Sedgewick, Robert",
title = {{Algorithms}},
publisher = "Addison-Wesley",
year = "1988"
}\end{chunk}
\index{Armstrong, Joe}
\begin{chunk}{ axiom.bib}
@misc{Arms14,
author = "Armstrong, Joe",
title = {{The Mess We're In}},
link = "\url{https://www.youtube. com/watch? v=lKXe3HUG2l4}",
year = "2014",
keywords = "axiomref, DONE"
}\end{chunk}
\index{Turing, A. M.}
\begin{chunk}{ axiom.bib}
@article{Turi49,
author = "Turing, A. M.",
title = {{Checking a Large Routine}},
journal = "Annals of the History of Computing",
volume = "6",
number = "2",
year = "1949",
abstract =
"The standard references for work on program proofs attribute the
early statement of direction to John McCarthy (e.g. McCarthy
1963); the first workable methods to Peter Naur (1966) and Robert
Floyd (1967); and the provision of more formal systems to C.A.R
Hoare (1969) and Edsger Dijkstra (1976). The early papers of some
of the computing pioneers, however, show an awareness of the need
for proofs of program correctness and even present workabe methods
(.e.g Goldstine and von Neumann 1947; Turing 1949).The 1949 paper by Alan M. Turing is remarkable in many
respects. The three (foolscap) pages of text contain an excellent
motivation by analogy, a proof of a program with two nested loops,
and an indication of a general proof method very like that of
Floyd. Unfortunately, the paper is made extremely difficult to
read by a large number of transcription errors. For example, all
instances of the factorial sign (Turing used $|n$) have been
omitted in the commentary, and ten other identifiers are written
incorrectly. It would appear to be worth correcting these errors
and commenting on the proof from the viewpoint of subsequent work
on program proofs.Turing delivered this paper in June 1949, at the inaugural
conference of the EDSAC, the computer at Cambridge University
built under the direction of Maurice V. Wilkes. Turing had been
writing programs for an electronic computer since the end of 1945
-- at first for the proposed ACE, the computer project at the
National Physical Laboraty, but since October 1948 for the
Manchester prototype computer, of which he was deputy
director. The references in his paper to $2^{40}$ are reflections
of the 40-bit ``lines'' of the Manchester machine storage system.The following is the text of Turing's 1949 paper, corrected to the
best of our ability to what we believe Turing intended. We have
made no changes in spelling, punctuation, or grammar.",
paper = "Turi49.pdf"
}\end{chunk}
\index{Hoare, Charles Antony Richard}
\begin{chunk}{ axiom.bib}
@article{Hoar81,
author = "Hoare, Charles Antony Richard",
title = {{The Emperor's Old Clothes}},
journal = "CACM",
volume = "24",
number = "2",
pages = "75-83",
abstract =
"The author recounts his experiences in the implementation,
design, and standardization of computer programming languages, and
issues a warning for the future.",
paper = "Hoar18.pdf",
keywords = "DONE"
}\end{chunk}
\index{Wirth, Niklaus}
\begin{chunk}{ axiom.bib}
@article{Wirt95,
author = "Wirth, Niklaus",
title = {{A Plea for Lean Software}},
publisher = "IEEE",
journal = "Computer",
year = "1995",
pages = "64-68",
paper = "Wirt95.pdf",
keywords = "DONE"
}\end{chunk}
\index{Liskov, Barbara}
\begin{chunk}{ axiom.bib}
@misc{Lisk12,
author = "Liskov, Barbara",
title = {{Programming the Turing Machine}},
year = "2012",
link = "\url{https://www.youtube. com/watch? v=ibRar7sWulM}",
keywords = "DONE"
}\end{chunk}
\index{Di Franco, Anthony}
\index{Rubio-Gonzalez, Cindy}
\begin{chunk}{ axiom.bib}
@misc{Difr17,
author = "Di Franco, Anthony and Rubio-Gonzalez, Cindy",
title = {{A Comprehensive Study of Real-World Numerical Bug
Characteri stics}} ,
year = "2017",
link = "\url{https://web.cs. ucdavis. edu/~rubio/ includes/ ase17.pdf}",
abstract =
"Numerical software is used in a wide variety of applications
including safety-critical systems, which have stringent
correctness requirements, and whose failures have catastrophic
consequences that endanger human life. Numerical bugs are known to
be particularly difficult to diagnose and fix, largely due to the
use of approximate representations of numbers such as floating
point. Understanding the characteristics of numerical bugs is the
first step to combat them more effectively. In this paper, we
present the first comprehansive study of real-world numerical
bugs. Specifically, we identify and carefully examine 269
numerical bugs from five widely-used numerical software libraries:
NumPy, SciPy, LAPACK, GNU Scientific Library, and Elemental. We
propose a categorization of numerical bugs, and discuss their
frequency, symptoms and fixes. Our study opens new directions in
the areas of program analysis, testing, and automated program
repair of numerical software, and provides a collection of
real-world numerical bugs.",
paper = "Difr17.pdf"
}\end{chunk}
\index{Talcott, Carolyn}
\begin{chunk}{ axiom.bib}
@inproceedings{Talc90,
author = "Talcott, Carolyn",
title = {{A Theory for Program and Data Type Specifications}},
booktitle = "DISCO 1990",
year = "1990",
pages = "91-100",
paper = "Talc90.pdf"
}\end{chunk}
\index{Jouannaud, Jean-Pierre}
\index{Marche, Claude}
\begin{chunk}{ axiom.bib}
@inproceedings{Joua90,
author = "Jouannaud, Jean-Pierre",
title = {{Completion modulo Associativity, Commutativity and
Identity (ACI)}},
booktitle = "DISCO 1990",
year = "1990",
pages = "111-120",
abstract =
"Rewriting with associativity, commutativity and identity has been
an open problem for along time. In a recent paper [PBW89], Baird,
Peterson and Wilkerson introduced the notion of constrained
rewriting, to avoid the problem of non-termination inherent to the
use of identities. We build up on this idea in two ways: by giving
a complete set of rules for completion modulo these axioms; by
showing how to build appropriate orderings for proving termination
of constrained rewriting modulo associativity, commutativity and
identity.",
paper = "Joua90.pdf",
keywords = "printed"
}\end{chunk}
\index{Reynaud, Jean-Claude}
\begin{chunk}{ axiom.bib}
@inproceedings{Reyn90,
author = "Reynaud, Jean-Claude",
title = {{Putting Algebraic Components Together: A Dependent Type
Approach }},
booktitle = "DISCO 1990",
year = "1990",
pages = "141-150",
abstract =
"We define a framework based on dependent types for putting
algebraic components together. It is defined with freely generated
categories. In order to preserve initial, loos and constrainted
semantics of components, we introduce the notion of
SPEC-categories which look like specific finitely co-complete
categories. A constructive approach which includes
parameterization techniques is sued to define new components from
basic predefined ones. The problem of the internal coding of
external signature symbols is introduced.",
paper = "Reyn90.pdf",
keywords = "printed"
}\end{chunk}
\index{Dewar, M.C.}
\index{Richardson, M.G.}
\begin{chunk}{ axiom.bib}
@inproceedings{Dewa90,
author = "Dewar, M.C. and Richardson, M.G.",
title = {{Reconciling Symbolic and Numeric Computation in a
Practical Setting}},
booktitle = "DISCO 1990",
year = "1990",
pages = "195-204",
paper = "Dewa90.pdf",
keywords = "printed"
}\end{chunk}
\index{Jebelean, Tudor}
\begin{chunk}{ axiom.bib}
@inproceedings{Jebe93,
author = "Jebelean, Tudor",
title = {{Improving the Multiprecision Euclidean Algorithm}},
booktitle = "DISCO 1993",
year = "1993",
pages = "45-58",
publisher = "Springer",
abstract =
"We improve the implementation of Lehmer-Euclid algorithm for
multiprecision integer GCD computations by partial consequence
computation on pairs of double digits, enhanced conditions for
exiting the partia consequence computatin, and approximative GCD
computation. The combined effect of these improvements is an
experimentally measured speed-up by a factor of 2 over the
currently used implementation.",
paper = "Jebe93.pdf",
keywords = "printed"
}\end{chunk}
\index{Hennicker, Rolf}
\begin{chunk}{ axiom.bib}
@inproceedings{Henn90,
author = "Hennicker, Rolf",
title = {{Context Induction: A Proof Principle for Behavioural
Abstractio ns}},
booktitle = "DISCO 1990",
year = "1990",
pages = "101-110",
abstract =
"An induction principle, called context induction, is presented
which is appropriate for the verification of behavioural
properties of abstract data types. The usefulness of the proof
principle is documented by several applications: the verification
of behavioural theorems over a behavioural specification, the
verification of behavioural implementations and the verification
of ``forget=restrict- identify" implementations. In particular it is shown that behavioural implementations and
``forget-restrict- identify' ' implementations (under certain
assumptions) can be characterized by the same context condition,
i.e. (under the given assumptions) both concepts are
equivalent. This leads to the suggestion to use context induction
as a uniform proof method for correctness proofs of formal
implementations.",
paper = "Henn90.pdf",
keywords = "printed"
}\end{chunk}
\index{Calmet, J.}
\index{Tjandra, I.A.}
\begin{chunk}{ axiom.bib}
@inproceedings{Calm93,
author = "Calmet, J. and Tjandra, I.A.",
title = {{A Unified-Algebra- based Specification Language for
Symbolic Computing}},
booktitle = "DISCO 1993",
year = "1993",
pages = "122-133",
publisher = "Springer",
abstract =
"A precise and perspicuous specification of mathematical domains
of computation and their inherently related type inference
mechanisms is a prerequisite for the design and systematic
development of a system for symbolic computing. This paper
describes FORMAL, a language for giving modular and
well-structured specifications of such domains and particularly of
``mathematical objects''. A novel framework for algebraic
specification involving so-called ``unified algebras'' has been
adopted, where sorts are treated as values. The adoption of this
framework aims also at being capable of specifying polymorphism,
unifying the notions of ``parametric'' and ``inclusion''
polymorphisms. Furthermore, the operational nature of the
specification formalisms allows a straightforward transformation
into an executable form.",
paper = "Calm93.pdf",
keywords = "printed, axiomref"
}\end{chunk}
\index{Fritzson, Peter}
\index{Engelson, Vadim}
\index{Viklund, Lars}
\begin{chunk}{ axiom.bib}
@inproceedings{Frit93,
author = "Fritzson, Peter and Engelson, Vadim and Viklund, Lars",
title = {{Variant Handling, Inheritance and Composition in the
ObjectMath Computer Algebra Environment}},
booktitle = "DISCO 1993",
year = "1993",
pages = "145-163",
publisher = "Springer",
abstract =
"ObjectMath is a high-level programming environment and modeling
language for scientific computing which supports variants and
graphical browsing in the environment and integrates
object-oriented constructs such as classes and single and multiple
inheritance within a computer algebra language. In addition,
composition of objects using the part-of relation and support for
solution of systems of equations is provided. This environment is
currently being used for industrial applications in scientific
computing. The ObjectMath environment is designed to handle
realistic problems. This is achieved by allowing the user to
specify transformations and simplifications of formulae in the
model, in order to arrive at a representation which is efficiently
solvable. When necessary, equations can be transformed to C++ code
for efficient numerical solution. The re-use of equations through
inheritance in general reduces models by a factor of two to three,
compared to a direct representation in the Mathematica computer
algebra language. Also, we found that multiple inheritance from
orthogonal classes facilitates re-use and maintenance of
application models.",
paper = "Frit93.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Grivas, Georgios}
\index{Maeder, Roman E.}
\begin{chunk}{ axiom.bib}
@inproceedings{Griv93,
author = "Grivas, Georgios and Maeder, Roman E.",
title = {{Matching and Unification for the Object-Oriented Symbolic
Computatio n System}},
booktitle = "DISCO 1993",
year = "1993",
pages = "164-176",
publisher = "Springer",
abstract =
"Term matching has become one of the most important privmitive
operations for symbolic computation. This paper describes the
extension of the object-oriented symbolic computation system
AlgBench with pattern matching and unification facilities. The
various pattern objects are organized in subclasses of the class
of the composite expressions. This leads to a clear design and a
distributed implementation of the pattern matcher in the
subclasses. New pattern object classes can consequently be added
easily to the system. Huet's and our simple mark and retract
algorithm for standard unification as well as Stickel's algorithm
for associative commutative unification have been implemented in
an object-oriented style. Unifiers are selected at runtime. We
extend Mathematica's type-constrained pattern matching by taking
into account inheritance information from a user-defined hierarchy
of object types. the argument unification is basically instance
variable unification. The improvement of the pattern matching
operation of a rule- and object-based symbolic computation system
with unification in an object-oriented way seems to be very
appropriate.",
paper = "Griv93.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Santas, Philip S.}
\begin{chunk}{ axiom.bib}
@article{Sant93,
author = "Santas, Philip S.",
title = {{A Type System for Computer Algebra}},
booktitle = "DISCO 1993",
year = "1993",
pages = "177-191",
publisher = "Springer",
abstract =
"We examine type systems for support of subtypes and categories in
computer algebra systems. By modeling representation of instances
in terms of existential types instead of recursive types, we
obtain not only a simplified model, but we build a basis for
defining subtyping among algebraic domains. The introduction of
metaclasses, facilitates the task, by allowing the inference of
type classes. By means of type classes and existential types we
construct subtype relations without involving coercions.",
paper = "Sant93.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Zwillinger, Daniel}
\begin{chunk}{ axiom.bib}
@book{Zwil92,
author = "Zwillinger, Daniel",
title = {{Handbook of Integration}},
publisher = "Jones and Bartlett",
year = "1992",
isbn = "0-86720-293-9",
keywords = "axiomref"
}\end{chunk}
\index{Limongelli, C.}
\index{Temperini, M.}
\begin{chunk}{ axiom.bib}
@inproceedings{Limo93,
author = "Limongelli, C. and Temperini, M.",
title = {{On the Uniform Representation of Mathematical Data
Structures }},
booktitle = "DISCO 1993",
year = "1993",
pages = "319-330",
publisher = "Springer",
abstract =
"Topics about the integration of the numeric and symbolic
computation paradigms are discussed. Mainly an approach through a
uniform representation of numbers and symbols is presented, that
allows for the application of algebraic algorithms to numeric
problems. The $p$-adic constructions is the basis of the unifying
representation environment. An integrated version of the Hensel
algorithm is presented, which is able to perform symbolic and
numeric computations over instances of ground (concrete) and
parametric structures, and symbolic computations over instances of
abstract structures. Examples are provided to show how the
approach outlined and the proposed implementation can treat both
cases of symbolic and numeric computations. In the numeric case it
is shown that the proposed extension of the Hensel Algorithm can
allow for the exact manipulation of numbers. Moreover, such an
extension avoids the use of simplification algorithms, since the
computed results are already in simplified form.",
paper = "Limo93.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Walsh, Toby}
\begin{chunk}{ axiom.bib}
@inproceedings{Wals93,
author = "Walsh, Toby",
title = {{General Purpose Proof Plans}},
booktitle = "DISCO 1993",
year = "1993",
pages = "319-330",
publisher = "Springer",
abstract =
"One of the key problems in the design and implementation of
automated reasoning systems is the specification of heuristics to
control search. ``Proof planning'' has been developed as a
powerful technique for declaratively specifying high-level proof
strategies. This paper describes an extension to proof planning to
allow for the specification of more general purpose plans.",
paper = "Wals93.pdf"
}\end{chunk}
\index{Calmet, J.}
\index{Comon, H.}
\index{Lugiez, D.}
\begin{chunk}{ axiom.bib}
@article{Calm89,
author = "Calmet, J. and Comon, H. and Lugiez, D.",
title = {{Type Inference Using Unification in Computer Algebra}},
journal = "LNC",
volume = "307",
publisher = "Springer",
paper = "Calm89.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Poll, Erik}
\begin{chunk}{ axiom.bib}
@misc{Pollxx,
author = "Poll, Erik",
title = {{The type system of Axiom}},
link = "\url{https://www.cs. ru.nl/E. Poll/talks/ axiom.pdf}",
paper = "Polxx.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Pottier, Francois}
\index{Regis-Gianas, Yann}
\begin{chunk}{ axiom.bib}
@inproceedings{Pott06,
author = "Pottier, Francois and Regis-Gianas, Yann",
title = {{Stratified Type Inference For Generalized Algebraic Data
Types} },
booktitle = "POPL'06",
year = "2006",
publisher = "ACM",
abstract =
"We offer a solution to the type inference problem for an
extension of Hindley and Milner's type system with generalized
algebraic data types. Our approach is in two {\sl strata}. The
bottom stratum is a core language that marries type
{\sl inference} in the style of Hindley and Milner with type
{\sl checking} for generalized algebraic data types. This results
in an extremely simple specification, where case constructs must
carry an explicit type annotation and type conversions must be
made explicit. The top stratum consists of (two variants of) an
independent {\sl shape inference} algorithm. This algorithm
accepts a source term that contains some explicit type
information, propagates this information in a local, predictable
way, and produces a new source term that carries more explicit
type information. It can be viewed as a preprocessor that helps
produce some of the type annotations required by the bottom
stratum. It is proven {\sl sound} in the sense that it never
inserts annotations that could contradict the type derivation that
the programmer has in mind.",
paper = "Pott06.pdf"
}\end{chunk}
\index{Calmet, Jacques}
\index{Lugiez, Denis}
\begin{chunk}{ axiom.bib}
@article{Calm87a,
author = "Calmet, Jacques and Lugiez, Denis",
title = {{A Knowledge-based System for Computer Algebra}},
journal = "ACM SIGSAM Bulletin",
volume = "21",
number = "1",
year = "1987",
abstract =
"This paper reports on a work in progress aiming at designing and
implementing a system for representing and manipulating
mathematical knowledge. Its kernel is a computer algebra system
but it shows several of the features of the so-called
knowledge-based systems. The main issues considered here are the
software engineering aspects of the project, the definition of a
new language to support the system and the use of AI techniques in
a field where algebraic algorithms are the building stones of
systems. This defines an environment which enables not only to
have data-bases of knowledge but also to implement an expert use
of this knowledge.",
paper = "Calm87a.pdf",
keywords = "axiomref"
}\end{chunk}
\begin{
chunk}{ axiom.bib}
@misc{HOPLxx,
author = "unknown",
title = {{Scratchpad II}},
year = "2020",
link = "\url{https://hopl.info/ showlanguage2. prx?exp= 566}",
keywords = "axiomref"
}\end{chunk}
\index{Davenport, James H.}
\begin{chunk}{ axiom.bib}
@misc{Dave86a,
author = "Davenport, James H.",
title = {{SCRATCHPAD II Programming Language Reference}},
comment = "IBM T.J. Watson",
year = "1986"
}\end{chunk}
\index{Jenks, R.}
\index{Trager, B.}
\index{Watt, S.M.}
\index{Sutor, R.S.}
\begin{chunk}{ axiom.bib}
@misc{Jenk85,
author = "Jenks, R. and Trager, B. and Watt, S.M. and Sutor, R.S.",
title = {{Scratchpad II Programming Language Manual}},
year = "1985",
keywords = "axiomref"
}\end{chunk}
\index{Chudnovsky, David V.}
\index{Jenks, Richard D.}
\begin{chunk}{ axiom.bib}
@book{Chud89,
author = "Chudnovsky, David V. and Jenks, Richard D.",
title = {{Computer Algebra. Pure and Applied Mathematics}},
publisher = "Springer",
year = "1989"
}\end{chunk}
\index{Padget, J.A.}
\begin{chunk}{ axiom.bib}
@inproceedings{Padg85,
author = "Padget, J.A.",
title = {{Current Development in LISP}},
booktitle = "EUROCAL 85",
publisher = "Springer",
year = "1985",
abstract =
"This paper is divided into three sections. The first gives an
overview of the presently available and emerging dialects of LISP
and how design decisions within them affect symbolic algebra. The
second discusses recent developments, in particular, Common LISP
subsetting, portability, pure language research and mixed paradigm
systems. The third part is devoted to what is happening in
specialised LISP hardware in Japan, in the United States and in
Europe. The subject matter of each of these three sections is so
tightly interwoven however that the detailed discussion of some
material may be postponed until a later section although some
readers it might seem appropriate for inclusion earlier. It shoud
also be mentioned that this is a survey, therefore the items have
been selected for mention on the grounds of interest rather than
completeness.",
paper = "Padg85.pdf",
keywords = "axiomref"
}\end{chunk}
\index{Kreisel, G.}
\begin{chunk}{ axiom.bib}
@inproceedings{Krei85,
author = "Kreisel, G.",
title = {{Proof Theory and the Synthesis of Programs: Potential and
Limitation s}},
booktitle = "EUROCAL 85",
publisher = "Springer"
paper = "Krei85.pdf"
}\end{chunk}
\index{Vazou, Niki}
\index{Breitner, Joachim}
\index{Kunkel, Rose}
\index{Horn, David Van}
\index{Hutton, Graham}
\begin{chunk}{ axiom.bib}
@inproceedings{Vazo18,
author = "Vazou, Niki and Breitner, Joachim and Kunkel, Rose and
Horn, David Van and Hutton, Graham",
title = {{Theorem Proving for All: Equational Reasoning in Liquid
Haskell} },
booktitle = "Haskell'18",
publisher = "ACM",
year = "2018",
isbn = "978-1-4503-5835- 4",
link = "\url{http://www.cs. nott.ac. uk/~pszgmh/ tpfa.pdf}",
abstract =
"Equational reasoning is one of the key features of pure
functional languages such as Haskell. To date, however, such
reasoning always took place eternally to Haskell, either manually
on paper, or machanised in a theorem prover. This article shows
how equational reasoning can be performed directly and seamlessly
within Haskell itself, and be checked using Liquid Haskell. In
particular, language learners -- to whom external theorem provers
are out of reach -- can benefit from having their proofs
mechanically checked. Concretely, we show how the equational
proofs and derivations from Hutton's textbook can be recast as
proofs in Haskell (spoiler: they look essentially the same).",
paper = "Vazo18.pdf"
}\end{chunk}
\index{Jaskelioff, Mauro}
\index{Ghani, Neil}
\index{Hutton, Graham}
\begin{chunk}{ axiom.bib}
@inproceedings{Jask08,
author = "Jaskelioff, Mauro and Ghani, Neil and Hutton, Graham",
title = {{Modularity and Implementation of Mathematical Operational
Semantics} },
publisher = "Elsevier",
year = "2008",
link = "\url{http://www.cs. nott.ac. uk/~pszgmh/ modular. pdf}",
abstract =
"Structural operational semantics is a popular technique for
specifying the meaning of programs by means of inductive
clauses. One seeks syntactic restrictions on those clauses so that
the resulting operational semantics is well-behaved. This approach
is simple and concrete but it has some drawbacks. Turi pioneered a
more abstract categorical treatment based upon the idea that
operational semantics is essentially a distribution of syntax over
behaviour. In this article we take Turi's approach in two new
directions. Firstly, we show how to write operational semantics as
modular components and how to combine such components to specify
complete languages. Secondly, we show how the categorical nature
of Turi's operational semantics makes it ideal for implementation
in a functional programming language such as Haskell.",
paper = "Jask08.pdf"
}\end{chunk}
\index{Pickard, Mitchell}
\index{Hutton, Graham}
\begin{chunk}{ axiom.bib}
@inproceedings{Pick20,
author = "Pickard, Mitchell and Hutton, Graham",
title = {{Dependently-Typed Compilers Don't Go Wrong}},
booktitle = "Programming Languages",
publisher = "ACM",
year = "2020",
link = "\url{http://www.cs. nott.ac. uk/~pszgmh/ well-typed. pdf}",
abstract =
"Compilers are difficult to write, and difficult to get
right. Bahr and Hutton recently developed a new technique for
calculating compilers directly from specifications of their
correctness, which ensures that the resulting compilers are
correct-by-construction . To date, however, this technique has only
been applicable to source languages that are untyped. In this
article, we show that moving to a dependently-typed setting allows
us to naturally support typed source languages, ensure that all
compilation components are type-safe, and make the resulting
calculations easier to mechanically check using a proof
assistant.",
paper = "Pick20.pdf"
}\end{chunk}
\index{Doorn, Floris van}
\index{Ebner, Gabriel}
\index{Lewis, Robert Y.}
\begin{chunk}{ axiom.bib}
@misc{Door20,
author = "Doorn, Floris van and Ebner, Gabriel and
Lewis, Robert Y.",
title = {{Maintaining a Library of Formal Mathematics}},
link = "\url{https://arxiv. org/pdf/ 2004.03673}",
year = "2020",
abstract =
"The Lean mathematical library mathlib is developed by a community
of users with very different backgrounds and levels of
experience. To lower the barrier of entry for contributors and to
lessen the burden of reviewing contributions, we have developed a
number of tools for the library which check proof developments for
subtle mistakes in the code and generate documentation suited for
our varied audience.",
paper = "Door20.pdf"
}\end{chunk}
- 2917. By daly
-
books/bookvolbib add references
Goal: Proving Axiom Sane
\index{Alturki, Musab A.}
\index{Moore, Brandon}
\begin{chunk}{ axiom.bib}
@misc{Altu19,
author = "Alturki, Musab A. and Moore, Brandon",
title = {{K vs Coq as Language Verification Frameworks}},
year = "2019",
link = "\url{https://runtimeverific ation.com/ blog/k- vs-coq- as-language- verification- frameworks- part-1- of-3/}"
}\end{chunk}
\index{Rosu, Grigore}
\index{Lucanu, Dorel}
\index{Guth, Dwight}
\begin{chunk}{ axiom.bib}
@misc{Rosu20,
author = "Rosu, Grigore and Lucanu, Dorel and Guth, Dwight",
title = {{The K Framework}},
year = "2020",
link = "\url{http://www.kframework. org/index. php/Main_ Page}"
}\end{chunk}
\index{Moore, Brandon}
\index{Pena, Lucas}
\index{Rosu, Grigore}
\begin{chunk}{ axiom.bib}
@inproceedings{Moor18,
author = "Moore, Brandon and Pena, Lucas and Rosu, Grigore",
title = {{Program Verification by Coinduction}},
booktitle = "Programming Languages and Systems",
publisher = "Springer",
year = "2018",
pages = "589-618",
link = "\url{https://link.springer. com/content/ pdf/10. 1007%2F978- 3-319-89884- 1.pdf}",
abstract =
"We present a novel program verification approach based on
coinduction, which takes as input an operational semantics. No
intermediates like program logics or verifcation condition
generators are needed. Specifications can be written using any
state predicates. We implement our approach in Coq, giving a
certifying language-independent verification framework. Our proof
system is implemented as a single module imported unchanged into
language-specific proofs. Automation is reached by instantiating a
generic heuristic with language-specific tactics. Manual
assistance is also smoothly allowed at points the automation
cannot handle. We demonstrate the power and versatility of our
approach by verifying algoirthms as complicated as Schorr-Waite
graph marking and instantiating our framework for object languages
in several styles of semantics. Finally, we show that our
coinductive approach subsumes reachability logic, a recent
language-independent soudn and (relatively) complete logic for
program verification that has been instantiated with operational
semantics of languages as complex as C, Java and Javascript.",
paper = "Moor18.pdf"
}\end{chunk}
\index{Carneiro, Mario}
\begin{chunk}{ axiom.bib}
@misc{Carn19a,
author = "Carneiro, Mario",
title = {{The Type Theory of Lean}},
year = "2019",
link = "\url{https://github. com/digama0/ lean-type- theory/ releases/ v1.0}",
abstract =
"This thesis is a presentation of dependent type theory with
inductive types, a hierarchy of universes, with an impredicative
universe of propositions, proof irrelevance, and subsingleton
elimination, along with axioms for propositional extensionality,
quotient types, and the axiom of choice. This theory is notable
for being the axiomatic framework of the Lean theorem prover. The
axiom system is given here in complete detail, including
``optional'' features of the type system such as {\bf let} binders
and definitions. We provide a reduction of the theory to a
finitely axiomatized fragment utilizing a fixed set of inductive
types (the {\bf W}-type plus a few others), to ease the study of
this framework.The metatheory of this theory (which we will Lean) is studied. In
particular, we prove unique typing of the definitional equality,
and use this to construct the expected set-theoretic model, from
which we derive consistency of Lean relative to {\bf ZFC+} \{there
are $n$ inaccessible cardinals $\vert~n<\omega$\} (a relatively
weak large cardinal assumption). As Lean supports models of
{\bf ZFC} with $n$ inaccessible cardinals, this is optimal.We also show a number of negative results, where the theory is
less nice than we would like. In particular, type checking is
undecidable, and the type checking as implemented by the Lean
theorem prover is a decideable non-transitive underapproximation
of the typing judgment. Non-transitivity also leads to lack of
subject reduction, and the reduction relation does not satisfy the
Church-Rosser property, so reduction to a normal form does not
produce a decision procedure for definitional equality. However, a
modified reduction relation allows us to restore the Church-Rosser
property at the expense of guaranteed termination, so that unique
typing is shown to hold.",
paper = "Carn19a.pdf",
keywords = "printed"
}\end{chunk}
\index{Carneiro, Mario}
\begin{chunk}{ axiom.bib}
@misc{Carn19b,
author = "Carneiro, Mario",
title = {{The Type Theory of Lean (slides)}},
year = "2019",
link = "\url{https://github. com/digama0/ lean-type- theory/ releases/ v1.0}",
paper = "Carn19b.pdf",
keywords = "printed"
}\end{chunk}
\index{Milner, Robin}
\begin{chunk}{ axiom.bib}
@techreport{Miln73,
author = "Milner, Robin",
title = {{Models of LCF}},
type = "technical report",
institution = "Stanford Artificial Intelligence Laboratory",
number = "STAN-CS-73-332",
year = "1973",
link = "\url{http://i.stanford. edu/pub/ cstr/reports/ cs/tr/73/ 332/CS- TR-73-332. pdf}",
abstract =
"LCF is a deductive system for computable functions proposed by
D. Scott in 1969 in an unpublished memorandum. The purpose of the
present paper is to demonstrate the soundness of the system with
respect to certain models, which are partially ordered domains of
continuous functions. This demonstration was supplied by Scott in
his memorandum; the present paper is merely intended to make this
work more accessible.",
paper = "Miln73.pdf",
keywords = "printed"
}\end{chunk}
\index{Jenkins, Kris}
\begin{chunk}{ axiom.bib}
@misc{Jenk18,
author = "Jenkins, Kris",
title = {{Communicating with Types}},
year = "2018",
link = "\url{https://vimeo. com/302682323}"
}\end{chunk}
\index{Taylor, Sophie}
\begin{chunk}{ axiom.bib}
@misc{Tayl16,
author = "Taylor, Sophie",
title = {{A working (class) Introduction to Homotopy Type Theory}},
year = "2016",
link = "\url{https://www.youtube. com/watch? v=xv6SwPn1dlc}",
keywords = "DONE"
}\end{chunk}
\index{Voevodsky, Vladimir}
\begin{chunk}{ axiom.bib}
@misc{Voev16,
author = "Voevodsky, Vladimir",
title = {{Univalent Foundations of Mathematics}},
year = "2016",
link = "\url{https://www.youtube. com/watch? v=9f4sS9s- X2A}"
}\end{chunk}
\index{Zach, Richard}
\begin{chunk}{ axiom.bib}
@book{Zach20,
author = "Zach, Richard",
title = {{The Open Logic Text}},
year = "2020",
publisher = "The Open Logic Project",
link = "\url{http://builds. openlogicprojec t.org/open- logic-complete. pdf}",
paper = "Zach20.pdf"
}\end{chunk}
\index{
Bezhanishvili, Nick}
\index{de Jongh, Dick}
\begin{chunk}{ axiom.bib}
@misc{Bezh05,
author = "Bezhanishvili, Nick and de Jongh, Dick",
title = {{Intuitionistic Logic}},
year = "2005",
link = "\url{https://www.cs. le.ac.uk/ people/ nb118/Publicati ons/ESSLLI' 05.pdf}",
paper = "Bezh05.pdf"
}\end{chunk}
\index{Milne, Peter}
\begin{chunk}{ axiom.bib}
@article{Miln94,
author = "Milne, Peter",
title = {{Classical Harmony: Rules of Inference and the Meaning of
the Logical Constants}},
journal = "Synthese",
volume = "100",
number = "1",
pages = "49-94",
year = "1994",
abstract =
"The thesis that, in a system of natural deduction, the meaning of
a logical constant is given by some or all of its introduction and
elimination rules has been developed recently by Dummett, Prawitz,
Tennant, and others, by the addition of harmony constraints.
Introduction and elimination rules for a logical constant must be
in harmony. By deploying harmony constraints, these authors have
arrived at logics no stronger than intuitionistic propositional
logic. Classical logic, they maintain, cannot be justified from
this proof-theoretic perspective. This paper argues that, while
classical logic can be formulated so as to satisfy a number of
harmony constraints, the meanings of the standard logical
constants cannot all be given by their introduction and/or
elimination rules; negation, in particular, comes under close
scrutiny.",
paper = "Miln94.pdf"
}\end{chunk}
\index{Tennant, Neil}
\begin{chunk}{ axiom.bib}
@article{Tenn79,
author = "Tennant, Neil",
title = {{Entailment and Proofs}},
journal = "Proc. of the Aristotelian Society",
volume = "79",
pages = "167-189",
year = "1979",
paper = "Tenn79.pdf"
}\end{chunk}
\index{Montanaro, Ashley}
\begin{chunk}{ axiom.bib}
@misc{Mont12,
author = "Montanaro, Ashley",
title = {{Computational Complexity Lecture Notes}},
year = "2012",
link = "\url{https://people. maths.bris. ac.uk/~ csxam/teaching/ cc-lecturenotes .pdf}",
paper = "Mont12.pdf"
}\end{chunk}
\index{Grayson, Daniel R.}
\begin{chunk}{ axiom.bib}
@misc{Gray18a,
author = "Grayson, Daniel R.",
title = {{The Mathematical Work of Vladimir Voevodsky}},
year = "2018",
link = "\url{https://www.youtube. com/watch? v=BanMgvdKP8E}",
keywords = "DONE"
}\end{chunk}
\index{Gentzen, G.}
\begin{chunk}{ axiom.bib}
@misc{Gent35,
author = "Gentzen, G.",
title = {{Untersuchungen uber das logische Schliessen I and II}},
booktitle = "Mathematische Zeitschrift",
year = "1935",
publisher = "Springer"
paper = "Gent35.pdf"
}\end{chunk}
\index{Vapnik, Vladimir}
\begin{chunk}{ axiom.bib}
@misc{Vapn20,
author = "Vapnik, Vladimir",
title = {{Predicates, Invariants, and the Essence of Intelligence}},
year = "2020",
link = "\url{https://lexfridman. com/vladimir- vapnik- 2}",
keywords = "DONE"
}\end{chunk}
\index{Baruch, Robert}
\begin{chunk}{ axiom.bib}
@misc{Baru19,
author = "Baruch, Robert",
title = {{Very Basic Introduction to Formal Verification}},
year = "2019",
link = "\url{https://www.youtube. com/watch? v=9e7F1XhjhKw}",
keywords = "DONE"
}\end{chunk}
\index{Carneiro, Mario}
\begin{chunk}{ axiom.bib}
@misc{Carn19c,
author = "Carneiro, Mario",
title = {{Specifying Verified x86 Software from Scratch}},
year = "2019",
link = "\url{https://arxiv. org/pdf/ 1907.01283. pdf}",
paper = "Carn19c.pdf",
keywords = "printed"
}\end{chunk}
\index{Bendersky, Eli}
\begin{chunk}{ axiom.bib}
@misc{Bend10,
author = "Bendersky, Eli",
title = {{Top Down Operator Precedence}},
year = "2010",
link = "\url{https://eli.thegreenpl ace.net/ 2010/01/ 02/top- down-operator- precedence- parsing}",
keywords = "DONE"
}\end{chunk}
\index{Baruch, Robert}
\begin{chunk}{ axiom.bib}
@misc{Baru19a,
author = "Baruch, Robert",
title = {{Cmod A7 Reference Manual}},
year = "2019",
link = "\url{https://reference. digilentinc. com/_media/ reference/ programmable- logic/cmod- a7/cmod_ a7_rm.pdf}",
paper = "Baru19a.pdf",
keywords = "printed"
}\end{chunk}
\index{Baruch, Robert}
\begin{chunk}{ axiom.bib}
@misc{Baru19b,
author = "Baruch, Robert",
title = {{Cmod A7 Schematic}},
year = "2019",
link = "\url{https://reference. digilentinc. com/_media/ cmod-a7/ cmod_a7_ sch.pdf}",
paper = "Baru19b.pdf",
keywords = "printed"
}\end{chunk}
\index{Reis, Leonardo Vieira dos Santos}
\index{Dilorio, Oliveira}
\index{Bigonha, Roberto S.}
\begin{chunk}{ axiom.bib}
@inproceedings{Reis14,
author = "Reis, Leonardo Vieira dos Santos and Dilorio, Oliveira and
Bigonha, Roberto S.",
title = {{A Mixed Approach for Building Extensible Parsers}},
booktitle = "Programming Language",
publisher = "Springer",
volume = "LNCS 8771",
pages = "1-15",
year = "2014",
abstract =
"For languages whose syntax is fixed, parsers are usually built
with a static structure. The implementation of features like macor
mechanisms or extensible languages requires the use of parsers
that may be dynamically extended. In this work, we discuss a mixed
approach for building efficient top-down dynamically extensible
parsers. Our view is based on the fact that a large part of the
parser code can be statically compiled and only the parts that are
dynamic should be interpreted for a more efficient processing. We
propose the generation of code for the base parser, in which hooks
are included to allow efficient extension of the underlying
grammar and activation of a grammar interpreter whenever it is
necessary to use an extended syntax. As a proof of concept, we
present a prototype implementation of a parser generator using
Adaptable Parsing Expression Grammars (APEG) as the underlying
method for syntax definition. We shos that APEG has features which
allow an efficient implementation using the proposed mixed
approach.",
paper = "Reis14.pdf"
}\end{chunk}
\index{Ford, Bryan}
\begin{chunk}{ axiom.bib}
@article{Ford04,
author = "Ford, Bryan",
title = {{Parsing Expression Grammars: A Recognition-Based Syntactic
Foundation }},
journal = "SIGPLAN Notices",
volume = "39",
number = "1",
pages = "111-122",
year = "2004",
abstract =
"For decades we have been using Chomsky's generative system of
grammars, particularly context-free grammars (CFGs) and regular
expressions (REs), to express the syntax of programming languages
and protocols. The power of generative grammars to express
ambiguity is crucial to their original purpose of modelling
natural languages, but this very power makes it unnecessarily
difficult both to express and to parse machine-oriented languages
using CFGs. Parsing Expression Grammars (PEGs) provide an
alternative, recognition-based formal foundation for describing
machine-oriented syntax, which solves the ambiguity problem by not
introducing ambiguity in the first place. Where CFGs express
nondeterministic choice between alternatives, PEGs instead use
{\sl prioritized choice}. PEGs address frequently felt
expressiveness limitations of CFGs and REs, simplifying syntax
definitions and making it unnecessary to separate their lexical
and hierarchical components. A linear-time parser can be built for
any PEG, avoiding both the complexity and fickleness of LR parsers
and the inefficiency of generalized CFG parsing. While PEGs
provide a rich set of operators for constructing grammars, they
are reducible to two minimal recognition schemas developed around
1970, TS/TDPL and gTS/GTDPL, which are here proven equivalent in
effective recognition power.",
paper = "Ford04.pdf",
keywords = "printed"
}\end{chunk}
\index{Gueneau, Armael}
\begin{chunk}{ axiom.bib}
@phdthesis{Guen19,
author = "Gueneau, Armael",
title = {{Mechanized Verification of the Correctness and Asymptotic
Complexity of Programs}},
school = "University of Paris",
year = "2019",
link = "\url{http://gallium. inria.fr/ ~agueneau/ phd/manuscript. pdf}",
abstract =
"This dissertation is concerned with the question of formally
verifying that the implementation of an algorithm is not only
functionally correct (it always returns the right result), but
also has the right asymptotic complexity (it reliably computes the
result in the expected amount of time).In the algorithms literature, it is standard practice to
characterize the performance of an algorithm by indicating its
asymptotic time complexity, typically using Landau's ``big-O''
notation. We first argue informally that asymptotic complexity
bounds are equally useful as formal specifications, because they
enable modular reasoning: a $O$ bound abstracts over the concrete
cost expression of a program, and therefore abstracts over the
specifics of its implementation. We illustrate -- with the help of
small illustrative examples -- a number of challenges with the use
of the $O$ notation, in particular in the multivariate case, that
might be overlooked when reasoning informally.We put these considerations into practice by formalizing the $O$
notation in the Coq proof assistant, and by extending an existing
program verification framework, CFML, with support for a
methodology enabling robust and modular proofs of asymptotic
complexity bounds. We extend the existing framework of Separation
Logic with Time Credits, which allows to reason at the same time
about correctness and time complexity, and introduce negative time
credits. Negative time credits increase the expressiveness of the
logic, and enable convenient reasoning principles as well as
elegant specifications. At the level of specifications, we show
how asymptotic complexity specifications using $O$ can be
integrated and composed within Separation Logic with Time
Credits. Then, in order to establish such specifications, we
develop a methodology that allows proofs of complexity in
Separation Logic to be robust and carried out at a relatively high
level of abstraction, by relying on two key elements: a mechanism
for collecting and deferring constraints during the proof, and a
mechanism for semi-automatically synthesizing cost expressions
without loss of generality.We demonstrate the usefulness and practicality of our approach on
a number of increasingly challenging case studies. These include
algorithms whose complexity analysis is relatively simple (such as
binary search, which is nonetheless out of the scope of many
automated complexity analysis tools) and data structures (such as
Okasaki's binary random access lists). In our most challenging
case study, we establish the correctness and amortized complexity
of a state-of-the-art incremental cycle detection algorithm: our
methodology scales up to highly non-trivial algorithms whose
complexity analysis intimately depends on subtle functional
invariants, and furthermore makes it possible to formally verify
OCaml code which can then actually be used as part of real world
programs.",
paper = "Guen19.pdf"
}\end{chunk}
\begin{
chunk}{ axiom.bib}
@misc{Fade17,
author = "Unknown",
title = {{Strongly Connected and Completely Specified Moore
Equivalent of a Mealy Machine}},
year = "2017",
link = "\url{https://cs.stackexchan ge.com/ questions/ 81127/strongly- connected- and-completely- specified- moore-equivalen t-of-a- mealy-machine}",
keywords = "DONE"
}\end{chunk}
\index{Hoare, C.A.R}
\begin{chunk}{ axiom.bib}
@misc{Hoar04,
author = "Hoare, C.A.R",
title = {{Unified Theories of Programming}},
year = "2004",
link = "\url{https://fi.ort. edu.uy/ innovaportal/ file/20124/ 1/04-hoard_ unified_ theories. pdf}",
abstract =
"Professional practice in a mature engineering discipline is based
on relevant scientific theories, usually expressed in the language
of mathematics. A mathematical theory of programming aims to
provide a similar basis for specification, design and
implementation of computer programs. The theory can be presented
in a variety of styles, including
\begin{enumerate}
\item Denotational, relating a program to a specification of its
observable properties and behaviour
\item Algebraic, providing equations and inequalities for
comparison, transformation and optimisation of designs and
programs.
\item Operational, describing individual steps of a possible
mechanical implementation
\end{enumerate}This paper presents simple theories of sequential
non-deterministic programming in each of these three styles; by
deriving each presentation from its predecessor in a cyclic
fashion, mutual consistency is assured.",
paper = "Hoar04.pdf",
keywords = "printed"
}\end{chunk}
\index{Davis, Ernest}
\begin{chunk}{ axiom.bib}
@misc{Davi19,
author = "Davis, Ernest",
title = {{The Use of Deep Learning for Symbolic Integration}},
year = "2019",
link = "\url{https://arxiv. org/pdf/ 1912.05752. pdf}",
abstract =
"Lample and Charton (2019) describe a system that uses deep
learning technology to compute symbolic, indefinite integrals, and
to find symbolic solutions to first- and second-order ordinary
differential equations, when the solutions are elementary
functions. They found that, over a particular test set, the system
could find solutions more successfully than sophisticated packages
for symbolic mathematics such as Mathematica run with a long
time-out. This is an impressive accomplishment, as far as it
goes. However, the system can handle only a quite limited subset
of the problems that Mathematica deals with, and the test set has
significant built-in biases. Therefore the claim that this
outperforms Mathematica on symbolic integration needs to be very
much qualified.",
paper = "Davi19.pdf",
keywords = "printed, DONE"
}\end{chunk}
\index{Pierce, Benjamin}
\index{Appel, Andrew W.}
\index{Weirich, Stephanie}
\index{Zdancewic, Steve}
\index{Shao, Zhong}
\index{Chlipala, Adam}
\begin{chunk}{ axiom.bib}
@misc{Pier16,
author = "Pierce, Benjamin and Appel, Andrew W. and Weirich, Stephanie
and Zdancewic, Steve and Shao, Zhong and Chlipala, Adam",
title = {{The Science of Deep Specification}},
year = "2016",
link = "\url{https://www.cis. upenn.edu/ ~bcpierce/ papers/ deepspec- hcss2016- slides. pdf}",
paper = "Pier16.pdf"
}\end{chunk}
\index{Pierce, Benjamin}
\begin{chunk}{ axiom.bib}
@misc{Pier18,
author = "Pierce, Benjamin"
title = {{The Science of Deep Specification}},
year = "2018",
link = "\url{https://www.cis. upenn.edu/ ~bcpierce/ papers/ pierce- etaps2018. pdf}",
paper = "Pier18.pdf"
}\end{chunk}
\index{Chlipala, Adam}
\index{Arvind}
\index{Sherman, Benjamin}
\index{Choi, Joonwon}
\index{Vijayaraghavan, Murali}
\begin{chunk}{ axiom.bib}
@misc{Chli17b,
author = "Chlipala, Adam and Arvind and Sherman, Benjamin and
Choi, Joonwon and Vijayaraghavan, Murali",
title = {{Kami: A Platform for High-Level Parametric Hardware
Specificat ion and its Modular Verification}},
year = "2017",
abstract =
"It has become fairly standard in the programming languages
research world to verify functional programs in proof assistants
using induction, algebraic simplification, and rewriting. In this
paper, we introduce Kami, a Coq library that uses labelled
transition systems to enable similar expressive and modular
reasoning for hardware designs expressed in the style of the
Bluespec language. We can specify, implement, and verify realistic
designs entirely within Coq, ending with automatic extraction into
a pipeline that bottoms out in FPGAs. Our methodology has been
evaluated in a case study verifying an infinite family fo
multicore systems, with cache-coherent shared memory and pipelined
cores implementing (the base integer subset of) the RISC-V
instruction set.",
paper = "Chli19b.pdf",
keywords = "printed"
}\end{chunk}
\index{Appel, Andrew W.}
\index{Beringer, Lennart}
\index{Chlipala, Adam}
\index{Pierce, Benjamin C.}
\index{Shao, Zhong}
\index{Weirich, Stephanie}
\index{Zdancewic, Steve}
\begin{chunk}{ axiom.bib}
@article{Appe17a,
author = "Appel, Andrew W. and Beringer, Lennart and Chlipala, Adam
and Pierce, Benjamin C. and Shao, Zhong and Weirich, Stephanie
and Zdancewic, Steve",
title = {{Position Paper: The Science of Deep Specification}},
journal = "Philosophical Transactions of the Royal Society",
volume = "375",
year = "2017",
link = "\url{https://royalsocietypu blishing. org/doi/ pdf/10. 1098/rsta. 2016.0331}",
abstract =
"We introduce our efforts within the project ``The Science of Deep
Specification'' to work out the key formal underpinnings of
inductrial-scale formal specifications of software and hardware
components, anticipating a world where large verified systems are
routinely built out of smaller verified components that are also
used by many other projects. We identify an important class of
specification that has already been used in a few experiments that
connect strong component-correctness theorems across the work of
different teams. To help popularize the unique advantages of that
style, we dub it {\sl deep specification}, and we say that it
encompasses specifications that are {\sl rich}, {\sl two-sided},
{\sl formal}, and {\sl live} (terms that we define in the
article). Our core team is developing a proof-of-concept ssystem
(based on teh Coq proof assistant) whose specification and
verification work is divided across argely decoupled subteams at
our four institutions, encompassing hardware microarchitecture,
compilers, operating systems and applications, along with
cross-cutting principles and tools for effective specification. We
also aim to catalyse interest in the approach, not just by basic
researchers but also by users in industry.This article is part of the themed issue ``Verified trustworthy
software systems''",
paper = "Appe17a.pdf",
keywords = "printed"
}\end{chunk}
\index{Braibant, Thomas}
\index{Chlipala, Adam}
\begin{chunk}{ axiom.bib}
@inproceedings{Brai13,
author = "Braibant, Thomas and Chlipala, Adam",
title = {{Formal Verification of Hardware Synthesis}},
booktitle = "Computer Aided Verification (CAV'13)",
publisher = "unknown",
year = "2013",
abstract =
"We report on the implmentation of a certified compier for a
high-level hardware description language (HDL) called Fe-Si
(FEatherweight Synthesis). Fe-Si is a simplified version of
Bluespec, an HDL based on a notion of guarded atomic
actions. Fe-Si is defined as a dependently typed deep embedding in
Coq. The target language of the compiler corresponds to a
synthesisable subset of Verilog or VHDL. A key aspect of our
approach is that input programs to the compiler can be defined and
proved correct inside Coq. The, we use extraction and a Verilog
back-end (written in OCaml) to get a certified version of a
hardware design.",
paper = "Brai13.pdf",
keywords = "printed"
}\end{chunk}
\index{Timany, Amin}
\index{Sozeau, Mattieu}
\begin{chunk}{ axiom.bib}
@inproceedings{Tima18,
author = "Timany, Amin and Sozeau, Mattieu",
title = {{Cumulative Inductive Types in Coq}},
booktitle = "3rd Conf. on Formal Structures for Computation and
Deduction" ,
publisher = "HAL",
link = "\url{https://hal.inria. fr/hal- 01952037/ document}",
abstract =
"In order to avoid well-known paradoxes associated with
self-referential definitions, high-order dependent type theories
stratify the theory using a countably infinite hierarchy of
universes (also known as sorts), $Type_0 : Type_1,\ldots$ Such
type systems are called cumulative if for any type $A$ we have
that $A:Type_i$ implies $A:Type_{i+1}$. The Predicative Calculus
of Inductive Constructions (pCIC) which forms the basis of the Coq
proof assistant, is one such system. In this paper we present the
Predicative Calculus of Cumulative Inductive Constructions
(pCUIC) which extends the cumulativity relation to inductive
types. We discuss cumulative inductive types as present in Coq 8.7
and their application to formalization and definitional
translations.",
paper = "Tima18.pdf"
}\end{chunk}
\index{Gilbert, Gaetan}
\begin{chunk}{ axiom.bib}
@phdthesis{Gilb19,
author = "Gilbert, Gaetan",
title = {{A Type Theory with Definitional Proof-Irrelevance}},
school = "Ecole Nationale Superieure Mines-Telecom Alantique.",
year = "2019",
link = "\url{https://gitlab. com/SkySkimmer/ thesis/ ~/jobs/ artifacts/ master/ download? job=build",
paper = "Gilb19.zip"
}\end{chunk}
\index{Awodey, Steve}
\begin{chunk}{ axiom.bib}
@misc{Awod16,
author = "Awodey, Steve",
title = {{Univalence as a Principle of Logic}},
year = "2016",
link = "\url{https://www.andrew. cmu.edu/ user/awodey/ preprints/ ualp.pdf}",
abstract =
"It is sometmes convenient or useful in mathematics to treat
isomorphic structures as the same. The recently proposed
Univalence Axiom for the foundations of mathematics elevates this
idea to a foundational principle in the setting of Homotopy Type
Theory. It states, roughly, that isomorphic structures can be
identified. We explore the motivations and consequences, both
mathematical and philosophical, of making such a new logical
postulate.",
paper = "Awod16.pdf",
keywords = "printed"
}\end{chunk}
\index{McCorduck, Pamela}
\begin{chunk}{ axiom.bib}
@book{Mcco79,
author = "McCorduck, Pamela",
tilte = {{Machines Who Think}},
year = "1979",
publisher = "Freeman",
keywords = "shelf, DONE"
}\end{chunk}
\index{Gabriel, Richard}
\begin{chunk}{ axiom.bib}
@book{Gabr96,
author = "Gabriel, Richard",
title = {{Patterns of Software}},
link = "\url{https://www.dreamsongs .com/Files/ PatternsOfSoftw are.pdf}",
year = "1996",
publisher = "Oxford University Press",
paper = "Gabr96.pdf"}
\end{chunk}
\index{Gabriel, Richard}
\begin{chunk}{ axiom.bib}
@book{Gabr85,
author = "Gabriel, Richard",
title = {{Performance and Evaluation of Lisp Systems}},
link = "\url{https://www.dreamsongs .com/Files/ Timrep. pdf}",
year = "1985",
publisher = "MIT Press",
paper = "Gabr85.pdf"}
\end{chunk}
\index{Lucas, J.R.}
\begin{chunk}{ axiom.bib}
@misc{Luca03,
author = "Lucas, J.R.",
title = {{Minds, Machines and Godel}},
year = "2003",
link =
"\url{https://pdfs.semantics cholar. org/bde3/ b731bf73ef6052e 34c4465e57718c0 3b13f8. pdf}",
paper = "Luca03.pdf"
}\end{chunk}
\index{Lem, Stanislaw}
\begin{chunk}{ axiom.bib}
@book{Lemx68,
author = "Lem, Stanislaw",
title = {{His Master's Voice}},
publisher = "MIT Press",
year = "1968"
}\end{chunk}
- 2916. By daly
-
books/bookvolbib add references
Goal: Proving Axiom Sane
\begin{
chunk}{ axiom.bib}
@misc{Wiki19,
author = "Wikipedia",
title = {{Algebraic data type}},
year = "2019",
link = "\url{https://en.wikipedia. org/wiki/ Algebraic_ data_type}"
}\end{chunk}
\index{MacKenzei, Donald}
\begin{chunk}{ axiom.bib}
@book{Mack01,
author = "MacKenzei, Donald",
title = {{Mechanizing Proof: Computing, Risk, and Trust}},
year = "2001",
publisher = "MIT Press",
isbn = "0-262-13393-8",
keywords = "shelf"
}\end{chunk}
\index{Selsam, Daniel}
\index{Ullrich, Sebastian}
\index{de Moura, Leonardo}
\begin{chunk}{ axiom.bib}
@misc{Sels20,
author = "Selsam, Daniel and Ullrich, Sebastian and
de Moura, Leonardo",
title = {{Tabled Typeclass Resolution}},
year = "2020",
link = "\url{https://www.dropbox. com/s/5nxklkxvd h7xna9/ typeclass. pdf}",
comment = "preprint",
abstract =
"Typeclasses have proven an elegant and effective way of managing
ad-hoc polymorphism in both programming languages and interactive
proof assistants. However, the increasingly sophisticated uses of
typeclasses within proof assistants has exposed two critical
problems with existing typeclass resolution procedures: the
{\sl diamond problem} which causes exponential running times in
both theory and practice, and the {\sl cycle problem}, which
cuases loops in the presense of cycles and so thwarts many desired
uses of typeclasses. We present a new typeclass resolution
procedure, called {\sl tabled typeclass resolution}, that solve
these problems. We have implemented our procedure for the upcoming
version (v4) of the Lean Theorem Prover, and confirm empirically
that it provides exponential speedups compared to all existing
procedures in the presense of diamonds. Our procedure is
sufficiently lightweight that it could easily be implemented in
other systems. We hope our new procedure facilitates even more
sophisticated uses of typeclasses in both software development and
interactive theorem proving.",
paper = "Sels20.pdf"
}\end{chunk}
\index{Hoare, Tony}
\begin{chunk}{ axiom.bib}
@article{Hoar03,
author = "Hoare, Tony",
title = {{The Verifying Compiler: A Grand Challenge for Computing
Research} },
journal = "Journal of the ACM",
volume = "50",
number = "1",
pages = "63-69",
year = "2003",
abstract =
"This contribution proposes a set of criteria that distinguish a
grand challenge in science or engineering from the many other
kinds of short-term or long-term research problems that engage the
interest of scientists and engineers. As an example drawn from
Computer Science, it revives an old challenge: the contruction and
application of a verifying compiler that guarantees correctness of
a program before running it.",
paper = "Hoar03.pdf",
keywords = "printed"
}\end{chunk}
\index{Pfenning, Frank}
\begin{chunk}{ axiom.bib}
@misc{Pfen04,
author = "Pfenning, Frank",
title = {{Automated Theorem Proving}},
year = "2004",
link = "\url{https://www.cs. cmu.edu/ ~fp/courses/ atp/handouts/ atp.pdf}",
paper = "Pfen04.pdf"
}\end{chunk}
\index{Horgan, John}
\begin{chunk}{ axiom.bib}
@misc{Horg93,
author = "Horgan, John",
title = {{The Death of Proof}},
year = "1993",
publisher = "Scientific American",
paper = "Horg93.pdf"
}\end{chunk}
\index{Licata, Dan}
\begin{chunk}{ axiom.bib}
@misc{Lica16,
author = "Licata, Dan",
title = {{A Functional Programmer's Guide to Homotopy Type Theory}},
year = "2016",
comment = "ICFP conference",
link = "\url{https://www.youtube. com/watch? v=caSOTjr1z18}"
}\end{chunk}
\index{Licata, Dan}
\begin{chunk}{ axiom.bib}
@misc{Lica16a,
author = "Licata, Dan",
title = {{Computing with Univalence}},
year = "2016",
comment = "Institute for Advanced Study",
link = "\url{https://www.youtube. com/watch? v=YDdDuq2AGw4}"
}\end{chunk}
\index{Pittman, Dan}
\begin{chunk}{ axiom.bib}
@misc{Pitt18,
author = "Pittman, Dan",
title = {{Proof Theory Impressionism: Blurring the Curry-Howard Line}},
year = "2018",
comment = "StrangeLoop 2018",
link = "\url{https://www.youtube. com/watch? v=jrVPB- Ad5Gc}"
}\end{chunk}
\index{Baker, Henry G.}
\begin{chunk}{ axiom.bib}
@misc{Bake93a,
author = "Baker, Henry G.",
title = {{Linear Logic and Permutation Stacks -- The Forth Shall Be First}},
year = "1993",
link = "\url{http:/home.pipeline. com/~hbaker1/ ForthStack. html}",
abstract =
"Girard's linear logic can be used to model programming languages
in which each bound variable name has exactly one ``occurence'' --
i.e., no variable can have implicit ``fan-out'', multiple uses
require explicit duplication. Among other nice properties,
``linear'' languages need no garbage collector, yet have no
dangling reference problems. We show a natural equivalence between
a ``linear'' programming language and a stack machine in which the
top items can undergo arbitrary permutations. Such permutation
stack machines can be considered combinator abstractions of
Moore's {\sl Forth} programming language.",
keywords = "DONE"
}\end{chunk}
\index{Dijkstra, Edsger W.}
\begin{chunk}{ axiom.bib}
@misc{Dijk78,
author = "Dijkstra, Edsger W.",
title = {{A Political Pamphlet From The Middle Ages}},
year = "1978",
comment = "EWD638",
link = "\url{https://www.cs. utexas. edu/users/ EWD/transcripti ons/EWD06xx/ EWD638. html}"
}\end{chunk}
\index{Bridges, Douglas}
\index{Reeves, Steve}
\begin{chunk}{ axiom.bib}
@article{Brid99,
author = "Bridges, Douglas and Reeves, Steve",
title = {{Constructive Mathematics in Theory and Programming
Practice} },
journal = "Philosophia Mathematica",
volume = "7",
number = "3",
year = "1999",
pages = "65-104",
paper = "Brid99.pdf",
keywords = "printed"
}\end{chunk}
\index{Pearce, David J.}
\index{Groves, Lindsay}
\begin{chunk}{ axiom.bib}
@misc{Pear15,
author = "Pearce, David J. and Groves, Lindsay",
title = {{Designing a Verifying Compiler: Lessons Learned from
developing Whiley}},
year = "2015",
link = "\url{https://homepages. ecs.vuw. ac.nz/~ djp/files/ SCP15-preprint. pdf}",
abstract =
"An ongoing challenge for computer science is the development of a
tool which automatically verifies programms meet their
specifications, and are free from runtime errors such as
divide-by-zero, array out-of-bounds and null dereferences. Several
impressive systems have been developed to this end, such as
ESC/Java and Spec\#, which build on existing programming languages
(e.g., Java, C\#). We have been developing a programming language
from scratch to simplify verification, called Whiley, and an
accompanying verifying compiler. In this paper, we present a
technical overview of the verifying compiler and document the
numerous design decisions made. Indeed, many of our designs
reflect thos of similar tools. However, they have often been
ignored in the literature and/or spread thinly throughout. In
doing this, we hope to provide a useful resource for those
building verifying compilers.",
paper = "Pear15.pdf",
keywords = "printed"
}\end{chunk}
\index{Rucker, Rudy}
\begin{chunk}{ axiom.bib}
@book{Ruck87,
author = "Rucker, Rudy",
title = {{Mathenauts: Tales of Mathematical Wonder}},
year = "1987",
comment = "Norman Kagan: Four Brands of Impossible",
publisher = "Arbor House Publishing"
}\end{chunk}
\index{Woodcock, Jim}
\index{Davies, Jim}
@book{Wood20,
author = "Woodcock, Jim and Davies, Jim"l
title = {{Using Z: Specification, Refinement, and Proof}}
year = "2020",
link = "\url{http://www.cs. cmu.edu/ ~15819/ zedbook. pdf}",
publisher = "CMU PDF",
paper = "Wood20.pdf"
}\end{chunk}
\index{Chang, Stephen}
\index{Ballantyne, Michael}
\index{Turner, Milo}
\index{Bowman, William J.}
\begin{chunk}{ axiom.bib}
@article{Chan20,
author = "Chang, Stephen and Ballantyne, Michael and Turner, Milo
and Bowman, William J.",
title = {{Dependent Type Systems as Macros}},
journal = "Proc. ACM Program Lang.",
volume = "4",
year = "2020",
abstract =
"We present {\tt TURNSTYLE+}, a high-level, macros-based metaDSL
for building dependently typed languages. With it, programmers may
rapidly prototype and iterate on the design of new dependently
typed features and extensions. Or they may create entirely new
DSLs whose dependet type ``power'' is tailored to a specific
domain. Our framework's support of language-oriented programming
also makes it suitable for experimenting with systems of
interacting components, e.g. a proof assistant and its companion
DSLs. This paper explains the implementation details of
{\tt TURNSTYLE+}, as well as how it may be used to create a wide
variety of dependently typed languages, from a lightweight one
with indexed types, to a full spectrum proof assistant, complete
with a tactic system and extensions for features like sized types
and SMT interaction.",
paper = "Chan20.pdf",
keywords = "printed"
}\end{chunk}
\index{Warne, Henrik}
\begin{chunk}{ axiom.bib}
@misc{Warn19,
author = "Warne, Henrik",
title = {{More Good Programming Quotes, Part 3}},
year = "2019",
link = "\url{https://henrikwarne. com/2019/ 04/03/more- good-programmin g-quotes- part-3}"
}\end{chunk}
\index{Warne, Henrik}
\begin{chunk}{ axiom.bib}
@misc{Warn16,
author = "Warne, Henrik",
title = {{More Good Programming Quotes}},
year = "2019",
link = "\url{https://henrikwarne. com/2016/ 04/17/more- good-programmin g-quotes}"
}\end{chunk}
\index{Baccala, Brent W.}
\begin{chunk}{ axiom.bib}
@misc{Bacc,
author = "Baccala, Brent W.",
title = {{The Facebook Integral}},
year = "2020",
link = "\url{https://ec2.freesoft. org/blogs/ soapbox/ the-facebook- integral}"
}\end{chunk}
\index{Baccala, Brent W.}
\begin{chunk}{ axiom.bib}
@misc{Bacca,
author = "Baccala, Brent W.",
title = {{The Facebook Integral (solved with Axiom and Sage}},
year = "2020",
link = "\url{https://youtu. be/tz1LwfJlMuo}"
}\end{chunk}
\index{Baccala, Brent W.}
\begin{chunk}{ axiom.bib}
@misc{Baccb,
author = "Baccala, Brent W.",
title = {{The Risch Theorem}},
year = "2020",
link = "\url{https://youtu. be/BGnLwhXb204}"
}\end{chunk}
\index{Goel, Shilpi}
\index{Slobodova, Anna}
\index{Sumners, Rob}
\index{Swords, Sol}
\begin{chunk}{ axiom.bib}
@misc{Goel19,
author = "Goel, Shilpi and Slobodova, Anna and Sumners, Rob and
Swords, Sol",
title = {{Verifying X86 Instruction Implementations}},
year = "2019",
link = "\url{https://arxiv. org/abs/ 1912.10285}",
paper = "Goel19.pdf"
}\end{chunk}
\index{Boizumault, Patrice}
\index{Djamboulian, Ara M.}
\index{Fattouh, Jamal}
\begin{chunk}{ axiom.bib}
@book{Boiz93,
author = "Boizumault, Patrice and Djamboulian, Ara M. and Fattouh, Jamal",
title = {{The Implementation of Prolog}}
year = "1993",
publisher = "Princeton Legacy Library",
keywords = "shelf"
}\end{chunk}
\index{Clark, K.L.}
\index{McCabe, F.G.}
\begin{chunk}{ axiom.bib}
@book{Clar84,
author = "Clark, K.L. and McCabe, F.G.",
title = {{micro-PROLOG: Programming in Logic}},
year = "1984",
publisher = "Prentice-Hall",
isbn = "0-13-581264-X",
keywords = "shelf"
}\end{chunk}
\index{Abelson, Harold}
\index{Sussman, Gerald Jay}
\index{Sussman, Julie}
\begin{chunk}{ axiom.bib}
@book{Abel86,
author = "Abelson, Harold and Sussman, Gerald Jay and Sussman, Julie",
title = {{Structure and Interpretation of Computer Programs}},
year = "1986",
publisher = "The MIT Press",
isbn = "0-262-01077-1",
keywords = "shelf"
}\end{chunk}
\index{Chandy, K. Mani}
\index{Misra, Jayadev}
\begin{chunk}{ axiom.bib}
@book{Chan89,
author = "Chandy, K. Mani and Misra, Jayadev",
title = {{Parallel Program Design}},
year = "1989",
publisher = "Addison-Wesley Publishing",
isbn = "0-201-05866-9",
keywords = "shelf"
}\end{chunk}
\index{Bundy, Alan}
\begin{chunk}{ axiom.bib}
@book{Bund83,
author = "Bundy, Alan",
title = {{The Computer Modelling of Mathematics Reasoning}},
year = "1983",
publisher = "Academic Press",
isbn = "0-12-141250-4",
keywords = "shelf"
}\end{chunk}
\index{Hamilton, A.G.}
\begin{chunk}{ axiom.bib}
@book{Hami78,
author = "Hamilton, A.G.",
title = {{Logic for Mathematicians}},
year = "1978",
publisher = "Cambridge University Press",
isbn = "0-521-29291-3",
keywords = "shelf"
}\end{chunk}
\index{Michaelson, S.}
\index{Milner, R.}
\begin{chunk}{ axiom.bib}
@book{Mich76,
author = "Michaelson, S. and Milner, R.",
title = {{Automata Languages and Programming}},
year = "1976",
publisher = "Edinburgh University Press",
isbn = "0-85224-308-1",
keywords = "shelf"
}\end{chunk}
\index{Campbell, J.A.}
\begin{chunk}{ axiom.bib}
@book{Camp84,
author = "Campbell, J.A.",
title = {{Implementations of PROLOG}},
year = "1984",
publisher = "Ellis Horwood Limited",
isbn = "0-85312-675-5",
keywords = "shelf"
}\end{chunk}
\index{Reichenbach, Hans}
\begin{chunk}{ axiom.bib}
@book{Reic47,
author = "Reichenbach, Hans",
title = {{Elements of Symbolic Logic}},
year = "1947",
publisher = "The MacMillan Company",
keywords = "shelf"
}\end{chunk}
\index{Oesterle, John A.}
\begin{chunk}{ axiom.bib}
@book{Oest53
author = "Oesterle, John A.",
title = {{Logic: The Art of Defining and Reasoning}},
year = "1953",
publisher = "Prentice-Hall",
keywords = "shelf"
}\end{chunk}
\index{Lewis, Robert Y.}
\index{Wu, Minchao}
\begin{chunk}{ axiom.bib}
@misc{Lewi17,
author = "Lewis, Robert Y. and Wu, Minchao",
title = {{A Bi-directional extensible ad hoc interface between Lean
and Mathematica}},
year = "2017",
link = "\url{https://robertylewis. com/leanmm/ lean_mm. pdf}",
abstract =
"We implement a user-extensible ad hoc connection between the Lean
proof assistant and the computer algebra system Mathematica. By
reflecting the syntax of each system in the other and providing a
flexible interface for extending translation, our connection
allows for the exchange of arbitrary information between the two
systemss. We show how to make use of the Lean metaprogramming
framework to verif certain Mathematica computations, so that the
rigor of the proof assistant is not compromised. We also establish
a connection in the opposite direction, using Mathematica to
import and process proof terms from Lean.",
paper = "Lewi17.pdf",
keywords = "printed"
}\end{chunk}
\begin{
chunk}{ axiom.bib}
@misc{Unkn20,
author = "Unknown",
title = {{Programming is Terrible -- Lessons Learned from a life
wasted} },
year = "2020",
link = "\url{https://programmingist errible. com/post/ 65781074112/ devils- dictionary- of-programming}"
}\end{chunk}
\begin{
chunk}{ axiom.bib}
@misc{Comm16,
author = "Unknown",
title = {{A very comprehensive and precise spec}},
year = "2016"
link =
"\url{http://www.commitstrip .com/en/ 2016/08/ 25/a-very- comprehensive- and-precise- spec}"
}\end{chunk}
\index{Pfenning, Frank}
\index{Schurmann, Carston}
\begin{chunk}{ axiom.bib}
@misc{Pfen99,
author = "Pfenning, Frank and Schurmann, Carston",
title = {{System Description: Twelf -- A Meta-Logical Framework for
Deductive Systems}},
year = "1999",
link = "\url{https://www.cs. cmu.edu/ ~fp/papers/ cade99. pdf}",
abstract =
"Twelf is a meta-logical framework for the specification,
implementation, and meta-theory of deductive systems from the
theory of programming languages and logics. It relies on the LF
type theory and the jugements-as-types methodology for
specifications [HHP93], a constraint logic programming interpreter
for implementation [Pfe91], and the meta-logic $M_2$ for reasoning
about object languages encoded in LF [SP98]. It is a significant
extension and complete reimplementation of the Elf system [Pfe94].Twelf is written in Standard ML and runs under SML of New Jersey
and MLWorks on Unix and Window platforms. The current version
(1.2) is distributed with a complete manual, example suites, a
tutorial in the form of on-line lecture notes [Pfe], and an Emacs
interface. Source and binary distributions are accessible via the
Twelf home page \url{http://www.cs. cmu.edu/ ~twelf}.",
paper = "Pfen99.pdf",
keywords = "printed"
}\end{chunk}
\index{Pfenning, Frank}
\begin{chunk}{ axiom.bib}
@misc{Pfen94,
author = "Pfenning, Frank",
title = {{Elf: A Meta-Language for Deductive Systems}},
year = "1994",
paper = "Pfen94.pdf",
keywords = "printed, DONE"
}\end{chunk}
\index{Thornton, Jacob}
\begin{chunk}{ axiom.bib}
@misc{Thor12,
author = "Thornton, Jacob",
title = {{What is Open Source and Why Do I Feel So Guilty}},
year = "2012",
link = "\url{https://www.youtube. com/watch? v=UIDb6VBO9os}"
}\end{chunk}
\index{Bazerman, Gershom}
\begin{chunk}{ axiom.bib}
@misc{Baze14,
author = "Bazerman, Gershom",
title = {{Homotopy Type Theory: What's the Big Idea}},
year = "2014",
comment = "Lambda Jam 2014",
link = "\url{https://www.youtube. com/watch? v=OupcXmLER7I}"
}\end{chunk}
\index{Voevodsky, Vladimir}
\begin{chunk}{ axiom.bib}
@misc{Voev10,
author = "Voevodsky, Vladimir",
title = {{The Equivalence Axiom and Univalent Models of Type
Theory} },
year = "2010",
link = "\url{https://www.math. ias.edu/ ~vladimir/ Site3/Univalent _Foundations_ files/CMU_ talk.pdf}",
abstract =
"I will show how to define, in any type system with dependent
sums, products and Martin-Lof identity types, the notion of a
homotopy equivalence between two types and how to formulate the
Equivalence Axiom which provides a natural way to assert that
``two homotopy equivalent types are equal''. I will then sketch a
construction of a model of one of the standard Martin-Lof type
theories which satisfies the equivalence axiom and the excluded
middle thus proving that M.L. type theory with excluded middle and
equivalence axiom is at least as consistent as ZFC theory.Models which satisfy the equivalence axiom are called
univalent. This is a totally new class of models and I will argue
that the semantics which they provide leads to the first
satisfactory approach to type-theoretic formalization of
mathematics.",
paper = "Voev10.pdf",
keywords = "printed"
}\end{chunk}
- 2915. By daly
-
books/bookvolbib add references
Goal: Proving Axiom Sane
\index{Kell, Stephen}
\begin{chunk}{ axiom.bib}
@inproceeding{Kell14,
author = "Kell, Stephen",
title = "In Search of Types",
booktitle = "Int. Symp. on New Ideas, New Pradigms, and Reflections
on Programming and Software (Onward!)",
publisher = "ACM",
year = "2014",
abstract =
"The concept of ``type'' has been used without a consistent,
precise definition in discussions about programming languages for
60 years. In this essay I explore various concepts lurking behind
distinct uses of this word, highlighting two traditions in which
the word came into use largely independently: engineering
traditions on the one hand, and those of symbolic logic on the
other. These traditions are founded on differing attitudes to the
nature and propose of abstraction, but their distinct uses of
``type'' have never been explicitly unified. One result is that
discourse {\sl across} these traditions often finds itself at
cross purposes, such as {\sl overapplying} one sense of ``type''
where another is appropriate, and occasionally proceeding to draw
wrong conslusions. I illustrate this with examples from well-known
and justly well-regarded literature, and argue that ongoing
developments in both the theory and practice of programming make
now a good time to resolve these problems.",
paper = "Kell14.pdf"
}\end{chunk}
\index{Dijkstra, Edsger W.}
\begin{chunk}{ axiom.bib}
@misc{Dijk00,
author = "Dijkstra, Edsger W.",
title = {{EWD1305 Archive: Answers to questions from students of
Software Engineering}},
link = "\url{https://www.cs. utexas. edu/users/ EWD/transcripti ons/EWD13xx/ EWD1305. html}",
year = "2000"
}\end{chunk}
\index{Jaffe, Arthur}
\index{Quinn, Frank}
\begin{chunk}{ axiom.bib}
@article{Jaff93,
author = "Jaffe, Arthur and Quinn, Frank",
title = {{``Theoretical Mathematics'': Towards a Cultural Synthesis
of Mathematics and Theoretical Physics}},
journal = "Bull. of the American Mathematical Society",
volume = "29",
number = "1",
pages = "1-13",
year = "1993",
abstract =
"Is specualative mathematics dangerous? Recent interactions
between physics and mathematics pose the question with some force:
traditional mathematical norms discourage speculation, but it is
the fabric of theoretical physics. In practice there can be
benefits, but there can also be unpleasant and destructive
consequences. Serious caution is required, and the issue should be
considered before, rather than after, obvious damage occurs. With
the hazards carefully in mind, we propose a framework that should
allow a healthy and positive role for speculation.",
paper = "Jaff92.pdf"
}\end{chunk}
\begin{
chunk}{ axiom.bib}
@misc{Lean19,
author = "Lean",
title = {{Lean Chat Room}},
link = "\url{https://leanprover. zulipchat. com}",
year = "2019"
}\end{chunk}
\index{Buzzard, Kevin}
\begin{chunk}{ axiom.bib}
@misc{Xena19,
author = "Buzzard, Kevin",
title = {{Xena}},
link = "\url{https://xenaproject. wordpress. com}",
year = "2019"
}\end{chunk}
\index{Hudson, Andrew Dana}
\begin{chunk}{ axiom.bib}
@misc{Huds19,
author = "Hudson, Andrew Dana",
title = {{A Priest, a Rabbi, and a Robot Walk Into a Bar}},
year = "2019",
link =
"\url{https://slate. com/technology/ future- tense-fiction- priest- rabbi-robot- bar-andrew- hudson. html},
abstract =
"A new short story looks at how artificial intelligence
could support, and distort, faith."
}\end{chunk}
\index{Russell, Bertrand}
\begin{chunk}{ axiom.bib}
@book{Russ1920,
author = "Russell, Bertrand",
title = {{Introduction to Mathematical Philosophy}},
publisher = "George Allen and Unwin, Ltd",
year = "1920"
}\end{chunk}
\index{Bauer, Andrej}
\begin{chunk}{ axiom.bib}
@misc{Baue19a,
author = "Bauer, Andrej",
title = {{Derivations as computations}},
year = "2019",
link = "\url{https://youtube. com/watch? v=YZqOVsuyQyQ}"
}\end{chunk}
\index{Bottu, Gert-Jan}
\index{Xie, Ningning}
\index{Mardirosian, Klara}
\index{Schrijvers, Tom}
\begin{chunk}{ axiom.bib}
@misc{Bott19,
author = "Bottu, Gert-Jan and Xie, Ningning and Mardirosian, Klara
and Schrijvers, Tom",
title = {{Coherence of Type Class Resolution}},
year = "2019",
link = "\url{http://youtube. com/watch? v=bmHd0MoCliM}",
abstract =
"Elaboration-based type class resolution, as found in languages
like Haskell, Mercury and PureScript, is generally
{\ls nondeterministic}: there can be multiple ways to satisfy a
wanted constraint in terms of global instances and locally given
constraints. Coherence is the key property that keeps this sane;
it guarantees that, despite the nondeterminism, programs will
behave predictably. even though elaboration-based resolution is
generally assumed coherent, as far as we know, there is no formal
proof of this property in the presence of sources of
nondeterminism, like suerclasses and flexible contexts.This paper provides a formal proof to remedy the situation. The
proof is non-trivial because the semantics elaborates resolution
into a target language where different elaborations can be
distinguished by contexts that do not have a source language
counterpart. Inspired by the notion of full abstraction, we
present a two-stop strategy that first elaborates
nondeterministically into an intermediate language that preserves
contextual equaivalence, and the deterministically elaborates from
there into the target language. We use an approach based on
logical relations to establish contextual equivalence and thus
coherence for the first step of elaboration, while the second
step's determinism straightforwardly preserves this coherence
property.",
paper = "Bott19.pdf"
}\end{chunk}
\index{Lample, Guillaume}
\index{Charton, Francois}
\begin{chunk}{ axiom.bib}
@misc{Lamp19,
author = "Lample, Guillaume and Charton, Francois",
title = {{Deep Learning for Symbolic Mathematics}},
year = "2019",
link = "\url{https://arxiv. org/pdf/ 1912.01412. pdf}",
abstract =
"Neural networks have a reputation for being better at solving
statistical or approximate problems than at performing
calculations or working with symbolic data. In this paper, we show
that they can be surprisingly good at more elaborated tasks in
mathematics, such as symbolic integration and solving differential
equations. We propose a syntax for representing mathematical
problems, and methods for generating large datasets that can be
used to train sequence-to-sequence models. We achieve results that
outperform commercial Computer Algebra Systems such as Matlab or
Mathematica.",
paper = "Lamp19.pdf"
}\end{chunk}
\index{Loh, Po-Shen}
\begin{chunk}{ axiom.bib}
@misc{Lohx19,
author = "Loh, Po-Shen",
title = {{A Simple Proof of the Quadratic Formula}},
year = "2019",
link = "\url{https://arxiv. org/pdf/ 1910.06709. pdf}",
abstract =
"This aarticle provides a simple proof of the quadratic formula,
which also produces an efficient and natural method for solving
general quadratic equations. The derivation is computationally
light and conceptually natural, and has the potential to demystify
quadratic equations for students worldwide.",
paper = "Lohx19.pdf"
}\end{chunk}
\index{Kay, Alan}
\begin{chunk}{ axiom.bib}
@misc{Kayx19,
author = "Kay, Alan",
title = {{Alan Kay Speaks at ATLAS Institute}},
year = "2019",
link = "\url{https://www.youtube. com/watch? v=nOrdzDaPYV4}"
}\end{chunk}
\index{Bauer, Andrej}
\index{Stone, Christopher A.}
\begin{chunk}{ axiom.bib}
@misc{Baue07,
author = "Bauer, Andrej and Stone, Christopher A.",
title = {{RZ: a Tool for Bringing Constructive and Computational
Mathematic s Closer to Programming Practice}},
year = "2007",
link = "\url{http://math.andrej. com/wp- content/ uploads/ 2007/cie- long.pdf}",
abstract =
"Realizability theory is not only a fundamental tool in logic and
computability, but also has direct application to the design and
implementation of programs: it can produce interfaces for the data
structure corresponding to a mathematical theory. Our tool, called
RS, serves as a bridge between the worlds of constructive
mathematics and programming. By using the realizability
interpretation of constructive mathematics, RZ translates
specifications in constructive logic into annotated interface code
in Objective Caml. The system supports a rich input language
allowing descriptions of complex mathematical structures. RZ does
not extract code from proofs, but allows any implementation
method, from handwritten code to code extracted from proofs by
other tools.",
paper = "Baue07.pdf",
keywords = "printed"
}\end{chunk}
\index{Awodey, Steve}
\index{Bauer, Andrej}
\begin{chunk}{ axiom.bib}
@misc{Awod04,
author = "Awodey, Steve and Bauer, Andrej",
title = {{Propositions as [Types]}},
year = "2004",
link = "\url{http://math.andrej. com/asset/ data/bracket_ types.pdf}",
abstract =
"Image factorizations in regular categories are stable under
pullbacks, as they model a natural modal operator in dependent
type theory. This unary type constructor [A] has turned up
previously in a syntactic form as a way of erasing computational
content, and formalizing a notion of proof irrelevance. Indeed,
semantically, the notion of a {\sl support} is sometimes used as
surrogate proposition asserting inhabitation of an indexed family.We give rules for bracket types in dependent type theory and
provide complete semantics using regular categories. We show that
dependent type theory with the unit type, strong extensional
equality types, strong dependent sums, and bracket types is the
internal type theory of regular categories, in the same way that
the usual dependent type theory with dependent sums and products
is the internal type theory of locally cartesian closed
categories.We also show how to interpret first-order logic in type theory
with brackets, and we make use of the translation to compare type
theory with logic. Specifically, we show that the
propositions-as-types interpretation is complete with respect to a
certain fragment of intuitionistic first-order logic, in the sense
that a formula from the fragment is derivable in intuitionistic
first-order logic if, and only if, its interpretation in dependent
type theory is inhabited. As a consequence, a modified
double-negation translation into type theory (without bracket
types) is complete, in the same sense, for all of classical
first-order logic.",
paper = "Awod04.pdf",
keywords = "printed"
}\end{chunk}
\index{Hoare, C.A.R}
\index{Shepherdson, J.C.}
\begin{chunk}{ axiom.bib}
@book{Hoar85,
author = "Hoare, C.A.R and Shepherdson, J.C.",
title = {{Mathematical Logic and Programming Languages}},
year = "1985",
publisher = "Prentice-Hall",
isbn = "0-13-561465-1"
}\end{chunk}
\index{Luckham, D.C.}
\index{Park, D.M.R}
\index{Paterson, M.S.}
\begin{chunk}{ axiom.bib}
@article{Luck70,
author = "Luckham, D.C. and Park, D.M.R and Paterson, M.S.",
title = {{On Formalised Computer Programs}},
journal = "J. of Computer and System Sciences",
volume = "4",
pages = "220-249",
year = "1970",
paper = "Luck70.pdf"
}\end{chunk}
\index{Manna, Zohar}
\begin{chunk}{ axiom.bib}
@phdthesis{Mann68,
author = "Manna, Zohar",
title = {{Termination of Algorithms}},
school = "Carnegie Mellon University",
year = "1968",
link = "\url{http://apps/dtic. mil/dtic/ tr/fulltext/ u2/670558. pdf}",
abstract =
"The thesis contains two parts which are self-contained units.In Part I we present several results on the relation between
\begin{enumerate}
\item the problem of termination and equivalence of programs and
abstract programs, and
\item the first order predicate calculus
\end{enumerate}Part II is concerned with the relation between
\begin{enumerate}
\item the termination of interpreted graphs, and
\item properties of well-ordered sets and graph theory
\end{enumerate} ",
paper = "Mann68.pdf"
}\end{chunk}
\index{McCarthy, J.}
\begin{chunk}{ axiom.bib}
@misc{Mcca60a,
author = "McCarthy, J.",
title = {{Programs with Common Sense}},
year = "1960",
abstract =
"Interesting work is being done in programming computers to solve
problems which require a high degree of intelligence in
humans. However, certain elementary verbal reasoning processes so
simple that they can be carried out by any non-feeble-minded human
have yet to be simulated by machine programs.This paper will discuss programs to manipulate in a suitable
formal language (most likely a part of the predicate calculus)
common instrumental statements. The basic program will draw
immediate conclusions from a list of premises. These conclusions
will be either declarative or imperative sentences. When an
imperative sentence is deduced the program takes a corresponding
action. These actions may include printing sentences, moving
sentences on lists, and reinitiating the basic deduction process
on these lists.Facilities will be provided for communication with humans in the
system via manual intervention and display devices connected to
the computer.",
paper = "Mcca60a.pdf"
}\end{chunk}
\index{van Benthem Jutting, L.S.}
\index{McKinna, J.}
\index{Pollack, R.}
\begin{chunk}{ axiom.bib}
@inproceedings{Bent93,
author = "van Benthem Jutting, L.S. and McKinna, J. and Pollack, R.",
title = {{Checking Algorithms for Pure Type Systems}},
booktitle = "Types for Proofs and Programs",
publisher = "Springer",
year = "1993",
pages = "19-61",
abstract =
"This work is motivated by the problem of finding reasonable
algorithms for typechecking Pure Type Systems [Bar91] (PTS). There
are several implementations of formal systems that are either PTS
or closely related to PTS. For example, LEGO [LP92] implements the
Pure Calculus of Constructions (PCC) [CH88], the Extended Calculus
of Constructions [Luo90] and the Edinburgh Logical Framework (LF)
[HHP87]. ELF [Pfe89] implements LF; CONSTRUCTOR [Hel91] implements
arbitrary PTS with finite set of sorts. Are these implementations
actually correct? Of course, we may enumerate all derivations of a
given PTS, and Jutting [vBJ93] has shown that a large class of
normalizing PTS have decidable typechecking by computing the
normal forms of types, but such techniques are obviously not
usable in practice. Algorithms in the literature for particular
type systems, such as Huet's Constructive Engine [Hue89], do not
obviously extend even to such tame classes as the normalizing and
functional PTS.",
paper = "Bent93.pdf"
}\end{chunk}
\index{Ranta, Aarne}
\begin{chunk}{ axiom.bib}
@inproceedings{Rant93,
author = "Ranta, Aarne",
title = {{Type Theory and the Informal Language of Mathematics}},
booktitle = "Types for Proofs and Programs",
publisher = "Springer",
year = "1993",
pages = "352-365",
paper = "Rant93.pdf"
}\end{chunk}
\index{Parent, C.}
\begin{chunk}{ axiom.bib}
@inproceedings{Pare93,
author = "Parent, C.",
title = {{Developing Certified Programs in the System Coq the
Program Tactic}},
booktitle = "Types for Proofs and Programs",
publisher = "Springer",
year = "1993",
pages = "291-312",
abstract =
"The system Coq is an environment for proof development based on
the Calculus of Contructions extended by inductive
definitions. The specification of a program can be represented by
a logical formula and the program itself can be extracted from the
constructive proof of the specification. In this paper, we look at
the possibility of inverting the extraction process. More
precisely, we present a method which, given a specification and a
program, builds the logical conditions to be verified in order to
obtain a correctness proof of the program. We build a proof of the
specification from the program from which the program can be
extracted. Since some information cannot automatically be
inferred, we show how to annotate the program by specifiying some
of its parts in order to guide the search for the proof.",
paper = "Pare93.pdf",
keywords = "printed"
}\end{chunk}
\index{Gordon, M.J.C}
\begin{chunk}{ axiom.bib}
@article{Gord15,
author = "Gordon, M.J.C",
title = {{Tactics for Mechanized Reasoning: A Commentary on Milner
(1984) 'The Use of Machines to Assist in Rigorous Proof'}},
journal = "Philosophical Transactions: Mathematical, Physical and
Engineeri ng Sciences",
volume = "373",
number = "2039",
pages = "1-11",
year = "2015",
abstract =
"Robin Milner's paper, ``The use of machines to assist in rigorous
proof'', introduces methods for automating mathematical reasoning
that are a milestone in the development of computer-assisted
theorem proving. His ideas, particularly his theory of tactics,
revolutionized the architecture of proof assistants. His
methodology for automating rigorous proof soundly, particularly
his theory of type polymorphism in programming, led to major
contributions to the theory and design of programming
languages. His citation for the 1991 ACM A.M. Turing award, the
most prestigious award in computer science, credits him with,
among other achievements, 'probably the first theoretically based
yet practical tool for machine assisted proof construction'. This
commentary was written to celebrate the 350th anniversary of the
journal {\sl Philosophical Transactions of the Royal Society}."
paper = "Gord15.pdf",
keywords = "printed"
}\end{chunk}
\index{Knuth, Donald E.}
\begin{chunk}{ axiom.bib}
@misc{Knut19,
author = "Knuth, Donald E.",
title = {{Donald Knuth: Algorithms, Complexity, Life, and The Art of
Computer Programmig}},
link = "\url{https://www.youtube. com/watch? v=2Bd8fsXbST8}",
year = "2019",
comment = "Lex Fridman AI Podcast"
}\end{chunk}
\index{Scott, Dana}
\index{Strachey, Christopher}
@misc{Scot92,
author = "Scott, Dana and Strachey, Christopher",
title = {{Toward a Mathematical Semantics for Computer Languages}},
year = "1992",
comment = "Oxford Programming Research Group Monograph PRG-6"
abstract =
"Compilers for high-level languages are generally constructed to
give the complete translation of the programs into machine
language. As machines merely juggle bit patterns, the concepts of
the original language may be lost or at least obscured during this
passage. The purpose of a mathematical semantics is to give a
correct and meaningful correspondence between programs and
mathematical entities in a way that is entirely independent of an
implementation. This plan is illustrated in a very elementary way
in the introduction. The first section connects the general method
with the usual idea of state transformations. The next section
shows why the mathematics of functions has to be modified to
accommodate recursive commands. Section 3 explains the
modification. Section 4 introduces the environments for handling
variables and identifiers and shows how the semantical equations
define equivalence of programs. Section 5 gives an exposition of
the new type of mathematical function spaces that are required for
the semantics of procedures when these are allowed in
assignments. The conclusion traces some of the background of the
project and points the way to future work.",
paper = "Scot92.pdf"
}\end{chunk}
\index{Milner, Robin}
\begin{chunk}{ axiom.bib}
@article{Miln72a,
author = "Milner, Robin",
title = {{Implementation and Applications of Scott's Logic for
Computable Functions}},
journal = "ACM SIGPLAN Notices",
volume = "7",
number = "1",
year = "1972",
pages = "1-6",
abstract =
"The basis for this paper is a logic designed by Dana Scott[1] in
1969 for formalizing arguments about computable functions of
higher type. This logic uses typed combinators, and we give a more
or less direct translation into typed $\lambda$-calculus, which is
an easier formalism to use, though not so easy for the metatheory
because of the presence of bound variables. We then describe, by
example only, a proof-checker program which has been implemented
for this logic; the program is fully describe in [2]. We relate
the induction rule which is central to the logic to two more
familiar rules -- Recursion Induction and Structural Induction --
showing that the former is a theorem of the logic, and that for
recursively defined structures the latter is a derived rule of the
logic. Finally we show how the syntax and semantics of a simple
programming language may be described completely in the logic, and
we give an example of a theorem which relates syntactic and
semantic properties of programs and which can be stated and proved
within the logic."
}\end{chunk}
\index{Avigad, Jeremy}
\index{Harrison, John}
\begin{chunk}{ axiom.bib}
@article{Avig14b,
author = "Avigad, Jeremy and Harrison, John",
title = {{Formally Verified Mathematics}},
journal = "Communications of the ACM",
volume = "57",
number = "4",
pages = "66-75",
year = "2014",
abstract =
"With the help of computational proof assistants, formal
verification could beome the new standard for rigor in
mathematics.",
paper = "Avig14b.pdf"
}\end{chunk}
\index{Landin, P.J.}
\begin{chunk}{ axiom.bib}
@article{Land66,
author = "Landin, P.J.",
title = {{The Next 700 Programming Languages}},
journal = "Communications of the ACM",
volume = "9",
pages = "157-166",
year = "1966",
abstract =
"A family of unimplemented computing languages is described that
is intended to span differences of application areas by a unified
framework. This framework dictates the rules about the uses of
user-coined names, and the conventions about characterizing
functional relationships. Within this framework the design of a
specific language splits into two independent parts. One is the
choice of written appearances of programs (or more generally,
their physical representation). The other is the choice of the
abstract entities (such as numbers, character-strings, lists of
them, functional relations among them) that can be referred to in
the language.The system is biased towards ``expressions'' rather than
``statements''. It includes a nonprocedural (purely functional)
subsystem that aims to expand the class of users' needs that can
be met by a single print-instruction, without sacrificing the
important properties that make conventional right-hand-side
expressions easy to construct and understand.",
paper = "Land66.pdf"
}\end{chunk}
\index{Keenen, David C.}
\begin{chunk}{ axiom.bib}
@misc{Keen14,
author = "Keenen, David C.",
title = {{To Dissect a Mockingbird: A Graphical Notation for the
Lambda Calculus with Animated Reduction}},
year = "2014",
link = "\url{http://dkeenan. com/Lambda}",
abstract =
"The lambda calculus, and the closely related theory of
combinators, are important in the foundations of mathematics,
logic and computer science. This paper provides an informal and
entertaining introduction by means of an animated graphical
notation.",
keywords = "printed, DONE"
}\end{chunk}
\index{Friedman, Daniel P.}
\index{Eastland, Carl}
\begin{chunk}{ axiom.bib}
@book{Frie15,
author = "Friedman, Daniel P. and Eastland, Carl",
title = {{The Little Prover}},
publisher = "The MIT Press",
year = "2015",
isbn = "978-0-262-52795- 8",
keywords = "shelf"
}\end{chunk}
\index{Donald, B.R.}
\index{Kapur, D.}
\index{Mundy, J.L.}
\begin{chunk}{ axiom.bib}
@book{Dona92,
author = "Donald, B.R. and Kapur, D. and Mundy, J.L.",
title = {{Symbolic and Numerical Computation for Artificial
Intelligen ce}},
publisher = "Academic Press Limited",
year = "1992",
isbn = "0 12 220535 9",
keywords = "shelf"
}\end{chunk}
\index{Friedman, Daniel P.}
\index{Christiansen, David Thrane}
\begin{chunk}{ axiom.bib}
@book{Frie18,
author = "Friedman, Daniel P. and Christiansen, David Thrane",
title = {{The Little Typer}},
publisher = "The MIT Press",
year = "2018",
isbn = "978-0-262-53643- 1",
keywords = "shelf"
}\end{chunk}
\index{Daepp, Ulrich}
\index{Gorkin, Pamela}
\begin{chunk}{ axiom.bib}
@book{Daep11,
author = "Daepp, Ulrich and Gorkin, Pamela",
title = {{Reading, Writing, and Proving}},
year = "2011",
publisher = "Springer",
isbn = "978-1-4419-9478- 3",
keywords = "shelf"
}\end{chunk}
\index{Girard, Jean-Yves}
\index{Lafont, Yves}
\index{Regnier, Laurent}
\begin{chunk}{ axiom.bib}
@book{Gira95a,
author = "Girard, Jean-Yves and Lafont, Yves and Regnier, Laurent",
title = {{Advances in Linear Logic}},
publisher = "Cambridge University Press",
year = "1995",
isbn = "0-521-55961-8",
keywords = "shelf"
}\end{chunk}
\index{Barrett, William A.}
\index{Couch, John D.}
\begin{chunk}{ axiom.bib}
@book{Barr79,
author = "Barrett, William A. and Couch, John D.",
title = {{Compiler Construction: Theory and Practice}},
publisher = "Science Research Associates",
year = "1979",
isbn = "0-574-21335-X",
keywords = "shelf"
}\end{chunk}
\index{Weitz, Edmund}
\begin{chunk}{ axiom.bib}
@book{Weit16,
author = "Weitz, Edmund",
title = {{Common Lisp Recipes}},
publisher = "Apress",
year = "2016",
isbn = "978-1-4842-1177- 9",
keywords = "shelf"
}\end{chunk}
\index{Feynman, Richard}
\index{Hey, Anthony J.G.}
\begin{chunk}{ axiom.bib}
@book{Feyn02,
author = "Feynman, Richard and Hey, Anthony J.G.",
title = {{Feynman and Computation}},
publisher = "Westview Press",
year = "2002",
isbn = "0-8133-4039-X",
keywords = "shelf"
}\end{chunk}
\index{Carter, Nathan}
\begin{chunk}{ axiom.bib}
@book{Cart09,
author = "Carter, Nathan",
title = {{Visual Group Theory}},
publisher = "Mathematical Association of America",
year = "2009",
isbn = "978-0-88385-757- 1",
keywords = "shelf"
}\end{chunk}
\index{Goguen, Joseph A.}
\index{Malolm, Grant}
\begin{chunk}{ axiom.bib}
@book{Gogu96,
author = "Goguen, Joseph A. and Malolm, Grant",
title = {{Algebraic Semantics of Imperative Programs}},
publisher = "MIT Press",
year = "1996",
isbn = "0-262-07172-X",
keywords = "shelf"
}\end{chunk}
\index{Tanimoto, Steven L.}
\begin{chunk}{ axiom.bib}
@book{Tani95,
author = "Tanimoto, Steven L.",
title = {{The Elements of Artificial Intelligence Using Common
Lisp}},
publisher = "Computer Science Press",
year = "1995",
isbn = "0-7167-8269-3",
keywords = "shelf"
}\end{chunk}
\index{Petzold, Charles}
\begin{chunk}{ axiom.bib}
@book{Petz08,
author = "Petzold, Charles",
title = {{The Annotated Turing}},
publisher = "Wiley",
year = "2008",
isbn = "978-0-470-22905- 7".
keywords = "shelf"
}\end{chunk}
\index{Wiener, Norbert}
\begin{chunk}{ axiom.bib}
@book{Wien61,
author = "Wiener, Norbert",
title = {{Cybernetics}},
publisher = "MIT Press",
year = "1961",
keywords = "shelf"
}\end{chunk}
\index{Monin, Jean-Francois}
\begin{chunk}{ axiom.bib}
@book{Moni03,
author = "Monin, Jean-Francois",
title = {{Understanding Formal Methods}},
publisher = "Springer",
year = "2003",
isbn = "1-85233-247-6",
keywords = "shelf"
}\end{chunk}
\index{Wilf, William}
\index{Johnsson, Richard K.}
\index{Weinstock, Charles B.}
\index{Hobbs, Steven O.}
\index{Geschke, Charles M.}
\begin{chunk}{ axiom.bib}
@book{Wilf75,
author = "Wilf, William and Johnsson, Richard K. and
Weinstock, Charles B. and Hobbs, Steven O.
and Geschke, Charles M.",
title = {{The Design of an Optimizing Compiler}},
publisher = "Elsevier",
year = "1975",
isbn = "0-444-00158-1",
keywords = "shelf"
}\end{chunk}
\index{Goldstein, Rebecca}
\begin{chunk}{ axiom.bib}
@book{Gold05,
author = "Goldstein, Rebecca",
title = {{Incompleteness}},
publisher = "Atlas Books",
year = "2005",
isbn = "0-393-32760-4",
keywords = "shelf"
}\end{chunk}
\index{Queinnec, Christian}
\begin{chunk}{ axiom.bib}
@book{Quei94,
author = "Queinnec, Christian",
title = {{Lisp In Small Pieces}},
publisher = "Cambridge University Press",
year = "1994",
isbn = "0-521-54566-8",
keywords = "shelf"
}\end{chunk}
\index{Kalorkoti, K.}
\begin{chunk}{ axiom.bib}
@misc{Kalo18,
author = "Kalorkoti, K.",
title = {{Introduction to Computer Algebra}},
comment = "Course Notes",
year = "2018",
keywords = "shelf"
}\end{chunk}
\index{Friedman, Daniel P.}
\index{Byrd, William E.}
\index{Kiselyov, Oleg}
\begin{chunk}{ axiom.bib}
@book{Frie05,
author = "Friedman, Daniel P. and Byrd, William E. and
Kiselyov, Oleg",
title = {{The Reasoned Schemer}},
publisher = "MIT Press",
year = "2005",
isbn = "0-262-56214-6",
keywords = "shelf"
}\end{chunk}
\index{Friedman, Daniel P.}
\index{Felleisen, Matthias}
\begin{chunk}{ axiom.bib}
@book{Frie96,
author = "Friedman, Daniel P. and Felleisen, Matthias",
title = {{The Seasoned Schemer}},
publisher = "MIT Press",
year = "1996",
isbn = "0-262-56100-X",
keywords = "shelf"
}\end{chunk}
\index{Friedman, Daniel P.}
\index{Felleisen, Matthias}
\begin{chunk}{ axiom.bib}
@book{Frie99,
author = "Friedman, Daniel P. and Felleisen, Matthias",
title = {{The Little Schemer}},
publisher = "MIT Press",
year = "1999",
isbn = "0-262-56099-2",
keywords = "shelf"
}\end{chunk}
\index{Brady, Edwin}
\begin{chunk}{ axiom.bib}
@book{Brad17a,
author = "Brady, Edwin",
title = {{Type-Driven Development with IDRIS}},
publisher = "Manning Publications",
year = "2017",
isbn = "978-1-61729-302- 3",
keywords = "shelf"
}\end{chunk}
\index{Bundy, Alan}
\index{Basin, David}
\index{Hutter, Dieter}
\index{Ireland, Andrew}
\begin{chunk}{ axiom.bib}
@book{Bund05,
author = "Bundy, Alan and Basin, David and Hutter, Dieter and
Ireland, Andrew",
title = {{Rippling: Meta-Level Guidance for Mathematical
Reasoning} },
publisher = "Cambridge University Press",
year = "2005",
isbn = "978-0-521-83449- 0",
keywords = "shelf"
}\end{chunk}
\index{Nagel, Ernest}
\index{Newman, James R.}
\begin{chunk}{ axiom.bib}
@book{Nage01,
author = "Nagel, Ernest and Newman, James R.",
title = {{Godel's Proof}},
publisher = "New York University Press",
year = "2001",
isbn = "0-8147-5816-9",
keywords = "shelf"
}\end{chunk}
\index{Sozeau, Matthieu}
\index{Boulier, Simon}
\index{Forster, Yannick}
\index{Tabareau, Nicolas}
\index{Winterhalter, Theo}
\begin{chunk}{ axiom.bib}
@misc{Soze19,
author = "Sozeau, Matthieu and Boulier, Simon and Forster, Yannick
and Tabareau, Nicolas and Winterhalter, Theo",
title = {{Coq Coq Correct!}},
year = "2019",
link = "\url{https://www.irif. fr/~sozeau/ research/ publications/ drafts/ Coq_Coq_ Correct. pdf}",
abstract =
"Coq is built around a well-delimited kernel that performs
typechecking for definitions in a variant of the Calculus of
Inductive Constructions (CIC). Although the metatheory of CIC is
very stable and reliable, the correctness of its implementation in
Coq is less clear. Indeed, implementing an efficient type checker
for CIC is a rather complex task, and many parts of the code rely
on implicit invariants which can easily be broken by further
evolution of the code. Therefore, on average, one critical bug has
been found every year in Coq. This paper presents the first
implementation of a type checker for the kernel of Coq, which is
proven correct in Coq with respect to its formal
specification. Note that because of Godel's incompleteness
theorem, there is no hope to prove completely the correctness of
the specification of Coq inside Coq (in particular strong
normalisation or canonicity), but it is possible to prove the
correctness of the implementation assuming the correctness of the
specification. Our work is based on the METACOQ project [Anand et
al. 2018; Sozeau et al. 2019] which provides metaprogramming
facilities to work with terms and declarations at the level of
this kernel. Our type checker is based on the specification of the
typing relation of the Polymorphic Cumulative Calculus of
Inductive Constructions (PCUIC) at the basis of Coq and the
verification of a relatively efficient and sound type-checker for
it. In addition to the kernel implementation, an essential feature
of Coq is so-called {\sl extraction} [Lebouzey 2004]; the
production of executable code in functional languages from Coq
definitions. We present a verified version of this subtle
type-and-proof erasure step, therefore enabling the verified
extraction of a safe type-checker for Coq.",
paper = "Soze19.pdf",
keywords = "printed"
}\end{chunk}
\index{Crouse, Maxwell}
\index{Whitehead, Spencer}
\index{Abdelaziz, Ibrahim}
\index{Makni, Bassem}
\index{Cornelio, Cristina}
\index{Kapanipathi, Pavan}
\index{Pell, Edwin}
\index{Srinivas, Kavitha}
\index{Thost, Veronika}
\index{Witbrock, Michael}
\index{Fokoue, Achille}
\begin{chunk}{ axiom.bib}
@misc{Crou19,
author = "Crouse, Maxwell and Whitehead, Spencer and
Abdelaziz, Ibrahim and Makni, Bassem and
Cornelio, Cristina and Kapanipathi, Pavan and
Pell, Edwin and Srinivas, Kavitha and
Thost, Veronika and Witbrock, Michael and
Fokoue, Achille",
title = {{A Deep Reinforcement Learning Base Approach to Learning
Transferabl e Proof Guidance Strategies}},
year = "2019",
linke = "\url{https://arxiv. org/pdf/ 1911.02065. pdf}",
abstract =
"Traditional first-order logic (FOL) reasoning systems usually
rely on manual heuristics for proof guidance. We propose TRAIL: a
system that learns to perform proof guidance using reinforcement
learning. A key design principle of our system is that it is
general enough to allow transfer to problems in different domains
that do not share the same vocabulary of the training set. To do
so, we developed a novel representation of the internal state of a
prover in terms of clauses and inference actions, and a novel
neural-based attention mechanism to learn interactions between
clauses. We demonstrate that this approach enables the system to
generalize from training to test data across domains with
different vocabularies, suggesting that the nerual architecture in
TRAIL is well suited for representing and processing of logical
formalisms.",
paper = "Crou19.pdf"
}\end{chunk}
\index{Crouse, Maxwell}
\index{Abdelaziz, Ibrahim}
\index{Cornelio, Cristina}
\index{Thost, Veronika}
\index{Wu, Lingfei}
\index{Forbus, Kenneth}
\index{Fokoue, Achille}
\begin{chunk}{ axiom.bib}
@misc{Crou19a,
author = "Crouse, Maxwell and Abdelaziz, Ibrahim and
Cornelio, Cristina and Thost, Veronika and
Wu, Lingfei and Forbus, Kenneth and Fokoue, Achille",
title = {{Improving Graph Neural Network Representations of Logical
Formulae with Subgraph Pooling}},
year = "2019",
linke = "\url{https://arxiv. org/pdf/ 1911.06904. pdf}",
abstract =
"Recent advances in the integration of deep learning with
automated theorem proving have centered around the representation
of graph-structured representations, in large part driven by the
rapidly emerging body of research in geometric deep
learning. Typically, structure-aware neural methods for embedding
logical formulae have been variants of either Tree LSTMs or
GNNs. While more effective than character and token-level
approaches, such methods have often made representational
trade-offs that limited their ability to effectively represent the
global structure of their inputs. In this work, we introduce a
novel approach for embedding logical formulae using DAG LSTMs that
is designed to overome the limitations of both Tree LSTMs and
GNNs. The effectiveness of the proposed framework is demonstrated
on the tasks of premise selection and proof step classification
where it achieves the state-of-the-art performance on two standard
datasets.",
paper = "Crou19a.pdf"
}\end{chunk}
\index{Gauthier, Thibault}
\begin{chunk}{ axiom.bib}
@misc{Gaut19,
author = "Gauthier, Thibault",
title = {{Deep Reinforcement Learning in HOL4}},
year = "2019",
link = "\url{https://arxiv. org/pdf/ 1910.11797. pdf}",
abstract =
"The paper describes an implementation of deep reinforcement
learning through self-supervised learning within the proof
assistant HOL4. A close interaction between the machine learning
modules and the HOL4 library is achieved by the choice of tree
neural networks (TNNs) as machine learning models and the internal
use of HOL4 terms to represent tree structures of TNNs. Recursive
improvement is possible when a given task is expressed as a search
problem. In this case, a Monte Carlo Tree Search (MCTS) algorithm
guided by a TNN can be used to explore the search space and
produce better examples for training the next TNN. As an
illustration, tasks over propositional and arithmetical terms,
representative of fundamental theorem proving techniques, are
specified and learned: truth estimation, end-to-end computation,
term rewriting and term synthesis.",
paper = "Gaut19.pdf"
}\end{chunk}
\index{Piotrowski, Bartosz}
\index{Brown, Chad E.}
\index{Kaliszyk, Cezary}
\begin{chunk}{ axiom.bib}
@misc{Piot19,
author = "Piotrowski, Bartosz and Brown, Chad E. and
Kaliszyk, Cezary",
title = {{Can Neural Networks Learn Symbolic Rewriting?}},
year = "2019",
link = "\url{https://arxiv. org/pdf/ 1911.04783. pdf}",
abstract =
"This work investigates if the current neural architectures are
adequate for learning symbolic rewriting. Two kinds of data sets
are proposed for this research -- one based on automated proofs
and the other being a synthetic set of polynomial terms. The
experiments with use of the current neural machine translation
models are performed and its results are discussed. Ideas for
extending this line of research are proposed and its relevance is
motivated.",
paper = "Piot19.pdf"
}\end{chunk}
\index{Brown, Chad E.}
\index{Gauthier, Thibault}
\begin{chunk}{ axiom.bib}
@misc{Brow19,
author = "Brown, Chad E. and Gauthier, Thibault",
title = {{Self-Learned Formula Synthesis in Set Theory}},
year = "2019",
link = "\url{https://arxiv. org/pdf/ 1912.01525. pdf}",
abstract =
"A reinforcement learning algorithm accomplishes the task of
synthesizing a set-theoretical formula that evaluates to a given
truth value for given assignments.",
paper = "Brow19.pdf"
}\end{chunk}
\index{Olsak, Miroslav}
\index{Kaliszyk, Cezary}
\index{Urban, Josef}
\begin{chunk}{ axiom.bib}
@misc{Olsa19,
author = "Olsak, Miroslav and Kaliszyk, Cezary and Urban, Josef",
title = {{Property Invariant Embedding for Automated Reasoning}},
year = "2019",
link = "\url{https://arxiv. org/pdf/ 1911.12073. pdf}",
abstract =
"Automated reasoning and theorem proving have recently become
major challenges for machine learning. In other domains,
representations that are able to abstract over unimportant
transformations, such as abstraction over translations and
rotations in vision, are becoming more common. Standard methods of
embedding mathematical formulas for learning theorem proving are
however yet unable to handle many important transformations. In
particular, embedding previously unseen labels, that often arise
in definitional encodings and in Skolemizatin, has been very weak
so far. Similar problems appear when tranferring knowledge between
known symbols.We propose a novel encoding of formulas that extends existing
graph neural network models. This encoding represents symbols only
by nodes in the graph, without giving the network any knowledge of
the original labels. We provide additional links between such
nodes that allow the network to recover the meaning and therefore
correctly embed such nodes irrespective of the given labels. We
test the proposed encoding in an automated theorem prover based on
the tableaux connection calculus, and show that it improves on the
best characterizations used so far. The encoding is further
evaluated on the premise selection task and a newly introduced
symbol guessing task, and shown to correctly predict 65\% of the
symbol names.",
paper = "Olsa19.pdf"
}\end{chunk}
\index{Wang, Qingxiang}
\index{Brown, Chad}
\index{Kaliszyk, Cezary}
\index{Urban, Josef}
\begin{chunk}{ axiom.bib}
@misc{Wang19a,
author = "Wang, Qingxiang and Brown, Chad and Kaliszyk, Cezary and
Urban, Josef",
title = {{Exploration of Neural Machine Translation in
Autoformal ization of Mathematics in Mizar}},
year = "2019",
link = "\url{https://arxiv. org/pdf/ 1912.02636. pdf}",
abstract =
"In this paper we share several experiments trying to
automatically translate informal mathematics into formal
mathematics. In our context informal mathematics refers to
human-written mathematical sentences in the LaTeX format; and
formal mathematics refers to statements in the Mizar language. We
conducted our experiments against three established neural
network-based machine translation models that are know to deliver
competitive results on translating between natural languages. To
train these models we also prepared four informal-to-formal
datasets. We compare and analyze our results according to whether
the model is supervised or unsupervised. In order to augment the
data available for auto-formalization and improve the results, we
develop a custom type-elaboration mechanism and integrate it into
the supervised translation.",
paper = "Wang19a.pdf"
}\end{chunk}
- 2913. By daly
-
books/bookvolbib add references
Goal: Proving Axiom Sane
\index{Al-hassy, Musa}
\index{Carette, Jacques}
\index{Kahl, Wolfram}
\begin{chunk}{ axiom.bib}
@misc{Alha19,
author = "Al-hassy, Musa and Carette, Jacques and Kahl, Wolfram",
title = {{A Language Feature to Unbundle Data at Will}},
year = "2019",
link = "\url{http://www.cas. mcmaster. ca/~carette/ publications/ gpce19main- p33.pdf}",
abstract =
"Programming languages with sufficiently expressive type systems
provide users with different means of data 'bundling'.
Specifically, in dependently-typed languages such as Agda, Coq,
Lean and Idris, one can choose to encode information in a record
either as a parameter or a field. For example, we can speak of
graphs over a particular vertex set, or speak of arbitrary graphs
where the vertex set is a component. These create isomorphic
types, but differ with respect to intended use. Traditionally, a
library designer would make this choice (between parameters and
fields); if a user wants a different variant, they are forced to
build cnversion utilities, as well as duplicate functionality. For
a graph data type, if a library only provides a Haskell-like
typeclass view of graphs over a vertex set, yet a user wishes to
work with the category of graphs, they must now package a vertex
set as a component in a record along with a graph over that set.We design and implement a language feature that allows both the
library designer and the user to make the choice of information
exposure only when necessary, and otherwise leave the
distinguishing line between parameters and fields unspecified.
Our language feature is currently implemented as a prototype
meta-program incorporated into Agda's Emacs ecosystem, in a what
that is unobtrusive to Agda users.",
paper = "Alha19.pdf"
}\end{chunk}
\index{Kahan, William}
\begin{chunk}{ axiom.bib}
@misc{Kaha04,
author = "Kahan, William",
title = {{On the Cost of Floating-Point Computation Without
Extra- Precise Arithmetic}},
year = "2004",
link = "\url{https://people. eecs.berkeley. edu/~wkahan/ Qdrtcs. pdf}",
abstract =
"Current benchmarks give the impression that computation costs no
more than the time consumed, and perhaps the memory occupied, only
because we can measure these so easily. What about the costs of
maintenance (when hardware or operating systems change), of
development (algorithm design, tests, proofs of correctness), and
of misleading results? Solving a quadratic equation provides a
relatively easily understood case study of the way all costs get
inflated when arithmetic precision rather higher than the
precision of the data and the accuracy demanded of results is
unavailable or, worse, unusable because of lapses in the design
and/or implementation of widely used programming languages. Then
costs are inflated by the trickery required to devise portable
programs that compute roots at least about as accurately as the
data deserve, and to prove that they are that accurate, and to
test them. This trickery will be illustrated by a MATLAB program
designed to get results of high quality from diverse versions of
MATLAB on the two most popular kinds of hardware, PCs and
Macs. Included is a test program and some of its results. Only a
small part of the program needs higher precision arithmetic not
too slow. There it would cost far less than the consequences of
doing without.",
paper = "Kaha04.pdf"
}\end{chunk}
\index{Dominus, Mark}
\begin{chunk}{ axiom.bib}
@misc{Domi19,
author = "Dominus, Mark",
title = {{The Least Common Divisor and the Greatest Common Multiple}},
link = "\url{https://blog.plover. com/math/ gcm.html}",
year = "2019"
}\end{chunk}
\index{Carette, Jacques}
\index{O'Connor, Russell}
\index{Sharoda, Yasmine}
\begin{chunk}{ axiom.bib}
@misc{Care19,
author = "Carette, Jacques and O'Connor, Russell and Sharoda, Yasmine",
title = {{Building on the Diamonds between Theories: Theory
Presentati on Combinators}},
year = "2019",
abstract =
"To build a large library of mathematics, it seems more efficient
to take advantage of the inherent structure of mathematical
theories. Various theory presentation combinators have been
proposed, and some have been implemented, in both legacy and
current systems. Surprisingly, the 'standard library' of most
systems do not make pervasive use of these combinators.We present a set of combinators optimized for reuse, via the tiny
theory approach. Our combinators draw their power from the
inherent structure already present in the {\sl category of
contexts} associated to a dependently typed language. The current
work builds on ideas originating in CLEAR and Specware and their
descendents (both direct and intellectual). Driven by some design
criteria for user-centric library design, our library-building
experience via the systematic use of combinators has fed back into
the semantics of these combinators, and later into an updated
syntax for them.",
paper = "Care19.pdf"
}\end{chunk}
\index{Steel, Allan K.}
\begin{chunk}{ axiom.bib}
@article{Stee10,
author = "Steel, Allan K.",
title = {{Computing with Agebraically Closed Fields}},
journal = "J. of Symbolic Computation",
volume = "45",
number = "3",
year = "2010",
pages = "342-372",
abstract =
"A practical computational system is described for computing with
an algebraic closure of a field. The system avoids factorization
of polynomials over extension fields, but gives the illusion of a
genuine field to the user. All roots of an arbitrary polynomial
defined over such an algebraically closed field can be constructed
and are easily distinguished within the system. The difficult case
of inseparable extensions of function fields of positive
characteristic is also handled properly by the system. A technique
of modular evaluation into a finite field critically ensures that
a unique genuine field is simulated by the system but also
provides fast optimizations for some fundamental operations. Fast
matrix techniques are also used for several non-trivial
operations. The system has been successfully implemented within
the Magma Computer Algebra System, and several examples are
presented, using this implementation.",
paper = "Stee10.pdf"
}\end{chunk}
Branch metadata
- Branch format:
- Branch format 7
- Repository format:
- Bazaar repository format 2a (needs bzr 1.16 or later)