@techreport{BUCS-TR-2010-011,
author = {Andrei Lapets and Assaf Kfoury},
title = {{A User-friendly Interface for a Lightweight Verification System}},
institution = {CS Dept., Boston University},
number = {BUCS-TR-2010-011},
month = {{May}},
year = {2010},
url = {http://www.cs.bu.edu/techreports/2010-011-aartifact-interface.ps.Z},
abstract = {User-friendly interfaces can play an important role in bringing
the benefits of a machine-readable representation of formal
arguments to a wider audience. The ``aartifact" system is an
easy-to-use lightweight verifier for formal arguments that involve
logical and algebraic manipulations of common mathematical
concepts. The system provides validation capabilities by utilizing
a database of propositions governing common mathematical concepts.
The ``aartifact" system's multi-faceted interactive user interface
combines several approaches to user-friendly interface design: (1)
a familiar and natural syntax based on existing conventions in
mathematical practice, (2) a real-time keyword-based lookup
mechanism for interactive, context-sensitive discovery of the
syntactic idioms and semantic concepts found in the system's
database of propositions, and (3) immediate validation feedback in
the form of reformatted raw input. The system's natural syntax and
database of propositions allow it to meet a user's expectations in
the formal reasoning scenarios for which it is intended. The
real-time keyword-based lookup mechanism and validation feedback
allow the system to teach the user about its capabilities and
limitations in an immediate, interactive, and context-aware
manner. } }
@techreport{BUCS-TR-2010-010,
author = {Andrei Lapets},
title = {{User-friendly Support for Common Concepts in a Lightweight Verifier}},
institution = {CS Dept., Boston University},
number = {BUCS-TR-2010-010},
month = {{May}},
year = {2010},
url = {http://www.cs.bu.edu/techreports/2010-010-aartifact-discussion.ps.Z},
abstract = {
Machine verification of formal arguments can only increase our
confidence in the correctness of those arguments, but the costs of
employing machine verification still outweigh the benefits for
some common kinds of formal reasoning activities. As a result,
usability is becoming increasingly important in the design of
formal verification tools. We describe the ``aartifact"
lightweight verification system, designed for processing formal
arguments involving basic, ubiquitous mathematical concepts. The
system is a prototype for investigating potential techniques for
improving the usability of formal verification systems. It
leverages techniques drawn both from existing work and from our
own efforts. In addition to a parser for a familiar concrete
syntax and a mechanism for automated syntax lookup, the system
integrates (1) a basic logical inference algorithm, (2) a database
of propositions governing common mathematical concepts, and (3) a
data structure that computes congruence closures of expressions
involving relations found in this database. Together, these
components allow the system to better accommodate the expectations
of users interested in verifying formal arguments involving
algebraic and logical manipulations of numbers, sets, vectors, and
related operators and predicates. We demonstrate the reasonable
performance of this system on typical formal arguments and briefly
discuss how the system's design contributed to its usability in
two case studies. } }
@techreport{BUCS-TR-2009-030,
author = {Andrei Lapets and Assaf Kfoury},
title = {{Verification with Natural Contexts: Soundness of Safe Compositional Network Sketches}},
institution = {CS Dept., Boston University},
number = {BUCS-TR-2009-030},
month = {{October}},
year = {2009},
url = {http://www.cs.bu.edu/techreports/2009-030-verified-netsketch-soundness.ps.Z},
abstract = {
In research areas involving mathematical rigor, there are numerous
benefits to adopting a formal representation of models and
arguments: reusability, automatic evaluation of examples, and
verification of consistency and correctness. However,
accessibility has not been a priority in the design of formal
verification tools that can provide these benefits. In earlier
work \cite{BUCS-TR-2009-015}, we attempt to address this broad
problem by proposing several specific design criteria organized
around the notion of a \emph{natural context}: the sphere of
awareness a working human user maintains of the relevant
constructs, arguments, experiences, and background materials
necessary to accomplish the task at hand. We evaluate our proposed
design criteria by utilizing within the context of novel research
a formal reasoning system that is designed according to these
criteria. In particular, we consider how the design and
capabilities of the formal reasoning system that we employ
influence, aid, or hinder our ability to accomplish a formal
reasoning task -- the assembly of a machine-verifiable proof
pertaining to the NetSketch formalism.
NetSketch is a tool for the specification of constrained-flow
applications and the certification of desirable safety properties
imposed thereon. NetSketch is conceived to assist system
integrators in two types of activities: modeling and design. It
provides capabilities for compositional analysis based on a
strongly-typed Domain-Specific Language (DSL) for describing and
reasoning about constrained-flow networks and invariants that need
to be enforced thereupon. In a companion paper
\cite{BUCS-TR-2009-028}, we overview NetSketch, highlight its
salient features, and illustrate how it could be used in actual
applications. In this paper, we define using a machine-readable
syntax major parts of the formal system underlying the operation
of NetSketch, along with its semantics and a corresponding notion
of validity. We then provide a proof of soundness for the
formalism that can be partially verified using a lightweight
formal reasoning system that simulates natural contexts. A
traditional version of these definitions and arguments can be
found in a related paper \cite{BUCS-TR-2009-029}. } }
@UNPUBLISHED{Car+Wel:AOE-2008,
ITRSPAPER = {yes},
CHURCHREPORT = {yes},
AUTHOR = {S{\'e}bastien Carlier and J. B. Wells},
TITLE = {The Algebra of Expansion},
NOTE = {Draft corresponding roughly to an ITRS 2008 workshop
talk},
MONTH = {March},
DATE = {2008-03-25},
YEAR = {2008},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Carlier+Wells:The-Algebra-of-Expansion:2008-03-25.pdf},
ABSTRACT = {Expansion is an operation on typings (pairs of type
environments and result types) in type systems for
the $\lambda$-calculus. Expansion was originally
introduced for calculating possible typings of a
term in systems with intersection types.
Unfortunately, definitions of expansion (even the
most modern ones) have been difficult for outsiders
to absorb. This paper aims to clarify expansion and
make it more accessible to non-specialists by
isolating the pure notion of expansion on its own,
independent of type systems and types. We show how
expansion can be seen as a simple algebra on terms
with variables, substitutions, composition, and
miscellaneous constructors such that the algebra
satisfies 8 simple axioms and axiom schemas: the 3
standard axioms of a monoid, 4 standard axioms or
axiom schemas of substitutions (including one that
corresponds to the usual ``substitution lemma''),
and 1 axiom schema for expansion itself. This
presentation should help make more accessible to a
wider community theory and techniques involving
intersection types and type inference with flexible
precision.}
}
@TECHREPORT{Don+Hal+Kfo:BUCS-TR-2005-X,
AUTHOR = {Kevin Donnelly and Joseph J. Hallett and Assaf J. Kfoury},
TITLE = {Formal Semantics For Weak References},
INSTITUTION = {Department of Computer Science, Boston University},
YEAR = {2005},
MONTH = {July},
CHURCHREPORT = {yes},
TYPE = {Technical Report},
NUMBER = {Don+Hal+Kfo:BUCS-TR-2005-X},
PDF = {http://www.church-project.org/reports/electronic/Don+Hal+Kfo:BUCS-TR-2005-X.pdf},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Don+Hal+Kfo:BUCS-TR-2005-X.ps.gz},
XPOSTSCRIPT = {},
XPDF = {},
ITRSPAPER = {no},
ABSTRACT = {
Weak references are references that do not prevent the object they
point to from being garbage collected. Many realistic languages,
including Java, SML/NJ, and Haskell to name a few, support weak
references. However, there is no generally accepted formal
semantics for weak references. Without such a formal semantics it
becomes impossible to formally prove properties of such a language
and the programs written in it.
\par
We give a formal semantics for a calculus called lambda-weak that
includes weak references and is derived from Morrisett, Felleisen,
and Harper's lambda-gc. The semantics is used to examine several
issues involving weak references. We use the framework to
formalize the semantics for the key/value weak references found in
Haskell. Furthermore, we consider a type system for the language
and show how to extend the earlier result that type inference can
be used to collect reachable garbage. In addition we show how to
allow collection of weakly referenced garbage without incurring
the computational overhead often associated with collecting a weak
reference which may be later used. Lastly, we address the
non-determinism of the semantics by providing both an effectively
decidable syntactic restriction and a more general semantic
criterion, which guarantee a unique result of evaluation.} }
@TECHREPORT{Don+Kfo:BUCS-TR-2005-X,
AUTHOR = {Kevin Donnelly and Assaf J. Kfoury},
TITLE = {Some Considerations on Formal Semantics For Weak References},
INSTITUTION = {Department of Computer Science, Boston University},
YEAR = {2005},
MONTH = {July},
CHURCHREPORT = {yes},
TYPE = {Technical Report},
NUMBER = {Don+Kfo:BUCS-TR-2005-X},
PDF = {http://www.church-project.org/reports/electronic/Don+Kfo:BUCS-TR-2005-X.pdf},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Don+Kfo:BUCS-TR-2005-X.ps.gz},
XPOSTSCRIPT = {},
XPDF = {},
ITRSPAPER = {no},
ABSTRACT = {
Weak references are references that do not prevent the object they
point to from being garbage collected. Most realistic languages,
including Java, SML/NJ, and OCaml to name a few, have some
facility for programming with weak references. Weak references
are used in implementing idioms like memoizing functions and
hash-consing in order to avoid potential memory leaks.
However, the semantics of weak references in many languages are
not clearly specified. Without a formal semantics for weak
references it becomes impossible to prove the correctness of
implementations making use of this feature. Previous work by
Hallett and Kfoury \cite{HK-weak} extends $\gc$
\cite{Mor+Fel+Har:FPCA-1995}, a language for modeling garbage
collection, to $\weak$, a similar language with weak references.
Using this previously formalized semantics for weak references, we
consider two issues related to well-behavedness of programs.
Firstly, we provide a new, simpler proof of the well-behavedness
of the syntactically restricted fragment of $\weak$ defined in
\cite{HK-weak}. Secondly, we give a natural semantic criterion
for well-behavedness much broader than the syntactic restriction,
which is useful as principle for programming with weak references.
Furthermore we extend the result, proved in
\cite{Mor+Fel+Har:FPCA-1995}, which allows one to use
type-inference to collect some reachable objects that are never
used. We prove that this result holds of our language, and we
extend this result to allow the collection of weakly-referenced
reachable garbage without incurring the computation overhead
sometimes associated with collecting weak bindings (e.g. the need
to recompute a memoized function).
Lastly we use extend the semantic framework to model the key/value
weak references found in Haskell and we prove the Haskell is
semantics equivalent to a simpler semantics due to the lack of
side-effects in our language.} }
@TECHREPORT{Hal+Kfo:BUCS-TR-2005-031,
AUTHOR = {Joseph J. Hallett and Assaf J. Kfoury},
TITLE = {A Formal Semantics For Weak References},
INSTITUTION = {Department of Computer Science, Boston University},
YEAR = {2005},
MONTH = {May},
CHURCHREPORT = {yes},
TYPE = {Technical Report},
NUMBER = {Hal+Kfo:BUCS-TR-2005-031},
PDF = {http://www.church-project.org/reports/electronic/Hal+Kfo:BUCS-TR-2005-031.pdf},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Hal+Kfo:BUCS-TR-2005-031.ps.gz},
XPOSTSCRIPT = {},
XPDF = {},
ITRSPAPER = {no},
ABSTRACT = {A weak reference is a reference to an object that is not
followed by the pointer tracer when garbage collection is called.
That is, a weak reference cannot prevent the object it references
from being garbage collected. Weak references remain a
troublesome programming feature largely because there is not an
accepted, precise semantics that describes their behavior (in
fact, we are not aware of any formalization of their semantics).
The trouble is that weak references allow reachable objects to be
garbage collected, therefore allowing garbage collection to
influence the result of a program. Despite this difficulty, weak
references continue to be used in practice for reasons related to
efficient storage management, and are included in many popular
programming languages (Standard ML, Haskell, OCaml, and Java).
We give a formal semantics for a calculus called $\weak$ that
includes weak references and is derived from Morrisett, Felleisen,
and Harper's $\gc$. $\gc$ formalizes the notion of garbage
collection by means of a rewrite rule. Such a formalization is
required to precisely characterize the semantics of weak
references. However, the inclusion of a garbage-collection
rewrite-rule in a language with {\em weak references} introduces
non-deterministic evaluation, even if the parameter-passing
mechanism is deterministic (call-by-value in our case). This
raises the question of confluence for our rewrite system. We
discuss natural restrictions under which our rewrite system is
confluent, thus guaranteeing uniqueness of program result. We
define conditions that allow other garbage collection algorithms
to co-exist with our semantics of weak references. We also
introduce a polymorphic type system to prove the absence of
erroneous program behavior (i.e., the absence of ``stuck
evaluation'') and a corresponding type inference algorithm. We
prove the type system sound and the inference algorithm sound and
complete.} }
@TECHREPORT{Bak+Kfo:TR-2005a,
AUTHOR = {Adam Bakewell and Assaf J. Kfoury},
TITLE = {Properties of a Rewrite System for Unification with Expansion Variables},
INSTITUTION = {Church Project, Boston University},
YEAR = {2005},
MONTH = {April},
CHURCHREPORT = {yes},
TYPE = {Technical Report},
PDF = {http://cs-people.bu.edu/bake/uwev/propbun.pdf},
POSTSCRIPT = {http://cs-people.bu.edu/bake/uwev/propbun.ps.gz}
}
@TECHREPORT{Bak+Car+Kfo+Wel:BUCS-TR-2005a,
AUTHOR = {Adam Bakewell and S{\'e}bastien Carlier and A. J. Kfoury and J. B. Wells},
TITLE = {Inferring Intersection Typings that are Equivalent to Call-by-Name and Call-by-Value Evaluations},
INSTITUTION = {Church Project, Boston University},
YEAR = {2005},
MONTH = {April},
DATE = {2005-04-13},
CHURCHREPORT = {yes},
TYPE = {Technical Report},
PDF = {http://cs-people.bu.edu/bake/uwev/nameval.pdf},
POSTSCRIPT = {http://cs-people.bu.edu/bake/uwev/nameval.ps.gz},
ABSTRACT = {
We present a procedure to infer a typing for an arbitrary
$\lambda$-term $M$ in an intersection-type system that translates
into exactly the call-by-name (resp., call-by-value) evaluation of
$M$. Our framework is the recently developed System~E which
augments intersection types with \emph{expansion variables}. The
inferred typing for $M$ is obtained by setting up a unification
problem involving both type variables and expansion variables,
which we solve with a confluent rewrite system. The inference
procedure is \emph{compositional} in the sense that typings for
different program components can be inferred in \emph{any} order,
and without knowledge of the definition of other program
components. Using expansion variables lets us achieve a
compositional inference procedure easily. Termination of the
procedure is generally undecidable. The procedure terminates and
returns a typing iff the input $M$ is normalizing according to
call-by-name (resp., call-by-value). The inferred typing is
\emph{exact} in the sense that the exact call-by-name (resp.,
call-by-value) behaviour of $M$ can be obtained by a (polynomial)
transformation of the typing. The inferred typing is also
\emph{principal} in the sense that any other typing that
translates the call-by-name (resp., call-by-value) evaluation of
$M$ can be obtained from the inferred typing for $M$ using a
substitution-based transformation.}
}
@INPROCEEDINGS{Zhu+Xi:PADL-2005,
AUTHOR = {Dengping Zhu and Hongwei Xi},
TITLE = {Safe Programming with Pointers through Stateful Views},
BOOKTITLE = {Proceedings of the 7th International Symposium on Practical Aspects of Declarative Languages},
PUBLISHER = {Springer-Verlag LNCS},
YEAR = {2005},
MONTH = {January},
PAGES = {TBA},
ADDRESS = {Long Beach, CA},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://cs-people.bu.edu/zhudp/pubs/PADL05.ps},
PDF = {http://cs-people.bu.edu/zhudp/pubs/PADL05.pdf},
ABSTRACT = {{
The need for direct memory manipulation through pointers is essential in
many applications. However, it is also commonly understood that the use (or
probably misuse) of pointers is often a rich source of program errors.
Therefore, approaches that can effectively enforce safe use of pointers in
programming are highly sought after. ATS is a programming language with a
type system rooted in a recently developed framework Applied Type System,
and a novel and desirable feature in ATS lies in its support for safe
programming with pointers through a novel notion of {\em stateful
views}. In particular, even pointer arithmetic is allowed in ATS and
guaranteed to be safe by the type system of ATS. In this paper, we give an
overview of this feature in ATS, presenting some interesting examples based
on a prototype implementation of ATS to demonstrate the practicality of
safe programming with pointer through stateful views.
}}
}
@INPROCEEDINGS{Mak+Wel:ESOP-2005,
AUTHOR = {Henning Makholm and J. B. Wells},
TITLE = {Instant Polymorphic Type Systems for Mobile Process
Calculi: Just Add Reduction Rules and Close},
CHURCHREPORT = {yes},
DARTREPORT = {yes},
CROSSREF = {ESOP2005},
PAGES = {389--407},
NOTE = {A more detailed predecessor is
\cite{Mak+Wel:PolyStar-2004}},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Makholm+Wells:Instant-Polymorphic-Type-Systems-for-Mobile-Process-Calculi:ESOP-2005.pdf},
ABSTRACT = {Many different \emph{mobile process calculi} have
been invented, and for each some number of type
systems has been developed. Soundness and other
properties must be proved separately for each
calculus and type system. We present the
\emph{generic} polymorphic type system
Poly$\star$ which works for a wide range of
mobile process calculi, including the $\pi$-calculus
and Mobile Ambients. For any calculus satisfying
some general syntactic conditions, well-formedness
rules for types are derived automatically from the
reduction rules and Poly$\star$ works
otherwise unchanged. The derived type system is
automatically sound (i.e., has subject reduction)
and often more precise than previous type systems
for the calculus, due to Poly$\star$'s
\emph{spatial polymorphism}. We present an
implemented type inference algorithm for
Poly$\star$ which automatically constructs
a typing given a set of reduction rules and a term
to be typed. The generated typings are
\emph{principal} with respect to certain natural
type shape constraints.}
}
@INPROCEEDINGS{Mak+Wel:ICFP-2005,
AUTHOR = {Henning Makholm and J. B. Wells},
TITLE = {Type Inference, Principal Typings, and
Let-Polymorphism for First-Class Mixin Modules},
CROSSREF = {ICFP2005},
CHURCHREPORT = {yes},
DARTREPORT = {yes},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Makholm+Wells:Type-Inference,-Principal-Typings,-and-Let-Polymorphism-for-First-Class-Mixin-Modules:ICFP-2005.pdf},
PAGES = {156--167},
ABSTRACT = {A \emph{mixin module} is a programming abstraction
that simultaneously generalizes
$\lambda$-abstractions, records, and mutually
recursive definitions. Although various mixin module
type systems have been developed, no one has
investigated \emph{principal typings} or developed
\emph{type inference} for first-class mixin modules,
nor has anyone added Milner's
\emph{let-polymorphism} to such a system.
\par
This paper proves that typability is NP-complete for
the naive approach followed by previous mixin module
type systems. Because a $\lambda$-calculus extended
with \emph{record concatenation} is a simple
restriction of our mixin module calculus, we also
prove the folk belief that typability is NP-complete
for the naive early type systems for record
concatenation.
\par
To allow feasible type inference, we present
\textsf{Martini}, a new system of \emph{simple
types} for mixin modules with \emph{principal
typings}. \textsf{Martini} is conceptually simple,
with no subtyping and a clean and balanced
separation between unification-based type inference
with type and row variables and constraint solving
for safety of linking and field extraction. We have
implemented a type inference algorithm and we prove
its complexity to be $O(n^2)$, or $O(n)$ given a
fixed bound on the number of field
labels. (Technically, there is also a factor of
$\alpha(n)$, but $\alpha(n)\leq 4$ for
$n<10^{10^{100}}$ (a ``googolplex'').) To prove the
complexity, we need to present an algorithm for
\emph{row unification} that may have been
implemented by others, but which we could not find
written down anywhere. Because \textsf{Martini} has
principal typings, we successfully extend it with
Milner's let-polymorphism.}
}
@TECHREPORT{Bak+Kfo:BUCS-TR-2004-0xx,
AUTHOR = {Adam Bakewell and Assaf J. Kfoury},
TITLE = {Unification with Expansion Variables},
INSTITUTION = {Department of Computer Science, Boston University},
YEAR = {2004},
MONTH = {December},
CHURCHREPORT = {yes},
TYPE = {Technical Report},
PDF = {http://cs-people.bu.edu/bake/uwev/betau.pdf},
POSTSCRIPT = {http://cs-people.bu.edu/bake/uwev/betau.ps.gz}
}
@TECHREPORT{Bak+Car+Kfo+Wel:BUCS-TR-2004-0xy,
AUTHOR = {Adam Bakewell and S{\'e}bastien Carlier and A. J. Kfoury and J. B. Wells},
TITLE = {Exact Intersection Typing Inference and Call-by-Name Evaluation},
INSTITUTION = {Department of Computer Science, Boston University},
YEAR = {2004},
MONTH = {December},
NOTE = {Superseded by \cite{Bak+Car+Kfo+Wel:BUCS-TR-2005a}},
CHURCHREPORT = {yes},
TYPE = {Technical Report},
PDF = {http://cs-people.bu.edu/bake/uwev/seti.pdf},
POSTSCRIPT = {http://cs-people.bu.edu/bake/uwev/seti.ps.gz}
}
@TECHREPORT{Mak+Wel:PolyStar-2004,
AUTHOR = {Henning Makholm and J. B. Wells},
TITLE = {Instant Polymorphic Type Systems for Mobile Process
Calculi: Just Add Reduction Rules and Close},
YEAR = {2004},
MONTH = {November},
CHURCHREPORT = {yes},
DARTREPORT = {yes},
INSTITUTION = {Heriot-Watt Univ., School of Math.\ \& Comput.\
Sci.},
NUMBER = {HW-MACS-TR-0022},
XNOTE = {Available at http://www.macs.hw.ac.uk:8080/},
PDF = {http://www.macs.hw.ac.uk:8080/techreps/docs/files/HW-MACS-TR-0022.pdf},
NOTE = {A shorter successor is~\cite{Mak+Wel:ESOP-2005}},
ABSTRACT = {Many different \emph{mobile process calculi} have
been invented, and for each some number of type
systems has been developed. Soundness and other
properties must be proved separately for each
calculus and type system.
\par
We present the \emph{generic} polymorphic type
system Poly$\star$ which works for a wide
range of mobile process calculi. For any calculus
satisfying some general syntactic conditions,
well-formedness rules for types are derived
automatically from the reduction rules and
Poly$\star$ works otherwise unchanged. The
derived type system is automatically sound and often
more precise than previous type systems for the
calculus, due to Poly$\star$'s
\emph{spatial polymorphism}.
\par
We present an implemented type inference algorithm
for Poly$\star$ which automatically
constructs a typing given a set of reduction rules
and a term to be typed. The generated typings are
\emph{principal} with respect to certain natural
type shape constraints.}
}
@INPROCEEDINGS{Chen+Zhu+Xi:PADL-2004,
AUTHOR = {Chiyan Chen and Dengping Zhu and Hongwei Xi},
TITLE = {Implementing Cut Elimination: A Case Study of Simulating Dependent Types in {Haskell}},
BOOKTITLE = {Proceedings of the 6th International Symposium on Practical Aspects of Declarative Languages},
PUBLISHER = {Springer-Verlag LNCS vol. 3057},
YEAR = {2004},
MONTH = {June},
PAGES = {239-254},
ADDRESS = {Dallas, TX},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/CutElim/CutElim.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/CutElim/CutElim.pdf},
ABSTRACT = {
Gentzen's Hauptsatz -- cut elimination theorem -- in sequent
calculi reveals a fundamental property on logic connectives in
various logics such as classical logic and intuitionistic logic.
In this paper, we implement a procedure in Haskell to perform cut
elimination for intuitionistic sequent calculus, where we use
types to guarantee that the procedure can only return a cut-free
proof of the same sequent when given a proof of a sequent that may
contain cuts. The contribution of the paper is two-fold. On the
one hand, we present an interesting (and somewhat unexpected)
application of the current type system of Haskell, illustrating
through a concrete example how some typical use of dependent types
can be simulated in Haskell. On the other hand, we identify
several problematic issues with such a simulation technique and
then suggest some approaches to addressing these issues in
Haskell.} }
@INPROCEEDINGS{Chen+Shi+Xi:PADL-2004,
AUTHOR = {Chiyan Chen and Rui Shi and Hongwei Xi},
TITLE = {A Typeful Approach to Object-Oriented Programming with Multiple Inheritance},
BOOKTITLE = {Proceedings of the 6th International Symposium on Practical Aspects of Declarative Languages},
PUBLISHER = {Springer-Verlag LNCS vol. 3057},
PAGES = {23-38},
YEAR = {2004},
MONTH = {June},
ADDRESS = {Dallas, TX},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/OBJwMI/OBJwMI.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/OBJwMI/OBJwMI.pdf},
ABSTRACT = {
The wide practice of objected oriented programming (OOP) in
current software practice is evident. Despite extensive studies on
typing programming objects, it is still undeniably a challenging
research task to design a type system that can satisfactorily
account for a variety of features (e.g., binary methods and
multiple inheritance) in OOP. In this paper, we present a typeful
approach to implementing objects that makes use of a recently
introduced notion of guarded datatypes. In particular, we
demonstrate how the feature of multiple inheritance can be
supported with this approach, presenting a simple and general
account for multiple inheritance in a typeful manner.} }
@TECHREPORT{Amt+Mak+Wel:PolyA-2004a,
AUTHOR = {Torben Amtoft and Henning Makholm and J. B. Wells},
TITLE = {{PolyA}: True Type Polymorphism for {M}obile
{A}mbients},
YEAR = {2004},
MONTH = {February},
INSTITUTION = {Heriot-Watt Univ., School of Math.\ \& Comput.\
Sci.},
NUMBER = {HW-MACS-TR-0015},
PDF = {http://www.macs.hw.ac.uk:8080/techreps/docs/files/HW-MACS-TR-0015.pdf},
CHURCHREPORT = {yes},
DARTREPORT = {yes},
NOTE = {A shorter successor is \cite{Amt+Mak+Wel:TCS-2004}},
ABSTRACT = {
Previous type systems for mobility calculi (the original Mobile
Ambients, its variants and descendants, e.g., Boxed Ambients and
Safe Ambients, and other related systems) offer little support for
generic mobile agents. Previous systems either do not handle
communication at all or globally assign fixed communication types
to ambient names that do not change as an ambient moves around or
interacts with other ambients. This makes it hard to type
examples such as a ``messenger'' ambient that uses communication
primitives to collect a message of non-predetermined type and
deliver it to a non-predetermined destination.
\par
In contrast, we present our new type system $\mathsf{PolyA}$.
Instead of assigning communication types to ambient names,
$\mathsf{PolyA}$ assigns a type to each process $P$ that gives
upper bounds on (1) the possible ambient nesting shapes of any
process $P'$ to which $P$ can evolve, (2) the values that may be
communicated at each location, and (3) the capabilities that can
be used at each location. Because $\mathsf{PolyA}$ can type
generic mobile agents, we believe $\mathsf{PolyA}$ is the first
type system for a mobility calculus that provides type
polymorphism comparable in power to polymorphic type systems for
the $\lambda$-calculus. $\mathsf{PolyA}$ is easily extended to
ambient calculus variants. A restriction of $\mathsf{PolyA}$ has
principal typings.} }
@TECHREPORT{Hal+Kfo:BUCS-TR-2004-004,
AUTHOR = {Joseph J. Hallett and Assaf J. Kfoury},
TITLE = {Programming Examples Needing Polymorphic Recursion},
INSTITUTION = {Department of Computer Science, Boston University},
YEAR = {2004},
MONTH = {January},
CHURCHREPORT = {yes},
TYPE = {Technical Report},
NUMBER = {BUCS-TR-2004-004},
PDF = {http://www.church-project.org/reports/electronic/Hal+Kfo:BUCS-TR-2004-004.pdf},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Hal+Kfo:BUCS-TR-2004-004.ps.gz},
XPOSTSCRIPT = {http://www.cs.bu.edu/techreports/2004-004-polymorphic-recursion-progams.ps.Z},
XPDF = {http://www.cs.bu.edu/techreports/pdf/2004-004-polymorphic-recursion-progams.pdf},
ITRSPAPER = {yes},
ABSTRACT = {Inferring types for polymorphic recursive function definitions (abbreviated
to {\em polymorphic recursion}) is a recurring topic on the mailing lists
of popular typed programming languages. This is despite the fact that type
inference for polymorphic recursion using $\forall$-types has been proved
undecidable. This report presents several programming examples involving
polymorphic recursion and determines their typability under various type
systems, including the Hindley-Milner system, an intersection-type system,
and extensions of these two. The goal of this report is to show that many
of these examples are typable using a system of intersection types as an
alternative form of polymorphism. By accomplishing this, we hope to lay
the foundation for future research into a decidable intersection-type inference
algorithm.
\par
We do not provide a comprehensive survey of type systems
appropriate for polymorphic recursion, with or without type
annotations inserted in the source language. Rather, we focus on
examples for which types may be inferred without type annotations,
with an emphasis on systems of intersection-types.} }
@INPROCEEDINGS{Xi:TYPES-2004,
AUTHOR = {Hongwei Xi},
TITLE = {Applied Type System (extended abstract)},
BOOKTITLE = {post-workshop Proceedings of TYPES 2003},
PUBLISHER = {Springer-Verlag LNCS 3085},
YEAR = {2004},
PAGES = {394--408},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/types03.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/types03.pdf},
ABSTRACT = {{
The framework Pure Type System (PTS) offers a simple and general
approach to designing and formalizing type systems. However, in
the presence of dependent types, there often exist some acute
problems that make it difficult for PTS to accommodate many common
realistic programming features such as general recursion,
recursive types, effects (e.g., exceptions, references,
input/output), etc. In this paper, we propose a new framework
Applied Type System (ATS) to allow for designing and formalizing
type systems that can readily support common realistic programming
features. The key salient feature of ATS lies in a complete
separation between statics, in which types are formed and reasoned
about, and dynamics, in which programs are constructed and
evaluated. With this separation, it is no longer possible for a
program to occur in a type as is otherwise allowed in PTS. We
present not only a formal development of ATS but also mention some
examples in support of using ATS as a framework to form type
systems for practical programming.}} }
@ARTICLE{Andrews+etc:JAR-2004,
AUTHOR = {Peter Andrews and Matthew Bishop and Chad Brown and Sunil Issar and Frank Pfenning and Hongwei Xi},
TITLE = {ETPS: A System to Help Students to Write Formal Proofs},
JOURNAL = {Journal of Automated Reasoning},
VOLUME = {32},
YEAR = {2004},
PAGES = {75--92},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/etps-jar.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/etps-jar.pdf},
ABSTRACT = {
ETPS (Educational Theorem Proving System) is a program which logic
students and use to write formal proofs in first-order logic or
higher-order logic. It help students to concentrate on the
essential logic problems involved in proving theorems, and
automatically checks the proofs.} }
@TECHREPORT{Mak+Wel:TIP-2004,
AUTHOR = {Henning Makholm and J. B. Wells},
TITLE = {Type Inference for {PolyA}},
YEAR = {2004},
MONTH = {January},
CHURCHREPORT = {yes},
DARTREPORT = {yes},
DISABLEDPDF = {http://www.macs.hw.ac.uk:8080/techreps/docs/files/HW-MACS-TR-0013.pdf},
PDF = {http://www.macs.hw.ac.uk/DART/reports/D2.3/MW04.pdf},
INSTITUTION = {Heriot-Watt Univ., School of Math.\ \& Comput.\
Sci.},
NUMBER = {HW-MACS-TR-0013},
ABSTRACT = {
We present an automatic type inference algorithm for PolyA, a type
system for Mobile Ambients presented in earlier work by us
together with Torben Amtoft. We present not only a basic
inference algorithm, but also several optimizations to it aimed at
reducing the size of the inferred types. The final algorithm has
been implemented and verified to work on small examples. We
discuss some small problems that still exist and methods for
solving them.} }
@INPROCEEDINGS{Amt+Mak+Wel:TCS-2004,
CHURCHREPORT = {yes},
DARTREPORT = {yes},
AUTHOR = {Torben Amtoft and Henning Makholm and J. B. Wells},
TITLE = {{PolyA}: True Type Polymorphism for {M}obile
{A}mbients},
CROSSREF = {TCS2004},
YEAR = {2004},
PAGES = {591--604},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Amtoft+Makholm+Wells:PolyA:True-Type-Polymorphism-for-Mobile-Ambients:TCS-2004.pdf},
NOTE = {A more detailed predecessor is
\cite{Amt+Mak+Wel:PolyA-2004a}},
ABSTRACT = {Previous type systems for mobility calculi (the
original Mobile Ambients, its variants and
descendants, e.g., Boxed Ambients and Safe Ambients,
and other related systems) offer little support for
generic mobile agents. Previous systems either do
not handle communication at all or globally assign
fixed communication types to ambient names that do
not change as an ambient moves around or interacts
with other ambients. This makes it hard to type
examples such as a ``messenger'' ambient that uses
communication primitives to collect a message of
non-predetermined type and deliver it to a
non-predetermined destination.
\par
In contrast, we present our new type system
$\mathsf{PolyA}$. Instead of assigning communication
types to ambient names, $\mathsf{PolyA}$ assigns a
type to each process $P$ that gives upper bounds on
(1) the possible ambient nesting shapes of any
process $P'$ to which $P$ can evolve, (2) the values
that may be communicated at each location, and (3)
the capabilities that can be used at each
location. Because $\mathsf{PolyA}$ can type generic
mobile agents, we believe $\mathsf{PolyA}$ is the
first type system for a mobility calculus that
provides type polymorphism comparable in power to
polymorphic type systems for the
$\lambda$-calculus. $\mathsf{PolyA}$ is easily
extended to ambient calculus variants. A restriction
of $\mathsf{PolyA}$ has principal typings.}
}
@ARTICLE{Haa+Wel:SCP-2004-v50,
ITRSPAPER = {yes},
AUTHOR = {Christian Haack and J. B. Wells},
TITLE = {Type Error Slicing in Implicitly Typed Higher-Order
Languages},
JOURNAL = {Sci.\ Comput.\ Programming},
DARTREPORT = {yes},
VOLUME = 50,
PAGES = {189--224},
YEAR = {2004},
PDFTODO = {*** rename this file ***},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Haack+Wells:Type-Error-Slicing-in-Implicitly-Typed-Higher-Order-Languages:SCP-ta.pdf},
CHURCHREPORT = {yes},
NOTE = {Supersedes \cite{Haa+Wel:ESOP-2003}},
DOIHTTP = {http://dx.doi.org/10.1016/j.scico.2004.01.004},
DOI = {doi:10.1016/j.scico.2004.01.004},
ABSTRACT = {Previous methods have generally identified the
location of a type error as a particular program
point or the program subtree rooted at that
point. We present a new approach that identifies the
location of a type error as a set of program points
(a \emph{slice}) all of which are necessary for the
type error. We identify the criteria of
\emph{completeness} and \emph{minimality} for type
error slices. We discuss the advantages of complete
and minimal type error slices over previous methods
of presenting type errors. We present and prove the
correctness of algorithms for finding complete and
minimal type error slices for implicitly typed
higher-order languages like Standard~ML.}
}
@INPROCEEDINGS{Car+Pol+Wel+Kfo:ESOP-2004,
ITRSPAPER = {yes},
CHURCHREPORT = {yes},
DARTREPORT = {yes},
AUTHOR = {S{\'e}bastien Carlier and Jeff Polakow and
J. B. Wells and A. J. Kfoury},
TITLE = {{S}ystem {E}: Expansion Variables for Flexible Typing
with Linear and Non-linear Types and Intersection
Types},
CROSSREF = {ESOP2004},
YEAR = {2004},
PAGES = {294--309},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Carlier+Polakow+Wells+Kfoury:System-E:ESOP-2004.pdf},
ALTPDF = {http://www.macs.hw.ac.uk/DART/reports/D2.3/CPWK04.pdf},
ABSTRACT = {Types are often used to control and analyze computer
programs. Intersection types give a type system
great flexibility, but have been difficult to
implement. The $!$ operator, used to distinguish
between linear and non-linear types, has good
potential for improving tracking of resource usage,
but has not been as flexible as one might want and
has been difficult to use in compositional
analysis. We introduce System~E, a type system with
expansion variables, linear intersection types, and
the $!$ type constructor for creating non-linear
types. System~E is designed for maximum flexibility
in automatic type inference and for ease of
automatic manipulation of type
information. Expansion variables allow postponing
the choice of which typing rules to use until later
constraint solving gives enough information to allow
making a good choice. System~E removes many
limitations and technical difficulties that
expansion variables had in the earlier System~I and
extends expansion variables to work with $!$ in
addition to the intersection type constructor. We
present subject reduction results for call-by-need
evaluation and discuss approaches for implementing
program analysis in System~E.}
}
@INPROCEEDINGS{Car+Wel:PPDP-2004,
ITRSPAPER = {yes},
CHURCHREPORT = {yes},
DARTREPORT = {yes},
YEAR = {2004},
AUTHOR = {S{\'e}bastien Carlier and J. B. Wells},
TITLE = {Type Inference with Expansion Variables and
Intersection Types in {S}ystem~{E} and an Exact
Correspondence with $\beta$-Reduction},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Carlier+Wells:Type-Inference-with-Expansion-Variables-and-Intersection-Types-in-System-E-and-an-Exact-Correspondence-with-Beta-Reduction:PPDP-2004.pdf},
CROSSREF = {PPDP2004},
XPAGES = {132--143},
NOTE = {Completely supersedes \cite{Car+Wel:TIEV-2004}},
ABSTRACT = {System~E is a recently designed type system for the
$\lambda$-calculus with intersection types and
\emph{expansion variables}. During automatic type
inference, expansion variables allow postponing
decisions about which non-syntax-driven typing rules
to use until the right information is available and
allow implementing the choices via substitution.
\par
This paper uses expansion variables in a
unification-based automatic type inference algorithm
for System~E that succeeds for every
$\beta$-normalizable $\lambda$-term. We have
implemented and tested our algorithm and released
our implementation publicly. Each step of our
unification algorithm corresponds to exactly one
$\beta$-reduction step, and \emph{vice versa}. This
formally verifies and makes precise a step-for-step
correspondence between type inference and
$\beta$-reduction. This also shows that type
inference with intersection types and expansion
variables can, in effect, carry out an arbitrary
amount of partial evaluation of the program being
analyzed.}
}
@PHDTHESIS{neergaard-capld-2004,
AUTHOR = {Peter M{\o}ller Neergaard},
TITLE = {Complexity Aspects of Programming Language Design---From Logspace to Elementary Time via Proofnets and Intersection Types},
SCHOOL = {Brandeis University},
YEAR = {2004},
MONTH = {October},
ABSTRACT = {More than being just a tool for expressing algorithms, a
well-designed programming language allows the user to express her
ideas efficiently. The design choices, however, affect the
efficiency of the algorithms written in the languages. It is
therefore important to understand how such choices effect the
expressibility of programming languages. The dissertation
approaches this problem from two different angles.
\par
The dissertation presents what appears to be the first resource
independent characterization of the \textsc{logspace}-computable
\emph{functions}. This is done in the form of a \emph{function
algebra}~BC$^{\mbox{-}}_{\varepsilon}$ that is sound and complete
for \textsc{logspace}. BC$^{\mbox{-}}_{\varepsilon}$ provides
insights into restricting recursion that might be useful to
provide compile-time guarantees on resource bounds. Furthermore,
by comparison with work by Bellantoni and Cook, it highlights
linearity as a potential distinction between~\textsc{logspace}\
and \textsc{ptime}.
\par
The dissertation also formalizes how successful type inference
fundamentally depends on the \emph{amnesia} of the type system:
the fact that the type system forgets information about the
program. \emph{Intersection type systems} without
\emph{idempotency} provides an exact analysis where all
information is retained. The dissertation shows that in such
intersection type systems, normalization is equivalent to type
inference in \emph{every single case}. Time bounds on type
inference and normalization are thus identical and doing
compile-time type inference is useless.
\par
The latter part reveals that \emph{expansion variables} proposed
for type inference are closely related to the boxes already known
from the \emph{linear logic} technology of \emph{proofnets}. This
hithertho unknown connection between the two technologies suggests
fruitful contributions between the two fields. Moreover, it
extends and formalizes a previously known connection between
intersection types and proofnets.},
PDF = {http://www.linearity.org/turtle/down/pmn-dissertation.pdf},
PMNWWW = {dissertation},
CHURCHREPORT = {yes}
}
@ARTICLE{neergaard-jfp-ta,
AUTHOR = {Peter M{\o}ller Neergaard},
TITLE = {A Bargain for Intersection Types: A Simple Strong Normalization Proof},
JOURNAL = {J.~Funct.\ Programming},
NOTE = {To appear},
PMNWWW = {journal},
CHURCHREPORT = {yes},
PDF = {http://www.church-project.org/reports/electronic/neergaard-jfp-ta.pdf},
URL = {http://www.church-project.org/reports/electronic/neergaard-jfp-ta.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/neergaard-jfp-ta.ps.gz},
ABSTRACT = {This pearl gives a discount proof of the folklore theorem that every
strongly $\beta$-normalizing $\lambda$-term is typable with an
intersection type. (We consider typings that do not use the empty
intersection~$\omega$ which can type any term.) The proof uses the
perpetual reduction
strategy which finds a longest path. This is a simplification over existing proofs that
consider any longest reduction path. The choice of reduction
strategy avoids the need for weakening or strengthening of type derivations. The proof
becomes a bargain because it works for more intersection type
systems, while being simpler than existing proofs.}
}
@TECHREPORT{Neergaard-brtcl-2004,
AUTHOR = {Peter M{\o}ller Neergaard},
TITLE = {{BC$^{\textrm{-}}_\epsilon$}: A Recursion-Theoretic Characterization of \textsc{logspace}},
INSTITUTION = {Brandeis University},
YEAR = {2004},
MONTH = {March},
NOTE = {Preliminary version},
CHURCHREPORT = {yes},
ABSTRACT = {We present {BC$^{\textrm{-}}_\epsilon$} which is a function algebra that is sound and
complete for \textsc{logspace}. It is based on the novel
recursion-theoretic principle of generalized recursion where the
step length of primitive recursion can be varied at each step. This
allows elegant representations of functions like logarithm and
division. Unlike characterizations found in the literature, it is
noticeable that there does not appear to be a way to represent pairs
in the algebra.\par
The soundness proof uses a simulation based on ``computational
amnesia'' where, analogously to tail recursion optimization, a
recursive call replaces its own activation record. Even though the
call is not necessarily tail, we recover the full recursion by
repeatedly restarting the computation.},
PDF = {http://www.church-project.org/reports/electronic/Neergaard-brtcl-2004.pdf},
URL = {http://www.church-project.org/reports/electronic/Neergaard-brtcl-2004.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Neergaard-brtcl-2004.ps.gz},
PMNWWW = {techreport}
}
@INPROCEEDINGS{neergaard-aplas-2004,
AUTHOR = {Peter M{\o}ller Neergaard},
TITLE = {A Functional Language for Logarithmic Space},
CROSSREF = {APLAS2004},
YEAR = {2004},
PAGES = {311--326},
DOI = {http://www.springerlink.com/(qe2jbx55cx2ecx55i0bnjp45)/app/home/contribution.asp?referrer=parent&backto=issue,21,29;journal,667,3858;linkingpublicationresults,1:105633,1},
ABSTRACT = {
More than being just a tool for expressing algorithms, a
well-designed programming language allows the user to express her
ideas efficiently. The design choices however effect the
efficiency of the algorithms written in the languages. It is
therefore important to understand how such choices effect the
expressibility of programming languages.
\par
The paper pursues the very low complexity programs by presenting a
first-order function
algebra~\textrm{BC}$^{\textrm{-}}_\varepsilon$ that captures
exactly \textsc{logspace}, the functions computable in logarithmic
space. This gives insights into the expressiveness of recursion.
\par
The important technical features of
\textrm{BC}$^{\textrm{-}}_{\varepsilon}$ are (1) a separation of
variables into safe and normal variables where recursion can only
be done over the latter; (2) linearity of the recursive call; and
(3) recursion with a variable step length (course-of-value
recursion). Unlike formulations of \textsc{logspace} via Turing
machines, \textrm{BC}$^{\textrm{-}}_{\varepsilon}$ makes no
references to outside resource measures, e.g., the size of the
memory used. This appears to be the first such characterization of
\textsc{logspace}-computable functions (not just predicates).
\par
The proof that all
\textrm{BC}$^{\textrm{-}}_{\varepsilon}$-programs can be evaluated
in \textsc{logspace} is of separate interest to programmers: it
trades space for time and evaluates recursion with at most one
recursive call without a call stack.},
CHURCHREPORT = {yes},
PMNWWW = {conference},
PDF = {http://www.church-project.org/reports/electronic/neergaard-aplas-2004.pdf},
URL = {http://www.church-project.org/reports/electronic/neergaard-aplas-2004.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/neergaard-aplas-2004.ps.gz},
COPYRIGHT = {\copyright\ Springer-Verlag; Appears in the
\begin{rawhtml}\end{rawhtml}Lecture Notes in Computer Science\begin{rawhtml}\end{rawhtml} series}
}
@INPROCEEDINGS{Nee-Mai-icfp-2004,
ITRSPAPER = {yes},
AUTHOR = {Peter M{\o}ller Neergaard and Harry G. Mairson},
TITLE = {Types, Potency, and Idempotency: {W}hy Nonlinearity and Amnesia Make a Type System Work},
YEAR = {2004},
ABSTRACT = {Useful type inference must be faster than normalization.
Otherwise, you could check safety conditions by running the program.
We analyze the relationship between bounds on normalization and type
inference. We show how the success of type inference is
fundamentally related to the \emph{amnesia} of the type system: the
\emph{nonlinearity} by which all instances of a variable are
constrained to have the same type.
\par
Recent work on \emph{intersection types} has advocated their
usefulness for static analysis and modular compilation. We analyze
system~$\mathbb{I}$ (and some instances of its descendant, system~E), an
intersection type system with a type inference algorithm. Because
system~$\mathbb{I}$ lacks \emph{idempotency}, each occurrence of a variable
requires a distinct type. Consequently, type inference is
equivalent to normalization in \emph{every single case}, and time
bounds on type inference and normalization are identical. Similar
relationships hold for other intersection type systems without
idempotency.
\par
The analysis is founded on an investigation of the relationship
between \emph{linear logic} and intersection types. We show a lockstep
correspondence between normalization and type inference. The latter
shows the promise of intersection types to facilitate static analyses
of varied granularity, but also belies an immense challenge: to add
amnesia to such analysis without losing all of its benefits.},
CROSSREF = {ICFP2004},
CHURCHREPORT = {yes},
PDF = {http://www.church-project.org/reports/electronic/Nee-Mai-icfp-2004.pdf},
URL = {http://www.church-project.org/reports/electronic/Nee-Mai-icfp-2004.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Nee-Mai-icfp-2004.ps},
PMNWWW = {conference},
COPYRIGHT = {\copyright{}ACM, 2004. This is the author's version of
the work. It is posted here by permission of ACM for
your personal use. Not for redistribution. The
definitive version was published in {ACM SIGPLAN
Notices}, Volume~39, Issue~9 (September 2004)},
DOI = {http://portal.acm.org/citation.cfm?doid=1016850.1016871}
}
@ARTICLE{Kfo+Wel:TCSB-2004-v311n1-3,
ITRSPAPER = {yes},
CHURCHREPORT = {yes},
DARTTODO = {Mention this.},
AUTHOR = {Assaf J. Kfoury and J. B. Wells},
TITLE = {Principality and Type Inference for Intersection
Types Using Expansion Variables},
JOURNAL = {Theoret.\ Comput.\ Sci.},
VOLUME = {311},
NUMBER = {1--3},
PAGES = {1--70},
XNUMBERNOTE = {Elsevier refers to "issues" 1--3, not "number"},
YEAR = {2004},
DOI = {doi:10.1016/j.tcs.2003.10.032},
SCIENCEDIRECT = {http://authors.elsevier.com/sd/article/S0304397503005772},
TCSREF = {TCS4942},
DOCUMENTURL = {http://www.church-project.org/reports/Kfo+Wel:TCSB-2004-v311n1-3.html},
PDF = {http://www.church-project.org/reports/electronic/Kfo+Wel:TCSB-2004-v311n1-3.pdf},
NOTE = {Supersedes \cite{Kfo+Wel:POPL-1999}.
For omitted proofs, see the longer
report \cite{Kfo+Wel:PTI-2003}},
ABSTRACT = {Principality of typings is the property that for
each typable term, there is a typing from which all
other typings are obtained via some set of
operations. Type inference is the problem of finding
a typing for a given term, if possible. We define an
intersection type system which has principal typings
and types exactly the strongly normalizable
$\lambda$-terms. More interestingly, every
finite-rank restriction of this system (using
Leivant's first notion of rank) has principal
typings and also has decidable type inference. This
is in contrast to System~F where the finite rank
restriction for every finite rank at 3 and above has
neither principal typings nor decidable type
inference. Furthermore, the notion of principal typings
for our system involves only one operation,
substitution, rather than several operations (not
all substitution-based) as in earlier presentations
of principality for intersection types (without rank
restrictions). In our system the earlier notion of
\emph{expansion} is integrated in the form of
\emph{expansion variables}, which are subject to
substitution as are ordinary variables. A
unification-based type inference algorithm is
presented using a new form of unification,
$\beta$-unification.}
}
@INPROCEEDINGS{Zhu+Xi:APLAS-2003,
AUTHOR = {Dengping Zhu and Hongwei Xi},
TITLE = {A Typeful and Tagless Representation for XML Documents},
BOOKTITLE = {Proceedings of the First Asian Symposium on Programming Languages and Systems},
YEAR = {2003},
MONTH = {November},
ADDRESS = {Beijing, China},
PAGES = {89-104},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://cs-people.bu.edu/zhudp/pubs/aplas03.ps},
PDF = {http://cs-people.bu.edu/zhudp/pubs/aplas03.pdf},
ABSTRACT = {Abstract: When constructing programs to process XML documents, we immediately face
the question as to how XML documents should be represented
internally in the programming language we use. Currently, most
representations for XML documents are typeless in the sense that
the type information of an XML document cannot be reflected in the
type of the representation of the document (if the representation
is assumed to be typed). Though convenient to construct, a
typeless representation for XML documents often makes use of a
large number of representation tags, which not only require some
significant amount of space to store but may also incur numerous
run-time tag checks when the represented documents are processed.
Moreover, with a typeless representation for XML documents, it
becomes difficult or even impossible to statically capture program
invariants that are related to the type information of XML
documents. Building upon our recent work on guarded recursive
datatypes, we present an approach to representing XML documents in
this paper that not only allows the type information of an XML
document to be reflected in the type of the representation of the
document but also significantly reduces the need for
representation tags that are required in typeless representations.
With this approach, we become able to process XML documents in a
typeful manner, thus reaping various well-known software
engineering benefits from the presence of types.} }
@INPROCEEDINGS{Taha+Ellner+Xi:EMSOFT-2003,
AUTHOR = {Walid Taha and Stephan Ellner and Hongwei Xi},
TITLE = {Generating Imperative, Heap-Bounded Programs in a Functional Setting},
BOOKTITLE = {Proceedings of the Third International Conference on Embedded Software},
PUBLISHER = {Springer-Verlag LNCS 2855},
YEAR = {2003},
MONTH = {October},
ADDRESS = {Philadelphia, PA},
PAGES = {340--355},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/emsoft03.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/emsoft03.pdf},
ABSTRACT = {
High-level programming languages offer significant expressivity
but provide little or no guarantees about resource utilization.
Resource-bounded languages provide strong guarantees about the
runtime behavior of programs but often lack mechanisms that allow
programmers to write more structured, modular, and reusable
programs. To overcome this basic tension in language design, this
paper advocates taking into account the natural distinction
between the development platform and the deployment platform for
resource-sensitive software. To illustrate this approach, we
develop the meta-theory for GeHB, a two-level language in which
first stage computations can involve arbitrary resource
consumption, but the second stage can only involve functional
programs that do not require new heap allocations. As an example
of a such a second-stage language we use the recently proposed
first-order functional language LFPL. LFPL can be compiled
directly to malloc-free, imperative C code. We show that all
generated programs in GeHB can be transformed into well-typed LFPL
programs, thus ensuring that the results established for LFPL are
directly applicable to GeHB.} }
@INPROCEEDINGS{Xi:SEFM-2003,
AUTHOR = {Hongwei Xi},
TITLE = {Facilitating Program Verification with Dependent Types},
BOOKTITLE = {Proceedings of the International Conference on Software Engineering and Formal Methods},
YEAR = {2003},
MONTH = {September},
ADDRESS = {Brisbane, Australia},
PAGES = {72--81},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/sefm03.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/sefm03.pdf},
CHURCHREPORT = {yes},
ABSTRACT = {
The use of types in capturing program invariants is overwhelming
in practical programming. The type systems in languages such as
ML and Java scale convincingly to realistic programs but they are
of relatively limited expressive power. In this paper, we show
that the use of a restricted form of dependent types can enable us
to capture many more program invariants such as memory safety
while retaining practical type-checking. The programmer can encode
program invariants with type annotations and then verify these
invariants through static type-checking. Also the type annotations
can serve as informative program documentation, which are
mechanically verified and can thus be fully trusted. We argue with
realistic examples that this restricted form of dependent types
can significantly facilitate program verification as well as
program documentation.} }
@INPROCEEDINGS{Chen+Xi:ICFP-2003,
AUTHOR = {Chiyan Chen and Hongwei Xi},
TITLE = {Meta-Programming through Typeful Code Representation},
BOOKTITLE = {Proceedings of the Eighth ACM SIGPLAN International Conference on Functional Programming},
YEAR = {2003},
MONTH = {August},
ADDRESS = {Uppsala, Sweden},
PAGES = {275--286},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/icfp03.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/icfp03.pdf},
CHURCHREPORT = {yes},
ABSTRACT = {
By allowing the programmer to write code that can generate code at
run-time, meta-programming offers a powerful approach to program
construction. For instance, meta-programming can often be employed
to enhance program efficiency and facilitate the construction of
generic programs. However, meta-programming, especially in an
untyped setting, is notoriously error-prone. In this paper, we
aim at making meta-programming less error-prone by providing a
type system to facilitate the construction of correct
meta-programs. We first introduce some code constructors for
constructing typeful code representation in which program
variables are replaced with deBruijn indices, and then formally
demonstrate how such typeful code representation can be used to
support meta-programming. The main contribution of the paper lies
in recognition and then formalization of a novel approach to typed
meta-programming that is practical, general and flexible.} }
@INPROCEEDINGS{Chen+Xi:PEPM-2003,
AUTHOR = {Chiyan Chen and Hongwei Xi},
TITLE = {Implementing Typeful Program Transformation},
BOOKTITLE = {Proceedings of ACM SIGPLAN Workshop on Partial Evaluation and Semantics Based Program Manipulation},
YEAR = {2003},
MONTH = {June},
CHURCHREPORT = {yes},
ADDRESS = {San Diego, CA},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/pepm03.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/pepm03.pdf},
ABSTRACT = {
The notion of program transformation is ubiquitous in programming
language studies on interpreters, compilers, partial evaluators,
etc. In order to implement a program transformation, we need to
choose a representation in the meta language, that is, the
programming language in which we construct programs, for
representing object programs, that is, the programs in the object
language on which the program transformation is to be performed.
In practice, most representations chosen for typed object programs
are typeless in the sense that the type of an object program
cannot be reflected in the type of its representation. This is
unsatisfactory as such typeless representations make it impossible
to capture in the type system of the meta language various
invariants in a program transformation that are related to the
types of object programs. In this paper, we propose an approach
to implementing program transformations that makes use of a
first-order typeful program representation formed in Dependent ML
(DML), where the type of an object program as well as the types of
the free variables in the object program can be reflected in the
type of the representation of the object program. We introduce
some programming techniques needed to handle this typeful program
representation, and then present an implementation of a CPS
transform function where the relation between the type of an
object program and that of its CPS transform is captured in the
type system of DML. In a broader context, we claim to have taken a
solid step along the line of research on constructing certifying
compilers.} }
@INPROCEEDINGS{Xi:SBLP-2003,
AUTHOR = {Hongwei Xi},
TITLE = {Dependently Typed Pattern Matching},
BOOKTITLE = {Proceedings of Simposio Brasileiro de Linguagens de Programacao},
YEAR = {2003},
MONTH = {May},
ADDRESS = {Ouro Preto, Brazil},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/sblp03.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/sblp03.pdf},
ABSTRACT = {
The mechanism for declaring datatypes to model data structures in
functional programming languages such as Standard ML and Haskell
can offer both convenience in programming and clarity in code.
With the introduction of dependent datatypes in DML, the
programmer can model data structures with more accuracy, thus
capturing more program invariants. In this paper, we study some
practical aspects of dependent datatypes that affect both
type-checking and compiling pattern matching. The results, which
have already been tested, demonstrate that dependent datatype can
not only offer various programming benefits but also lead to
performance gains, yielding a concrete case where safer programs
run faster.} }
@UNPUBLISHED{Wel+Plu+Kam:DMP-2003,
AUTHOR = {J. B. Wells and Detlef Plump and Fairouz Kamareddine},
TITLE = {Diagrams for Meaning Preservation},
NOTE = {Long version of \cite{Wel+Plu+Kam:RTA-2003}},
MONTH = {April},
YEAR = {2003},
CHURCHREPORT = {yes},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Wells+Plump+Kamareddine:Diagrams-for-Meaning-Preservation:2003-04-16.pdf},
ABSTRACT = {This paper presents an abstract framework and
multiple diagram-based methods for proving
\emph{meaning preservation}, i.e., that all rewrite
steps of a rewriting system preserve the meaning
given by an operational semantics based on a
rewriting strategy. While previous rewriting-based
methods have generally needed the treated rewriting
system as a whole to have such properties as, e.g.,
\emph{confluence}, \emph{standardization}, and/or
\emph{termination} or \emph{boundedness} of
\emph{developments}, our methods can work when
\emph{all} of these conditions fail, and thus can
handle more rewriting systems. We isolate the new
\emph{lift/project with termination} diagram as the
key proof idea and show that previous
rewriting-based methods (Plotkin's method based on
\emph{confluence} and \emph{standardization} and
Machkasova and Turbak's method based on distinct
\emph{lift} and \emph{project} properties)
implicitly use this diagram. Furthermore, our
framework and proof methods help reduce the proof
burden substantially by, e.g., supporting separate
treatment of partitions of the rewrite steps,
needing only \emph{elementary diagrams} for rewrite
step interactions, excluding many rewrite step
interactions from consideration, needing weaker
termination properties, and providing generic
support for using developments in combination with
any method.}
}
@ARTICLE{Xi:JUCS-2003,
AUTHOR = {Hongwei Xi},
TITLE = {Dependently Typed Pattern Matching},
JOURNAL = {Journal of Universal Computer Science},
VOLUME = {9},
NUMBER = {8},
YEAR = {2003},
PAGES = {851-872},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/jucs03.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/jucs03.pdf},
ABSTRACT = {
The mechanism for declaring datatypes to model data structures in
programming languages such as Standard ML and Haskell can offer
both convenience in programming and clarity in code. With the
introduction of dependent datatypes in DML, the programmer can
model data structures with more accuracy, thus capturing more
program invariants. In this paper, we study some practical
aspects of dependent datatypes that affect both type-checking and
compiling pattern matching. The results, which have already been
tested, demonstrate that dependent datatype can not only offer
various programming benefits but also lead to performance gains,
yielding a concrete case where safer programs run faster. } }
@INPROCEEDINGS{Xi+Chen+Chen:POPL-2003,
AUTHOR = {Hongwei Xi and Chiyan Chen and Gang Chen},
TITLE = {Guarded Recursive Datatype Constructors},
BOOKTITLE = {Proceedings of the 30th ACM SIGPLAN Symposium on Principles of Programming Languages},
YEAR = {2003},
MONTH = {January},
ADDRESS = {New Orleans},
PAGES = {224--235},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/popl03.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/popl03.pdf},
ABSTRACT = {Abstract: We introduce a notion of guarded recursive (g.r.) datatype
constructors, generalizing the notion of recursive datatypes in
functional programming languages such as ML and Haskell. We
address both theoretical and pragmatic issues resulted from this
generalization. On one hand, we design a type system to formalize
the notion of g.r. datatype constructors and then prove the
soundness of the type system. On the other hand, we present some
significant applications (e.g., implementing objects, implementing
staged computation, etc.) of g.r. datatype constructors, arguing
that g.r. datatype constructors can have far-reaching consequences
in programming. The main contribution of the paper lies in the
recognition and then the formalization of a programming notion
that is of both theoretical interest and practical use.} }
@INPROCEEDINGS{Wel+Plu+Kam:RTA-2003,
AUTHOR = {J. B. Wells and Detlef Plump and Fairouz Kamareddine},
TITLE = {Diagrams for Meaning Preservation},
NOTE = {A long version is \cite{Wel+Plu+Kam:DMP-2003}},
CROSSREF = {RTA2003},
YEAR = {2003},
PAGES = {88--106},
CHURCHREPORT = {yes},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Wells+Plump+Kamareddine:Diagrams-for-Meaning-Preservation:RTA-2003.pdf},
ABSTRACT = {This paper presents an abstract framework and
multiple diagram-based methods for proving
\emph{meaning preservation}, i.e., that all rewrite
steps of a rewriting system preserve the meaning
given by an operational semantics based on a
rewriting strategy. While previous rewriting-based
methods have generally needed the treated rewriting
system as a whole to have such properties as, e.g.,
\emph{confluence}, \emph{standardization}, and/or
\emph{termination} or \emph{boundedness} of
\emph{developments}, our methods can work when
\emph{all} of these conditions fail, and thus can
handle more rewriting systems. We isolate the new
\emph{lift/project with termination} diagram as the
key proof idea and show that previous
rewriting-based methods (Plotkin's method based on
\emph{confluence} and \emph{standardization} and
Machkasova and Turbak's method based on distinct
\emph{lift} and \emph{project} properties)
implicitly use this diagram. Furthermore, our
framework and proof methods help reduce the proof
burden substantially by, e.g., supporting separate
treatment of partitions of the rewrite steps,
needing only \emph{elementary diagrams} for rewrite
step interactions, excluding many rewrite step
interactions from consideration, needing weaker
termination properties, and providing generic
support for using developments in combination with
any method.}
}
@MISC{Chen:JLC-2003,
AUTHOR = {Gang Chen},
TITLE = {Soundness of Coercion in the Calculus of Constructions },
CHURCHREPORT = {yes},
YEAR = {2003},
KEYWORDS = {Subtyping, coersion, Calculus of Constructions },
CHURCHREPORT = {yes},
XPOSTSCRIPT = {http://www.church-project.org/reports/electronic/Chen:JLC-2003.ps.gz},
XURL = {http://www.church-project.org/reports/electronic/Chen:JLC-2003.pdf.gz},
XPDF = {http://www.church-project.org/reports/electronic/Chen:JLC-2003.pdf},
ABSTRACT = {We present a coercive subtyping system for the calculus of constructions. The proposed system $\lambda C_{\leq}^{co}$ is obtained essentially by adding coercions and $\eta$-conversion to $\lambda C_\leq$\cite{Chen97a}, which is a subtyping extension to the calculus of constructions without coercions. We proved that a coercion $c : A \leq B$ is semantically sound. That is, 1) $c$ is a term of type $A\to B$; 2) $c$ behaves like an identity function. This development follows the spirit of semantical interpretation of subtyping in \cite{LMS95,LMS98}.},
NOTE = {To appear in Journal of Logic and Computation}
}
@INPROCEEDINGS{Chen:POPL-2003,
AUTHOR = {Gang Chen},
TITLE = {Coercive Subtyping for the Calculus of Constructions
(extended abstract) },
CHURCHREPORT = {yes},
CROSSREF = {POPL2003},
KEYWORDS = {Subtyping, coersion, Calculus of Constructions },
YEAR = {2003},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Chen:POPL-2003.ps.gz},
URL = {http://www.church-project.org/reports/electronic/Chen:POPL-2003.pdf.gz},
PDF = {http://www.church-project.org/reports/electronic/Chen:POPL-2003.pdf},
ABSTRACT = {We present a coercive subtyping system for the calculus of
constructions. The proposed system $\lambda C_{\leq}^{co}$ is obtained
essentially by adding coercions and $\eta$-conversion to
$\lambda C_\leq$\cite{Chen97a}, which is a subtyping
extension to the calculus of constructions without
coercions. Following \cite{LMS95,LMS98}, the coercive
subtyping $c : A \leq B$ is understood as a special case
of typing in arrow type $c : A \to B$ such that the term
$c$ behaves like an identity function. We prove that,
with respect to this semantic interpretation, the
proposed coercive subtyping system is sound and complete,
and that this completeness leads to transitivity
elimination. In addition, we establish the equivalence
between $\lambda C_{\leq}^{co}$ and $\lambda C_{\beta \eta}$, this fact implies that
$\lambda C_{\leq}^{co}$ has confluence, subject reduction and strong
normalization. We propose a formalization of coercion
inference problem and present a sound and complete
coercion inference algorithm.}
}
@UNPUBLISHED{Nee-Mai-rank-bnd-inter-ta,
AUTHOR = {Peter M{\o}ller Neergaard and Harry G. Mairson},
TITLE = {Rank Bounded Intersection: Types, Potency, and
Idempotency},
ABSTRACT = { Intersection type systems realize a \emph{finite
polymorphism} where different types for a term are
itemized explicitly. We analyze System~$\mathbb{I}$,
a rank-bounded intersection type system where
intersection is not \emph{associative, commutative,}
or \emph{idempotent} (ACI), but includes a
substitution mechanism employing {\em expansion
variables} that facilitates modular program
composition and flow analysis. This type system is
used in a prototype intersection type compiler for
the Church project. We prove that the problem of
type inference is exactly as hard as the problem of
normalization: the worst-case cost of both is an
elementary function, where the iterated exponential
depends on the rank. The key to these results is
that simply-typed terms must be linear without ACI,
but have the usual nonelementary power with
ACI. Further, type inference is \emph{always}
synonymous with normalization: the cost of computing
the principal typing of any term is exactly the cost
of computing its normal form. These results do not
hold when AC, and particularly I, is added.},
NOTE = {This paper is obsolete and has been replaced with \cite{Nee-Mai-icfp-2004}},
YEAR = {2003},
CHURCHREPORT = {yes}
}
@INPROCEEDINGS{Haa+Wel:ESOP-2003,
ITRSPAPER = {yes},
AUTHOR = {Christian Haack and J. B. Wells},
TITLE = {Type Error Slicing in Implicitly Typed Higher-Order
Languages},
YEAR = {2003},
CROSSREF = {ESOP2003},
PAGES = {284--301},
CHURCHREPORT = {yes},
NOTE = {Superseded by \cite{Haa+Wel:SCP-2004-v50}},
DARTREPORT = {yes},
XDARTREPORT = {not a DART paper, although its journal version is part of a deliverable},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Haack+Wells:Type-Error-Slicing-in-Implicitly-Typed-Higher-Order-Languages:ESOP-2003.pdf},
ABSTRACT = {Previous methods have generally identified the
location of a type error as a particular program
point or the program subtree rooted at that
point. We present a new approach that identifies the
location of a type error as a set of program
points (a \emph{slice}) all of which are necessary
for the type error. We describe algorithms for
finding minimal type error slices for implicitly typed
higher-order languages like Standard ML.}
}
@UNPUBLISHED{Kfo+Wel:PTI-2003,
AUTHOR = {Assaf J. Kfoury and J. B. Wells},
TITLE = {Principality and Type Inference for Intersection
Types Using Expansion Variables},
CHURCHREPORT = {yes},
DARTTODO = {Mention this.},
PDF = {http://www.church-project.org/reports/electronic/Kfo+Wel:PTI-2003.pdf.gz},
NOTE = {Supersedes \cite{Kfo+Wel:POPL-1999}},
MONTH = {August},
YEAR = {2003},
ABSTRACT = {Principality of typings is the property that for
each typable term, there is a typing from which all
other typings are obtained via some set of
operations. Type inference is the problem of finding
a typing for a given term, if possible. We define an
intersection type system which has principal typings
and types exactly the strongly normalizable
$\lambda$-terms. More interestingly, every
finite-rank restriction of this system (using
Leivant's first notion of rank) has principal
typings and also has decidable type inference. This
is in contrast to System~F where the finite rank
restriction for every finite rank at 3 and above has
neither principal typings nor decidable type
inference. Furthermore, the notion of principal typings
for our system involves only one operation,
substitution, rather than several operations (not
all substitution-based) as in earlier presentations
of principality for intersection types (without rank
restrictions). In our system the earlier notion of
\emph{expansion} is integrated in the form of
\emph{expansion variables}, which are subject to
substitution as are ordinary variables. A
unification-based type inference algorithm is
presented using a new form of unification,
$\beta$-unification.}
}
@UNPUBLISHED{Nee-Mai-how-light-is-safe-rec-LICS03,
AUTHOR = {Peter M{\o}ller Neergaard and Harry G. Mairson},
TITLE = {How Light is Safe Recursion? {T}ranslations Between Logics of Polynomial Time},
NOTE = {Unpublished note},
YEAR = {2003}
}
@UNPUBLISHED{Nee-Mai-how-light-is-safe-rec,
CROSSREF = {Nee-Mai-how-light-is-safe-rec-LICS03},
YEAR = {2003},
ABSTRACT = {We investigate the simulation of Bellantoni and
Cook's function algebra BC by Girard's Light Linear
Logic LAL (as amended by Asperti). Both of these
languages characterize exactly the polynomial time
functions. Given a $\textsc{ptime}$ function $f$
described in BC program $\phi_f$, how can we
translate $\phi_f$ into LAL? \par The best known
translation, due to Murawski and Ong, compiles only
the \emph{linear} fragment $\textsc{bc}^-$, where
\emph{safe variables} cannot be shared. We show that
this fragment can be evaluated in
$\textsc{logspace}$. Next, we prove that extensions
of their approach to the full BC algebra, as well as
any translation that interprets integers and
primitive recursion via their standard, System
F-inspired inductive type coding, cannot provide a
correct translation into LAL. Finally, we provide a
correct translation, based on generalizing Turing
machine simulations into an SECD compiler, realized
in LAL. We further elaborate this translation so
that it is compositional on the BC combinators.},
YEAR = {2003},
PDF = {http://www.church-project.org/reports/electronic/Nee-Mai-how-light-is-safe-rec.pdf},
URL = {http://www.church-project.org/reports/electronic/Nee-Mai-how-light-is-safe-rec.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Nee-Mai-how-light-is-safe-rec.ps},
CHURCHREPORT = {yes},
PMNWWW = {conference}
}
@INPROCEEDINGS{Amt+Mul:IpRA-2002,
AUTHOR = {Torben Amtoft and Robert Muller},
TITLE = {Inferring Annotated Types for Inter-procedural
Register Allocation with Constructor Flattening},
BOOKTITLE = {The 2003 ACM SIGPLAN International Workshop on Types
in Language Design and Implementation (TLDI'03)},
LOCATION = {New Orleans, Louisiana, USA},
PAGES = {86--97},
YEAR = {2003},
MONTH = {January},
PUBLISHER = {{ACM} {P}ress},
CHURCHREPORT = {yes},
ABSTRACT = {We introduce an annotated type system for a
compiler intermediate language. The type system is designed to
support inter-procedural register allocation and the
representation of tuples and variants directly in the register
file. We present an algorithm for assigning annotations and prove
its soundness with respect to the type system.},
PDF = {http://www.church-project.org/reports/electronic/Amt+Mul:IpRA-2002.pdf},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Amt+Mul:IpRA-2002.ps}
}
@ARTICLE{Nee-Sor-IAC-2002-v178n1,
CROSSREF = {Nee+Sor:IAC-ta},
PMNWWW = {journal},
CHURCHREPORT = {yes},
YEAR = {2002},
ABSTRACT = {For a notion of reduction in a $\lambda$-calculus
one can ask whether a term satisfies
\emph{conservation} and \emph{uniform
normalization}. Conservation means that single-step
reductions of the term preserve infinite reduction
paths from the term. Uniform normalization means
that either the term will have no reduction paths
leading to a normal form, or all reduction paths
will lead to a normal form. \par In the classical
\emph{conservation theorem} for $\Lambda^I$ the
distinction between the two notions is not clear:
uniform normalization implies conservation, and
conservation also implies uniform normalization. The
reason for this is that $\Lambda^I$ is closed under
reduction, due to the fact that reductions never
erase terms in $\Lambda^I$. More generaly for
non-erasing reductions, the two notions are
equivalent on a set closed under reduction. However,
when turning to erasing reductions the distinction
becomes important as conservation no longer implies
uniform normalization. \par This paper presents a
new technique for finding uniformly normalizing
subsets of a $\lambda$-calculus. This is done by
combining a syntactic and a semantic criterion. The
technique is demonstrated by several
applications. The technique is used to present a new
uniformly normalizing subset of the pure
$\lambda$-calculus; this subset is a superset
of~$\Lambda^I$ and thus contains erasing
$\mathbf{K}$-redexes. The technique is also used to
prove strong normalization from weak normalization
of the simply typed $\lambda$-calculus extended with
pairs; this is an extension of techniques developed
recently by S{\o}rensen and Xi. Before presenting
the technique the paper presents a simple proof of a
slightly weaker form of the characterization of
perpetual redexes by Bergstra and Klop; this is a
stepping for the later applications of the
technique.},
COPYRIGHT = {\copyright{}This paper is Copyright Elsevier.},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Nee-Sor-IAC-2002-v178n1.ps.gz},
PDF = {http://www.church-project.org/reports/electronic/Nee-Sor-IAC-2002-v178n1.pdf},
URL = {http://www.church-project.org/reports/electronic/Nee-Sor-IAC-2002-v178n1.pdf.gz}
}
@ARTICLE{Nee+Sor:IAC-ta,
AUTHOR = {Peter M{\o}ller Neergaard and Morten Heine Bj{\o}rnlund S{\o}rensen},
TITLE = {Conservation and Uniform Normalization in $\lambda$-calculi with Erasing Reductions},
JOURNAL = {Information and Computation},
VOLUME = 178,
NUMBER = 1,
PAGES = {149--179},
MONTH = {October},
YEAR = {2002}
}
@INPROCEEDINGS{Nee-Mai-LIS-ICC02,
AUTHOR = {Peter M{\o}ller Neergaard and Harry G. Mairson},
TITLE = {{LAL} Is Square: Representation and Expressiveness in Light Affine Logic},
CROSSREF = {ICC2002},
YEAR = {2002},
CHURCHREPORT = {yes},
PMNWWW = {conference},
ABSTRACT = {We focus on how the choice of input-output representation has a crucial impact on the expressiveness of so-called ``logics of polynomial
time.'' Our analysis illustrates this dependence in
the context of \emph{Light Affine Logic} (LAL),
which is both a restricted version of Linear Logic,
and a primitive functional programming language with
restricted sharing of arguments. By slightly
relaxing representation conventions, we derive
doubly-exponential expressiveness bounds for this
``logic of polynomial time.'' We emphasize that
\emph{squaring} is the unifying idea that relates
upper bounds on cut elimination for LAL with lower
bounds on representation. Representation issues
arise in the simulation of
\textsc{dtime}$(2^{2^n})$, where we construct a {\em
uniform family} of proof-nets encoding Turing
Machine; specifically, the dependence on $n$ only
affects the number of enclosing {\em boxes.} A
related technical improvement is the simulation of
\textsc{dtime}$(n^k)$ in depth~$O(\log k)$ LAL
proof-nets. The resulting upper bounds on cut
elimination then satisfy the properties of a
\emph{first-class} polynomial Turing Machine
simulation, where there is a fixed polynomial
slowdown in the simulation of any polynomial
computation.},
PDF = {http://www.church-project.org/reports/electronic/Nee+Mai:LIS-ICC02.pdf},
URL = {http://www.church-project.org/reports/electronic/Nee+Mai:LIS-ICC02.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Nee+Mai:LIS-ICC02.ps}
}
@ARTICLE{Xi:JHOSL-2002,
AUTHOR = {Hongwei Xi},
TITLE = {Dependent Types for Program Termination Verification},
JOURNAL = {Journal of Higher-Order and Symbolic Computation},
YEAR = {2002},
MONTH = {October},
VOLUME = 15,
PAGES = {91--131},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/HOSCdmlterm.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/HOSCdmlterm.pdf},
CHURCHREPORT = {yes},
ABSTRACT = {
Program termination verification is a challenging research subject
of significant practical importance. While there is already a rich
body of literature on this subject, it is still undeniably a
difficult task to design a termination checker for a realistic
programming language that supports general recursion. In this
paper, we present an approach to program termination verification
that makes use of a form of dependent types developed in Dependent
ML (DML), demonstrating a novel application of such dependent
types to establishing a liveness property. We design a type system
that enables the programmer to supply metrics for verifying
program termination and prove that every well-typed program in
this type system is terminating. We also provide realistic
examples, which are all verified in a prototype implementation, to
support the effectiveness of our approach to program termination
verification as well as its unobtrusiveness to programming. The
main contribution of the paper lies in the design of an approach
to program termination verification that smoothly combines types
with metrics, yielding a type system capable of guaranteeing
program termination that supports a general form of recursion
(including mutual recursion), higher-order functions, algebraic
datatypes, and polymorphism.} }
@INPROCEEDINGS{Xi:ASIA-PEPM-2002,
AUTHOR = {Hongwei Xi},
TITLE = {{Unifying Object-Oriented Programming with Typed Functional Programming}},
BOOKTITLE = {Proceedings of ASIAN Symposium on Partial Evaluation and Semantics-Based Program Manipulation (ASIA-PEPM)},
YEAR = {2002},
MONTH = {September},
PAGES = {117--125},
ADDRESS = {Aizu-Wakamatsu, Japan},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://www.cs.bu.edu/~hwxi/academic/papers/asia-pepm02.ps},
PDF = {http://www.cs.bu.edu/~hwxi/academic/papers/asia-pepm02.pdf},
ABSTRACT = {
The wide practice of object-oriented programming in current
software construction is evident. Despite extensive studies on
typing programming objects, it is still undeniably a challenging
research task to design a type system for object-oriented
programming that is both effective in capturing program errors and
unobtrusive to program construction. In this paper, we present a
novel approach to typing objects that makes use of a recently
invented notion of guarded dependent datatypes. We show that our
approach can address various difficult issues (e.g., handling
``self'' type, typing binary methods, etc.) in a simple and
natural type-theoretical manner, remedying the deficiencies in
many existing approaches to typing objects.} }
@INPROCEEDINGS{Chen:PPDP-2002,
AUTHOR = {Gang Chen},
TITLE = {Full Integration of Subtyping and If-expression},
CROSSREF = {PPDP2002},
YEAR = {2002},
KEYWORDS = {Subtyping, If-expression, Java, Programming Language},
CHURCHREPORT = {yes},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Chen:PPDP-2002.ps.gz},
URL = {http://www.church-project.org/reports/electronic/Chen:PPDP-2002.pdf.gz},
PDF = {http://www.church-project.org/reports/electronic/Chen:PPDP-2002.pdf},
ABSTRACT = {The combination of full subsumption and conditional
expression is a challenging problem, because in such a
system, a term might not have a type which is a
representative of all types of the term. Therefore, the
traditional type checking technique fails. Due to such a
difficulty, Java typing rule for if-expression uses only
a restricted form of subtyping. In this paper, we propose
a type system which includes both of the above mentioned
features and enjoys decidable type checking. We also show
that the system has the subject reduction property. It is
expected that this technique could be used to improve the
type system of Java with full subsumption. }
}
@TECHREPORT{Amt+Wel:MPD-2002,
AUTHOR = {Torben Amtoft and J. B. Wells},
TITLE = {Mobile Processes with Dependent Communication Types
and Singleton Types for Names and Capabilities},
MONTH = {December},
YEAR = {2002},
INSTITUTION = {Kansas State University, Department of Computing and
Information Sciences},
NUMBER = {2002-3},
URL = {http://www.cis.ksu.edu/~schmidt/techreport/2002.list.html},
PDF = {http://www.cis.ksu.edu/~tamtoft/Papers/Amt+Wel:MPwDCT-2002/short.pdf},
XNOTE = {Torben has replaced the file supposed linked to as
KSU CIS TR 2002-3 with a later version which was
submitted to SAS '03. The original KSU CIS TR
2002-03 is no longer available!!! It might be
possible to reconstruct something like it via CVS
time travel, but it would be a pain.
Torben, 02/07/04: the link now again points to
the original report!!},
DARTREPORT = {yes},
CHURCHREPORT = {yes},
ABSTRACT = {There are many calculi for reasoning about
concurrent communicating processes which have
locations and are mobile. Examples include the
original Ambient Calculus and its many variants, the
Seal Calculus, the MR-calculus, the M-calculus,
etc. It is desirable to use such calculi to describe
the behavior of mobile agents. It seems reasonable
that mobile agents should be able to follow
non-predetermined paths and to carry
non-predetermined types of data from location to
location, collecting and delivering this data using
communication primitives. Previous type systems for
ambient calculi make this difficult or impossible to
express, because these systems (if they handle
communication at all) have always globally mapped
each ambient name to a type governing the type of
values that can be communicated locally or with
adjacent locations, and this type can not depend on
where the ambient has traveled. \par We present a
new type system where there are no global
assignments of types to ambient names. Instead, the
type of an ambient process $P$ not only indicates
what can be locally communicated but also gives an
upper bound on the possible ambient nesting shapes
of any process $P'$ to which $P$ can evolve, as well
as the possible capabilities and names that can be
exhibited or communicated at each location. Because
these shapes can depend on which capabilities and
names are actually communicated, the types support
this with explicit dependencies on
communication. This system is thus the first type
system for an ambient calculus which provides type
polymorphism of the kind that is usually present in
polymorphic type systems for the
$\lambda$-calculus.}
}
@INPROCEEDINGS{Carlier:ITRS-2002,
ITRSPAPER = {yes},
AUTHOR = {S{\'e}bastien Carlier},
TITLE = {Polar Type Inference with Intersection Types and $\omega$},
CROSSREF = {ITRS2002},
YEAR = {2002},
POSTSCRIPT = {http://www.macs.hw.ac.uk/~sebc/carlier-ptiio.ps.gz},
CHURCHREPORT = {yes},
NOTEREMARK = {The note field is blank to suppress the note field from ITRS2002.},
NOTE = {},
ABSTRACT = {We present a type system featuring intersection
types and $\omega$, a type constant which is
assigned to unused terms. We exploit and extend the
technology of expansion variables from the recently
developed System I, with which we believe our system
shares many interesting properties, such as strong
normalization, principal typings, and compositional
analysis. Our presentation emphasizes a polarity
discipline and shows its benefits. We syntactically
distinguish positive and negative types, and give
them different interpretations. We take the point of
view that the interpretation of a type is intrinsic
to it, and should not change implicitly when it
appears at the opposite polarity. Our system is the
result of a process which started with an extension
of Trevor Jim's Polar Type System.}
}
@INPROCEEDINGS{Kfo+Was+Wel:ITRS-2002,
ITRSPAPER = {yes},
AUTHOR = {Assaf J. Kfoury and Geoff Washburn and J. B. Wells},
TITLE = {Implementing Compositional Analysis Using
Intersection Types With Expansion Variables},
CROSSREF = {ITRS2002},
YEAR = {2002},
NOTEREMARK = {The note field is blank to suppress the note field from ITRS2002.},
NOTE = {},
DARTREPORT = {yes},
CHURCHREPORT = {yes},
NONEXISTENTDVI = {http://www.church-project.org/reports/electronic/Kfo+Was+Wel:ITRS-2002.dvi},
PDF = {http://www.church-project.org/reports/electronic/Kfo+Was+Wel:ITRS-2002.pdf},
ELSEVIERURLB = {http://www.elsevier.nl/gej-ng/31/29/23/125/51/show/Products/notes/index.htt},
ELSEVIERURLA = {http://www.elsevier.nl/locate/entcs/volume70.html},
ABSTRACT = {A program analysis is \emph{compositional} when the analysis
result for a particular program fragment is obtained solely from
the results for its immediate subfragments via some composition
operator. This means the subfragments can be analyzed
independently in any order. Many commonly used program analysis
techniques (in particular, most abstract interpretations and most
uses of the Hindley/Milner type system) are not compositional and
require the entire text of a program for sound and complete
analysis. \par System I is a recent type system for the pure
$\lambda$-calculus with intersection types and the new technology
of expansion variables. System I supports compositional analysis
because it has the \emph{principal typings} property and an
algorithm based on the new technology of $\beta$-unification has
been developed that finds these principal typings. In addition,
for each natural number $k$, typability in the rank-$k$
restriction of System I is decidable, so a complete and
terminating analysis algorithm exists for the rank-$k$
restriction. \par This paper presents new understanding that has
been gained from working with multiple implementations of System I
and $\beta$-unification-based analysis algorithms. The previous
literature on System I presented the type system in a way that
helped in proving its more important theoretical properties, but
was not as easy for implementers to follow as it could be. This
paper provides a presentation of many aspects of System I that
should be clearer as well as a discussion of important
implementation issues.} }
@ARTICLE{Amt+Kfo+Per:CL-2002,
AUTHOR = {Torben Amtoft and Assaf J. Kfoury and Santiago
M. Pericas-Geertsen},
TITLE = {Orderly Communication in the Ambient Calculus},
JOURNAL = {Computer Languages},
PUBLISHER = {Elsevier Science},
YEAR = {2002},
VOLUME = 28,
PAGES = {29--60},
CHURCHREPORT = {yes},
PDF = {http://www.church-project.org/reports/electronic/Amt+Kfo+Per:CL-2002.pdf},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Amt+Kfo+Per:CL-2002.ps},
ABSTRACT = {The Ambient Calculus (henceforth, {\bf AC}) was developed by Cardelli
and Gordon as a formal framework to study issues of mobility and
migrant code. We present a type system for {\bf AC} that allows
the type of exchanged data within the same ambient to vary over
time. Our type system assigns what we call \emph{behaviors} to
processes; a denotational semantics of behaviors is proposed, here
called \emph{trace semantics}, underlying much of the remaining
analysis. We state and prove a Subject Reduction property for our
typed version of {\bf AC}. Based on techniques borrowed from
finite automata theory, type checking of fully type-annotated
processes is shown to be decidable. We show that the typed
version of {\bf AC} originally proposed by Cardelli and Gordon can
be naturally embedded into our typed version of {\bf AC}.} }
@ARTICLE{Wel+Dim+Mul+Tur:JFP-2002-v12n3,
ITRSPAPER = {yes},
AUTHOR = {J. B. Wells and Allyn Dimock and Robert Muller and
Franklyn Turbak},
TITLE = {A Calculus with Polymorphic and Polyvariant Flow
Types},
JOURNAL = {J.~Funct.\ Programming},
YEAR = {2002},
VOLUME = {12},
NUMBER = {3},
MONTH = {May},
PAGES = {183--227},
NOTE = {Supersedes \cite{Wel+Dim+Mul+Tur:TAPSOFT-1997}},
CHURCHREPORT = {yes},
DOCUMENTURL = {http://www.church-project.org/reports/Wel+Dim+Mul+Tur:JFP-2002-v12n3.html},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Wells+Dimock+Muller+Turbak:A-Calculus-with-Polymorphic-and-Polyvariant-Flow-Types:JFP-2002-v12n3.pdf},
ABSTRACT = {We present $\lambda^{\mathrm{CIL}}$, a typed
$\lambda$-calculus which serves as the foundation
for a typed intermediate language for optimizing
compilers for higher-order polymorphic programming
languages. The key innovation of
$\lambda^{\mathrm{CIL}}$ is a novel formulation of
intersection and union types and flow labels on both
terms and types. These \emph{flow types} can encode
polyvariant control and data flow information within
a polymorphically typed program representation. Flow
types can guide a compiler in generating customized
data representations in a strongly typed setting.
Since $\lambda^{\mathrm{CIL}}$ enjoys confluence,
standardization, and subject reduction properties,
it is a valuable tool for reasoning about programs
and program transformations.}
}
@INPROCEEDINGS{Wells:ICALP-2002,
ITRSPAPER = {yes},
AUTHOR = {J. B. Wells},
TITLE = {The Essence of Principal Typings},
CROSSREF = {ICALP2002},
YEAR = {2002},
DOCUMENTURL = {http://www.church-project.org/reports/Wells:ICALP-2002.html},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Wells:The-Essence-of-Principal-Typings:ICALP-2002.pdf},
DARTREPORT = {yes},
CHURCHREPORT = {yes},
PAGES = {913--925},
SPRINGERURL = {http://link.springer.de/link/service/series/0558/bibs/2380/23800913.htm},
ABSTRACT = {Let $S$ be some type system. A \emph{typing} in $S$
for a typable term $M$ is the collection of all of
the information other than $M$ which appears in the
final judgement of a proof derivation showing that
$M$ is typable. For example, suppose there is a
derivation in $S$ ending with the judgement
$A\vdash{M}\mathrel{:}{\tau}$ meaning that $M$ has
result type $\tau$ when assuming the types of free
variables are given by $A$. Then $(A,\tau)$ is a
typing for $M$.
\par
A \emph{principal typing} in $S$ for a term $M$ is a
typing for $M$ which somehow represents all other
possible typings in $S$ for $M$. It is important not
to confuse this notion with the weaker notion of
\emph{principal type} often mentioned in connection
with the Hindley/Milner type system. Previous
definitions of principal typings for specific type
systems have involved various syntactic operations
on typings such as \emph{substitution} of types for
type variables, \emph{expansion}, \emph{lifting},
etc.
\par
This paper presents a new general definition of
principal typings which does not depend on the
details of any particular type system. This paper
shows that the new general definition correctly
generalizes previous system-dependent definitions.
This paper explains why the new definition is the
right one. Furthermore, the new definition is used
to prove that certain polymorphic type systems using
$\forall$-quantifiers, namely System F and the
Hindley/Milner system, do not have principal
typings.}
}
@UNPUBLISHED{Wells:EPT-2002,
ITRSPAPER = {yes},
AUTHOR = {J. B. Wells},
TITLE = {The Essence of Principal Typings},
DOCUMENTURL = {http://www.church-project.org/reports/Wells:EPT-2002.html},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Wells:The-Essence-of-Principal-Typings:slightly-longer.pdf},
DARTREPORT = {yes},
CHURCHREPORT = {yes},
YEAR = {2002},
NOTE = {A longer version of \cite{Wells:ICALP-2002} with a
3-page appendix with extra proofs, a page of extra
discussion of non-related work, and other minor
changes},
ABSTRACT = {Let $S$ be some type system. A \emph{typing} in $S$
for a typable term $M$ is the collection of all of
the information other than $M$ which appears in the
final judgement of a proof derivation showing that
$M$ is typable. For example, suppose there is a
derivation in $S$ ending with the judgement
$A\vdash{M}\mathrel{:}{\tau}$ meaning that $M$ has
result type $\tau$ when assuming the types of free
variables are given by $A$. Then $(A,\tau)$ is a
typing for $M$.
\par
A \emph{principal typing} in $S$ for a term $M$ is a
typing for $M$ which somehow represents all other
possible typings in $S$ for $M$. It is important not
to confuse this notion with the weaker notion of
\emph{principal type} often mentioned in connection
with the Hindley/Milner type system. Previous
definitions of principal typings for specific type
systems have involved various syntactic operations
on typings such as \emph{substitution} of types for
type variables, \emph{expansion}, \emph{lifting},
etc.
\par
This paper presents a new general definition of
principal typings which does not depend on the
details of any particular type system. This paper
shows that the new general definition correctly
generalizes previous system-dependent definitions.
This paper explains why the new definition is the
right one. Furthermore, the new definition is used
to prove that certain polymorphic type systems using
$\forall$-quantifiers, namely System F and the
Hindley/Milner system, do not have principal
typings.}
}
@ARTICLE{Wel+Haa:IAC-ta,
ITRSPAPER = {yes},
DARTTODO = {Mention this.},
AUTHOR = {J. B. Wells and Christian Haack},
TITLE = {Branching Types},
JOURNAL = {Inform.\ {\&} Comput.},
YEAR = {200X},
XVOLUME = {},
XNUMBER = {},
XPAGES = {},
XMONTH = {},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Wells+Haack:Branching-Types:2002-08-18.pdf},
NOTE = {Completely supersedes \cite{Wel+Haa:ESOP-2002}. Accepted
subject to revisions},
CHURCHREPORT = {yes},
ABSTRACT = {Although systems with intersection types have many
unique capabilities, there has never been a fully
satisfactory explicitly typed system with
intersection types. We introduce and prove the basic
properties of $\lambda^{\mathrm{B}}$, a typed
$\lambda$-calculus with \emph{branching types} and
types with quantification over \emph{type selection
parameters}. The new system $\lambda^{\mathrm{B}}$ is an
explicitly typed system with the same expressiveness
as a system with intersection types. Typing
derivations in $\lambda^{\mathrm{B}}$ use branching types to
squash together what would be separate parallel
derivations in earlier systems with intersection
types.}
}
@INPROCEEDINGS{Wel+Haa:ESOP-2002,
ITRSPAPER = {yes},
AUTHOR = {J. B. Wells and Christian Haack},
TITLE = {Branching Types},
YEAR = {2002},
CROSSREF = {ESOP2002},
PAGES = {115--132},
DOCUMENTURL = {http://www.church-project.org/reports/Wel+Haa:ESOP-2002.html},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Wells+Haack:Branching-Types:ESOP-2002.pdf},
SPRINGERURL = {http://link.springer.de/link/service/series/0558/bibs/2305/23050115.htm},
CHURCHREPORT = {yes},
NOTE = {Completely superseded by \cite{Wel+Haa:IAC-ta}},
ABSTRACT = {Although systems with intersection types have many
unique capabilities, there has never been a fully
satisfactory explicitly typed system with
intersection types. We introduce $\lambda^{\mathrm{B}}$ with \emph{branching types} and types which are
quantified over \emph{type selectors} to provide an
explicitly typed system with the same expressiveness
as a system with intersection types. Typing
derivations in $\lambda^{\mathrm{B}}$ effectively
squash together what would be separate parallel
derivations in earlier systems with intersection
types.}
}
@UNPUBLISHED{Amt:CauMov-2001,
AUTHOR = {Torben Amtoft},
TITLE = {Causal Type System for Ambient Movements},
NOTE = {Submitted for publication},
YEAR = {2002},
CHURCHREPORT = {yes},
ABSTRACT = {
The Ambient Calculus was developed by Cardelli and Gordon as a
formal framework to study issues of mobility and migrant code. We
present a type system for the calculus, parameterized by security
constraints expressing where a given ambient may reside and where
it may be dissolved. A subject reduction property then guarantees
that a well-typed process never violates these constraints;
additionally it ensures that communicating subprocesses agree on
their ``topic of conversation''. Based on techniques borrowed from
finite automata theory, type checking of type-annotated processes
is decidable. The type system employs a notion of causality in
that processes are assigned ``behaviors''. This significantly
increases the precision of the analysis and compensates for the
lack of ``co-capabilities'' (an otherwise increasingly popular
extension to the ambient calculus); also it allows (in contrast to
other approaches) an ambient to hold multiple topics of
conversation. },
PDF = {http://www.church-project.org/reports/electronic/Amt:CauMov-2001.pdf},
URL = {http://www.church-project.org/reports/electronic/Amt:CauMov-2001.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Amt:CauMov-2001.ps.gz}
}
@INPROCEEDINGS{Tur+Wel:PPDP-2001,
AUTHOR = {Franklyn Turbak and J. B. Wells},
TITLE = {Cycle Therapy: A Prescription for Fold and Unfold on
Regular Trees},
CROSSREF = {PPDP2001},
YEAR = {2001},
PAGES = {137--149},
PDF = {http://www.macs.hw.ac.uk/~jbw/papers/Turbak+Wells:Cycle-Therapy:A-Prescription-for-Fold-and-Unfold-on-Regular-Trees:PPDP-2001.pdf},
CHURCHREPORT = {yes},
DOCUMENTURL = {http://www.church-project.org/reports/Tur+Wel:PPDP-2001.html},
OVERHEADSURL = {http://cs.wellesley.edu/~fturbak/talks/cycle-therapy.pdf},
OVERHEADSCOMMENT = {to make printable version: {pdf2ps
cycle-therapy.{pdf,ps}} then "psnup -b1cm -c
-w20.99cm -h29.7cm -W29.7cm -H20.99cm -4
cycle-therapy.ps{,-4up}"},
ABSTRACT = {Cyclic data structures can be tricky to create and
manipulate in declarative programming languages. In
a declarative setting, a natural way to view cyclic
structures is as denoting \emph{regular} trees,
those trees which may be infinite but have only a
finite number of distinct subtrees. This paper shows
how to implement the \emph{unfold}
(\emph{anamorphism}) operator in both eager and lazy
languages so as to create cyclic structures when the
result is a regular tree as opposed to merely
infinite lazy structures. The usual \emph{fold}
(\emph{catamorphism}) operator when used with a
strict combining function on any infinite tree
yields an undefined result. As an alternative, this
paper defines and show how to implement a
\emph{cycfold} operator with more useful semantics
when used with a strict function on cyclic
structures representing regular trees. This paper
also introduces an abstract data type
(\emph{cycamores}) to simplify the use of cyclic
structures representing regular trees in both eager
and lazy languages. Implementions of cycamores in
both SML and Haskell are presented.}
}
@PHDTHESIS{Per:PhD-2001,
AUTHOR = {Santiago M. Pericas-Geertsen},
TITLE = {XML-Fluent Mobile Ambients},
INSTITUTION = {{Ph.D. Thesis} Comp.\ Sci.\ Dept., Boston Univ.},
SCHOOL = {Boston University},
YEAR = {2001},
PDF = {http://cs-people.bu.edu/santiago/Papers/Per:PhD-2001.pdf},
POSTSCRIPT = {http://cs-people.bu.edu/santiago/Papers/Per:PhD-2001.ps},
CHURCHREPORT = {yes},
ABSTRACT = {
The Internet is considered to be among the greatest inventions of the
twentieth century. Despite its immense importance we, programming language
designers, have yet to propose a programming language that can exploit a
network that spans the entire globe.
In this thesis, we introduce a language for a programming model based on
XML-speaking mobile agents. The thesis is divided into three parts: the
first part defines the mobility aspect of the language, the second
part introduces a typed calculus for manipulating XML documents, and the
third part proposes a way of integrating these two calculi. The final goal
is to design a typed language in which computation can be carried out by
mobile agents exchanging data in the form of XML documents.
The Ambient Calculus was developed by Cardelli and Gordon as a formal
framework to study issues of mobility and migrant. In Part I, we consider
an extension to the Ambient Calculus where ambients are equipped with a set
of local channels over which processes can communicate. This extension
incorporates certain features from the Pi-Calculus, such as the ability
to communicate over higher-order channels, but without violating design
principles of the Ambient Calculus (e.g., that two processes are unable to
communicate if located in different administrative domains). For the
extended calculus, we introduce an appropriate operational semantics and a
provably sound type system. We conclude this part by showing an encoding
of the Ambient Calculus into our calculus that is both semantics
preserving and type preserving.
XML is a simple and portable way of representing structured data in the
form of trees. It is an ideal language for machine-to-machine (or
peer-to-peer) exchange of data because (i) it is portable, so there is no
need to worry about things like integer representations (big endian
vs.~little endian) or string representations (null terminated vs.~fixed
length) and (ii) structured data is easily represented in the form of
trees. Interestingly, the ability of XML to represent almost any form of
structured data is often an important weakness at the same time: for a
data exchange to be successful, a producer and a consumer must agree on
the structure of the exchanged documents so that the latter can
find/extract what it needs.
This problem can be addressed by the use of a type system. A type
system is needed in order to keep track of the meta-data (i.e.~data about
the data) which can be used by a consumer as a guideline for data
extraction. A number of type systems for XML have been proposed in recent
years. Perhaps surprisingly, specially if we look at the evolution of
programming languages, the use of a type system in XML (as well as in
languages for manipulating XML documents) is not mandatory.
Domain-specific languages for XML proposed in the last few years are all,
with the exception of XDuce, dynamically typed. Experience has showed us that
statically-typed languages are better suited for real-world applications than
dynamically-typed ones. Despite requiring more from the programmer's
perspective (at least in the absence of a type-inference engine) the
benefits greatly outcast the requirements. Specifically, the use of a
statically-typed language ensures that the final product is free of an
important number of errors (so-called type errors). In Part II, we
present the dlcalculus, a statically-typed calculus for manipulating XML
documents, and show how it can be integrated with existing XML technology
by means of two algorithms for importing and exporting documents into the
calculus.}
}
@TECHREPORT{Amt+Con+Dan+Mal:BRICS-2001,
AUTHOR = {Torben Amtoft and Charles Consel and Olivier Danvy
and Karoline Malmkj{\ae}r},
TITLE = {The Abstraction and Instantiation of String-Matching
Programs},
INSTITUTION = {DAIMI, Department of Computer Science, University of
Aarhus},
YEAR = {2001},
CHURCHREPORT = {yes},
TYPE = {Technical Report},
NUMBER = {BRICS RS-01-12},
PDF = {http://www.church-project.org/reports/electronic/Amt+Con+Dan+Mal:BRICS-2001.pdf},
URL = {http://www.church-project.org/reports/electronic/Amt+Con+Dan+Mal:BRICS-2001.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Amt+Con+Dan+Mal:BRICS-2001.ps.gz},
ABSTRACT = {We consider a naive, quadratic string matcher testing whether a pattern occurs in a text; we equip it with a cache mediating its access to the
text; and we abstract the traversal policy of the pattern, the
cache, and the text. We then specialize this abstracted program
with respect to a pattern, using the off-the-shelf partial
evaluator Similix. Instantiating the abstracted program with a
left-to-right traversal policy yields the linear-time behavior of
Knuth, Morris and Pratt's string matcher. Instantiating it with a
right-to-left policy yields the linear-time behavior of Boyer and
Moore's string matcher.},
MONTH = {April},
NOTE = {An extended version of an essay which appears in {\em The Essence of Computation: Complexity, Analysis, Transformation. Essays Dedicated to Neil D. Jones}, editors T. Mogensen and D. Schmidt and I. H Sudborough, Springer LNCS 2566, pp 332--357, 2002.}
}
@INPROCEEDINGS{Amt+Kfo+Per:ESOP-2001,
AUTHOR = {Torben Amtoft and Assaf J. Kfoury and Santiago
M. Pericas-Geertsen},
TITLE = {What are Polymorphically-Typed Ambients?},
BOOKTITLE = {ESOP 2001, Genova},
PAGES = {206--220},
YEAR = {2001},
CHURCHREPORT = {yes},
PDF = {http://www.church-project.org/reports/electronic/Amt+Kfo+Per:ESOP-2001.pdf},
URL = {http://www.church-project.org/reports/electronic/Amt+Kfo+Per:ESOP-2001.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Amt+Kfo+Per:ESOP-2001.ps.gz},
EDITOR = {David Sands},
VOLUME = 2028,
SERIES = {LNCS},
MONTH = {April},
PUBLISHER = {Springer-Verlag},
NOTE = {This downloadable extended summary is more detailed
than the article in the ESOP proceedings. A full
report is \cite{Amt+Kfo+Per:PTA-2000}},
ABSTRACT = {The Ambient Calculus was developed by Cardelli and
Gordon as a formal framework to study issues of mobility and
migrant code. We consider an Ambient Calculus where ambients
transport and exchange programs rather that just inert data. We
propose different senses in which such a calculus can be said to
be \emph{polymorphically typed}, and design accordingly a
\emph{polymorphic} type system for it. Our type system assigns
\emph{types} to embedded programs and what we call
\emph{behaviors} to processes; a denotational semantics of
behaviors is then proposed, here called \emph{trace semantics},
underlying much of the remaining analysis. We state and prove a
Subject Reduction property for our polymorphically-typed calculus.
Based on techniques borrowed from finite automata theory,
type-checking of fully type-annotated processes is shown to be
decidable. Our polymorphically-typed calculus is a conservative
extension of the typed Ambient Calculus originally proposed by
Cardelli and Gordon.} }
@INPROCEEDINGS{Dim+Wes+Mul+Tur+Wel:ICFP-2001,
ITRSPAPER = {yes},
AUTHOR = {Allyn Dimock and Ian Westmacott and Robert Muller
and Franklyn Turbak and J. B. Wells},
XPDLI01TITLE = {Flow-Based Function Customization in the Presence of
Representation Pollution},
TITLE = {Functioning without Closure: Type-Safe Customized
Function Representations for {S}tandard {ML}},
CROSSREF = {ICFP2001},
YEAR = {2001},
COMMENT = {change documenturl when this is added as church paper},
OLDDOCUMENTURL = {http://www.macs.hw.ac.uk/~jbw/papers/#Dim+Wes+Mul+Tur+Wel:ICFP-2001},
DOCUMENTURL = {http://www.church-project.org/reports/Dim+Wes+Mul+Tur+Wel:ICFP-2001.html},
PDF = {http://www.church-project.org/reports/electronic/Dim+Wes+Mul+Tur+Wel:ICFP-2001.pdf},
URL = {http://www.church-project.org/reports/electronic/Dim+Wes+Mul+Tur+Wel:ICFP-2001.pdf.gz},
CHURCHREPORT = {yes},
ABSTRACT = {The CIL compiler for core Standard ML compiles
whole SML programs using a novel typed intermediate
language that supports the generation of type-safe
customized data representations. In this paper, we
present empirical data comparing the relative
efficacy of several different customization
strategies for function representations. We develop
a cost model to interpret dynamic counts of
operations required for each strategy. One of our
results is data showing that compiling with a
function representation strategy that makes
customization decisions based on the presence or
absence of free variables of a function gives a 26\%
improvement over a uniform closure representation.
We also present data on the relative effectiveness
of various representation pollution removal
strategies. We find that for the types of
representation pollution detected by our compiler,
pollution removal rarely provides any additional
benefit for the representation strategies and
benchmarks considered.},
PAGES = {14--25},
ISBN = {1-58113-415-0}
}
@TECHREPORT{Dim+Wes+Mul+Tur+Wel+Con:TIC-2000-TR,
ITRSPAPER = {yes},
AUTHOR = {Allyn Dimock and Ian Westmacott and Robert Muller
and Franklyn Turbak and J. B. Wells and Jeffrey
Considine},
TITLE = {Program Representation Size in an Intermediate
Language with Intersection and Union Types},
MONTH = {March},
YEAR = {2001},
INSTITUTION = {Comp.\ Sci.\ Dept., Boston Univ.},
LONGINSTITUTION = {Computer Science Department, Boston University},
NUMBER = {BUCS-TR-2001-02},
NOTE = {This is a version of \cite{Dim+Wes+Mul+Tur+Wel+Con:TIC-2000-LNCS}
extended with an appendix describing the CIL typed intermediate
language.},
CHURCHREPORT = {yes},
PDF = {http://www.cs.bu.edu/techreports/pdf/2001-002-program-representation-size.pdf},
PDFREMARK = {non-scalable fonts!},
POSTSCRIPT = {http://www.cs.bu.edu/techreports/ps/2001-002-program-representation-size.ps},
ABSTRACT = {The CIL compiler for core Standard ML compiles whole
programs using a novel typed intermediate language
(TIL) with intersection and union types and flow
labels on both terms and types. The CIL term
representation duplicates portions of the program
where intersection types are introduced and union
types are eliminated. This duplication makes it
easier to represent type information and to
introduce customized data representations. However,
duplication incurs compile-time space costs that are
potentially much greater than are incurred in TILs
employing type-level abstraction or quantification.
In this paper, we present empirical data on the
compile-time space costs of using CIL as an
intermediate language. The data shows that these
costs can be made tractable by using sufficiently
fine-grained flow analyses together with standard
hash-consing techniques. The data also suggests
that non-duplicating formulations of intersection
(and union) types would not achieve significantly
better space complexity.}
}
@ARTICLE{JLC-2000-v10n3,
AUTHOR = {},
TITLE = {},
JOURNAL = {J.~Logic Comput.},
YEAR = {2000},
EDITOR = {Fairouz Kamareddine and Jan Willem Klop},
XEDITOR = {"editor" field is not used by BibTeX style for
"article"!!!},
PUBLISHER = {Oxford University Press},
XEDITOR = {"publisher" field is not used by BibTeX style for
"article"!!!},
VOLUME = 10,
NUMBER = 3,
NOTE = {Special issue on Type Theory and Term Rewriting. Kamareddine and Klop (editors)}
}
@TECHREPORT{Amt+Kfo+Per:PTA-2000,
TITLE = {What Are Polymorphically-Typed Ambients?},
AUTHOR = {Torben Amtoft and Assaf J. Kfoury and Santiago M. Pericas-Geertsen},
MONTH = {December},
YEAR = {2000},
NUMBER = {BUCS-TR-2000-021},
INSTITUTION = {Comp.\ Sci.\ Dept., Boston Univ.},
LONGINSTITUTION = {Computer Science Department, Boston
University},
ABSTRACT = {The Ambient Calculus was developed by Cardelli and
Gordon as a formal framework to study issues of mobility and
migrant code. We consider an Ambient Calculus where ambients
transport and exchange programs rather that just inert data. We
propose different senses in which such a calculus can be said to
be \emph{polymorphically typed}, and design accordingly a
\emph{polymorphic} type system for it. Our type system assigns
\emph{types} to embedded programs and what we call
\emph{behaviors} to processes; a denotational semantics of
behaviors is then proposed, here called \emph{trace semantics},
underlying much of the remaining analysis. We state and prove a
Subject Reduction property for our polymorphically-typed calculus.
Based on techniques borrowed from finite automata theory,
type-checking of fully type-annotated processes is shown to be
decidable. Our polymorphically-typed calculus is a conservative
extension of the typed Ambient Calculus originally proposed by
Cardelli and Gordon.},
CHURCHREPORT = {yes},
PDF = {http://www.church-project.org/reports/electronic/Amt+Kfo+Per:PTA-2000.pdf},
URL = {http://www.church-project.org/reports/electronic/Amt+Kfo+Per:PTA-2000.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Amt+Kfo+Per:PTA-2000.ps.gz}
}
@INPROCEEDINGS{Dim+Wes+Mul+Tur+Wel+Con:TIC-2000,
ITRSPAPER = {yes},
AUTHOR = {Allyn Dimock and Ian Westmacott and Robert Muller
and Franklyn Turbak and J. B. Wells and Jeffrey
Considine},
TITLE = {Space Issues in Compiling with Intersection and
Union Types},
CROSSREF = {TIC2000},
YEAR = {2000},
CHURCHREPORT = {yes},
DOCUMENTURL = {http://www.church-project.org/reports/Dim+Wes+Mul+Tur+Wel+Con:TIC-2000.html},
ABSTRACT = {The CIL compiler for core Standard ML compiles whole
programs using the CIL typed intermediate language
with flow labels and intersection and union
types. Flow labels embed flow information in the
types and intersection and union types support
precise polyvariant type and flow information,
without the use of type-level abstraction or
quantification.
\par
Compile-time representations of CIL types and terms
are potentially large compared to those for similar
types and terms in systems based on quantified
types. The listing-based nature of intersection and
union types, together with flow label annotations on
types, contribute to the size of CIL types. The CIL
term representation duplicates portions of the
program where intersection types are introduced and
union types are eliminated. This duplication makes
it easier to represent type information and to
introduce multiple representation conventions, but
incurs a compile-time space cost.
\par
This paper presents empirical data on the
compile-time space costs of using CIL. These costs
can be made tractable using standard hash-consing
techniques. Surprisingly, the duplicating nature of
CIL has acceptable compile-time space performance in
practice on the benchmarks and flow analyses that we
have investigated. Increasing the precision of flow
analysis can significantly reduce compile-time space
costs. There is also preliminary evidence that the
flow information encoded in CIL's type system can
effectively guide data customization.},
NOTE = {Superseded by~\cite{Dim+Wes+Mul+Tur+Wel+Con:TIC-2000-LNCS}}
}
@INPROCEEDINGS{Dim+Wes+Mul+Tur+Wel+Con:TIC-2000-LNCS,
ITRSPAPER = {yes},
AUTHOR = {Allyn Dimock and Ian Westmacott and Robert Muller
and Franklyn Turbak and J. B. Wells and Jeffrey
Considine},
TITLE = {Program Representation Size in an Intermediate
Language with Intersection and Union Types},
CROSSREF = {TIC2000LNCS},
YEAR = {2000},
PAGES = {27--52},
CHURCHREPORT = {yes},
DOCUMENTURL = {http://www.church-project.org/reports/Dim+Wes+Mul+Tur+Wel+Con:TIC-2000-LNCS.html},
XPDF = {http://www.church-project.org/reports/electronic/Dim+Wes+Mul+Tur+Wel+Con:TIC-2000-LNCS.pdf.gz},
XPOSTSCRIPT = {http://www.church-project.org/reports/electronic/Dim+Wes+Mul+Tur+Wel+Con:TIC-2000-LNCS.ps.gz},
XNOTE = {Supersedes~\cite{Dim+Wes+Mul+Tur+Wel+Con:TIC-2000}},
ABSTRACT = {The CIL compiler for core Standard ML compiles whole
programs using the CIL typed intermediate language
with flow labels and intersection and union
types. Flow labels embed flow information in the
types and intersection and union types support
precise polyvariant type and flow information,
without the use of type-level abstraction or
quantification.
\par
Compile-time representations of CIL types and terms
are potentially large compared to those for similar
types and terms in systems based on quantified
types. The listing-based nature of intersection and
union types, together with flow label annotations on
types, contribute to the size of CIL types. The CIL
term representation duplicates portions of the
program where intersection types are introduced and
union types are eliminated. This duplication makes
it easier to represent type information and to
introduce multiple representation conventions, but
incurs a compile-time space cost.
\par
This paper presents empirical data on the
compile-time space costs of using CIL. These costs
can be made tractable using standard hash-consing
techniques. Surprisingly, the duplicating nature of
CIL has acceptable compile-time space performance in
practice on the benchmarks and flow analyses that we
have investigated. Increasing the precision of flow
analysis can significantly reduce compile-time space
costs. There is also preliminary evidence that the
flow information encoded in CIL's type system can
effectively guide data customization.}
}
@INPROCEEDINGS{Bawden:POPL-2000,
AUTHOR = {Alan Bawden},
TITLE = {First-class Macros Have Types},
CROSSREF = {POPL2000},
YEAR = {2000},
PAGES = {133--141},
CHURCHREPORT = {yes},
DOCUMENTURL = {http://www.linearity.org/bawden/mtt/},
PDF = {http://www.linearity.org/bawden/mtt/popl00.pdf},
URL = {http://www.linearity.org/bawden/mtt/popl00.pdf.gz},
POSTSCRIPT = {http://www.linearity.org/bawden/mtt/popl00.ps.gz},
ABSTRACT = {In modern Scheme, a macro captures the lexical environment
where it is defined. This creates an opportunity for extending
Scheme so that macros are first-class values. The key to
achieving this goal, while preserving the ability to compile
programs into reasonable code, is the addition of a type
system. Many interesting things can be done with first-class
macros, including the construction of a useful module system in
which modules are also first-class.}
}
@INPROCEEDINGS{Law+Mai:ESOP-2000,
AUTHOR = {Julia L. Lawall and Harry G. Mairson},
TITLE = {Sharing continuations: proofnets for languages with
explicit control},
CROSSREF = {ESOP2000},
YEAR = {2000},
PAGES = {245--259},
ABSTRACT = {We introduce graph reduction technology that
implements functional languages with control, such
as Scheme with {\tt call/cc}, where continuations
can be manipulated explicitly as values, and can be
optimally reduced in the sense of L{\'e}vy. The
technology is founded on {\em proofnets} for
multiplicative-exponential linear logic, extending
the techniques originally proposed by Lamping, where
we adapt the continuation-passing style
transformation to yield a new understanding of
sharable values. Confluence is maintained by
returning multiple answers to a (shared)
continuation. \par {\em Proofnets} provide a {\em
concurrent} version of linear logic proofs,
eliminating structurally irrelevant
sequentialization, and ignoring asymmetric
distinctions between inputs and outputs---dually,
expressions and continuations. While Lamping's
graphs and their variants encode an embedding of
intuitionistic logic into linear logic, our
construction implicitly contains an embedding of
classical logic into linear logic.\par We propose a
family of translations, produced uniformly by
beginning with a continuation-passing style
semantics for the languages, employing standard
codings into proofnets using call-by-value,
call-by-name---or hybrids of the two---to locate
proofnet {\em boxes}, and converting the proofnets
to direct style. The resulting graphs can be reduced
simply (cut elimination for linear logic), have a
consistent semantics that is preserved by reduction
(geometry of interaction, via the so-called {\em
context semantics}), and allow shared, incremental
evaluation of continuations (optimal reduction).},
PDF = {http://www.church-project.org/reports/electronic/Law+Mai:ESOP-2000.pdf},
URL = {http://www.church-project.org/reports/electronic/Law+Mai:ESOP-2000.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Law+Mai:ESOP-2000.ps.gz},
CHURCHREPORT = {yes}
}
@PHDTHESIS{Stewart:fortcf,
AUTHOR = {C. A. Stewart},
TITLE = {On the formulae-as-types correspondence for
classical logic},
SCHOOL = {Oxford University},
YEAR = {2000},
CHURCHREPORT = {yes},
PDF_A4 = {http://www.church-project.org/reports/electronic/Stewart:fortcf.pdf},
POSTSCRIPT_A4 = {http://www.linearity.org/cas/thesis/thesis-A4.ps},
DOCUMENTURL = {http://www.linearity.org/cas/thesis},
ABSTRACT = {
The Curry--Howard correspondence states the equivalence between the
constructions implicit in intuitionistic logic and those described in
the simply-typed lambda-calculus. It is an insight of great importance
in theoretical computer science, and is fundamental in modern
approaches to constructive type theory. The possibility of a similar
formulae-as-types correspondence for classical logic looks to be a
seminal development in this area, but whilst promising results have
been achieved, there does not appear to be much agreement of what is
at stake in claiming that such a correspondence exists. Consequently
much work in this area suffers from several weaknesses; in particular
the status of the new rules needed to describe the distinctively
classical inferences is unclear.
We show how to situate the formulae-as-types correspondence within the
proof-theoretic account of logical semantics arising from the work of
Michael Dummett and Dag Prawitz, and demonstrate that the
admissibility of Prawitz's inversion principle, which we argue should
be strengthened, is essential to the good behaviour of intuitionistic
logic.
By regarding the rules which determine the deductive strength of
classical logic as structural rules, as opposed to the logical rules
associated with specific logical connectives, we extend Prawitz's
inversion principle to classical propositional logic, formulated in a
theory of Parigot's lambda-mu calculus with eta expansions.
We then provide a classical analogue of a subsystem of Martin-L{\"o}f's
type theory corresponding to Peano Arithmetic and show its soundness,
appealing to an extension of Tait's reducibility method. Our treatment
is the first treatment of induction in classical arithmetic that truly
falls under the aegis of the formulae-as-types correspondence, as it
is the first that is consistent with the intensional reading of
propositional equality.
} }
@ARTICLE{Kfoury:JLC-2000-v10n3,
ITRSPAPER = {yes},
AUTHOR = {Assaf J. Kfoury},
TITLE = {A Linearization of the Lambda-Calculus},
CROSSREF = {JLC-2000-v10n3},
YEAR = {2000},
PAGES = {411--436},
DOCUMENTURL = {http://www.church-project.org/reports/Kfoury:JLC-2000-v10n3.html},
PDF = {http://www.church-project.org/reports/electronic/Kfoury:JLC-2000-v10n3.pdf},
URL = {http://www.church-project.org/reports/electronic/Kfoury:JLC-2000-v10n3.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Kfoury:JLC-2000-v10n3.ps.gz},
CHURCHREPORT = {yes},
ABSTRACT = {We embed the standard $\lambda$-calculus, denoted $\Lambda$,
into two larger $\lambda$-calculi,
denoted $\Lambda^\land$ and $\&\Lambda^\land$.
The standard notion of $\beta$-reduction for $\Lambda$
corresponds to two new notions of reduction,
$\beta^{\land}$ for $\Lambda^\land$ and
$\&\beta^{\land}$ for $\&\Lambda^\land$. A distinctive
feature of our new calculus $\Lambda^\land$
(resp., $\&\Lambda^\land$) is that, in every function
application, an argument is used at most once (resp.,
exactly once) in the body of the function. We establish
various connections between the three notions of reduction,
$\beta$, $\beta^{\land}$ and $\&\beta^{\land}$. As a
consequence, we provide an alternative framework to study
the relationship between $\beta$-weak normalization and
$\beta$-strong normalization, and give a new proof of the
oft-mentioned equivalence between $\beta$-strong
normalization of standard $\lambda$-terms and typability in
a system of ``intersection types''. }
}
@ARTICLE{Bug+Per:IAC-2000,
AUTHOR = {Michele Bugliesi and Santiago M. Pericas-Geertsen},
TITLE = {Type Inference for variant object types},
JOURNAL = {Inform.\ {\&} Comput.},
YEAR = {2000},
PDF = {http://cs-people.bu.edu/santiago/Papers/Bug+Per:TR-2000.pdf},
POSTSCRIPT = {http://cs-people.bu.edu/santiago/Papers/Bug+Per:TR-2000.ps},
XURL = {http://cs-people.bu.edu/santiago/Papers/Bug+Per:TR-2000.pdf},
CHURCHREPORT = {yes},
ABSTRACT = { Existing type systems for object calculi are based
on invariant subtyping. Subtyping invariance is
required for soundness of static typing in the
presence of method overrides, but it is often in the
way of the expressive power of the type
system. Flexibility of static typing can be
recovered in different ways: in first-order systems,
by the adoption of object types with
\textit{variance annotations}, in second-order
systems by resorting to \textit{Self types}. Type
inference is known to be P-complete for first-order
systems of finite and recursive object types, and
NP-complete for a restricted version of Self
types. The complexity of type inference for systems
with variance annotations is yet unknown. This paper
presents a new object type system based on the
notion of \textem{Split types}, a form of object
types where every method is assigned two types,
namely, an update type and a select type. The
subtyping relation that arises for Split types is
\textem{variant} and, as a result, subtyping can be
performed both in \textem{width} and in
\textem{depth}. The new type system generalizes all
the existing first-order type systems for objects,
including systems based on variance
annotations. Interestingly, the additional
expressive power does not affect the complexity of
the type inference problem, as we show by presenting
an O(n^3) inference algorithm.}
}
@INPROCEEDINGS{Bug+Per:FOOL-2000,
AUTHOR = {Michele Bugliesi and Santiago M. Pericas-Geertsen},
TITLE = {Depth Subtyping and Type Inference for Object
Calculi},
CROSSREF = {FOOL2000},
YEAR = {2000},
PDF = {http://cs-people.bu.edu/santiago/Papers/Bug+Per:EA-FOOL-2000.pdf},
XURL = {http://cs-people.bu.edu/santiago/Papers/Bug+Per:EA-FOOL-2000.pdf},
ABSTRACT = {We present a new type system based on the notion of
Split types. In this system, every method is
assigned two types, namely, an update type and a
select type. We show that the system of Split types
is strictly more powerful than the system of
recursive types and that type inference remains
decidable and feasible. We include a number of
interesting examples not typable with recursive
types that are typable with Split types. In
particular, we demonstrate that the subtyping
relation needed to encode the \lambda-calculus into
the Abadi and Cardelli's \varsigma-calculus
holds. We also present a polynomial-time algorithm
that infers Split types and show that it is sound
and complete with respect to the type system. We
conclude our presentation by relating the typing
power of Split types to the typing power of other
systems, such as, the system of recursive types with
variance annotations and the system of Self types.},
CHURCHREPORT = {yes}
}
@INPROCEEDINGS{Amt+Tur:Faithful-ESOP-2000,
ITRSPAPER = {yes},
TITLE = {Faithful Translations between Polyvariant Flows and Polymorphic Types},
AUTHOR = {Torben Amtoft and Franklyn Turbak},
CROSSREF = {ESOP2000},
YEAR = {2000},
PAGES = {26--40},
PDF = {http://www.church-project.org/reports/electronic/Amt+Tur:Faithful-ESOP-2000.pdf},
URL = {http://www.church-project.org/reports/electronic/Amt+Tur:Faithful-ESOP-2000.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Amt+Tur:Faithful-ESOP-2000.ps.gz},
DOCUMENTURL = {http://www.church-project.org/reports/Amt+Tur:Faithful-ESOP-2000.html},
ABSTRACT = {Recent work has shown equivalences between
various type systems and flow logics.
Ideally, the translations upon which such
equivalences are based should be {\em faithful}
in the sense that information is not lost in
round-trip translations from flows to types and back
or from types to flows and back. Building on the work of
Nielson \& Nielson and of Palsberg \& Pavlopoulou,
we present the first faithful translations between
a class of finitary polyvariant flow analyses
and a type system supporting polymorphism
in the form of intersection and union types.
Additionally,
our flow/type correspondence solves several
open problems posed by Palsberg \& Pavlopoulou:
(1) it expresses call-string based polyvariance
(such as k-CFA) as well as argument based polyvariance;
(2) it enjoys a subject reduction property
for flows as well as for types; and
(3) it supports a flow-oriented perspective
rather than a type-oriented one.},
CHURCHREPORT = {yes}
}
@TECHREPORT{Amt+Tur:Faithful-TR-2000,
ITRSPAPER = {yes},
TITLE = {Faithful Translations between Polyvariant Flows and Polymorphic Types},
AUTHOR = {Torben Amtoft and Franklyn Turbak},
YEAR = {2000},
INSTITUTION = {Comp.\ Sci.\ Dept., Boston Univ.},
LONGINSTITUTION = {Computer Science Department, Boston University},
NUMBER = {BUCS-TR-2000-01},
PDF = {http://www.church-project.org/reports/electronic/Amt+Tur:Faithful-TR-2000.pdf},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Amt+Tur:Faithful-TR-2000.ps},
ABSTRACT = {Recent work has shown equivalences between
various type systems and flow logics.
Ideally, the translations upon which such
equivalences are based should be {\em faithful}
in the sense that information is not lost in
round-trip translations from flows to types and back
or from types to flows and back. Building on the work of
Nielson \& Nielson and of Palsberg \& Pavlopoulou,
we present the first faithful translations between
a class of finitary polyvariant flow analyses
and a type system supporting polymorphism
in the form of intersection and union types.
Additionally,
our flow/type correspondence solves several
open problems posed by Palsberg \& Pavlopoulou:
(1) it expresses call-string based polyvariance
(such as k-CFA) as well as argument based polyvariance;
(2) it enjoys a subject reduction property
for flows as well as for types; and
(3) it supports a flow-oriented perspective
rather than a type-oriented one.},
NOTE = {Expanded full report of \cite{Amt+Tur:Faithful-ESOP-2000}. To appear Fall 2000, but a \emph{DRAFT} version is available.},
CHURCHREPORT = {yes}
}
@INPROCEEDINGS{Mac+Tur:Link-time-ESOP-2000,
TITLE = {A Calculus for Link-time Compilation},
AUTHOR = {Elena Machkasova and Franklyn A. Turbak},
CROSSREF = {ESOP2000},
PAGES = {260--274},
CHURCHREPORT = {yes},
YEAR = {2000},
PDF = {http://www.church-project.org/reports/electronic/Mac+Tur:Link-time-ESOP-2000.pdf},
URL = {http://www.church-project.org/reports/electronic/Mac+Tur:Link-time-ESOP-2000.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Mac+Tur:Link-time-ESOP-2000.ps.gz}
}
@TECHREPORT{Mac+Tur:Link-time-TR-2000,
TITLE = {A Calculus for Link-time Compilation},
AUTHOR = {Elena Machkasova and Franklyn Turbak},
YEAR = {2000},
INSTITUTION = {Comp.\ Sci.\ Dept., Boston Univ.},
LONGINSTITUTION = {Computer Science Department, Boston University},
NOTE = {Expanded full report of
\cite{Mac+Tur:Link-time-ESOP-2000}},
CHURCHREPORT = {yes}
}
@INCOLLECTION{Kfoury:Rasiowa-memorial,
ITRSPAPER = {yes},
AUTHOR = {Assaf J. Kfoury},
TITLE = {Beta-Reduction as Unification},
YEAR = {1999},
BOOKTITLE = {Logic, Algebra, and Computer Science (H. Rasiowa
Memorial Conference, December 1996), Banach Center
Publication, Volume 46},
PAGES = {137--158},
PUBLISHER = {Springer-Verlag},
EDITOR = {D. Niwinski},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Kfoury:Rasiowa-memorial.ps.gz},
PDF = {http://www.church-project.org/reports/electronic/Kfoury:Rasiowa-memorial.pdf},
URL = {http://www.church-project.org/reports/electronic/Kfoury:Rasiowa-memorial.pdf.gz},
DOCUMENTURL = {http://www.church-project.org/reports/Kfoury:Rasiowa-memorial.html},
NOTE = {Supersedes~\cite{Kfoury:TTTR-1996a} but omits a few proofs included in the
latter},
ABSTRACT = {We define a new unification problem, which we call
{\em $\beta$-unification} and which can be used to characterize
the $\beta$-strong normalization of terms in the
$\lambda$-calculus. We prove the undecidability
of $\beta$-unification, its connection with the system
of intersection types, and several of its basic
properties.},
CHURCHREPORT = {yes}
}
@INPROCEEDINGS{Kfo+Per:LICS-1999,
TITLE = {Type Inference for Recursive Definitions},
AUTHOR = {Assaf J. Kfoury and Santiago M. Pericas-Geertsen},
PAGES = {119--128},
YEAR = {1999},
CROSSREF = {LICS1999},
PDF = {http://cs-people.bu.edu/santiago/Papers/Kfo+Per:EA-LICS-1999.pdf},
POSTSCRIPT = {http://cs-people.bu.edu/santiago/Papers/Kfo+Per:EA-LICS-1999.ps},
XURL = {http://cs-people.bu.edu/santiago/Papers/Kfo+Per:EA-LICS-1999.pdf},
ABSTRACT = {We consider type systems that combine universal types,
recursive types, and object types. We study type inference
in these systems under a rank restriction, following
Leivant's notion of rank.
To motivate our work, we present several examples showing
how our systems can be used to type programs encountered
in practice.
We show that type inference in the rank-k system is decidable
for k <= 2 and undecidable for k >= 3.
(Similar results based on different techniques are known
to hold for System F, without recursive types and object
types.) Our undecidability result is obtained by a reduction
from a particular adaptation (which we call "regular") of
the semi-unification problem and whose undecidability is,
interestingly, obtained by methods totally different
from those used in the case of standard (or finite)
semi-unification.},
CHURCHREPORT = {yes}
}
@TECHREPORT{Kfo+Per:TIRD-1999,
TITLE = {Type Inference for Recursive Definitions},
AUTHOR = {Assaf J. Kfoury and Santiago M. Pericas-Geertsen},
POSTSCRIPT = {http://cs-people.bu.edu/santiago/Papers/Kfo+Per:TR-1999.ps},
PDF = {http://cs-people.bu.edu/santiago/Papers/Kfo+Per:TR-1999.pdf},
XURL = {http://cs-people.bu.edu/santiago/Papers/Kfo+Per:TR-1999.pdf},
YEAR = {1999},
INSTITUTION = {Comp.\ Sci.\ Dept., Boston Univ.},
LONGINSTITUTION = {Computer Science Department, Boston University},
NOTE = {Expanded full report of \cite{Kfo+Per:LICS-1999}},
ABSTRACT = {We consider type systems that combine universal types,
recursive types, and object types. We study type inference
in these systems under a rank restriction, following
Leivant's notion of rank.
To motivate our work, we present several examples showing
how our systems can be used to type programs encountered
in practice.
We show that type inference in the rank-k system is decidable
for k <= 2 and undecidable for k >= 3.
(Similar results based on different techniques are known
to hold for System F, without recursive types and object
types.) Our undecidability result is obtained by a reduction
from a particular adaptation (which we call "regular") of
the semi-unification problem and whose undecidability is,
interestingly, obtained by methods totally different
from those used in the case of standard (or finite)
semi-unification.},
CHURCHREPORT = {yes}
}
@MASTERSTHESIS{Neergaard-WSNK-1999,
CROSSREF = {Neergaard:WSNK-1999},
YEAR = {1999},
PDF_A4 = {http://www.church-project.org/reports/electronic/Neergaard-WSNK-1999.pdf},
URL_A4 = {http://www.church-project.org/reports/electronic/Neergaard-WSNK-1999.pdf.gz},
POSTSCRIPT_A4 = {http://www.church-project.org/reports/electronic/Neergaard-WSNK-1999.ps.gz},
ABSTRACT = {The thesis studies certain aspects of normalisation
in the lambda-calculus. It begins with a study of
the notions conservation and uniform
normalisation. Conservation means that infinite
reduction paths are preserved under
reduction. Uniform normalisation means that the term
can either not be reduced to a normal form, or that
all ways of reducing the term will eventually lead
to a normal form. \par The classical conservation
theorem for Lambda I has been formulated using
either of the notions conservation and uniform
normalisation. The reason for the indistinctness in
the formulation of the conservation theorem is that
Lambda I is closed under beta-reduction, which in
Lambda I does not erase subterms. In this situation
uniform normalisation implies conservation, and
conservation also implies uniform
normalisation. However, when turning to erasing
reductions the distinction becomes important as
conservation no longer implies uniform
normalisation. This insight is elaborated in a new
technique for finding uniformly normalising subsets
of a lambda-calculus. The technique uses a
combination of a syntactic and a semantic criterion
and it is applied in several ways:
\begin{itemize}\item The technique is used to find a
new uniformly normalising subset of the basic
lambda-calculus. This uniformly normalising subset
includes some terms with erasing redexes, so-called
Kr-redexes. \item The technique is also used in a
typed setting to find a uniformly normalising subset
of the typed lambda-calculus Lambda M corresponding
to minimal first-order logic through the
Curry-Howard isomorphism. \item This uniformly
normalising subset is used to infer strong
normalisation from weak normalisation for Lambda
M. This is a non-trivial extension of techniques
developed recently by S{\o}rensen and Xi to infer
strong normalisation from weak normalisation of the
same notion of reduction. \end{itemize}Furthermore,
the thesis considers the type system of classical
first-order logic, which allows typing of exception
handling. An analysis concludes that it is not
possible to make a direct extension of the technique
by S{\o}rensen and Xi to this system.},
CHURCHREPORT = {yes},
PMNWWW = {report},
URL = {http://www.linearity.org/turtle/reports/Neergaard-WSNK-1999.html}
}
@MASTERSTHESIS{Neergaard:WSNK-1999,
AUTHOR = {Peter M{\o}ller Neergaard},
TITLE = {Weak and Strong Normalization, $\mathbf{K}$-redexes,
and First-Order Logic},
SCHOOL = {{DIKU}},
YEAR = {1999},
ADDRESS = {August},
NOTE = {DIKU Technical Report 99/11},
XPOSTSCRIPT = {ftp://ftp.diku.dk/diku/semantics/papers/D-399.ps.gz}
}
@TECHREPORT{Amt:BRICS-1999,
AUTHOR = {Torben Amtoft},
TITLE = {Partial Evaluation for Constraint-Based Program
Analyses},
INSTITUTION = {BRICS, Dept. of Computer Science, University of
Aarhus},
YEAR = {1999},
CHURCHREPORT = {yes},
NUMBER = {BRICS-RS-99-45},
NOTE = {},
PDF = {http://www.church-project.org/reports/electronic/Amt:BRICS-1999.pdf},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Amt:BRICS-1999.ps.gz},
URL = {http://www.church-project.org/reports/electronic/Amt:BRICS-1999.pdf.gz},
ABSTRACT = {We report on a case study in the application of partial evaluation,
initiated by the desire to speed up a constraint-based algorithm
for control-flow analysis. We designed and implemented a dedicated
partial evaluator, able to specialize the analysis wrt. a given
constraint graph and thus remove the interpretive overhead, and
measured it with Feeley's Scheme benchmarks. Even though the gain
turned out to be rather limited, our investigation yielded
valuable feed back in that it provided a better understanding of
the analysis, leading us to (re)invent an incremental version. We
believe this phenomenon to be a quite frequent spinoff from using
partial evaluation, since the removal of interpretive overhead
makes the flow of control more explicit and hence pinpoints
sources of inefficiency. Finally, we observed that partial
evaluation in our case yields such regular, low-level specialized
programs that it begs for runtime code generation.} }
@INPROCEEDINGS{Kfo+Mai+Tur+Wel:ICFP-1999,
ITRSPAPER = {yes},
AUTHOR = {Assaf J. Kfoury and Harry G. Mairson and Franklyn
A. Turbak and J. B. Wells},
TITLE = {Relating Typability and Expressibility in
Finite-Rank Intersection Type Systems},
CROSSREF = {ICFP1999},
PAGES = {90--101},
YEAR = {1999},
ABSTRACT = {We investigate finite-rank intersection type
systems, analyzing the complexity of their type
inference problems and their relation to the problem
of recognizing semantically equivalent
terms. Intersection types allow something of type
$\tau_1\land\tau_2$ to be used in some places at
type $\tau_1$ and in other places at type
$\tau_2$. A \emph{finite-rank} intersection type
system bounds how deeply the $\land$ can appear in
type expressions. Such type systems enjoy strong
normalization, subject reduction, and computable
type inference, and they support a pragmatics for
implementing parametric polymorphism. As a
consequence, they provide a conceptually simple and
tractable alternative to the impredicative
polymorphism of System F and its extensions, while
typing many more programs than the Hindley-Milner
type system found in ML and Haskell. \par While type
inference is computable at every rank, we show that
its complexity grows exponentially as rank
increases. Let $\mathbf{K}(0,n)=n$ and
$\mathbf{K}(t+1,n)=2^{\mathbf{K}(t,n)}$; we prove
that recognizing the pure $\lambda$-terms of size
$n$ that are typable at rank $k$ is complete for
\textsc{dtime}[$\mathbf{K}(k-1,n)$]. We then
consider the problem of deciding whether two
$\lambda$-terms typable at rank $k$ have the same
normal form, generalizing a well-known result of
Statman from simple types to finite-rank
intersection types. We show that the equivalence
problem is
{\textsc{dtime}}$[\mathbf{K}(\mathbf{K}(k-1,n),2)]$-complete.
This relationship between the complexity of
typability and expressiveness is identical in
well-known decidable type systems such as simple
types and Hindley-Milner types, but seems to fail
for System F and its generalizations. The
correspondence gives rise to a conjecture that if
$\mathcal{T}$ is a predicative type system where
typability has complexity $t(n)$ and expressiveness
has complexity $e(n)$, then
$t(n)=\Omega(\log^{*}e(n))$.},
DOCUMENTURL = {http://www.church-project.org/reports/Kfo+Mai+Tur+Wel:ICFP-1999.html},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Kfo+Mai+Tur+Wel:ICFP-1999.ps.gz},
CHURCHREPORT = {yes}
}
@INPROCEEDINGS{Kfo+Wel:POPL-1999,
ITRSPAPER = {yes},
AUTHOR = {Assaf J. Kfoury and J. B. Wells},
TITLE = {Principality and Decidable Type Inference for
Finite-Rank Intersection Types},
CROSSREF = {POPL1999},
YEAR = {1999},
PAGES = {161--174},
DOCUMENTURL = {http://www.church-project.org/reports/Kfo+Wel:POPL-1999.html},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Kfo+Wel:POPL-1999.ps.gz},
ACMURLA = {http://portal.acm.org/citation.cfm?id=292540.292556&coll=portal&dl=ACM&CFID=3866787&CFTOKEN=29489829},
ACMURLB = {http://doi.acm.org/10.1145/292540.292556},
CHURCHREPORT = {yes},
NOTE = {Superseded by \cite{Kfo+Wel:TCSB-2004-v311n1-3}},
ABSTRACT = {Principality of typings is the property that for
each typable term, there is a typing from which all
other typings are obtained via some set of
operations. Type inference is the problem of finding
a typing for a given term, if possible. We define an
intersection type system which has principal typings
and types exactly the strongly normalizable
$\lambda$-terms. More interestingly, every
finite-rank restriction of this system (using
Leivant's first notion of rank) has principal
typings and also has decidable type inference. This
is in contrast to System~F where the finite rank
restriction for every finite rank at 3 and above has
neither principal typings nor decidable type
inference. This is also in contrast to earlier
presentations of intersection types where the status
(decidable or undecidable) of these properties is
unknown for the finite-rank restrictions at 3 and
above. Furthermore, the notion of principal typings
for our system involves only one operation,
substitution, rather than several operations (not
all substitution-based) as in earlier presentations
of principality for intersection types (without rank
restrictions). In our system the earlier notion of
\emph{expansion} is integrated in the form of
\emph{expansion variables}, which are subject to
substitution as are ordinary variables. A
unification-based type inference algorithm is
presented using a new form of unification,
$\beta$-unification.}
}
@ARTICLE{Wells:APAL-1999-v98-no-note,
AUTHOR = {J. B. Wells},
TITLE = {Typability and Type Checking in {S}ystem {F} Are Equivalent and Undecidable},
JOURNAL = {Ann.\ Pure Appl.\ Logic},
VOLUME = 98,
NUMBER = {1--3},
YEAR = {1999},
PAGES = {111--156},
CHURCHREPORT = {yes},
DOCUMENTURL = {http://www.church-project.org/reports/Wells:APAL-1999-v98-no-note.html},
POSTSCRIPT = {http://www.macs.hw.ac.uk/~jbw/papers/f-undecidable-APAL.ps.gz},
XNOTE = {Supersedes \cite{Wells:LICS-1994}},
ABSTRACT = {Girard and Reynolds independently invented System~F
(a.k.a.\ the second-order polymorphically typed
lambda calculus) to handle problems in logic and
computer programming language design, respectively.
Viewing F in the \emph{Curry style}, which
associates types with untyped lambda terms, raises
the questions of \emph{typability} and \emph{type
checking}. Typability asks for a term whether there
exists some type it can be given. Type checking
asks, for a particular term and type, whether the
term can be given that type. The decidability of
these problems has been settled for restrictions and
extensions of F and related systems and complexity
lower-bounds have been determined for typability in
F, but this report is the first to resolve whether
these problems are decidable for System~F.
\par
This report proves that type checking in F is
undecidable, by a reduction from
\emph{semi-unification}, and that typability in F is
undecidable, by a reduction from type checking.
Because there is an easy reduction from typability
to type checking, the two problems are equivalent.
The reduction from type checking to typability uses
a novel method of constructing lambda terms that
simulate arbitrarily chosen type environments. All
of the results also hold for the $\lambda
I$-calculus.}
}
@INPROCEEDINGS{Ong+Ste:curhff,
AUTHOR = {C.-H. L. Ong and C. A. Stewart},
TITLE = {A {C}urry-{H}oward foundation for functional
computation with control},
PAGES = {215 -- 217},
YEAR = {1997},
CROSSREF = {POPL1997},
CHURCHREPORT = {yes},
PDF = {http://www.church-project.org/reports/electronic/Ong+Ste:curhff.pdf},
URL = {http://www.church-project.org/reports/electronic/Ong+Ste:curhff.pdf.gz},
XPOSTSCRIPT = {ftp://achilles.linearity.org/pub/cas/popl97.ps},
DOCUMENTURL = {http://www.linearity.org/cas/abstracts/popl97.html},
ABSTRACT = {
We introduce the type theory lambda-mu-v, a call-by-value variant of
Parigot's lambda-mu calculus, as a Curry-Howard representation theory
of classical propositional proofs. The associated rewrite system is
Church-Rosser and strongly normalising, and definitional equality of
the type theory is consistent, compatible with cut, congruent and
decidable. The attendant call-by-value programming language mu-PCF is
obtained from lambda-mu-v by augmenting it by basic arithmetic,
conditionals and fixpoints. We study the behavioural properties of
mu-PCF-v and show that, though simple, it is a very general language
for functional computation with control: it can express all the main
control constructs such as exceptions and first-class continuations.
Proof theoretically the dual lambda-mu-v constructs of naming and mu
abstraction witness the introduction and elimination rules of
absurdity respectively. Computationally they give succinct expression
to a kind of generic (forward) `jump' operator, which may be regarded
as a unifying control construct for functional computation. Our goal
is that lambda-mu-v and mu-PCF-v respectively should be to functional
computation with first-class access to the flow of control what
lambda-calculus and PCF respectively are to pure functional
programming: lambda-mu-v gives the logical basis via the Curry-Howard
correspondence, and mu-PCF-v is a prototypical language albeit in a
purified form.
} }
@INPROCEEDINGS{Banerjee:ICFP-1997,
ITRSPAPER = {yes},
AUTHOR = {Anindya Banerjee},
TITLE = {A Modular, Polyvariant, and Type-Based Closure
Analysis},
CROSSREF = {ICFP1997},
YEAR = {1997},
PAGES = {1--10},
ABSTRACT = {We observe that the \textit{principal typing
property} of a type system is the enabling
technology for \textit{modularity} and
\textit{separate compilation}
\cite{Jim:MIT-LCS-1995-532}. We use this technology
to formulate a modular and polyvariant closure
analysis, based on the rank 2 intersection types
annotated with control-flow information.\par
Modularity manifests itself in a syntax-directed,
annotated-type inference algorithm that can analyse
\textit{program fragments} containing free
variables: a \textit{principal typing} property is
used to formalise it. Polyvariance manifests itself
in the separation of different behaviours of the
same function at its different uses: this is
formalised via the rank 2 intersection types. As the
rank 2 intersection type discipline types at least
all (core) ML programs, our analysis can be used in
the separate compilation of such programs.},
DOCUMENTURL = {http://www.church-project.org/reports/Banerjee:ICFP-1997.html},
PDF = {http://www.church-project.org/reports/electronic/Banerjee:ICFP-1997.pdf},
URL = {http://www.church-project.org/reports/electronic/Banerjee:ICFP-1997.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Banerjee:ICFP-1997.ps.gz},
CHURCHREPORT = {yes}
}
@INPROCEEDINGS{Wel+Dim+Mul+Tur:TAPSOFT-1997,
ITRSPAPER = {yes},
AUTHOR = {J. B. Wells and Allyn Dimock and Robert Muller and Franklyn Turbak},
TITLE = {A Typed Intermediate Language for Flow-Directed Compilation},
CROSSREF = {TAPSOFT1997},
YEAR = {1997},
PAGES = {757--771},
ABSTRACT = {We present a typed intermediate language
$\lambda^{\mathrm{CIL}}$ for optimizing compilers
for function-oriented and polymorphically typed
programming languages (e.g., ML). The language
$\lambda^{\mathrm{CIL}}$ is a typed lambda calculus
with product, sum, intersection, and union types as
well as function types annotated with flow labels. A
novel formulation of intersection and union types
supports encoding flow information in the typed
program representation. This flow information can
direct optimization.},
NOTE = {Superseded by \cite{Wel+Dim+Mul+Tur:JFP-2002-v12n3}},
COMMENT = {The disabling of the PDF links is disabled as the
fonts does not look that bad when printed and PDF
provides the best way to ensure that the files are
printable under Windows. Please do not disable this
diabling without discussing it with me (Peter). The
PDF link is disabled because the PDF file has
absolutely horrible and unreadable fonts. Please do
not enable it until the fonts are fixed.},
DOCUMENTURL = {http://www.church-project.org/reports/Wel+Dim+Mul+Tur:TAPSOFT-1997.html},
PDF = {http://www.church-project.org/reports/electronic/Wel+Dim+Mul+Tur:TAPSOFT-1997.pdf},
URL = {http://www.church-project.org/reports/electronic/Wel+Dim+Mul+Tur:TAPSOFT-1997.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Wel+Dim+Mul+Tur:TAPSOFT-1997.ps},
CHURCHREPORT = {yes}
}
@INPROCEEDINGS{Dim+Mul+Tur+Wel:ICFP-1997,
ITRSPAPER = {yes},
AUTHOR = {Allyn Dimock and Robert Muller and Franklyn Turbak
and J. B. Wells},
TITLE = {Strongly Typed Flow-Directed Representation
Transformations},
CROSSREF = {ICFP1997},
YEAR = {1997},
PAGES = {11--24},
ABSTRACT = {We present a new framework for transforming data
representations in a strongly typed intermediate
language. Our method allows both value producers
(sources) and value consumers (sinks) to support
multiple representations, automatically inserting
any required code. Specialized representations can
be easily chosen for particular source/sink
pairs. The framework is based on these techniques:
\begin{enumerate} \item \emph{Flow annotated types}
encode the ``flows-from'' (source) and ``flows-to''
(sink) information of a flow graph. \item
\emph{Intersection and union types} support (a)
encoding precise flow information, (b) separating
flow information so that transformations can be well
typed, (c) automatically reorganizing flow paths to
enable multiple representations. \end{enumerate} As
an instance of our framework, we provide a function
representation transformation that encompasses both
closure conversion and inlining. Our framework is
adaptable to data other than functions.},
COMMENT = {The disabling of the PDF links is disabled as the fonts does not look that bad when printed and PDF provides the best way to ensure that the files are printable under Windows. Please do not disable this diabling without discussing it with me (Peter).
The PDF link is disabled because the PDF file has absolutely
horrible and unreadable fonts. Please do not enable it until the
fonts are fixed.},
URL = {http://www.church-project.org/reports/electronic/Dim+Mul+Tur+Wel:ICFP-1997.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Dim+Mul+Tur+Wel:ICFP-1997.ps.gz},
DOCUMENTURL = {http://www.church-project.org/reports/Dim+Mul+Tur+Wel:ICFP-1997.html},
CHURCHREPORT = {yes}
}
@INPROCEEDINGS{Tur+Dim+Mul+Wel:CPPFT-1997,
ITRSPAPER = {yes},
AUTHOR = {Franklyn Turbak and Allyn Dimock and Robert Muller
and J. B. Wells},
TITLE = {Compiling with Polymorphic and Polyvariant Flow
Types},
CROSSREF = {TIC1997},
YEAR = {1997},
ABSTRACT = {Optimizing compilers for function-oriented and
object-oriented languages exploit type and flow
information for efficient implementation. Although
type and flow information (both control and data
flow) are inseparably intertwined, compilers usually
compute and represent them separately. Partially,
this has been a result of the usual polymorphic type
systems using $\forall$ and $\exists$ quantifiers,
which are difficult to use in combination with flow
annotations. \par In the Church Project, we are
experimenting with intermediate languages that
integrate type and flow information into a single
{\em flow type} framework. This integration
facilitates the preservation of flow and type
information through program transformations. In this
paper we describe $\lambda^{\mathrm{CIL}}$, an
intermediate language supporting polymorphic types
and polyvariant flow information and describe its
application in program optimiziation. We are
experimenting with this intermediate language in a
flow and type-directed compiler for a functional
language. The types of $\lambda^{\mathrm{CIL}}$
encode flow information (1) via labels that
approximate sources and sinks of values and (2) via
intersection and union types, finitary versions of
universal and existential types that support type
polymorphism and (in combination with the labels)
polyvariant flow analysis. Accurate flow types
expose opportunities for a wide range of optimizing
program transformations.},
COMMENT = {The disabling of the PDF links is disabled as the fonts does not look that bad when printed and PDF provides the best way to ensure that the files are printable under Windows. Please do not disable this diabling without discussing it with me (Peter).
The PDF link is disabled because the PDF file has absolutely
horrible and unreadable fonts. Please do not enable it until the
fonts are fixed.},
DOCUMENTURL = {http://www.church-project.org/reports/Tur+Dim+Mul+Wel:CPPFT-1997.html},
URL = {http://www.church-project.org/reports/electronic/Tur+Dim+Mul+Wel:CPPFT-1997.pdf.gz},
POSTSCRIPT = {http://www.cs.bc.edu/~muller/TIC97/Turbak.ps.gz},
CHURCHREPORT = {yes},
NOTE = {},
XNOTE = {note field must be blank to prevent getting
crossref's note!}
}
@UNPUBLISHED{Kfoury:TTTR-1996a,
ITRSPAPER = {yes},
AUTHOR = {Assaf J. Kfoury},
TITLE = {Beta-Reduction as Unification},
NOTE = {A refereed extensively edited version
is~\cite{Kfoury:Rasiowa-memorial}. This preliminary version was
presented at the Helena Rasiowa Memorial Conference},
YEAR = {1996},
MONTH = {July},
CHURCHREPORT = {yes},
PDF = {http://www.church-project.org/reports/electronic/Kfoury:TTTR-1996a.pdf},
URL = {http://www.church-project.org/reports/electronic/Kfoury:TTTR-1996a.pdf.gz},
ABSTRACT = {We define a new unification problem, which we call
{\em $\beta$-unification} and which can be used to characterize
the $\beta$-strong normalization of terms in the
$\lambda$-calculus. We prove the undecidability
of $\beta$-unification, its connection with the system
of intersection types, and several of its basic
properties.}
}
@UNPUBLISHED{Kfoury:TTTR-1996,
AUTHOR = {Assaf J. Kfoury},
TITLE = {A Linearization of the Lambda-Calculus},
NOTE = {A refereed version
is~\cite{Kfoury:JLC-2000-v10n3}. This version was
presented at the Glasgow Int'l School on Type Theory
\& Term Rewriting},
PDF = {http://www.church-project.org/reports/electronic/Kfoury:TTTR-1996.pdf},
URL = {http://www.church-project.org/reports/electronic/Kfoury:TTTR-1996.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Kfoury:TTTR-1996.ps.gz},
YEAR = {1996},
MONTH = {September},
CHURCHREPORT = {yes},
ABSTRACT = {We embed the standard $\lambda$-calculus, denoted $\Lambda$,
into two larger $\lambda$-calculi,
denoted $\Lambda^\land$ and $\&\Lambda^\land$.
The standard notion of $\beta$-reduction for $\Lambda$
corresponds to two new notions of reduction,
$\beta^{\land}$ for $\Lambda^\land$ and
$\&\beta^{\land}$ for $\&\Lambda^\land$. A distinctive
feature of our new calculus $\Lambda^\land$
(resp., $\&\Lambda^\land$) is that, in every function
application, an argument is used at most once (resp.,
exactly once) in the body of the function. We establish
various connections between the three notions of reduction,
$\beta$, $\beta^{\land}$ and $\&\beta^{\land}$. As a
consequence, we provide an alternative framework to study
the relationship between $\beta$-weak normalization and
$\beta$-strong normalization, and give a new proof of the
oft-mentioned equivalence between $\beta$-strong
normalization of standard $\lambda$-terms and typability in
a system of ``intersection types''. },
COMMENT = {The funny order of things in the note is to work
around the way BibTeX would really screw up the
meaning by putting ", 1996" after the note, implying
falsely that the year has something to do with the
note.}
}
@TECHREPORT{Wells:TUFE-1996,
AUTHOR = {J. B. Wells},
TITLE = {Typability is Undecidable for {F}+Eta},
INSTITUTION = {Comp.\ Sci.\ Dept., Boston Univ.},
YEAR = {1996},
NUMBER = {96-022},
TYPE = {Tech.\ Rep.},
MONTH = {March},
DOCUMENTURL = {http://www.church-project.org/reports/Wells:TUFE-1996.html},
POSTSCRIPT = {http://www.cs.bu.edu/techreports/1996-022-f+eta-typability-undecidable.ps.gz},
CHURCHREPORT = {yes},
ABSTRACT = {System F is the well-known polymorphically-typed
$\lambda$-calculus with
universal quantifiers (``$\forall$'').
F+$\eta$ is System F extended with the eta rule,
which says that if term $M$ can be given type $\tau$
and $M$ $\eta$-reduces to $N$, then $N$ can also be
given the type $\tau$.
Adding the eta rule to System F is equivalent to
adding the subsumption rule using the subtyping
(``containment'') relation that Mitchell defined and
axiomatized.
The subsumption rule says that if $M$ can be given
type $\tau$ and $\tau$ is a subtype of type
$\sigma$, then $M$ can be given type $\sigma$.
Mitchell's subtyping relation involves no extensions
to the syntax of types, i.e., no bounded
polymorphism and no supertype of all types, and is
thus unrelated to the system $\mathrm{F}_\leq$
(``F-sub'').
\par
Typability for F+$\eta$ is the problem of
determining for any term $M$ whether there is any
type $\tau$ that can be given to it using the type
inference rules of F+$\eta$.
Typability has been proven undecidable for System
F (without the eta rule), but the decidability of
typability has been an open problem for F+$\eta$.
Mitchell's subtyping relation has recently been
proven undecidable, implying the undecidability of
``type checking'' for F+$\eta$.
This paper reduces the problem of subtyping to the
problem of typability for F+$\eta$, thus proving the
undecidability of typability.
The proof methods are similar in outline to those
used to prove the undecidability of typability for
System F, but the fine details differ greatly.}
}
@TECHREPORT{Jim:RTTS-1995,
ITRSPAPER = {yes},
AUTHOR = {Trevor Jim},
TITLE = {Rank 2 Type Systems and Recursive Definitions},
INSTITUTION = {Massachusetts Institute of Technology},
YEAR = {1995},
NUMBER = {MIT/LCS/TM-531},
MONTH = {November},
PDF = {http://www.church-project.org/reports/electronic/Jim:RTTS-1995.pdf},
URL = {http://www.church-project.org/reports/electronic/Jim:RTTS-1995.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Jim:RTTS-1995.ps.gz},
ABSTRACT = {We demonstrate the pragmatic value of the
\emph{principal typing property}, a property
distinct from ML's principal type property, by
studying a type system with principal typings. The
type system is based on rank 2 intersection types
and is closely related to ML\@.\par Its principal
typing property provides elegant support for
separate compilation, including ``smartest
recompilation'' and incremental type
inference. Moreover, it motivates a new rule for
typing recursive definitions that can type some
interesting examples of polymorphic recursion.},
CHURCHREPORT = {yes}
}
@TECHREPORT{Jim:MIT-LCS-1995-532,
ITRSPAPER = {yes},
AUTHOR = {Trevor Jim},
TITLE = {What Are Principal Typings and What Are They Good
For?},
INSTITUTION = {MIT},
YEAR = {1995},
TYPE = {Tech.\ memo.},
NUMBER = {MIT/LCS/TM-532},
PDF = {http://www.church-project.org/reports/electronic/Jim:MIT-LCS-1995-532.pdf},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Jim:MIT-LCS-1995-532.ps.gz},
URL = {http://www.church-project.org/reports/electronic/Jim:MIT-LCS-1995-532.pdf.gz},
ABSTRACT = {We demonstrate the pragmatic value of the
\emph{principal typing property}, a property
distinct from ML's principal type property, by
studying a type system with principal typings. The
type system is based on rank 2 intersection types
and is closely related to ML\@.\par Its principal
typing property provides elegant support for
separate compilation, including ``smartest
recompilation'' and incremental type inference, and
for accurate type error messages. Moreover, it
motivates a new rule for typing recursive
definitions that can type some interesting examples
of polymorphic recursion.},
CHURCHREPORT = {yes}
}
@INPROCEEDINGS{Kfo+Wel:LICS-1995,
ITRSPAPER = {yes},
TITLE = {New Notions of Reduction and Non-Semantic Proofs of
$\beta$-Strong Normalization in Typed
$\lambda$-Calculi},
AUTHOR = {A. J. Kfoury and J. B. Wells},
PAGES = {311--321},
CROSSREF = {LICS1995},
COMMENT = {The PDF link is disabled because the PDF file has
absolutely horrible and unreadable fonts. Please do
not enable it until the fonts are fixed.},
PDF = {http://www.church-project.org/reports/electronic/Kfo+Wel:LICS-1995.pdf},
DOCUMENTURL = {http://www.church-project.org/reports/Kfo+Wel:LICS-1995.html},
URL = {http://www.church-project.org/reports/electronic/Kfo+Wel:LICS-1995.pdf.gz},
POSTSCRIPT = {http://www.church-project.org/reports/electronic/Kfo+Wel:LICS-1995.ps.gz},
SOURCE = {ftp://theory.lcs.mit.edu/pub/meyer/lics.bib},
CHURCHREPORT = {yes},
YEAR = {1995},
ABSTRACT = {Two notions of reduction for terms of the
$\lambda$-calculus are introduced and the question
of whether a $\lambda$-term is $\beta$-strongly
normalizing is reduced to the question of whether a
$\lambda$-term is merely normalizing under one of
the notions of reduction. This gives a method to
prove strong $\beta$-normalization for typed
$\lambda$-calculi. Instead of the usual semantic
proof style based on Tait's realizability or
Girard's ``candidats de r{\'e}ductibilit{\'e}'',
termination can be proved using a decreasing metric
over a well-founded ordering. This proof method is
applied to the simply-typed $\lambda$-calculus and
the system of intersection types, giving the first
non-semantic proof for a polymorphic extension of
the $\lambda$-calculus.}
}
@TECHREPORT{Wells:UMSR-1995,
AUTHOR = {J. B. Wells},
TITLE = {The Undecidability of {Mitchell's} Subtyping Relation},
INSTITUTION = {Comp.\ Sci.\ Dept., Boston Univ.},
YEAR = {1995},
MONTH = {December},
DOCUMENTURL = {http://www.church-project.org/reports/Wells:UMSR-1995.html},
POSTSCRIPT = {http://www.cs.bu.edu/ftp/pub/jbw/types/subtyping-undecidable.ps.gz},
POSTSCRIPTB = {http://www.cs.bu.edu/techreports/ps/1995-019-subtyping-undecidable.ps.gz},
REMARK = {Doesn't have tech report number in the actual paper.},
TYPE = {Tech.\ Rep.},
NUMBER = {95-019},
CHURCHREPORT = {yes},
ABSTRACT = {Mitchell defined and axiomatized a subtyping
relationship (also known as \emph{containment},
\emph{coercibility}, or \emph{subsumption}) over the
types of System F (with ``$\to$'' and
``$\forall$''). This subtyping relationship is quite
simple and does not involve bounded
quantification. Tiuryn and Urzyczyn quite recently
proved this subtyping relationship to be
undecidable. This paper supplies a new
undecidability proof for this subtyping
relationship. First, a new syntax-directed
axiomatization of the subtyping relationship is
defined. Then, this axiomatization is used to prove
a reduction from the undecidable problem of
\emph{semi-unification} to subtyping. The
undecidability of subtyping implies the
undecidability of \emph{type checking} for System F
extended with Mitchell's subtyping, also known as
``F plus eta''.}
}
@COMMENT{{Bookswhicharecollectionsofarticlesmustgoattheendofthefile}}
@COMMENT{{Donotaddentriesotherthancrossreftargetsatendoffile.Donotaddanyentriesafterthispoint.}}
@PROCEEDINGS{APLAS2004,
TITLE = {Prog.\ Lang.\ and
Systems: 2nd Asian Symp.\ ({APLAS} 2004)},
BOOKTITLE = {Prog.\ Lang.\ and
Systems: 2nd Asian Symp.\ ({APLAS} 2004)},
XADDRESS = {Taipei, Taiwan},
YEAR = 2004,
MONTH = NOV,
KEY = {AP{\relax LAS} '04},
SERIES = {LNCS},
PUBLISHER = {Springer-Verlag},
VOLUME = 3302
}
@PROCEEDINGS{ESOP2000,
TITLE = {Programming Languages \& Systems, 9th European Symp.\ Programming},
BOOKTITLE = {Programming Languages \& Systems, 9th European Symp.\ Programming},
KEY = {ES{\relax OP} '00},
YEAR = 2000,
SERIES = {LNCS},
VOLUME = 1782,
PUBLISHER = {Springer-Verlag},
XEDITOR = {G. Smolka},
TOTALPAGES = 442,
XMONTH = MAR # {/} # APR,
XPUBLISHER = {Springer-Verlag Berlin and Heidelberg GmbH & Co. KG},
ISBN = {3-540-67262-1},
XADDRESS = {Berlin, Germany},
DATE = {2000-03-28/---31},
XNOTE = {held as part of ETAPS 2000}
}
@PROCEEDINGS{ESOP2002,
TITLE = {Programming Languages \& Systems, 11th European Symp.\ Programming},
BOOKTITLE = {Programming Languages \& Systems, 11th European Symp.\ Programming},
KEY = {ES{\relax OP} '02},
YEAR = 2002,
SERIES = {LNCS},
VOLUME = 2305,
PUBLISHER = {Springer-Verlag},
TOTALPAGES = {***},
DATE = {2002-04-08/---12},
XPUBLISHER = {Springer-Verlag Berlin and Heidelberg GmbH & Co. KG},
XADDRESS = {Grenoble, France},
XNOTE = {held as part of ETAPS 2002},
XEDITOR = {Le M{\'e}tayer, Daniel},
ISBN = {3-540-43363-5}
}
@PROCEEDINGS{ESOP2003,
TITLE = {Programming Languages \& Systems, 12th European Symp.\ Programming},
BOOKTITLE = {Programming Languages \& Systems, 12th European Symp.\ Programming},
KEY = {ES{\relax OP} '03},
YEAR = 2003,
SERIES = {LNCS},
VOLUME = 2618,
PUBLISHER = {Springer-Verlag},
XDATE = {2003-04-07/---11},
XPUBLISHER = {Springer-Verlag Berlin and Heidelberg GmbH & Co. KG},
XADDRESS = {Warsaw, Poland},
XNOTE = {held as part of ETAPS 2003},
XEDITOR = {Pierpaolo Degano},
ISBN = {3-540-00886-1}
}
@PROCEEDINGS{ESOP2004,
TITLE = {Programming Languages \& Systems, 13th European Symp.\ Programming},
BOOKTITLE = {Programming Languages \& Systems, 13th European Symp.\ Programming},
KEY = {ES{\relax OP} '04},
YEAR = 2004,
SERIES = {LNCS},
VOLUME = 2986,
PUBLISHER = {Springer-Verlag},
XDATE = {2004-03-29/--04-02},
XPUBLISHER = {Springer-Verlag Berlin and Heidelberg GmbH & Co. KG},
XADDRESS = {Barcelona, Spain},
XNOTE = {held as part of ETAPS 2004},
XEDITOR = {David Schmidt},
ISBN = {3-540-21313-9}
}
@PROCEEDINGS{ESOP2005,
TITLE = {Programming Languages \& Systems, 14th European Symp.\ Programming},
BOOKTITLE = {Programming Languages \& Systems, 14th European Symp.\ Programming},
KEY = {ES{\relax OP} '05},
YEAR = 2005,
SERIES = {LNCS},
VOLUME = 3444,
PUBLISHER = {Springer-Verlag},
XDATE = {2005-04-04/---08},
XPUBLISHER = {Springer-Verlag Berlin and Heidelberg GmbH & Co. KG},
XADDRESS = {Edinburgh, Scotland},
XNOTE = {held as part of ETAPS 2005},
XEDITOR = {Mooly Sagiv},
ISBN = {3-540-25435-8},
DOI = {10.1007/b107380}
}
@PROCEEDINGS{FOOL2000,
TITLE = {Proc.\ Seventh Workshop on Foundations of Object-Oriented Languages},
YEAR = 2000,
KEY = {FO{\relax OL} '00},
BOOKTITLE = {Proc.\ Seventh Workshop on Foundations of Object-Oriented Languages},
ADDRESS = {Boston, Mass., U.S.A.}
}
@PROCEEDINGS{ICALP2002,
TITLE = {Proc.\ 29th Int'l Coll.\ Automata, Languages, and Programming},
BOOKTITLE = {Proc.\ 29th Int'l Coll.\ Automata, Languages, and Programming},
SERIES = {LNCS},
PUBLISHER = {Springer-Verlag},
VOLUME = 2380,
YEAR = {2002},
MONTH = {July},
DATE = {2002-07-08/---13},
XADDRESS = {Malaga, Spain},
ISBN = {3-540-43864-5}
}
@PROCEEDINGS{ICC2002,
TITLE = {Proc.\ Workshop on Implicit Computational Complexity},
MONTH = JUL,
YEAR = 2002,
KEY = {ICC '02},
BOOKTITLE = {Proc.\ Workshop on Implicit Computational Complexity},
XPUBLISHER = {DIKU},
XADDRESS = {Copenhagen, Denmark}
}
@PROCEEDINGS{ICFP1997,
TITLE = {Proc.\ 1997 Int'l Conf.\ Functional Programming},
YEAR = 1997,
KEY = {IC{\relax FP} '97},
BOOKTITLE = {Proc.\ 1997 Int'l Conf.\ Functional Programming},
PUBLISHER = {ACM Press},
XADDRESS = {Amsterdam, The Netherlands},
XMONTH = JUN # {~9--11},
DATE = {1997-06-09/---11},
ISBN = {0-89791-918-1},
XISSN = {0362-1340},
XSERIES = {ACM SIGPLAN Notices},
XVOLUME = {32(8) (August 1997)},
XORGANIZATION = {ACM}
}
@PROCEEDINGS{ICFP1999,
TITLE = {Proc.\ 1999 Int'l Conf.\ Functional Programming},
YEAR = 1999,
KEY = {IC{\relax FP} '99},
BOOKTITLE = {Proc.\ 1999 Int'l Conf.\ Functional Programming},
PUBLISHER = {ACM Press},
XADDRESS = {Paris, France},
XMONTH = SEP # {~27--29},
DATE = {1999-09-27/---29},
ISBN = {1-58113-111-9},
XORGANIZATION = {ACM}
}
@PROCEEDINGS{ICFP2001,
TITLE = {Proc.\ 6th Int'l Conf.\ Functional Programming},
YEAR = 2001,
KEY = {IC{\relax FP} '01},
BOOKTITLE = {Proc.\ 6th Int'l Conf.\ Functional Programming},
PUBLISHER = {ACM Press},
XADDRESS = {Firenze, Italy},
XMONTH = SEP # {~3--5},
XORGANIZATION = {ACM},
ISBN = {1-58113-415-0}
}
@PROCEEDINGS{ICFP2004,
TITLE = {Proc.\ 9th Int'l Conf.\ Functional Programming},
MONTH = SEP,
YEAR = 2004,
KEY = {IC{\relax FP} '04},
BOOKTITLE = {Proc.\ 9th Int'l Conf.\ Functional Programming},
PUBLISHER = {ACM Press},
XADDRESS = {Snowbird, Utah, USA},
XORGANIZATION = {ACM}
}
@PROCEEDINGS{ICFP2005,
TITLE = {Proc.\ 10th Int'l Conf.\ Functional Programming},
MONTH = SEP,
YEAR = 2005,
KEY = {IC{\relax FP} '05},
BOOKTITLE = {Proc.\ 10th Int'l Conf.\ Functional Programming},
PUBLISHER = {ACM Press},
XADDRESS = {Tallinn, Estonia},
XEDITOR = {Olivier Danvy and Benjamin C. Pierce},
XORGANIZATION = {ACM},
DATE = {2005-09-26/---28},
ISBN = {1-59593-064-7}
}
@PROCEEDINGS{ITRS2002,
XEDITOR = {Steffen van Bakel},
TITLE = {Proceedings of the 2nd Workshop on Intersection Types and Related Systems},
BOOKTITLE = {Proceedings of the 2nd Workshop on Intersection Types and Related Systems},
KEY = {IT{\relax RS} '02},
REMARK = {*****FIX THIS ENTRY*****},
EVENTDATE = {2002-07-26},
PUBLICATIONDATE = {2003-02},
YEAR = 2002,
ELSEVIERURL = {http://www.elsevier.nl/locate/entcs/volume70.html},
NOTE = {The ITRS '02 proceedings appears as vol.\ 70, issue 1 of \emph{Elec.\ Notes in Theoret.\ Comp.\ Sci.}, 2003-02},
ISBN = {0-444-51407-4}
}
@PROCEEDINGS{LICS1995,
TITLE = {Proc.\ 10th Ann.\ {IEEE} Symp.\ Logic in Comput. Sci.},
BOOKTITLE = {Proc.\ 10th Ann.\ {IEEE} Symp.\ Logic in Comput. Sci.},
XADDRESS = {San Diego, CA, U.S.A.},
XMONTH = JUN,
XMONTH = JUN # {~26--29,},
KEY = {LI{\relax CS} '95},
XDAYS = {26--29},
DATE = {1995-06-26/---29},
YEAR = 1995,
XPUBLISHER = {IEEE Comput.\ Soc.\ Press},
XORGANIZATION = {The IEEE Computer Society's Technical Committee on
Mathematical Foundations of Computing},
ISBN = {0-8186-7050-9},
COMMENT = {IEEE Computer Society Press Order Number PR07050;
IEEE Catalog Number 95CH35768; ISBN 0-8186-7050-9;
ISSN Number 1043-6871},
SOURCE = {ftp://theory.lcs.mit.edu/pub/meyer/lics.bib}
}
@PROCEEDINGS{LICS1999,
TITLE = {Proc.\ 14th Ann.\ {IEEE} Symp.\ Logic in Comput. Sci.},
YEAR = 1999,
KEY = {LI{\relax CS} '99},
BOOKTITLE = {Proc.\ 14th Ann.\ {IEEE} Symp.\ Logic in Comput. Sci.},
XADDRESS = {Trento, Italy},
MONTH = JUL,
XPUBLISHER = {IEEE Comput.\ Soc.\ Press}
}
@PROCEEDINGS{POPL1997,
TITLE = {Conf.\ Rec.\ POPL '97: 24th {ACM} Symp.\ Princ.\ of\ Prog.\ Langs.},
BOOKTITLE = {Conf.\ Rec.\ POPL '97: 24th {ACM} Symp.\ Princ.\ of\ Prog.\ Langs.},
YEAR = 1997,
XORGANIZATION = {ACM},
KEY = {PO{\relax PL} '97},
XADDRESS = {Paris, France},
XPUBLISHER = {ACM Press},
XMONTH = {15--17 } # JAN
}
@PROCEEDINGS{POPL1999,
TITLE = {Conf.\ Rec.\ POPL '99: 26th {ACM} Symp.\ Princ.\ of\ Prog.\ Langs.},
BOOKTITLE = {Conf.\ Rec.\ POPL '99: 26th {ACM} Symp.\ Princ.\ of\ Prog.\ Langs.},
YEAR = 1999,
XORGANIZATION = {ACM},
KEY = {PO{\relax PL} '99},
XADDRESS = {San Antonio, Texas, USA},
XPUBLISHER = {ACM Press},
XMONTH = JAN,
DATE = {1999-01-20/---22},
ISBN = {1-58113-095-3}
}
@PROCEEDINGS{POPL2000,
TITLE = {Conf.\ Rec.\ POPL '00: 27th {ACM} Symp.\ Princ.\ of\ Prog.\ Langs.},
BOOKTITLE = {Conf.\ Rec.\ POPL '00: 27th {ACM} Symp.\ Princ.\ of\ Prog.\ Langs.},
YEAR = 2000,
XORGANIZATION = {ACM},
KEY = {PO{\relax PL} '00},
XADDRESS = {Boston, Massachusetts, USA},
XPUBLISHER = {ACM Press},
XMONTH = JAN
}
@PROCEEDINGS{POPL2003,
TITLE = {Conf.\ Rec.\ POPL '03: 30th {ACM} Symp.\ Princ.\ of\ Prog.\ Langs.},
BOOKTITLE = {Conf.\ Rec.\ POPL '03: 30th {ACM} Symp.\ Princ.\ of\ Prog.\ Langs.},
YEAR = 2003,
XORGANIZATION = {ACM},
KEY = {PO{\relax PL} '03},
XADDRESS = {Louisiana, USA},
XPUBLISHER = {ACM Press},
XMONTH = JAN
}
@PROCEEDINGS{PPDP2001,
TITLE = {Proc.\ 3rd Int'l Conf.\ Principles \& Practice Declarative Programming},
YEAR = 2001,
KEY = {PP{\relax DP} '01},
BOOKTITLE = {Proc.\ 3rd Int'l Conf.\ Principles \& Practice Declarative Programming},
XPUBLISHER = {ACM Press},
XADDRESS = {Firenze, Italy},
MONTH = {5--7~} # SEP,
ISBN = {1-58113-388-X}
}
@PROCEEDINGS{PPDP2002,
TITLE = {Proc.\ 4rd Int'l Conf.\ Principles \& Practice Declarative Programming},
YEAR = 2002,
KEY = {PP{\relax DP} '02},
BOOKTITLE = {Proc.\ 4rd Int'l Conf.\ Principles \& Practice Declarative Programming},
PUBLISHER = {ACM Press},
XADDRESS = {Pittsburgh, PA, US},
XPUBLISHER = {ACM Press}
}
@PROCEEDINGS{PPDP2004,
TITLE = {Proc.\ 6th Int'l Conf.\ Principles \& Practice Declarative Programming},
YEAR = 2004,
KEY = {PP{\relax DP} '04},
BOOKTITLE = {Proc.\ 6th Int'l Conf.\ Principles \& Practice Declarative Programming},
XADDRESS = {Verona, IT},
XPUBLISHER = {ACM Press},
ISBN = {1-58113-819-9}
}
@PROCEEDINGS{RTA2003,
TITLE = {Rewriting Techniques \& Applications, 14th Int'l Conf., RTA 2003},
BOOKTITLE = {Rewriting Techniques \& Applications, 14th Int'l Conf., RTA 2003},
KEY = {RTA '03},
PUBLISHER = {Springer-Verlag},
VOLUME = 2706,
SERIES = {LNCS},
XADDRESS = {Valencia, Spain},
DATE = {2003-06-09/---11},
XMONTH = JUN,
YEAR = 2003,
ISBN = {3-540-40254-3}
}
@PROCEEDINGS{TCS2004,
TITLE = {IFIP TC1 3rd Int'l Conf.\ Theoret.\ Comput.\ Sci.\ (TCS '04)},
BOOKTITLE = {IFIP TC1 3rd Int'l Conf.\ Theoret.\ Comput.\ Sci.\ (TCS '04)},
KEY = {TCS '04},
XEDITOR = {Jean-Jacques Levy and Ernst W. Mayr and John C. Mitchell},
XADDRESS = {Toulouse, France},
XMONTH = AUG # { 23--26},
YEAR = {2004},
ISBN = {1-4020-8140-5},
PUBLISHER = {Kluwer Academic Publishers}
}
@PROCEEDINGS{TAPSOFT1997,
TITLE = {Proc.\ 7th Int'l Joint Conf.\ Theory \& Practice
of Software Development},
YEAR = 1997,
KEY = {TA{\relax PS}OFT '97},
BOOKTITLE = {Proc.\ 7th Int'l Joint Conf.\ Theory \& Practice
of Software Development},
MAYBEBOOKTITLE = {TAPSOFT' 97, Theory and Practice of Software
Development, 7th International Joint Conference CAAP/FASE},
VOLUME = 1214,
SERIES = {LNCS},
XPUBLISHER = {Springer-Verlag},
XADDRESS = {Lille, France},
XMONTH = APR,
XXMONTH = APR # {~14--18,},
DATE = {1997-04-14/---18},
TOTALPAGES = 884,
XXPUBLISHER = {Springer-Verlag Berlin and Heidelberg GmbH & Co. KG},
ISBN = {3-540-62781-2},
XNOTE = {TAPSOFT consists of Trees in Algebra in Programming
(CAAP) and Formal Approaches in Software Engineering
(FASE)}
}
@PROCEEDINGS{TIC2000,
TITLE = {Preliminary Proceedings of the Third Workshop on Types in Compilation (TIC 2000)},
YEAR = 2000,
KEY = {TIC '00},
BOOKTITLE = {Preliminary Proceedings of the Third Workshop on Types in Compilation (TIC 2000)},
XADDRESS = {Montr{\'e}al, Canada},
XMONTH = {21~} # SEP,
DATE = {2000-09-21},
NOTE = {Carnegie Mellon Univ.\ tech.\ report CMU-CS-00-161}
}
@PROCEEDINGS{TIC2000LNCS,
TITLE = {Types in Compilation, Third Int'l Workshop, TIC 2000},
YEAR = 2001,
KEY = {TIC '00},
BOOKTITLE = {Types in Compilation, Third Int'l Workshop, TIC 2000},
SERIES = {LNCS},
VOLUME = 2071,
PUBLISHER = {Springer-Verlag},
XEDITOR = {R. Harper},
ISBN = {3-540-42196-3},
COMMENT = {My best guess as to the actual publication date is
2001-06-13, because that appears as the "online
publication" date on an associated web page.}
}
@PROCEEDINGS{TIC1997,
TITLE = {Proc.\ First Int'l Workshop on Types in Compilation},
YEAR = {1997},
KEY = {TIC '97},
BOOKTITLE = {Proc.\ First Int'l Workshop on Types in Compilation},
XEDITOR = {R. Muller},
XPUBLISHER = {Boston College},
XORGANIZATION = {ACM SIGPLAN},
XADDRESS = {Amsterdam, The Netherlands},
MONTH = JUN,
XMONTH = JUN # {~8,},
DATE = {1997-06-08},
XNOTE = {Boston College Computer Science Department Technical Report BCCS-97-03},
NOTE = {The printed TIC '97 proceedings is Boston Coll.\ Comp.\
Sci.\ Dept.\ Tech.\ Rep.\ BCCS-97-03. The individual
papers are available at
http://www.cs.bc.edu/{\~{}}muller/TIC97/ or
http://oak.bc.edu/{\~{}}muller/TIC97/}
}
%%eof