Implement LUB algorithm and add new unit-testing infrastructure for infer.

r=brson
This commit is contained in:
Niko Matsakis 2012-11-01 15:16:46 -07:00
parent dc34fb9219
commit 117e5e3583
12 changed files with 727 additions and 119 deletions

View file

@ -41,6 +41,7 @@ export expr_is_lval, expr_kind;
export ExprKind, LvalueExpr, RvalueDatumExpr, RvalueDpsExpr, RvalueStmtExpr;
export field_ty;
export fold_ty, fold_sty_to_ty, fold_region, fold_regions;
export apply_op_on_t_to_ty_fn;
export fold_regions_and_ty, walk_regions_and_ty;
export field;
export field_idx, field_idx_strict;
@ -1482,6 +1483,30 @@ fn fold_regions_and_ty(
}
}
/* A little utility: it often happens that I have a `fn_ty`,
* but I want to use some function like `fold_regions_and_ty()`
* that is defined over all types. This utility converts to
* a full type and back. It's not the best way to do this (somewhat
* inefficient to do the conversion), it would be better to refactor
* all this folding business. However, I've been waiting on that
* until trait support is improved. */
fn apply_op_on_t_to_ty_fn(
cx: ctxt,
f: &FnTy,
t_op: fn(t) -> t) -> FnTy
{
let t0 = ty::mk_fn(cx, *f);
let t1 = t_op(t0);
match ty::get(t1).sty {
ty::ty_fn(copy f) => {
move f
}
_ => {
cx.sess.bug(~"`t_op` did not return a function type");
}
}
}
// n.b. this function is intended to eventually replace fold_region() below,
// that is why its name is so similar.
fn fold_regions(

View file

@ -251,6 +251,29 @@ fn require_same_types(
}
}
// a list of mapping from in-scope-region-names ("isr") to the
// corresponding ty::Region
type isr_alist = @List<(ty::bound_region, ty::Region)>;
trait get_and_find_region {
fn get(br: ty::bound_region) -> ty::Region;
fn find(br: ty::bound_region) -> Option<ty::Region>;
}
impl isr_alist: get_and_find_region {
fn get(br: ty::bound_region) -> ty::Region {
self.find(br).get()
}
fn find(br: ty::bound_region) -> Option<ty::Region> {
for list::each(self) |isr| {
let (isr_br, isr_r) = *isr;
if isr_br == br { return Some(isr_r); }
}
return None;
}
}
fn arg_is_argv_ty(tcx: ty::ctxt, a: ty::arg) -> bool {
match ty::resolved_mode(tcx, a.mode) {
ast::by_val => { /*ok*/ }

View file

@ -166,29 +166,6 @@ fn blank_fn_ctxt(ccx: @crate_ctxt, rty: ty::t,
}
}
// a list of mapping from in-scope-region-names ("isr") to the
// corresponding ty::Region
type isr_alist = @List<(ty::bound_region, ty::Region)>;
trait get_and_find_region {
fn get(br: ty::bound_region) -> ty::Region;
fn find(br: ty::bound_region) -> Option<ty::Region>;
}
impl isr_alist: get_and_find_region {
fn get(br: ty::bound_region) -> ty::Region {
self.find(br).get()
}
fn find(br: ty::bound_region) -> Option<ty::Region> {
for list::each(self) |isr| {
let (isr_br, isr_r) = *isr;
if isr_br == br { return Some(isr_r); }
}
return None;
}
}
fn check_item_types(ccx: @crate_ctxt, crate: @ast::crate) {
let visit = visit::mk_simple_visitor(@{
visit_item: |a| check_item(ccx, a),

View file

@ -722,5 +722,24 @@ impl infer_ctxt {
self.type_error_message(sp, mk_msg, a, Some(err));
}
fn replace_bound_regions_with_fresh_regions(
&self, span: span,
fty: &ty::FnTy) -> (ty::FnTy, isr_alist)
{
let {fn_ty, isr, _} =
replace_bound_regions_in_fn_ty(self.tcx, @Nil, None, fty, |br| {
// N.B.: The name of the bound region doesn't have anything to
// do with the region variable that's created for it. The
// only thing we're doing with `br` here is using it in the
// debug message.
let rvar = self.next_region_var_nb(span);
debug!("Bound region %s maps to %?",
bound_region_to_str(self.tcx, br),
rvar);
rvar
});
(fn_ty, isr)
}
}

View file

@ -290,7 +290,9 @@ fn super_args<C:combine>(
fn super_vstores<C:combine>(
self: &C, vk: ty::terr_vstore_kind,
a: ty::vstore, b: ty::vstore) -> cres<ty::vstore> {
a: ty::vstore, b: ty::vstore) -> cres<ty::vstore>
{
debug!("%s.super_vstores(a=%?, b=%?)", self.tag(), a, b);
match (a, b) {
(ty::vstore_slice(a_r), ty::vstore_slice(b_r)) => {
@ -517,4 +519,3 @@ fn super_tys<C:combine>(
_ => Err(ty::terr_sorts(expected_found(self, a, b)))
}
}

View file

@ -3,6 +3,8 @@ use lattice::*;
use to_str::ToStr;
use syntax::ast::{Many, Once};
fn macros() { include!("macros.rs"); } // FIXME(#3114): Macro import/export.
enum Lub = combine_fields; // "subtype", "subregion" etc
impl Lub: combine {
@ -102,6 +104,100 @@ impl Lub: combine {
}
}
fn fns(a: &ty::FnTy, b: &ty::FnTy) -> cres<ty::FnTy> {
// Note: this is a subtle algorithm. For a full explanation,
// please see the large comment in `region_inference.rs`.
// Take a snapshot. We'll never roll this back, but in later
// phases we do want to be able to examine "all bindings that
// were created as part of this type comparison", and making a
// snapshot is a convenient way to do that.
let snapshot = self.infcx.region_vars.start_snapshot();
// Instantiate each bound region with a fresh region variable.
let (a_with_fresh, a_isr) =
self.infcx.replace_bound_regions_with_fresh_regions(
self.span, a);
let (b_with_fresh, _) =
self.infcx.replace_bound_regions_with_fresh_regions(
self.span, b);
// Collect constraints.
let fn_ty0 = if_ok!(super_fns(&self, &a_with_fresh, &b_with_fresh));
debug!("fn_ty0 = %s", fn_ty0.to_str(self.infcx));
// Generalize the regions appearing in fn_ty0 if possible
let new_vars =
self.infcx.region_vars.vars_created_since_snapshot(snapshot);
let fn_ty1 =
ty::apply_op_on_t_to_ty_fn(
self.infcx.tcx, &fn_ty0,
|t| ty::fold_regions(
self.infcx.tcx, t,
|r, _in_fn| generalize_region(&self, snapshot,
new_vars, a_isr, r)));
return Ok(move fn_ty1);
fn generalize_region(self: &Lub,
snapshot: uint,
new_vars: &[RegionVid],
a_isr: isr_alist,
r0: ty::Region) -> ty::Region {
// Regions that pre-dated the LUB computation stay as they are.
if !is_new_var(new_vars, r0) {
debug!("generalize_region(r0=%?): not new variable", r0);
return r0;
}
let tainted = self.infcx.region_vars.tainted(snapshot, r0);
// Variables created during LUB computation which are
// *related* to regions that pre-date the LUB computation
// stay as they are.
if !tainted.all(|r| is_new_var(new_vars, *r)) {
debug!("generalize_region(r0=%?): \
non-new-variables found in %?",
r0, tainted);
return r0;
}
// Otherwise, the variable must be associated with at
// least one of the variables representing bound regions
// in both A and B. Replace the variable with the "first"
// bound region from A that we find it to be associated
// with.
for list::each(a_isr) |pair| {
let (a_br, a_r) = *pair;
if tainted.contains(&a_r) {
debug!("generalize_region(r0=%?): \
replacing with %?, tainted=%?",
r0, a_br, tainted);
return ty::re_bound(a_br);
}
}
self.infcx.tcx.sess.span_bug(
self.span,
fmt!("Region %? is not associated with \
any bound region from A!", r0));
}
fn is_new_var(new_vars: &[RegionVid], r: ty::Region) -> bool {
match r {
ty::re_infer(ty::ReVar(ref v)) => new_vars.contains(v),
_ => false
}
}
}
fn fn_metas(a: &ty::FnMeta, b: &ty::FnMeta) -> cres<ty::FnMeta> {
super_fn_metas(&self, a, b)
}
fn fn_sigs(a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
super_fn_sigs(&self, a, b)
}
// Traits please (FIXME: #2794):
fn tys(a: ty::t, b: ty::t) -> cres<ty::t> {
@ -125,18 +221,6 @@ impl Lub: combine {
super_args(&self, a, b)
}
fn fns(a: &ty::FnTy, b: &ty::FnTy) -> cres<ty::FnTy> {
super_fns(&self, a, b)
}
fn fn_metas(a: &ty::FnMeta, b: &ty::FnMeta) -> cres<ty::FnMeta> {
super_fn_metas(&self, a, b)
}
fn fn_sigs(a: &ty::FnSig, b: &ty::FnSig) -> cres<ty::FnSig> {
super_fn_sigs(&self, a, b)
}
fn substs(did: ast::def_id,
as_: &ty::substs,
bs: &ty::substs) -> cres<ty::substs> {

View file

@ -320,6 +320,124 @@ set of all things reachable from a skolemized variable `x`.
step at which the skolemization was performed. So this case here
would fail because `&x` was created alone, but is relatable to `&A`.
## Computing the LUB and GLB
The paper I pointed you at is written for Haskell. It does not
therefore considering subtyping and in particular does not consider
LUB or GLB computation. We have to consider this. Here is the
algorithm I implemented.
### LUB
The LUB algorithm proceeds in three steps:
1. Replace all bound regions (on both sides) with fresh region
inference variables.
2. Compute the LUB "as normal", meaning compute the GLB of each
pair of argument types and the LUB of the return types and
so forth. Combine those to a new function type F.
3. Map the regions appearing in `F` using the procedure described below.
For each region `R` that appears in `F`, we may need to replace it
with a bound region. Let `V` be the set of fresh variables created as
part of the LUB procedure (either in step 1 or step 2). You may be
wondering how variables can be created in step 2. The answer is that
when we are asked to compute the LUB or GLB of two region variables,
we do so by producing a new region variable that is related to those
two variables. i.e., The LUB of two variables `$x` and `$y` is a
fresh variable `$z` that is constrained such that `$x <= $z` and `$y
<= $z`.
To decide how to replace a region `R`, we must examine `Tainted(R)`.
This function searches through the constraints which were generated
when computing the bounds of all the argument and return types and
produces a list of all regions to which `R` is related, directly or
indirectly.
If `R` is not in `V` or `Tainted(R)` contains any region that is not
in `V`, then `R` is not replaced (that is, `R` is mapped to itself).
Otherwise, if `Tainted(R)` is a subset of `V`, then we select the
earliest variable in `Tainted(R)` that originates from the left-hand
side and replace `R` with a bound version of that variable.
So, let's work through the simplest example: `fn(&A)` and `fn(&a)`.
In this case, `&a` will be replaced with `$a` (the $ indicates an
inference variable) which will be linked to the free region `&A`, and
hence `V = { $a }` and `Tainted($a) = { &A }`. Since `$a` is not a
member of `V`, we leave `$a` as is. When region inference happens,
`$a` will be resolved to `&A`, as we wanted.
So, let's work through the simplest example: `fn(&A)` and `fn(&a)`.
In this case, `&a` will be replaced with `$a` (the $ indicates an
inference variable) which will be linked to the free region `&A`, and
hence `V = { $a }` and `Tainted($a) = { $a, &A }`. Since `&A` is not a
member of `V`, we leave `$a` as is. When region inference happens,
`$a` will be resolved to `&A`, as we wanted.
Let's look at a more complex one: `fn(&a, &b)` and `fn(&x, &x)`.
In this case, we'll end up with a graph that looks like:
```
$a $b *--$x
\ \ / /
\ $h-* /
$g-----------*
```
Here `$g` and `$h` are fresh variables that are created to represent
the LUB/GLB of things requiring inference. This means that `V` and
`Tainted` will look like:
```
V = {$a, $b, $x}
Tainted($g) = Tainted($h) = { $a, $b, $h, $x }
```
Therefore we replace both `$g` and `$h` with `$a`, and end up
with the type `fn(&a, &a)`.
### GLB
The procedure for computing the GLB is similar. The difference lies
in computing the replacements for the various variables. For each
region `R` that appears in the type `F`, we again compute `Tainted(R)`
and examine the results:
1. If `Tainted(R) = {R}` is a singleton set, replace `R` with itself.
2. Else, if `Tainted(R)` contains only variables in `V`, and it
contains exactly one variable from the LHS and one variable from
the RHS, then `R` can be mapped to the bound version of the
variable from the LHS.
3. Else, `R` is mapped to a fresh bound variable.
These rules are pretty complex. Let's look at some examples to see
how they play out.
Out first example was `fn(&a)` and `fn(&X)`---in
this case, the LUB will be a variable `$g`, and `Tainted($g) =
{$g,$a,$x}`. By these rules, we'll replace `$g` with a fresh bound
variable, so the result is `fn(&z)`, which is fine.
The next example is `fn(&A)` and `fn(&Z)`. XXX
The next example is `fn(&a, &b)` and `fn(&x, &x)`. In this case, as
before, we'll end up with `F=fn(&g, &h)` where `Tainted($g) =
Tainted($h) = {$g, $a, $b, $x}`. This means that we'll select fresh
bound varibales `g` and `h` and wind up with `fn(&g, &h)`.
For the last example, let's consider what may seem trivial, but is
not: `fn(&a, &a)` and `fn(&x, &x)`. In this case, we'll get `F=fn(&g,
&h)` where `Tainted($g) = {$g, $a, $x}` and `Tainted($h) = {$h, $a,
$x}`. Both of these sets contain exactly one bound variable from each
side, so we'll map them both to `&a`, resulting in `fn(&a, &a)`.
Horray!
### Why are these correct?
You may be wondering whether this algorithm is correct. So am I. But
I believe it is. (Justification forthcoming, haven't had time to
write it)
*/
#[warn(deprecated_mode)];
@ -448,7 +566,6 @@ type CombineMap = HashMap<TwoRegions, RegionVid>;
struct RegionVarBindings {
tcx: ty::ctxt,
var_spans: DVec<span>,
values: Cell<~[Region]>,
constraints: HashMap<Constraint, span>,
lubs: CombineMap,
glbs: CombineMap,
@ -462,7 +579,12 @@ struct RegionVarBindings {
// actively snapshotting. The reason for this is that otherwise
// we end up adding entries for things like the lower bound on
// a variable and so forth, which can never be rolled back.
mut undo_log: ~[UndoLogEntry]
mut undo_log: ~[UndoLogEntry],
// This contains the results of inference. It begins as an empty
// cell and only acquires a value after inference is complete.
// We use a cell vs a mutable option to circumvent borrowck errors.
values: Cell<~[GraphNodeValue]>,
}
fn RegionVarBindings(tcx: ty::ctxt) -> RegionVarBindings {
@ -646,7 +768,37 @@ impl RegionVarBindings {
been computed!"));
}
self.values.with_ref(|values| values[*rid])
let v = self.values.with_ref(|values| values[*rid]);
match v {
Value(r) => r,
NoValue => {
// No constraints, report an error. It is plausible
// that we could select an arbitrary region here
// instead. At the moment I am not doing this because
// this generally masks bugs in the inference
// algorithm, and given our syntax one cannot create
// generally create a lifetime variable that isn't
// used in some type, and hence all lifetime variables
// should ultimately have some bounds.
self.tcx.sess.span_err(
self.var_spans[*rid],
fmt!("Unconstrained region variable #%u", *rid));
// Touch of a hack: to suppress duplicate messages,
// replace the NoValue entry with ErrorValue.
let mut values = self.values.take();
values[*rid] = ErrorValue;
self.values.put_back(move values);
re_static
}
ErrorValue => {
// An error that has previously been reported.
re_static
}
}
}
fn combine_vars(&self,
@ -676,14 +828,25 @@ impl RegionVarBindings {
}
}
fn vars_created_since_snapshot(&self, snapshot: uint) -> ~[RegionVid] {
do vec::build |push| {
for uint::range(snapshot, self.undo_log.len()) |i| {
match self.undo_log[i] {
AddVar(vid) => push(vid),
_ => ()
}
}
}
}
fn tainted(&self, snapshot: uint, r0: Region) -> ~[Region] {
/*!
*
* Computes all regions that have been related to `r0` in any
* way since the snapshot `snapshot` was taken---excluding
* `r0` itself and any region variables added as part of the
* snapshot. This is used when checking whether skolemized
* regions are being improperly related to other regions.
* way since the snapshot `snapshot` was taken---`r0` itself
* will be the first entry. This is used when checking whether
* skolemized regions are being improperly related to other
* regions.
*/
debug!("tainted(snapshot=%u, r0=%?)", snapshot, r0);
@ -691,16 +854,6 @@ impl RegionVarBindings {
let undo_len = self.undo_log.len();
// collect variables added since the snapshot was taken
let new_vars = do vec::build |push| {
for uint::range(snapshot, undo_len) |i| {
match self.undo_log[i] {
AddVar(vid) => push(vid),
_ => ()
}
}
};
// `result_set` acts as a worklist: we explore all outgoing
// edges and add any new regions we find to result_set. This
// is not a terribly efficient implementation.
@ -746,15 +899,6 @@ impl RegionVarBindings {
result_index += 1;
}
// Drop `r0` itself and any region variables that were created
// since the snapshot.
result_set.retain(|r| {
match *r {
re_infer(ReVar(ref vid)) => !new_vars.contains(vid),
_ => *r != r0
}
});
return result_set;
fn consider_adding_edge(+result_set: ~[Region],
@ -991,11 +1135,11 @@ fn TwoRegionsMap() -> TwoRegionsMap {
}
impl RegionVarBindings {
fn infer_variable_values(&self) -> ~[Region] {
fn infer_variable_values(&self) -> ~[GraphNodeValue] {
let graph = self.construct_graph();
self.expansion(&graph);
self.contraction(&graph);
self.extract_regions_and_report_errors(&graph)
self.extract_values_and_report_conflicts(&graph)
}
fn construct_graph(&self) -> Graph {
@ -1231,34 +1375,60 @@ impl RegionVarBindings {
debug!("---- %s Complete after %u iteration(s)", tag, iteration);
}
fn extract_regions_and_report_errors(&self, graph: &Graph) -> ~[Region] {
fn extract_values_and_report_conflicts(
&self,
graph: &Graph) -> ~[GraphNodeValue]
{
let dup_map = TwoRegionsMap();
graph.nodes.mapi(|idx, node| {
match node.value {
Value(v) => v,
NoValue => {
self.tcx.sess.span_err(
node.span,
fmt!("Unconstrained region variable #%u", idx));
re_static
}
ErrorValue => {
let node_vid = RegionVid(idx);
match node.classification {
Expanding => {
self.report_error_for_expanding_node(
graph, dup_map, node_vid);
}
Contracting => {
self.report_error_for_contracting_node(
graph, dup_map, node_vid);
}
Value(_) => {
/* Inference successful */
}
NoValue => {
/* Unconstrained inference: do not report an error
until the value of this variable is requested.
After all, sometimes we make region variables but never
really use their values. */
}
ErrorValue => {
/* Inference impossible, this value contains
inconsistent constraints.
I think that in this case we should report an
error now---unlike the case above, we can't
wait to see whether the user needs the result
of this variable. The reason is that the mere
existence of this variable implies that the
region graph is inconsistent, whether or not it
is used.
For example, we may have created a region
variable that is the GLB of two other regions
which do not have a GLB. Even if that variable
is not used, it implies that those two regions
*should* have a GLB.
At least I think this is true. It may be that
the mere existence of a conflict in a region variable
that is not used is not a problem, so if this rule
starts to create problems we'll have to revisit
this portion of the code and think hard about it. =) */
let node_vid = RegionVid(idx);
match node.classification {
Expanding => {
self.report_error_for_expanding_node(
graph, dup_map, node_vid);
}
Contracting => {
self.report_error_for_contracting_node(
graph, dup_map, node_vid);
}
}
}
re_static
}
}
node.value
})
}

View file

@ -125,8 +125,8 @@ impl Sub: combine {
// as-is, we need to do some extra work here in order to make sure
// that function subtyping works correctly with respect to regions
//
// A rather detailed discussion of what's going on here can be
// found in the region_inference.rs module.
// Note: this is a subtle algorithm. For a full explanation,
// please see the large comment in `region_inference.rs`.
// Take a snapshot. We'll never roll this back, but in later
// phases we do want to be able to examine "all bindings that
@ -136,20 +136,9 @@ impl Sub: combine {
// First, we instantiate each bound region in the subtype with a fresh
// region variable.
let {fn_ty: a_fn_ty, _} = {
do replace_bound_regions_in_fn_ty(self.infcx.tcx, @Nil,
None, a) |br| {
// N.B.: The name of the bound region doesn't have
// anything to do with the region variable that's created
// for it. The only thing we're doing with `br` here is
// using it in the debug message.
let rvar = self.infcx.next_region_var_nb(self.span);
debug!("Bound region %s maps to %?",
bound_region_to_str(self.infcx.tcx, br),
rvar);
rvar
}
};
let (a_fn_ty, _) =
self.infcx.replace_bound_regions_with_fresh_regions(
self.span, a);
// Second, we instantiate each bound region in the supertype with a
// fresh concrete region.
@ -172,10 +161,23 @@ impl Sub: combine {
// Presuming type comparison succeeds, we need to check
// that the skolemized regions do not "leak".
let new_vars =
self.infcx.region_vars.vars_created_since_snapshot(snapshot);
for list::each(skol_isr) |pair| {
let (skol_br, skol) = *pair;
let tainted = self.infcx.region_vars.tainted(snapshot, skol);
for tainted.each |tainted_region| {
// Each skolemized should only be relatable to itself
// or new variables:
match *tainted_region {
ty::re_infer(ty::ReVar(ref vid)) => {
if new_vars.contains(vid) { loop; }
}
_ => {
if *tainted_region == skol { loop; }
}
};
// A is not as polymorphic as B:
if self.a_is_expected {
return Err(ty::terr_regions_insufficiently_polymorphic(

View file

@ -0,0 +1,301 @@
/**
# Standalone Tests for the Inference Module
Note: This module is only compiled when doing unit testing.
*/
use std::getopts;
use std::map::HashMap;
use std::getopts;
use std::getopts::{opt_present};
use std::getopts::groups;
use std::getopts::groups::{optopt, optmulti, optflag, optflagopt, getopts};
use driver::driver::{optgroups, build_session_options, build_session,
str_input, build_configuration};
use driver::diagnostic;
use syntax::{ast, attr, parse};
use syntax::parse::parse_crate_from_source_str;
use middle::lang_items::{LanguageItems, language_items};
use util::ppaux::ty_to_str;
use syntax::ast_util::dummy_sp;
use middle::ty::{FnTyBase, FnMeta, FnSig};
struct Env {
crate: @ast::crate,
tcx: ty::ctxt,
infcx: infer::infer_ctxt
}
struct RH {
id: ast::node_id,
sub: &[RH]
}
fn setup_env(test_name: &str, source_string: &str) -> Env {
let matches = getopts(~[~"-Z", ~"verbose"], optgroups()).get();
let sessopts = build_session_options(~"rustc", matches, diagnostic::emit);
let sess = build_session(sessopts, diagnostic::emit);
let cfg = build_configuration(sess, ~"whatever", str_input(~""));
let dm = HashMap();
let amap = HashMap();
let freevars = HashMap();
let region_paramd_items = HashMap();
let region_map = HashMap();
let lang_items = language_items::make();
let parse_sess = parse::new_parse_sess(None);
let crate = parse_crate_from_source_str(
test_name.to_str(), @source_string.to_str(),
cfg, parse_sess);
let tcx = ty::mk_ctxt(sess, dm, amap, freevars, region_map,
region_paramd_items, move lang_items, crate);
let infcx = infer::new_infer_ctxt(tcx);
return Env { crate: crate, tcx: tcx, infcx: infcx };
}
impl Env {
fn create_region_hierarchy(&self, rh: &RH) {
for rh.sub.each |child_rh| {
self.create_region_hierarchy(child_rh);
self.tcx.region_map.insert(child_rh.id, rh.id);
}
}
fn create_simple_region_hierarchy(&self) {
// creates a region hierarchy where 1 is root, 10 and 11 are
// children of 1, etc
self.create_region_hierarchy(
&RH {id: 1,
sub: &[RH {id: 10,
sub: &[]},
RH {id: 11,
sub: &[]}]});
}
fn lookup_item(&self, names: &[~str]) -> ast::node_id {
return match search_mod(self, &self.crate.node.module, 0, names) {
Some(id) => id,
None => {
fail fmt!("No item found: `%s`", str::connect(names, "::"));
}
};
fn search_mod(self: &Env,
m: &ast::_mod,
idx: uint,
names: &[~str]) -> Option<ast::node_id> {
assert idx < names.len();
for m.items.each |item| {
if self.tcx.sess.str_of(item.ident) == names[idx] {
return search(self, *item, idx+1, names);
}
}
return None;
}
fn search(self: &Env,
it: @ast::item,
idx: uint,
names: &[~str]) -> Option<ast::node_id> {
if idx == names.len() {
return Some(it.id);
}
return match it.node {
ast::item_const(*) | ast::item_fn(*) |
ast::item_foreign_mod(*) | ast::item_ty(*) => {
None
}
ast::item_enum(*) | ast::item_class(*) |
ast::item_trait(*) | ast::item_impl(*) |
ast::item_mac(*) => {
None
}
ast::item_mod(ref m) => {
search_mod(self, m, idx, names)
}
};
}
}
fn is_subtype(&self, a: ty::t, b: ty::t) -> bool {
match infer::can_mk_subty(self.infcx, a, b) {
Ok(_) => true,
Err(_) => false
}
}
fn assert_subtype(&self, a: ty::t, b: ty::t) {
if !self.is_subtype(a, b) {
fail fmt!("%s is not a subtype of %s, but it should be",
self.ty_to_str(a),
self.ty_to_str(b));
}
}
fn assert_not_subtype(&self, a: ty::t, b: ty::t) {
if self.is_subtype(a, b) {
fail fmt!("%s is a subtype of %s, but it shouldn't be",
self.ty_to_str(a),
self.ty_to_str(b));
}
}
fn assert_strict_subtype(&self, a: ty::t, b: ty::t) {
self.assert_subtype(a, b);
self.assert_not_subtype(b, a);
}
fn assert_eq(&self, a: ty::t, b: ty::t) {
self.assert_subtype(a, b);
self.assert_subtype(b, a);
}
fn ty_to_str(&self, a: ty::t) -> ~str {
ty_to_str(self.tcx, a)
}
fn t_fn(&self, input_tys: &[ty::t], output_ty: ty::t) -> ty::t {
let inputs = input_tys.map(|t| {mode: ast::expl(ast::by_copy),
ty: *t});
ty::mk_fn(self.tcx, FnTyBase {
meta: FnMeta {purity: ast::impure_fn,
proto: ast::ProtoBare,
onceness: ast::Many,
region: ty::re_static,
bounds: @~[],
ret_style: ast::return_val},
sig: FnSig {inputs: move inputs,
output: output_ty}
})
}
fn t_int(&self) -> ty::t {
ty::mk_int(self.tcx)
}
fn t_rptr_bound(&self, id: uint) -> ty::t {
ty::mk_imm_rptr(self.tcx, ty::re_bound(ty::br_anon(id)), self.t_int())
}
fn t_rptr_scope(&self, id: ast::node_id) -> ty::t {
ty::mk_imm_rptr(self.tcx, ty::re_scope(id), self.t_int())
}
fn t_rptr_free(&self, nid: ast::node_id, id: uint) -> ty::t {
ty::mk_imm_rptr(self.tcx, ty::re_free(nid, ty::br_anon(id)),
self.t_int())
}
fn t_rptr_static(&self) -> ty::t {
ty::mk_imm_rptr(self.tcx, ty::re_static, self.t_int())
}
fn lub() -> Lub { Lub(self.infcx.combine_fields(true, dummy_sp())) }
/// Checks that `LUB(t1,t2) == t_lub`
fn check_lub(&self, t1: ty::t, t2: ty::t, t_lub: ty::t) {
match self.lub().tys(t1, t2) {
Err(e) => {
fail fmt!("Unexpected error computing LUB: %?", e)
}
Ok(t) => {
self.assert_eq(t, t_lub);
// sanity check for good measure:
self.assert_subtype(t1, t);
self.assert_subtype(t2, t);
}
}
}
/// Checks that `LUB(t1,t2)` is undefined
fn check_no_lub(&self, t1: ty::t, t2: ty::t) {
match self.lub().tys(t1, t2) {
Err(_) => {}
Ok(t) => {
fail fmt!("Unexpected success computing LUB: %?",
self.ty_to_str(t))
}
}
}
}
#[test]
fn contravariant_region_ptr() {
let env = setup_env("contravariant_region_ptr", "");
env.create_simple_region_hierarchy();
let t_rptr1 = env.t_rptr_scope(1);
let t_rptr10 = env.t_rptr_scope(10);
env.assert_eq(t_rptr1, t_rptr1);
env.assert_eq(t_rptr10, t_rptr10);
env.assert_strict_subtype(t_rptr1, t_rptr10);
}
#[test]
fn lub_bound_bound() {
let env = setup_env("lub_bound_bound", "");
let t_rptr_bound1 = env.t_rptr_bound(1);
let t_rptr_bound2 = env.t_rptr_bound(2);
env.check_lub(env.t_fn([t_rptr_bound1], env.t_int()),
env.t_fn([t_rptr_bound2], env.t_int()),
env.t_fn([t_rptr_bound1], env.t_int()));
}
#[test]
fn lub_bound_free() {
let env = setup_env("lub_bound_free", "");
let t_rptr_bound1 = env.t_rptr_bound(1);
let t_rptr_free1 = env.t_rptr_free(0, 1);
env.check_lub(env.t_fn([t_rptr_bound1], env.t_int()),
env.t_fn([t_rptr_free1], env.t_int()),
env.t_fn([t_rptr_free1], env.t_int()));
}
#[test]
fn lub_bound_static() {
let env = setup_env("lub_bound_static", "");
let t_rptr_bound1 = env.t_rptr_bound(1);
let t_rptr_static = env.t_rptr_static();
env.check_lub(env.t_fn([t_rptr_bound1], env.t_int()),
env.t_fn([t_rptr_static], env.t_int()),
env.t_fn([t_rptr_static], env.t_int()));
}
#[test]
fn lub_bound_bound_inverse_order() {
let env = setup_env("lub_bound_bound_inverse_order", "");
let t_rptr_bound1 = env.t_rptr_bound(1);
let t_rptr_bound2 = env.t_rptr_bound(2);
env.check_lub(env.t_fn([t_rptr_bound1, t_rptr_bound2], t_rptr_bound1),
env.t_fn([t_rptr_bound2, t_rptr_bound1], t_rptr_bound1),
env.t_fn([t_rptr_bound1, t_rptr_bound1], t_rptr_bound1));
}
#[test]
fn lub_free_free() {
let env = setup_env("lub_free_free", "");
let t_rptr_free1 = env.t_rptr_free(0, 1);
let t_rptr_free2 = env.t_rptr_free(0, 2);
let t_rptr_static = env.t_rptr_static();
env.check_lub(env.t_fn([t_rptr_free1], env.t_int()),
env.t_fn([t_rptr_free2], env.t_int()),
env.t_fn([t_rptr_static], env.t_int()));
}
#[test]
fn lub_returning_scope() {
let env = setup_env("lub_returning_scope", "");
let t_rptr_scope10 = env.t_rptr_scope(10);
let t_rptr_scope11 = env.t_rptr_scope(11);
env.check_no_lub(env.t_fn([], t_rptr_scope10),
env.t_fn([], t_rptr_scope11));
}

View file

@ -143,6 +143,9 @@ mod middle {
mod to_str;
#[legacy_exports]
mod unify;
#[cfg(test)]
#[legacy_exports]
mod test;
}
#[legacy_exports]
mod collect;

View file

@ -1,21 +1,11 @@
// Should fail region checking, because g can only accept a pointer
// with lifetime r, and a is a pointer with unspecified lifetime.
fn not_ok_1(a: &uint) {
let mut g: fn@(x: &uint) = fn@(x: &r/uint) {};
// Before fn subtyping was properly implemented,
// we reported errors in this case:
fn not_ok(a: &uint, b: &b/uint) {
let mut g: fn@(x: &uint) = fn@(x: &b/uint) {};
//~^ ERROR mismatched types
g(a);
}
// Should fail region checking, because g can only accept a pointer
// with lifetime r, and a is a pointer with lifetime s.
fn not_ok_2(s: &s/uint)
{
let mut g: fn@(x: &uint) = fn@(x: &r/uint) {};
//~^ ERROR mismatched types
g(s);
}
fn main() {
}

View file

@ -0,0 +1,13 @@
// Before fn subtyping was properly implemented,
// we reported errors in this case:
fn ok(a: &uint) {
// Here &r is an alias for &:
let mut g: fn@(x: &uint) = fn@(x: &r/uint) {};
g(a);
}
fn main() {
}