1 // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use rustc::middle::allocator::AllocatorKind;
14 use syntax::ast::{Crate, Attribute, LitKind, StrStyle, ExprKind};
15 use syntax::ast::{Unsafety, Constness, Generics, Mutability, Ty, Mac, Arg};
16 use syntax::ast::{self, Ident, Item, ItemKind, TyKind, Visibility, Expr};
18 use syntax::codemap::dummy_spanned;
19 use syntax::codemap::{ExpnInfo, NameAndSpan, MacroAttribute};
20 use syntax::ext::base::ExtCtxt;
21 use syntax::ext::base::Resolver;
22 use syntax::ext::build::AstBuilder;
23 use syntax::ext::expand::ExpansionConfig;
24 use syntax::ext::hygiene::{Mark, SyntaxContext};
25 use syntax::fold::{self, Folder};
26 use syntax::parse::ParseSess;
28 use syntax::symbol::Symbol;
29 use syntax::util::small_vector::SmallVector;
30 use syntax_pos::{Span, DUMMY_SP};
32 use {AllocatorMethod, AllocatorTy, ALLOCATOR_METHODS};
34 pub fn modify(sess: &ParseSess,
35 resolver: &mut Resolver,
37 handler: &rustc_errors::Handler) -> ast::Crate {
38 ExpandAllocatorDirectives {
46 struct ExpandAllocatorDirectives<'a> {
48 handler: &'a rustc_errors::Handler,
50 resolver: &'a mut Resolver,
53 impl<'a> Folder for ExpandAllocatorDirectives<'a> {
54 fn fold_item(&mut self, item: P<Item>) -> SmallVector<P<Item>> {
55 let name = if attr::contains_name(&item.attrs, "global_allocator") {
58 return fold::noop_fold_item(item, self)
61 ItemKind::Static(..) => {}
63 self.handler.span_err(item.span, "allocators must be statics");
64 return SmallVector::one(item)
69 self.handler.span_err(item.span, "cannot define more than one \
70 #[global_allocator]");
71 return SmallVector::one(item)
75 let mark = Mark::fresh(Mark::root());
76 mark.set_expn_info(ExpnInfo {
79 format: MacroAttribute(Symbol::intern(name)),
81 allow_internal_unstable: true,
85 ctxt: SyntaxContext::empty().apply_mark(mark),
88 let ecfg = ExpansionConfig::default(name.to_string());
89 let mut f = AllocFnFactory {
91 kind: AllocatorKind::Global,
93 alloc: Ident::from_str("alloc"),
94 cx: ExtCtxt::new(self.sess, ecfg, self.resolver),
96 let super_path = f.cx.path(f.span, vec![
97 Ident::from_str("super"),
100 let mut items = vec![
101 f.cx.item_extern_crate(f.span, f.alloc),
102 f.cx.item_use_simple(f.span, Visibility::Inherited, super_path),
104 for method in ALLOCATOR_METHODS {
105 items.push(f.allocator_fn(method));
107 let name = f.kind.fn_name("allocator_abi");
108 let allocator_abi = Ident::with_empty_ctxt(Symbol::gensym(&name));
109 let module = f.cx.item_mod(span, span, allocator_abi, Vec::new(), items);
110 let module = f.cx.monotonic_expander().fold_item(module).pop().unwrap();
112 let mut ret = SmallVector::new();
118 fn fold_mac(&mut self, mac: Mac) -> Mac {
119 fold::noop_fold_mac(mac, self)
123 struct AllocFnFactory<'a> {
131 impl<'a> AllocFnFactory<'a> {
132 fn allocator_fn(&self, method: &AllocatorMethod) -> P<Item> {
133 let mut abi_args = Vec::new();
135 let ref mut mk = || {
136 let name = Ident::from_str(&format!("arg{}", i));
140 let args = method.inputs.iter().map(|ty| {
141 self.arg_ty(ty, &mut abi_args, mk)
143 let result = self.call_allocator(method.name, args);
144 let (output_ty, output_expr) =
145 self.ret_ty(&method.output, &mut abi_args, mk, result);
146 let kind = ItemKind::Fn(self.cx.fn_decl(abi_args, output_ty),
148 dummy_spanned(Constness::NotConst),
151 self.cx.block_expr(output_expr));
152 self.cx.item(self.span,
153 Ident::from_str(&self.kind.fn_name(method.name)),
158 fn call_allocator(&self, method: &str, mut args: Vec<P<Expr>>) -> P<Expr> {
159 let method = self.cx.path(self.span, vec![
161 Ident::from_str("heap"),
162 Ident::from_str("Alloc"),
163 Ident::from_str(method),
165 let method = self.cx.expr_path(method);
166 let allocator = self.cx.path_ident(self.span, self.global);
167 let allocator = self.cx.expr_path(allocator);
168 let allocator = self.cx.expr_addr_of(self.span, allocator);
169 let allocator = self.cx.expr_mut_addr_of(self.span, allocator);
170 args.insert(0, allocator);
172 self.cx.expr_call(self.span, method, args)
175 fn attrs(&self) -> Vec<Attribute> {
176 let key = Symbol::intern("linkage");
177 let value = LitKind::Str(Symbol::intern("external"), StrStyle::Cooked);
178 let linkage = self.cx.meta_name_value(self.span, key, value);
180 let no_mangle = Symbol::intern("no_mangle");
181 let no_mangle = self.cx.meta_word(self.span, no_mangle);
183 self.cx.attribute(self.span, linkage),
184 self.cx.attribute(self.span, no_mangle),
191 mut ident: &mut FnMut() -> Ident) -> P<Expr> {
193 AllocatorTy::Layout => {
194 let usize = self.cx.path_ident(self.span, Ident::from_str("usize"));
195 let ty_usize = self.cx.ty_path(usize);
198 args.push(self.cx.arg(self.span, size, ty_usize.clone()));
199 args.push(self.cx.arg(self.span, align, ty_usize));
201 let layout_new = self.cx.path(self.span, vec![
203 Ident::from_str("heap"),
204 Ident::from_str("Layout"),
205 Ident::from_str("from_size_align_unchecked"),
207 let layout_new = self.cx.expr_path(layout_new);
208 let size = self.cx.expr_ident(self.span, size);
209 let align = self.cx.expr_ident(self.span, align);
210 let layout = self.cx.expr_call(self.span,
216 AllocatorTy::LayoutRef => {
218 args.push(self.cx.arg(self.span, ident, self.ptr_u8()));
220 // Convert our `arg: *const u8` via:
222 // &*(arg as *const Layout)
223 let expr = self.cx.expr_ident(self.span, ident);
224 let expr = self.cx.expr_cast(self.span, expr, self.layout_ptr());
225 let expr = self.cx.expr_deref(self.span, expr);
226 self.cx.expr_addr_of(self.span, expr)
229 AllocatorTy::AllocErr => {
232 // (*(arg as *const AllocErr)).clone()
234 args.push(self.cx.arg(self.span, ident, self.ptr_u8()));
235 let expr = self.cx.expr_ident(self.span, ident);
236 let expr = self.cx.expr_cast(self.span, expr, self.alloc_err_ptr());
237 let expr = self.cx.expr_deref(self.span, expr);
238 self.cx.expr_method_call(
241 Ident::from_str("clone"),
246 AllocatorTy::Ptr => {
248 args.push(self.cx.arg(self.span, ident, self.ptr_u8()));
249 self.cx.expr_ident(self.span, ident)
252 AllocatorTy::ResultPtr |
253 AllocatorTy::ResultExcess |
254 AllocatorTy::ResultUnit |
256 AllocatorTy::UsizePair |
257 AllocatorTy::Unit => {
258 panic!("can't convert AllocatorTy to an argument")
266 mut ident: &mut FnMut() -> Ident,
267 expr: P<Expr>) -> (P<Ty>, P<Expr>)
270 AllocatorTy::UsizePair => {
280 args.push(self.cx.arg(self.span, min, self.ptr_usize()));
281 args.push(self.cx.arg(self.span, max, self.ptr_usize()));
284 let stmt = self.cx.stmt_let(self.span, false, ident, expr);
285 let min = self.cx.expr_ident(self.span, min);
286 let max = self.cx.expr_ident(self.span, max);
287 let layout = self.cx.expr_ident(self.span, ident);
288 let assign_min = self.cx.expr(self.span, ExprKind::Assign(
289 self.cx.expr_deref(self.span, min),
290 self.cx.expr_tup_field_access(self.span, layout.clone(), 0),
292 let assign_min = self.cx.stmt_semi(assign_min);
293 let assign_max = self.cx.expr(self.span, ExprKind::Assign(
294 self.cx.expr_deref(self.span, max),
295 self.cx.expr_tup_field_access(self.span, layout.clone(), 1),
297 let assign_max = self.cx.stmt_semi(assign_max);
299 let stmts = vec![stmt, assign_min, assign_max];
300 let block = self.cx.block(self.span, stmts);
301 let ty_unit = self.cx.ty(self.span, TyKind::Tup(Vec::new()));
302 (ty_unit, self.cx.expr_block(block))
305 AllocatorTy::ResultExcess => {
314 // ptr::write(err_ptr, e);
319 let excess_ptr = ident();
320 args.push(self.cx.arg(self.span, excess_ptr, self.ptr_usize()));
321 let excess_ptr = self.cx.expr_ident(self.span, excess_ptr);
323 let err_ptr = ident();
324 args.push(self.cx.arg(self.span, err_ptr, self.ptr_u8()));
325 let err_ptr = self.cx.expr_ident(self.span, err_ptr);
326 let err_ptr = self.cx.expr_cast(self.span,
328 self.alloc_err_ptr());
332 let ptr = self.cx.expr_ident(self.span, name);
333 let write = self.cx.expr(self.span, ExprKind::Assign(
334 self.cx.expr_deref(self.span, excess_ptr),
335 self.cx.expr_tup_field_access(self.span, ptr.clone(), 1),
337 let write = self.cx.stmt_semi(write);
338 let ret = self.cx.expr_tup_field_access(self.span,
341 let ret = self.cx.stmt_expr(ret);
342 let block = self.cx.block(self.span, vec![write, ret]);
343 self.cx.expr_block(block)
345 let pat = self.cx.pat_ident(self.span, name);
346 let ok = self.cx.path_ident(self.span, Ident::from_str("Ok"));
347 let ok = self.cx.pat_tuple_struct(self.span, ok, vec![pat]);
348 let ok = self.cx.arm(self.span, vec![ok], ok_expr);
352 let err = self.cx.expr_ident(self.span, name);
353 let write = self.cx.path(self.span, vec![
355 Ident::from_str("heap"),
356 Ident::from_str("__core"),
357 Ident::from_str("ptr"),
358 Ident::from_str("write"),
360 let write = self.cx.expr_path(write);
361 let write = self.cx.expr_call(self.span, write,
363 let write = self.cx.stmt_semi(write);
364 let null = self.cx.expr_usize(self.span, 0);
365 let null = self.cx.expr_cast(self.span, null, self.ptr_u8());
366 let null = self.cx.stmt_expr(null);
367 let block = self.cx.block(self.span, vec![write, null]);
368 self.cx.expr_block(block)
370 let pat = self.cx.pat_ident(self.span, name);
371 let err = self.cx.path_ident(self.span, Ident::from_str("Err"));
372 let err = self.cx.pat_tuple_struct(self.span, err, vec![pat]);
373 let err = self.cx.arm(self.span, vec![err], err_expr);
375 let expr = self.cx.expr_match(self.span, expr, vec![ok, err]);
376 (self.ptr_u8(), expr)
379 AllocatorTy::ResultPtr => {
385 // ptr::write(err_ptr, e);
390 let err_ptr = ident();
391 args.push(self.cx.arg(self.span, err_ptr, self.ptr_u8()));
392 let err_ptr = self.cx.expr_ident(self.span, err_ptr);
393 let err_ptr = self.cx.expr_cast(self.span,
395 self.alloc_err_ptr());
398 let ok_expr = self.cx.expr_ident(self.span, name);
399 let pat = self.cx.pat_ident(self.span, name);
400 let ok = self.cx.path_ident(self.span, Ident::from_str("Ok"));
401 let ok = self.cx.pat_tuple_struct(self.span, ok, vec![pat]);
402 let ok = self.cx.arm(self.span, vec![ok], ok_expr);
406 let err = self.cx.expr_ident(self.span, name);
407 let write = self.cx.path(self.span, vec![
409 Ident::from_str("heap"),
410 Ident::from_str("__core"),
411 Ident::from_str("ptr"),
412 Ident::from_str("write"),
414 let write = self.cx.expr_path(write);
415 let write = self.cx.expr_call(self.span, write,
417 let write = self.cx.stmt_semi(write);
418 let null = self.cx.expr_usize(self.span, 0);
419 let null = self.cx.expr_cast(self.span, null, self.ptr_u8());
420 let null = self.cx.stmt_expr(null);
421 let block = self.cx.block(self.span, vec![write, null]);
422 self.cx.expr_block(block)
424 let pat = self.cx.pat_ident(self.span, name);
425 let err = self.cx.path_ident(self.span, Ident::from_str("Err"));
426 let err = self.cx.pat_tuple_struct(self.span, err, vec![pat]);
427 let err = self.cx.arm(self.span, vec![err], err_expr);
429 let expr = self.cx.expr_match(self.span, expr, vec![ok, err]);
430 (self.ptr_u8(), expr)
433 AllocatorTy::ResultUnit => {
436 // #expr.is_ok() as u8
438 let cast = self.cx.expr_method_call(
441 Ident::from_str("is_ok"),
444 let u8 = self.cx.path_ident(self.span, Ident::from_str("u8"));
445 let u8 = self.cx.ty_path(u8);
446 let cast = self.cx.expr_cast(self.span, cast, u8.clone());
450 AllocatorTy::Bang => {
451 (self.cx.ty(self.span, TyKind::Never), expr)
454 AllocatorTy::Unit => {
455 (self.cx.ty(self.span, TyKind::Tup(Vec::new())), expr)
458 AllocatorTy::AllocErr |
459 AllocatorTy::Layout |
460 AllocatorTy::LayoutRef |
461 AllocatorTy::Ptr => {
462 panic!("can't convert AllocatorTy to an output")
467 fn ptr_u8(&self) -> P<Ty> {
468 let u8 = self.cx.path_ident(self.span, Ident::from_str("u8"));
469 let ty_u8 = self.cx.ty_path(u8);
470 self.cx.ty_ptr(self.span, ty_u8, Mutability::Mutable)
473 fn ptr_usize(&self) -> P<Ty> {
474 let usize = self.cx.path_ident(self.span, Ident::from_str("usize"));
475 let ty_usize = self.cx.ty_path(usize);
476 self.cx.ty_ptr(self.span, ty_usize, Mutability::Mutable)
479 fn layout_ptr(&self) -> P<Ty> {
480 let layout = self.cx.path(self.span, vec![
482 Ident::from_str("heap"),
483 Ident::from_str("Layout"),
485 let layout = self.cx.ty_path(layout);
486 self.cx.ty_ptr(self.span, layout, Mutability::Mutable)
489 fn alloc_err_ptr(&self) -> P<Ty> {
490 let err = self.cx.path(self.span, vec![
492 Ident::from_str("heap"),
493 Ident::from_str("AllocErr"),
495 let err = self.cx.ty_path(err);
496 self.cx.ty_ptr(self.span, err, Mutability::Mutable)