This probably needs a bit more work, but I wanted to try and capture some common use cases and be a bit more helpful.
r? @huonw
cc @steveklabnik
Aaron Turon <aturon@mozilla.com>
Aaron Weiss <aaronweiss74@gmail.com>
Adam Bozanich <adam.boz@gmail.com>
+Adam Jacob <adam@opscode.com>
Adam Roben <adam@roben.org>
Adam Szkoda <adaszko@gmail.com>
+Adenilson Cavalcanti <cavalcantii@gmail.com>
Adolfo Ochagavía <aochagavia92@gmail.com>
Adrien Brault <adrien.brault@gmail.com>
Adrien Tétar <adri-from-59@hotmail.fr>
-Ahmed Charles <acharles@outlook.com>
Ahmed Charles <ahmedcharles@gmail.com>
Aidan Cully <github@aidan.users.panix.com>
Aidan Hobson Sayers <aidanhs@cantab.net>
Akos Kiss <akiss@inf.u-szeged.hu>
Alan Andrade <alan.andradec@gmail.com>
+Alan Cutter <alancutter@chromium.org>
Alan Williams <mralert@gmail.com>
Aleksander Balicki <balicki.aleksander@gmail.com>
Aleksandr Koshlo <sash7ko@gmail.com>
Alex Rønne Petersen <alex@lycus.org>
Alex Whitney <aw1209@ic.ac.uk>
Alexander Bliskovsky <alexander.bliskovsky@gmail.com>
+Alexander Campbell <alexanderhcampbell@gmail.com>
+Alexander Chernyakhovsky <achernya@mit.edu>
Alexander Korolkov <alexander.korolkov@gmail.com>
Alexander Light <allight@cs.brown.edu>
Alexander Stavonin <a.stavonin@gmail.com>
Ali Smesseim <smesseim.ali@gmail.com>
Alisdair Owens <awo101@zepler.net>
Aljaž "g5pw" Srebrnič <a2piratesoft@gmail.com>
+Amol Mundayoor <amol.com@gmail.com>
Amy Unger <amy.e.unger@gmail.com>
Anders Kaseorg <andersk@mit.edu>
Andre Arko <andre@arko.net>
Andrew Chin <achin@eminence32.net>
Andrew Dunham <andrew@du.nham.ca>
Andrew Gallant <jamslam@gmail.com>
+Andrew Hobden <andrew@hoverbear.org>
Andrew Paseltiner <apaseltiner@gmail.com>
Andrew Poelstra <asp11@sfu.ca>
Andrew Wagner <drewm1980@gmail.com>
Brendan Zabarauskas <bjzaba@yahoo.com.au>
Brett Cannon <brett@python.org>
Brian Anderson <banderson@mozilla.com>
+Brian Brooks <brooks.brian@gmail.com>
+Brian Campbell <lambda@continuation.org>
Brian Dawn <brian.t.dawn@gmail.com>
Brian J Brennan <brianloveswords@gmail.com>
Brian J. Burg <burg@cs.washington.edu>
Caitlin Potter <snowball@defpixel.com>
Cam Jackson <camjackson89@gmail.com>
Cameron Zwarich <zwarich@mozilla.com>
+Camille Roussel <camille@rousselfamily.com>
+Camille TJHOA <camille.tjhoa@outlook.com>
CarVac <c.lo.to.da.down.lo@gmail.com>
Carl Lerche <me@carllerche.com>
Carl-Anton Ingmarsson <mail@carlanton.se>
Carter Tazio Schonwald <carter.schonwald@gmail.com>
Caspar Krieger <caspar@asparck.com>
Chase Southwood <chase.southwood@gmail.com>
+Ches Martin <ches@whiskeyandgrits.net>
+Chloe <5paceToast@users.noreply.github.com>
Chris Double <chris.double@double.co.nz>
Chris Morgan <me@chrismorgan.info>
Chris Nixon <chris.nixon@sigma.me.uk>
Corey Ford <corey@coreyford.name>
Corey Richardson <corey@octayn.net>
Cristi Burcă <scribu@gmail.com>
+Cristian Kubis <cristian.kubis@tsunix.de>
DJUrsus <colinvh@divitu.com>
+Dabo Ross <daboross@daboross.net>
Damian Gryski <damian@gryski.com>
Damien Grassart <damien@grassart.com>
Damien Radtke <dradtke@channeliq.com>
Dan Burkert <dan@danburkert.com>
Dan Connolly <dckc@madmode.com>
Dan Luu <danluu@gmail.com>
+Dan Schatzberg <schatzberg.dan@gmail.com>
+Dan W. <1danwade@gmail.com>
Dan Yang <dsyang@fb.com>
Daniel Brooks <db48x@db48x.net>
Daniel Fagnan <dnfagnan@gmail.com>
Daniel Griffen <daniel@dgriffen.com>
Daniel Grunwald <daniel@danielgrunwald.de>
Daniel Hofstetter <daniel.hofstetter@42dh.com>
+Daniel Lobato García <elobatocs@gmail.com>
Daniel Luz <dev@mernen.com>
Daniel MacDougall <dmacdougall@gmail.com>
Daniel Micay <danielmicay@gmail.com>
Daniel Rosenwasser <DanielRosenwasser@gmail.com>
Daniel Ursache Dogariu <contact@danniel.net>
Daniil Smirnov <danslapman@gmail.com>
+Darin Morrison <darinmorrison+git@gmail.com>
+Darrell Hamilton <darrell.noice@gmail.com>
Dave Herman <dherman@mozilla.com>
Dave Hodder <dmh@dmh.org.uk>
+Dave Huseby <dhuseby@mozilla.com>
David Creswick <dcrewi@gyrae.net>
David Forsythe <dforsythe@gmail.com>
David Halperin <halperin.dr@gmail.com>
+David King <dave@davbo.org>
David Klein <david.klein@baesystemsdetica.com>
+David Mally <djmally@gmail.com>
David Manescu <david.manescu@gmail.com>
David Rajchenbach-Teller <dteller@mozilla.com>
David Renshaw <dwrenshaw@gmail.com>
David Vazgenovich Shakaryan <dvshakaryan@gmail.com>
Davis Silverman <sinistersnare@gmail.com>
+Denis Defreyne <denis.defreyne@stoneship.org>
Derecho <derecho@sector5d.org>
Derek Chiang <derekchiang93@gmail.com>
Derek Guenther <dguenther9@gmail.com>
Dominik Inführ <dominik.infuehr@gmail.com>
Donovan Preston <donovanpreston@gmail.com>
Douglas Young <rcxdude@gmail.com>
+Drew Crawford <drew@sealedabstract.com>
Drew Willcoxon <adw@mozilla.com>
Duane Edwards <mail@duaneedwards.net>
Duncan Regan <duncanregan@gmail.com>
Dylan Braithwaite <dylanbraithwaite1@gmail.com>
+Dylan Ede <dylanede@googlemail.com>
Dzmitry Malyshau <kvarkus@gmail.com>
Earl St Sauver <estsauver@gmail.com>
Eduard Bopp <eduard.bopp@aepsil0n.de>
Elliott Slaughter <elliottslaughter@gmail.com>
Elly Fong-Jones <elly@leptoquark.net>
Emanuel Rylke <ema-fox@web.de>
+Emeliov Dmitrii <demelev1990@gmail.com>
Eric Allen <ericpallen@gmail.com>
Eric Biggers <ebiggers3@gmail.com>
Eric Holk <eric.holk@gmail.com>
Eric Holmes <eric@ejholmes.net>
Eric Kidd <git@randomhacks.net>
Eric Martin <e.a.martin1337@gmail.com>
+Eric Platon <eric.platon@waku-waku.ne.jp>
Eric Reed <ecreed@cs.washington.edu>
Erick Rivas <chemical.rivas@gmail.com>
Erick Tryzelaar <erick.tryzelaar@gmail.com>
Florian Zeitz <florob@babelmonkeys.de>
Francisco Souza <f@souza.cc>
Franklin Chen <franklinchen@franklinchen.com>
+FuGangqiang <fu_gangqiang@163.com>
Gabriel <g2p.code@gmail.com>
Gareth Daniel Smith <garethdanielsmith@gmail.com>
Garrett Heel <garrettheel@gmail.com>
Geoffroy Couprie <geo.couprie@gmail.com>
George Papanikolaou <g3orge.app@gmail.com>
Georges Dubus <georges.dubus@gmail.com>
+Germano Gabbianelli <tyrion@users.noreply.github.com>
Gil Cottle <rc@redtown.org>
Gioele Barabucci <gioele@svario.it>
+GlacJAY <glacjay@gmail.com>
Gleb Kozyrev <gleb@gkoz.com>
Glenn Willen <gwillen@nerdnet.org>
Gonçalo Cabrita <_@gmcabrita.com>
HeroesGrave <heroesgrave@gmail.com>
Hong Chulju <ang0123dev@gmail.com>
Honza Strnad <hanny.strnad@gmail.com>
+Huachao Huang <huachao.huang@gmail.com>
Hugo Jobling <hello@thisishugo.com>
Hugo van der Wijst <hugo@wij.st>
Huon Wilson <dbau.pp+github@gmail.com>
+Hyeon Kim <simnalamburt@gmail.com>
Ian Connolly <iconnolly@mozilla.com>
Ian D. Bollinger <ian.bollinger@gmail.com>
Ian Daniher <it.daniher@gmail.com>
Isaac Dupree <antispam@idupree.com>
Ivan Enderlin <ivan.enderlin@hoa-project.net>
Ivan Petkov <ivanppetkov@gmail.com>
+Ivan Radanov Ivanov <ivanradanov@yahoo.co.uk>
Ivan Ukhov <ivan.ukhov@gmail.com>
Ivano Coppola <rgbfirefox@gmail.com>
J. J. Weber <jjweber@gmail.com>
J.C. Moyer <jmoyer1992@gmail.com>
JONNALAGADDA Srinivas <js@ojuslabs.com>
JP Sugarbroad <jpsugar@google.com>
+JP-Ellis <coujellis@gmail.com>
Jack Heizer <jack.heizer@gmail.com>
Jack Moffitt <jack@metajack.im>
Jacob Edelman <edelman.jd@gmail.com>
Jake Scott <jake.net@gmail.com>
Jakub Bukaj <jakub@jakub.cc>
Jakub Wieczorek <jakubw@jakubw.net>
+Jakub Vrána <jakub@vrana.cz>
James Deng <cnjamesdeng@gmail.com>
James Hurst <jamesrhurst@users.noreply.github.com>
James Lal <james@lightsofapollo.com>
Jauhien Piatlicki <jauhien@gentoo.org>
Jay Anderson <jayanderson0@gmail.com>
Jay True <glacjay@gmail.com>
+Jeaye <jeaye@arrownext.com>
Jed Davis <jld@panix.com>
Jed Estep <aje@jhu.edu>
Jeff Balogh <jbalogh@mozilla.com>
Jesse Luehrs <doy@tozt.net>
Jesse Ray <jesse@localhost.localdomain>
Jesse Ruderman <jruderman@gmail.com>
-Jihyun Yu <jihyun@nclab.kaist.ac.kr>
+Jessy Diamond Exum <jessy.diamondman@gmail.com>
+Jihyeok Seo <me@limeburst.net>
+Jihyun Yu <j.yu@navercorp.com>
Jim Apple <jbapple+rust@google.com>
Jim Blandy <jimb@red-bean.com>
Jim Radford <radford@blackbean.org>
Jimmie Elvenmark <flugsio@gmail.com>
Jimmy Lu <jimmy.lu.2011@gmail.com>
Jimmy Zelinskie <jimmyzelinskie@gmail.com>
+Jiří Stránský <jistr@jistr.com>
Joe Pletcher <joepletcher@gmail.com>
Joe Schafer <joe@jschaf.com>
Johannes Hoff <johshoff@gmail.com>
Johannes Löthberg <johannes@kyriasis.com>
Johannes Muenzel <jmuenzel@gmail.com>
+Johannes Oertel <johannes.oertel@uni-due.de>
John Albietz <inthecloud247@gmail.com>
John Barker <jebarker@gmail.com>
John Clements <clements@racket-lang.org>
+John Ericson <Ericson2314@Yahoo.com>
John Fresco <john.fresco@utah.edu>
John Gallagher <jgallagher@bignerdranch.com>
John Hodge <acessdev@gmail.com>
John Kåre Alsaker <john.kare.alsaker@gmail.com>
John Kleint <jk@hinge.co>
+John Kåre Alsaker <john.kare.alsaker@gmail.com>
John Louis Walker <injyuw@gmail.com>
John Schmidt <john.schmidt.h@gmail.com>
John Simon <john@johnsoft.com>
+John Zhang <john@zhang.io>
Jon Haddad <jon@jonhaddad.com>
Jon Morton <jonanin@gmail.com>
Jonas Hietala <tradet.h@gmail.com>
Jonathan S <gereeter@gmail.com>
Jonathan Sternberg <jonathansternberg@gmail.com>
Joonas Javanainen <joonas.javanainen@gmail.com>
+Jordan Woehr <jordanwoehr@gmail.com>
Jordi Boggiano <j.boggiano@seld.be>
Jorge Aparicio <japaricious@gmail.com>
Jorge Israel Peña <jorge.israel.p@gmail.com>
Joshua Yanovski <pythonesque@gmail.com>
Julia Evans <julia@jvns.ca>
Julian Orth <ju.orth@gmail.com>
+Julian Viereck <julian.viereck@gmail.com>
Junseok Lee <lee.junseok@berkeley.edu>
Junyoung Cho <june0.cho@samsung.com>
JustAPerson <jpriest8@ymail.com>
Justin Noah <justinnoah@gmail.com>
Jyun-Yan You <jyyou.tw@gmail.com>
Kang Seonghoon <kang.seonghoon@mearie.org>
-Kang Seonghoon <public+git@mearie.org>
Kasey Carrothers <kaseyc.808@gmail.com>
Keegan McAllister <mcallister.keegan@gmail.com>
Kelly Wilson <wilsonk@cpsc.ucalgary.ca>
+Kelvin Ly <kelvin.ly1618@gmail.com>
Ken Tossell <ken@tossell.net>
KernelJ <kernelj@epixxware.com>
Keshav Kini <keshav.kini@gmail.com>
Kevin Cantu <me@kevincantu.org>
Kevin Mehall <km@kevinmehall.net>
Kevin Murphy <kemurphy.cmu@gmail.com>
+Kevin Rauwolf <sweetpea-git@tentacle.net>
Kevin Walter <kevin.walter.private@googlemail.com>
Kevin Yap <me@kevinyap.ca>
Kiet Tran <ktt3ja@gmail.com>
LemmingAvalanche <haugsbakk@yahoo.no>
Lennart Kudling <github@kudling.de>
Leo Testard <leo.testard@gmail.com>
+Leonids Maslovs <leonids.maslovs@galeoconsulting.com>
+Liam Monahan <liam@monahan.io>
Liigo Zhuang <com.liigo@gmail.com>
Lindsey Kuper <lindsey@composition.al>
Lionel Flandrin <lionel.flandrin@parrot.com>
Matt Brubeck <mbrubeck@limpet.net>
Matt Carberry <carberry.matt@gmail.com>
Matt Coffin <mcoffin13@gmail.com>
+Matt Cox <mattcoxpdx@gmail.com>
Matt McPherrin <git@mcpherrin.ca>
Matt Murphy <matthew.john.murphy@gmail.com>
Matt Roche <angst7@gmail.com>
Michael Williams <m.t.williams@live.com>
Michael Woerister <michaelwoerister@posteo>
Michael Zhou <moz@google.com>
+Michał Krasnoborski <mkrdln@gmail.com>
Mick Koch <kchmck@gmail.com>
Mickaël Delahaye <mickael.delahaye@gmail.com>
Mihnea Dobrescu-Balaur <mihnea@linux.com>
Neil Pankey <npankey@gmail.com>
Nelson Chen <crazysim@gmail.com>
NiccosSystem <niccossystem@gmail.com>
+Nicholas <npmazzuca@gmail.com>
Nicholas Bishop <nicholasbishop@gmail.com>
+Nicholas Mazzuca <npmazzuca@gmail.com>
Nick Cameron <ncameron@mozilla.com>
Nick Desaulniers <ndesaulniers@mozilla.com>
Nick Howell <howellnick@gmail.com>
Olivier Saut <osaut@airpost.net>
Olle Jonsson <olle.jonsson@gmail.com>
Or Brostovski <tohava@gmail.com>
+Or Neeman <oneeman@gmail.com>
Oren Hazi <oren.hazi@gmail.com>
Orpheus Lummis <o@orpheuslummis.com>
+Orphée Lafond-Lummis <o@orftz.com>
P1start <rewi-github@whanau.org>
Pablo Brasero <pablo@pablobm.com>
Palmer Cox <p@lmercox.com>
Patrick Walton <pcwalton@mimiga.net>
Patrick Yevsukov <patrickyevsukov@users.noreply.github.com>
Patrik Kårlin <patrik.karlin@gmail.com>
+Paul ADENOT <paul@paul.cx>
Paul Collier <paul@paulcollier.ca>
+Paul Collins <paul@ondioline.org>
Paul Crowley <paulcrowley@google.com>
+Paul Osborne <osbpau@gmail.com>
Paul Stansifer <paul.stansifer@gmail.com>
Paul Woolcock <pwoolcoc+github@gmail.com>
Pavel Panchekha <me@pavpanchekha.com>
Peter Elmers <peter.elmers@yahoo.com>
Peter Hull <peterhull90@gmail.com>
Peter Marheine <peter@taricorp.net>
+Peter Minten <peter@pminten.nl>
Peter Schuller <peter.schuller@infidyne.com>
Peter Williams <peter@newton.cx>
Peter Zotov <whitequark@whitequark.org>
Pradeep Kumar <gohanpra@gmail.com>
Prudhvi Krishna Surapaneni <me@prudhvi.net>
Pyfisch <pyfisch@gmail.com>
+Pyry Kontio <pyry.kontio@drasa.eu>
Pythoner6 <pythoner6@gmail.com>
Q.P.Liu <qpliu@yahoo.com>
Rafael Ávila de Espíndola <respindola@mozilla.com>
+Rahul Horé <hore.rahul@gmail.com>
Ralph Bodenner <rkbodenner+github@gmail.com>
Ralph Giles <giles@thaumas.net>
Ramkumar Ramachandra <artagnon@gmail.com>
Randati <anttivan@gmail.com>
Raphael Catolino <raphael.catolino@gmail.com>
+Raphael Nestler <raphael.nestler@gmail.com>
Raphael Speyer <rspeyer@gmail.com>
Raul Gutierrez S <rgs@itevenworks.net>
Ray Clanan <rclanan@utopianconcept.com>
Renato Zannon <renato@rrsz.com.br>
Reuben Morais <reuben.morais@gmail.com>
Ricardo M. Correia <rcorreia@wizy.org>
+Ricardo Martins <ricardo@scarybox.net>
Rich Lane <rlane@club.cc.cmu.edu>
Richard Diamond <wichard@vitalitystudios.com>
Richo Healey <richo@psych0tik.net>
Ruud van Asseldonk <dev@veniogames.com>
Ryan Levick <ryan@6wunderkinder.com>
Ryan Mulligan <ryan@ryantm.com>
+Ryan Prichard <ryan.prichard@gmail.com>
+Ryan Riginding <marc.riginding@gmail.com>
Ryan Scheel <ryan.havvy@gmail.com>
Ryman <haqkrs@gmail.com>
Rüdiger Sonderfeld <ruediger@c-plusplus.de>
S Pradeep Kumar <gohanpra@gmail.com>
+Sae-bom Kim <sae-bom.kim@samsung.com>
Salem Talha <salem.a.talha@gmail.com>
Samuel Chase <samebchase@gmail.com>
Samuel Neves <sneves@dei.uc.pt>
Saurabh Anand <saurabhanandiit@gmail.com>
Scott Jenkins <scottdjwales@gmail.com>
Scott Lawrence <bytbox@gmail.com>
+Scott Olson <scott@scott-olson.org>
Sean Chalmers <sclhiannan@gmail.com>
Sean Collins <sean@cllns.com>
Sean Gillespie <sean.william.g@gmail.com>
Taras Shpot <mrshpot@gmail.com>
Taylor Hutchison <seanthutchison@gmail.com>
Ted Horst <ted.horst@earthlink.net>
+Tero Hänninen <tejohann@kapsi.fi>
Thad Guidry <thadguidry@gmail.com>
Thiago Carvalho <thiago.carvalho@westwing.de>
+Thiago Pontes <email@thiago.me>
Thomas Backman <serenity@exscape.org>
Thomas Daede <daede003@umn.edu>
+Tiago Nobrega <tigarmo@gmail.com>
Till Hoeppner <till@hoeppner.ws>
Tim Brooks <brooks@cern.ch>
Tim Chevalier <chevalier@alum.wellesley.edu>
Tony Young <tony@rfw.name>
Torsten Weber <TorstenWeber12@gmail.com>
Travis Watkins <amaranth@ubuntu.com>
+Trent Nadeau <tanadeau@gmail.com>
Trent Ogren <tedwardo2@gmail.com>
Trinick <slicksilver555@mac.com>
Tristan Storch <tstorch@math.uni-bielefeld.de>
Tycho Sci <tychosci@gmail.com>
Tyler Bindon <martica@martica.org>
Tyler Thrailkill <tylerbthrailkill@gmail.com>
-U-NOV2010\eugals
Ulrik Sverdrup <root@localhost>
Ulysse Carion <ulysse@ulysse.io>
+User Jyyou <jyyou@plaslab.cs.nctu.edu.tw>
Utkarsh Kukreti <utkarshkukreti@gmail.com>
Uwe Dauernheim <uwe@dauernheim.net>
Vadim Chugunov <vadimcn@gmail.com>
Wangshan Lu <wisagan@gmail.com>
WebeWizard <webewizard@gmail.com>
Wendell Smith <wendell.smith@yale.edu>
+Wesley Wiser <wwiser@gmail.com>
Will <will@glozer.net>
William Ting <io@williamting.com>
Willson Mock <willson.mock@gmail.com>
Zbigniew Siciarz <zbigniew@siciarz.net>
Ziad Hatahet <hatahet@gmail.com>
Zooko Wilcox-O'Hearn <zooko@zooko.com>
+adridu59 <adri-from-59@hotmail.fr>
aochagavia <aochagavia92@gmail.com>
areski <areski@gmail.com>
arturo <arturo@openframeworks.cc>
auREAX <mark@xn--hwg34fba.ws>
+awlnx <alecweber1994@gmail.com>
+aydin.kim <aydin.kim@samsung.com>
b1nd <clint.ryan3@gmail.com>
bachm <Ab@vapor.com>
+bcoopers <coopersmithbrian@gmail.com>
blackbeam <aikorsky@gmail.com>
blake2-ppc <ulrik.sverdrup@gmail.com>
bluss <bluss>
crhino <piraino.chris@gmail.com>
dan@daramos.com <dan@daramos.com>
darkf <lw9k123@gmail.com>
+defuz <defuz.net@gmail.com>
dgoon <dgoon@dgoon.net>
donkopotamus <general@chocolate-fish.com>
eliovir <eliovir@gmail.com>
free-Runner <aali07@students.poly.edu>
g3xzh <g3xzh@yahoo.com>
gamazeps <gamaz3ps@gmail.com>
+gareth <gareth@gareth-N56VM.(none)>
gentlefolk <cemacken@gmail.com>
gifnksm <makoto.nksm@gmail.com>
hansjorg <hansjorg@gmail.com>
jrincayc <jrincayc@users.noreply.github.com>
juxiliary <juxiliary@gmail.com>
jxv <joevargas@hush.com>
+kgv <mail@kgv.name>
+kjpgit <kjpgit@users.noreply.github.com>
klutzy <klutzytheklutzy@gmail.com>
korenchkin <korenchkin2@gmail.com>
kud1ing <github@kudling.de>
kwantam <kwantam@gmail.com>
lpy <pylaurent1314@gmail.com>
lucy <ne.tetewi@gmail.com>
+lummax <luogpg@googlemail.com>
lyuts <dioxinu@gmail.com>
m-r-r <raybaudroigm@gmail.com>
madmalik <matthias.tellen@googlemail.com>
musitdev <philippe.delrieu@free.fr>
nathan dotz <nathan.dotz@gmail.com>
nham <hamann.nick@gmail.com>
+niftynif <nif.ward@gmail.com>
noam <noam@clusterfoo.com>
novalis <novalis@novalis.org>
+nsf <no.smile.face@gmail.com>
+nwin <nwin@users.noreply.github.com>
oli-obk <github6541940@oli-obk.de>
olivren <o.renaud@gmx.fr>
osa1 <omeragacan@gmail.com>
posixphreak <posixphreak@gmail.com>
qwitwa <qwitwa@gmail.com>
+ray glover <ray@rayglover.net>
reedlepee <reedlepee123@gmail.com>
+reus <reusee@ymail.com>
rjz <rj@rjzaworski.com>
sevrak <sevrak@rediffmail.com>
sheroze1123 <mss385@cornell.edu>
smenardpw <sebastien@knoglr.com>
sp3d <sp3d@github>
startling <tdixon51793@gmail.com>
+tav <tav@espians.com>
th0114nd <th0114nd@gmail.com>
theptrk <patrick.tran06@gmail.com>
thiagopnts <thiagopnts@gmail.com>
zofrex <zofrex@gmail.com>
zslayton <zack.slayton@gmail.com>
zzmp <zmp@umich.edu>
+Łukasz Niemier <lukasz@niemier.pl>
+克雷 <geekcraik@users.noreply.github.com>
# This is used by the automation to produce single-target nightlies
opt dist-host-only 0 "only install bins for the host architecture"
opt inject-std-version 1 "inject the current compiler version of libstd into programs"
-opt jemalloc 1 "build liballoc with jemalloc"
opt llvm-version-check 1 "don't check if the LLVM version is supported, build anyway"
valopt localstatedir "/var/lib" "local state directory"
# (others are conditionally saved).
opt_nosave manage-submodules 1 "let the build manage the git submodules"
opt_nosave clang 0 "prefer clang to gcc for building the runtime"
+opt_nosave jemalloc 1 "build liballoc with jemalloc"
valopt_nosave prefix "/usr/local" "set installation prefix"
valopt_nosave local-rust-root "/usr/local" "set prefix for local rust binary"
probe CFG_VALGRIND valgrind
probe CFG_PERF perf
probe CFG_ISCC iscc
-probe CFG_JAVAC javac
probe CFG_ANTLR4 antlr4
probe CFG_GRUN grun
probe CFG_FLEX flex
probe CFG_GDB gdb
probe CFG_LLDB lldb
+# On MacOS X, invoking `javac` pops up a dialog if the JDK is not
+# installed. Since `javac` is only used if `antlr4` is available,
+# probe for it only in this case.
+if [ ! -z "$CFG_ANTLR4" ]
+then
+ probe CFG_JAVAC javac
+fi
+
if [ ! -z "$CFG_GDB" ]
then
# Store GDB's version
then
step_msg "on Bitrig, forcing use of clang, disabling jemalloc"
CFG_ENABLE_CLANG=1
- CFG_ENABLE_JEMALLOC=0
+ CFG_DISABLE_JEMALLOC=1
fi
if [ -z "$CFG_ENABLE_CLANG" -a -z "$CFG_GCC" ]
putvar CFG_ENABLE_CLANG
fi
+# Same with jemalloc. save the setting here.
+if [ ! -z "$CFG_DISABLE_JEMALLOC" ]
+then
+ putvar CFG_DISABLE_JEMALLOC
+fi
+
if [ ! -z "$CFG_LLVM_ROOT" -a -z "$CFG_DISABLE_LLVM_VERSION_CHECK" -a -e "$CFG_LLVM_ROOT/bin/llvm-config" ]
then
step_msg "using custom LLVM at $CFG_LLVM_ROOT"
// write debugger script
let mut script_str = String::with_capacity(2048);
- script_str.push_str("set charset UTF-8\n");
+ let charset = if cfg!(target_os = "bitrig") { "auto" } else { "UTF-8" };
+ script_str.push_str(&format!("set charset {}\n", charset));
script_str.push_str(&format!("file {}\n", exe_file.to_str().unwrap()));
script_str.push_str("target remote :5039\n");
script_str.push_str(&format!("set solib-search-path \
.to_string();
// write debugger script
let mut script_str = String::with_capacity(2048);
-
- script_str.push_str("set charset UTF-8\n");
+ let charset = if cfg!(target_os = "bitrig") { "auto" } else { "UTF-8" };
+ script_str.push_str(&format!("set charset {}\n", charset));
script_str.push_str("show version\n");
match config.gdb_version {
variable’: our `thread::scoped` closure wants to take ownership, and it can’t,
because the closure for `map` won’t let it.
-What to do here? Rust has two types that helps us: `Arc<T>` and `Mutex<T>`.
-*Arc* stands for "atomically reference counted". In other words, an Arc will
-keep track of the number of references to something, and not free the
-associated resource until the count is zero. The *atomic* portion refers to an
-Arc's usage of concurrency primitives to atomically update the count, making it
-safe across threads. If we use an Arc, we can have our three references. But,
-an Arc does not allow mutable borrows of the data it holds, and we want to
-modify what we're sharing. In this case, we can use a `Mutex<T>` inside of our
-Arc. A Mutex will synchronize our accesses, so that we can ensure that our
-mutation doesn't cause a data race.
-
-Here's what using an Arc with a Mutex looks like:
+What to do here? Rust has a type that helps us: `Mutex<T>`. Because the threads
+are scoped, it is possible to use an _immutable_ reference to `numbers` inside
+of the closure. However, Rust prevents us from having multiple _mutable_
+references to the same object, so we need a `Mutex` to be able to modify what
+we're sharing. A Mutex will synchronize our accesses, so that we can ensure
+that our mutation doesn't cause a data race.
+
+Here's what using a Mutex looks like:
```{rust}
use std::thread;
-use std::sync::{Arc,Mutex};
+use std::sync::Mutex;
fn main() {
- let numbers = Arc::new(Mutex::new(vec![1, 2, 3]));
+ let numbers = &Mutex::new(vec![1, 2, 3]);
let guards: Vec<_> = (0..3).map(|i| {
- let number = numbers.clone();
thread::scoped(move || {
- let mut array = number.lock().unwrap();
+ let mut array = numbers.lock().unwrap();
array[i] += 1;
println!("numbers[{}] is {}", i, array[i]);
})
```
We first have to `use` the appropriate library, and then we wrap our vector in
-an Arc with the call to `Arc::new()`. Inside of the loop, we make a new
-reference to the Arc with the `clone()` method. This will increment the
-reference count. When each new `numbers` variable binding goes out of scope, it
-will decrement the count. The `lock()` call will return us a reference to the
-value inside the Mutex, and block any other calls to `lock()` until said
-reference goes out of scope.
+a `Mutex` with the call to `Mutex::new()`. Inside of the loop, the `lock()`
+call will return us a reference to the value inside the Mutex, and block any
+other calls to `lock()` until said reference goes out of scope.
We can compile and run this program without error, and in fact, see the
non-deterministic aspect:
In a similar fashion to "Intermediate," this section is full of individual,
deep-dive chapters, which stand alone and can be read in any order. These
-chapters focus on the most complex features,
+chapters focus on Rust's most complex features.
<h2 class="section-header"><a href="unstable.html">Unstable</a></h2>
}
#[cfg(test)]
-mod tests {
+mod test {
use super::*;
use test::Bencher;
here to make them look a little closer:
```rust
-fn plus_one_v1 ( x: i32 ) -> i32 { x + 1 }
+fn plus_one_v1 (x: i32 ) -> i32 { x + 1 }
let plus_one_v2 = |x: i32 | -> i32 { x + 1 };
let plus_one_v3 = |x: i32 | x + 1 ;
```
needed to declare `add_num` as `mut` too, because we’re mutating its
environment.
-We also had to declare `add_num` as mut, since we will be modifying its
-environment.
-
If we change to a `move` closure, it's different:
```rust
finished. If we didn't want this behaviour, we could use `thread::spawn()`:
```
-# #![feature(old_io, std_misc)]
use std::thread;
-use std::old_io::timer;
-use std::time::Duration;
fn main() {
thread::spawn(|| {
println!("Hello from a thread!");
});
- timer::sleep(Duration::milliseconds(50));
+ thread::sleep_ms(50);
}
```
languages. It will not compile:
```ignore
-# #![feature(old_io, std_misc)]
use std::thread;
-use std::old_io::timer;
-use std::time::Duration;
fn main() {
let mut data = vec![1u32, 2, 3];
});
}
- timer::sleep(Duration::milliseconds(50));
+ thread::sleep_ms(50);
}
```
This gives us an error:
```text
-12:17 error: capture of moved value: `data`
+8:17 error: capture of moved value: `data`
data[i] += 1;
^~~~
```
but for a different reason:
```ignore
-# #![feature(old_io, std_misc)]
use std::thread;
-use std::old_io::timer;
-use std::time::Duration;
use std::sync::Mutex;
fn main() {
});
}
- timer::sleep(Duration::milliseconds(50));
+ thread::sleep_ms(50);
}
```
Here's the error:
```text
-<anon>:11:9: 11:22 error: the trait `core::marker::Send` is not implemented for the type `std::sync::mutex::MutexGuard<'_, collections::vec::Vec<u32>>` [E0277]
+<anon>:9:9: 9:22 error: the trait `core::marker::Send` is not implemented for the type `std::sync::mutex::MutexGuard<'_, collections::vec::Vec<u32>>` [E0277]
<anon>:11 thread::spawn(move || {
^~~~~~~~~~~~~
-<anon>:11:9: 11:22 note: `std::sync::mutex::MutexGuard<'_, collections::vec::Vec<u32>>` cannot be sent between threads safely
+<anon>:9:9: 9:22 note: `std::sync::mutex::MutexGuard<'_, collections::vec::Vec<u32>>` cannot be sent between threads safely
<anon>:11 thread::spawn(move || {
^~~~~~~~~~~~~
```
We can use `Arc<T>` to fix this. Here's the working version:
```
-# #![feature(old_io, std_misc)]
use std::sync::{Arc, Mutex};
use std::thread;
-use std::old_io::timer;
-use std::time::Duration;
fn main() {
let data = Arc::new(Mutex::new(vec![1u32, 2, 3]));
});
}
- timer::sleep(Duration::milliseconds(50));
+ thread::sleep_ms(50);
}
```
handle is then moved into the new thread. Let's examine the body of the
thread more closely:
-```
-# #![feature(old_io, std_misc)]
+```rust
# use std::sync::{Arc, Mutex};
# use std::thread;
-# use std::old_io::timer;
-# use std::time::Duration;
# fn main() {
# let data = Arc::new(Mutex::new(vec![1u32, 2, 3]));
# for i in 0..2 {
data[i] += 1;
});
# }
+# thread::sleep_ms(50);
# }
```
```bash
$ cargo build
Compiling phrases v0.0.1 (file:///home/you/projects/phrases)
-$ ls target
-deps libphrases-a7448e02a0468eaa.rlib native
+$ ls target/debug
+build deps examples libphrases-a7448e02a0468eaa.rlib native
```
`libphrase-hash.rlib` is the compiled crate. Before we see how to use this
│ │ └── mod.rs
│ └── lib.rs
└── target
- ├── deps
- ├── libphrases-a7448e02a0468eaa.rlib
- └── native
+ └── debug
+ ├── build
+ ├── deps
+ ├── examples
+ ├── libphrases-a7448e02a0468eaa.rlib
+ └── native
```
`src/lib.rs` is our crate root, and looks like this:
Put this in `src/japanese/greetings.rs`:
```rust
-// in src/japanese/greetings.rs
-
fn hello() -> String {
"こんにちは".to_string()
}
```bash
$ cargo build
Compiling phrases v0.0.1 (file:///home/you/projects/phrases)
-/home/you/projects/phrases/src/main.rs:4:38: 4:72 error: function `hello` is private
-/home/you/projects/phrases/src/main.rs:4 println!("Hello in English: {}", phrases::english::greetings::hello());
- ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+src/main.rs:4:38: 4:72 error: function `hello` is private
+src/main.rs:4 println!("Hello in English: {}", phrases::english::greetings::hello());
+ ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
note: in expansion of format_args!
-<std macros>:2:23: 2:77 note: expansion site
-<std macros>:1:1: 3:2 note: in expansion of println!
-/home/you/projects/phrases/src/main.rs:4:5: 4:76 note: expansion site
-
+<std macros>:2:25: 2:58 note: expansion site
+<std macros>:1:1: 2:62 note: in expansion of print!
+<std macros>:3:1: 3:54 note: expansion site
+<std macros>:1:1: 3:58 note: in expansion of println!
+phrases/src/main.rs:4:5: 4:76 note: expansion site
```
By default, everything is private in Rust. Let's talk about this in some more
```bash
$ cargo run
Compiling phrases v0.0.1 (file:///home/you/projects/phrases)
-/home/you/projects/phrases/src/japanese/greetings.rs:1:1: 3:2 warning: code is never used: `hello`, #[warn(dead_code)] on by default
-/home/you/projects/phrases/src/japanese/greetings.rs:1 fn hello() -> String {
-/home/you/projects/phrases/src/japanese/greetings.rs:2 "こんにちは".to_string()
-/home/you/projects/phrases/src/japanese/greetings.rs:3 }
-/home/you/projects/phrases/src/japanese/farewells.rs:1:1: 3:2 warning: code is never used: `goodbye`, #[warn(dead_code)] on by default
-/home/you/projects/phrases/src/japanese/farewells.rs:1 fn goodbye() -> String {
-/home/you/projects/phrases/src/japanese/farewells.rs:2 "さようなら".to_string()
-/home/you/projects/phrases/src/japanese/farewells.rs:3 }
- Running `target/phrases`
+src/japanese/greetings.rs:1:1: 3:2 warning: function is never used: `hello`, #[warn(dead_code)] on by default
+src/japanese/greetings.rs:1 fn hello() -> String {
+src/japanese/greetings.rs:2 "こんにちは".to_string()
+src/japanese/greetings.rs:3 }
+src/japanese/farewells.rs:1:1: 3:2 warning: function is never used: `goodbye`, #[warn(dead_code)] on by default
+src/japanese/farewells.rs:1 fn goodbye() -> String {
+src/japanese/farewells.rs:2 "さようなら".to_string()
+src/japanese/farewells.rs:3 }
+ Running `target/debug/phrases`
Hello in English: Hello!
Goodbye in English: Goodbye.
```
```text
Compiling phrases v0.0.1 (file:///home/you/projects/phrases)
-/home/you/projects/phrases/src/main.rs:4:5: 4:40 error: a value named `hello` has already been imported in this module
-/home/you/projects/phrases/src/main.rs:4 use phrases::japanese::greetings::hello;
- ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+src/main.rs:4:5: 4:40 error: a value named `hello` has already been imported in this module [E0252]
+src/main.rs:4 use phrases::japanese::greetings::hello;
+ ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
error: aborting due to previous error
Could not compile `phrases`.
```
```bash
$ cargo run
Compiling phrases v0.0.1 (file:///home/you/projects/phrases)
- Running `target/phrases`
+ Running `target/debug/phrases`
Hello in English: Hello!
Goodbye in English: Goodbye.
Hello in Japanese: こんにちは
```
If you're on Windows, please download either the [32-bit
-installer](https://static.rust-lang.org/dist/rust-nightly-i686-pc-windows-gnu.exe)
+installer](https://static.rust-lang.org/dist/rust-1.0.0-beta-i686-pc-windows-gnu.exe)
or the [64-bit
-installer](https://static.rust-lang.org/dist/rust-nightly-x86_64-pc-windows-gnu.exe)
+installer](https://static.rust-lang.org/dist/rust-1.0.0-beta-x86_64-pc-windows-gnu.exe)
and run it.
If you decide you don't want Rust anymore, we'll be a bit sad, but that's okay.
just use `for` instead.
There are tons of interesting iterator adapters. `take(n)` will return an
-iterator over the next `n` elements of the original iterator, note that this
+iterator over the next `n` elements of the original iterator. Note that this
has no side effect on the original iterator. Let's try it out with our infinite
iterator from before:
}
fn y(&mut self, coordinate: f64) -> &mut CircleBuilder {
- self.x = coordinate;
+ self.y = coordinate;
self
}
}
#[cfg(test)]
-mod tests {
+mod test {
use super::*;
#[test]
% Unstable Rust
+
+Rust provides three distribution channels for Rust: nightly, beta, and stable.
+Unstable features are only available on nightly Rust. For more details on this
+process, see [this post](http://blog.rust-lang.org/2014/10/30/Stability.html).
+
+To install nightly Rust, you can use `rustup.sh`:
+
+```bash
+$ curl -s https://static.rust-lang.org/rustup.sh | sudo sh -s -- --channel=nightly
+```
+
+If you're concerned about the [potential insecurity](http://curlpipesh.tumblr.com/) of using `curl | sudo sh`,
+please keep reading and see our disclaimer below. And feel free to use a two-step version of the installation and examine our installation script:
+
+```bash
+$ curl -f -L https://static.rust-lang.org/rustup.sh -O
+$ sudo sh rustup.sh --channel=nightly
+```
+
+If you're on Windows, please download either the [32-bit
+installer](https://static.rust-lang.org/dist/rust-nightly-i686-pc-windows-gnu.exe)
+or the [64-bit
+installer](https://static.rust-lang.org/dist/rust-nightly-x86_64-pc-windows-gnu.exe)
+and run it.
+
+If you decide you don't want Rust anymore, we'll be a bit sad, but that's okay.
+Not every programming language is great for everyone. Just run the uninstall
+script:
+
+```bash
+$ sudo /usr/local/lib/rustlib/uninstall.sh
+```
+
+If you used the Windows installer, just re-run the `.exe` and it will give you
+an uninstall option.
+
+You can re-run this script any time you want to update Rust. Which, at this
+point, is often. Rust is still pre-1.0, and so people assume that you're using
+a very recent Rust.
+
+This brings me to one other point: some people, and somewhat rightfully so, get
+very upset when we tell you to `curl | sudo sh`. And they should be! Basically,
+when you do this, you are trusting that the good people who maintain Rust
+aren't going to hack your computer and do bad things. That's a good instinct!
+If you're one of those people, please check out the documentation on [building
+Rust from Source](https://github.com/rust-lang/rust#building-from-source), or
+[the official binary downloads](http://www.rust-lang.org/install.html). And we
+promise that this method will not be the way to install Rust forever: it's just
+the easiest way to keep people updated while Rust is in its alpha state.
+
pub fn strong_count<T>(this: &Arc<T>) -> usize { this.inner().strong.load(SeqCst) }
-/// Try accessing a mutable reference to the contents behind an unique `Arc<T>`.
+/// Returns a mutable reference to the contained value if the `Arc<T>` is unique.
///
-/// The access is granted only if this is the only reference to the object.
-/// Otherwise, `None` is returned.
+/// Returns `None` if the `Arc<T>` is not unique.
///
/// # Examples
///
/// # #![feature(alloc)]
/// extern crate alloc;
/// # fn main() {
-/// use alloc::arc;
+/// use alloc::arc::{Arc, get_mut};
///
-/// let mut four = arc::Arc::new(4);
+/// let mut x = Arc::new(3);
+/// *get_mut(&mut x).unwrap() = 4;
+/// assert_eq!(*x, 4);
///
-/// arc::unique(&mut four).map(|num| *num = 5);
+/// let _y = x.clone();
+/// assert!(get_mut(&mut x).is_none());
/// # }
/// ```
#[inline]
#[unstable(feature = "alloc")]
-pub fn unique<T>(this: &mut Arc<T>) -> Option<&mut T> {
+pub fn get_mut<T>(this: &mut Arc<T>) -> Option<&mut T> {
if strong_count(this) == 1 && weak_count(this) == 0 {
// This unsafety is ok because we're guaranteed that the pointer
// returned is the *only* pointer that will ever be returned to T. Our
self.inner().weak.load(SeqCst) != 1 {
*self = Arc::new((**self).clone())
}
- // As with `unique()`, the unsafety is ok because our reference was
+ // As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
let inner = unsafe { &mut **self._ptr };
&mut inner.data
/// ```
pub fn upgrade(&self) -> Option<Arc<T>> {
// We use a CAS loop to increment the strong count instead of a
- // fetch_add because once the count hits 0 is must never be above 0.
+ // fetch_add because once the count hits 0 it must never be above 0.
let inner = self.inner();
loop {
let n = inner.strong.load(SeqCst);
use std::sync::atomic::Ordering::{Acquire, SeqCst};
use std::thread;
use std::vec::Vec;
- use super::{Arc, Weak, weak_count, strong_count, unique};
+ use super::{Arc, Weak, get_mut, weak_count, strong_count};
use std::sync::Mutex;
struct Canary(*mut atomic::AtomicUsize);
}
#[test]
- fn test_arc_unique() {
- let mut x = Arc::new(10);
- assert!(unique(&mut x).is_some());
- {
- let y = x.clone();
- assert!(unique(&mut x).is_none());
- }
- {
- let z = x.downgrade();
- assert!(unique(&mut x).is_none());
- }
- assert!(unique(&mut x).is_some());
+ fn test_arc_get_mut() {
+ let mut x = Arc::new(3);
+ *get_mut(&mut x).unwrap() = 4;
+ assert_eq!(*x, 4);
+ let y = x.clone();
+ assert!(get_mut(&mut x).is_none());
+ drop(y);
+ assert!(get_mut(&mut x).is_some());
+ let _w = x.downgrade();
+ assert!(get_mut(&mut x).is_none());
}
#[test]
/// ```
#[inline]
#[unstable(feature = "alloc")]
-pub fn get_mut<'a, T>(rc: &'a mut Rc<T>) -> Option<&'a mut T> {
+pub fn get_mut<T>(rc: &mut Rc<T>) -> Option<&mut T> {
if is_unique(rc) {
let inner = unsafe { &mut **rc._ptr };
Some(&mut inner.value)
align: usize
}
+trait AllTypes { fn dummy(&self) { } }
+impl<T:?Sized> AllTypes for T { }
+
unsafe fn get_tydesc<T>() -> *const TyDesc {
use std::raw::TraitObject;
let ptr = &*(1 as *const T);
// Can use any trait that is implemented for all types.
- let obj = mem::transmute::<&marker::MarkerTrait, TraitObject>(ptr);
+ let obj = mem::transmute::<&AllTypes, TraitObject>(ptr);
obj.vtable as *const TyDesc
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, T: Clone> AsRef<T> for Cow<'a, T> {
+impl<'a, T: ?Sized + ToOwned> AsRef<T> for Cow<'a, T> {
fn as_ref(&self) -> &T {
self
}
use core::option::Option::{self, Some, None};
use core::result::Result;
use core::str as core_str;
+use core::str::pattern::Pattern;
+use core::str::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher};
use unicode::str::{UnicodeStr, Utf16Encoder};
use core::convert::AsRef;
use slice::SliceConcatExt;
pub use core::str::{FromStr, Utf8Error, Str};
-pub use core::str::{Lines, LinesAny, MatchIndices, CharRange};
-pub use core::str::{Split, SplitTerminator, SplitN};
-pub use core::str::{RSplit, RSplitN};
+pub use core::str::{Lines, LinesAny, CharRange};
+pub use core::str::{Split, RSplit};
+pub use core::str::{SplitN, RSplitN};
+pub use core::str::{SplitTerminator, RSplitTerminator};
+pub use core::str::{Matches, RMatches};
+pub use core::str::{MatchIndices, RMatchIndices};
pub use core::str::{from_utf8, Chars, CharIndices, Bytes};
pub use core::str::{from_utf8_unchecked, ParseBoolError};
pub use unicode::str::{Words, Graphemes, GraphemeIndices};
-pub use core::str::Pattern;
-pub use core::str::{Searcher, ReverseSearcher, DoubleEndedSearcher, SearchStep};
+pub use core::str::pattern;
/*
Section: Creating a string
/// Replaces all occurrences of one string with another.
///
- /// `replace` takes two arguments, a sub-`&str` to find in `self`, and a second `&str` to
+ /// `replace` takes two arguments, a sub-`&str` to find in `self`, and a
+ /// second `&str` to
/// replace it with. If the original `&str` isn't found, no change occurs.
///
/// # Examples
/// An iterator over substrings of `self`, separated by characters
/// matched by a pattern.
///
- /// The pattern can be a simple `&str`, or a closure that determines
- /// the split.
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines the split.
+ /// Additional libraries might provide more complex patterns like
+ /// regular expressions.
+ ///
+ /// # Iterator behavior
+ ///
+ /// The returned iterator will be double ended if the pattern allows a
+ /// reverse search and forward/reverse search yields the same elements.
+ /// This is true for, eg, `char` but not
+ /// for `&str`.
+ ///
+ /// If the pattern allows a reverse search but its results might differ
+ /// from a forward search, `rsplit()` can be used.
///
/// # Examples
///
- /// Simple `&str` patterns:
+ /// Simple patterns:
///
/// ```
/// let v: Vec<&str> = "Mary had a little lamb".split(' ').collect();
///
/// let v: Vec<&str> = "".split('X').collect();
/// assert_eq!(v, [""]);
+ ///
+ /// let v: Vec<&str> = "lionXXtigerXleopard".split('X').collect();
+ /// assert_eq!(v, ["lion", "", "tiger", "leopard"]);
+ ///
+ /// let v: Vec<&str> = "lion::tiger::leopard".split("::").collect();
+ /// assert_eq!(v, ["lion", "tiger", "leopard"]);
/// ```
///
- /// More complex patterns with a lambda:
+ /// More complex patterns with closures:
///
/// ```
/// let v: Vec<&str> = "abc1def2ghi".split(|c: char| c.is_numeric()).collect();
/// assert_eq!(v, ["abc", "def", "ghi"]);
///
- /// let v: Vec<&str> = "lionXXtigerXleopard".split('X').collect();
- /// assert_eq!(v, ["lion", "", "tiger", "leopard"]);
+ /// let v: Vec<&str> = "lionXtigerXleopard".split(char::is_uppercase).collect();
+ /// assert_eq!(v, ["lion", "tiger", "leopard"]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn split<'a, P: Pattern<'a>>(&'a self, pat: P) -> Split<'a, P> {
core_str::StrExt::split(&self[..], pat)
}
- /// An iterator over substrings of `self`, separated by characters matched
- /// by a pattern, returning most `count` items.
+ /// An iterator over substrings of `self`, separated by characters
+ /// matched by a pattern and yielded in reverse order.
///
- /// The pattern can be a simple `&str`, or a closure that determines
- /// the split.
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines the split.
+ /// Additional libraries might provide more complex patterns like
+ /// regular expressions.
///
- /// The last element returned, if any, will contain the remainder of the
- /// string.
+ /// # Iterator behavior
///
- /// # Examples
+ /// The returned iterator requires that the pattern supports a
+ /// reverse search,
+ /// and it will be double ended if a forward/reverse search yields
+ /// the same elements.
///
- /// Simple `&str` patterns:
+ /// For iterating from the front, `split()` can be used.
///
- /// ```
- /// let v: Vec<&str> = "Mary had a little lambda".splitn(2, ' ').collect();
- /// assert_eq!(v, ["Mary", "had a little lambda"]);
+ /// # Examples
///
- /// let v: Vec<&str> = "lionXXtigerXleopard".splitn(2, 'X').collect();
- /// assert_eq!(v, ["lion", "XtigerXleopard"]);
+ /// Simple patterns:
///
- /// let v: Vec<&str> = "abcXdef".splitn(1, 'X').collect();
- /// assert_eq!(v, ["abcXdef"]);
+ /// ```rust
+ /// let v: Vec<&str> = "Mary had a little lamb".rsplit(' ').collect();
+ /// assert_eq!(v, ["lamb", "little", "a", "had", "Mary"]);
///
- /// let v: Vec<&str> = "".splitn(1, 'X').collect();
+ /// let v: Vec<&str> = "".rsplit('X').collect();
/// assert_eq!(v, [""]);
- /// ```
///
- /// More complex patterns with a lambda:
+ /// let v: Vec<&str> = "lionXXtigerXleopard".rsplit('X').collect();
+ /// assert_eq!(v, ["leopard", "tiger", "", "lion"]);
///
+ /// let v: Vec<&str> = "lion::tiger::leopard".rsplit("::").collect();
+ /// assert_eq!(v, ["leopard", "tiger", "lion"]);
/// ```
- /// let v: Vec<&str> = "abc1def2ghi".splitn(2, |c: char| c.is_numeric()).collect();
- /// assert_eq!(v, ["abc", "def2ghi"]);
+ ///
+ /// More complex patterns with closures:
+ ///
+ /// ```rust
+ /// let v: Vec<&str> = "abc1def2ghi".rsplit(|c: char| c.is_numeric()).collect();
+ /// assert_eq!(v, ["ghi", "def", "abc"]);
+ ///
+ /// let v: Vec<&str> = "lionXtigerXleopard".rsplit(char::is_uppercase).collect();
+ /// assert_eq!(v, ["leopard", "tiger", "lion"]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn splitn<'a, P: Pattern<'a>>(&'a self, count: usize, pat: P) -> SplitN<'a, P> {
- core_str::StrExt::splitn(&self[..], count, pat)
+ pub fn rsplit<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplit<'a, P>
+ where P::Searcher: ReverseSearcher<'a>
+ {
+ core_str::StrExt::rsplit(&self[..], pat)
}
/// An iterator over substrings of `self`, separated by characters
/// matched by a pattern.
///
- /// Equivalent to `split`, except that the trailing substring is skipped if empty.
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines the split.
+ /// Additional libraries might provide more complex patterns
+ /// like regular expressions.
///
- /// The pattern can be a simple `&str`, or a closure that determines
- /// the split.
+ /// Equivalent to `split`, except that the trailing substring
+ /// is skipped if empty.
+ ///
+ /// This method can be used for string data that is _terminated_,
+ /// rather than _seperated_ by a pattern.
+ ///
+ /// # Iterator behavior
+ ///
+ /// The returned iterator will be double ended if the pattern allows a
+ /// reverse search
+ /// and forward/reverse search yields the same elements. This is true
+ /// for, eg, `char` but not for `&str`.
+ ///
+ /// If the pattern allows a reverse search but its results might differ
+ /// from a forward search, `rsplit_terminator()` can be used.
///
/// # Examples
///
- /// Simple `&str` patterns:
+ /// Simple patterns:
///
/// ```
/// let v: Vec<&str> = "A.B.".split_terminator('.').collect();
/// assert_eq!(v, ["A", "B"]);
///
- /// let v: Vec<&str> = "A..B..".split_terminator('.').collect();
+ /// let v: Vec<&str> = "A..B..".split_terminator(".").collect();
/// assert_eq!(v, ["A", "", "B", ""]);
/// ```
///
- /// More complex patterns with a lambda:
+ /// More complex patterns with closures:
///
/// ```
/// let v: Vec<&str> = "abc1def2ghi3".split_terminator(|c: char| c.is_numeric()).collect();
core_str::StrExt::split_terminator(&self[..], pat)
}
- /// An iterator over substrings of `self`, separated by a pattern,
- /// starting from the end of the string.
+ /// An iterator over substrings of `self`, separated by characters
+ /// matched by a pattern and yielded in reverse order.
+ ///
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines the split.
+ /// Additional libraries might provide more complex patterns like
+ /// regular expressions.
+ ///
+ /// Equivalent to `split`, except that the trailing substring is
+ /// skipped if empty.
+ ///
+ /// This method can be used for string data that is _terminated_,
+ /// rather than _seperated_ by a pattern.
+ ///
+ /// # Iterator behavior
+ ///
+ /// The returned iterator requires that the pattern supports a
+ /// reverse search, and it will be double ended if a forward/reverse
+ /// search yields the same elements.
+ ///
+ /// For iterating from the front, `split_terminator()` can be used.
///
/// # Examples
///
/// Simple patterns:
///
/// ```
- /// let v: Vec<&str> = "Mary had a little lamb".rsplit(' ').collect();
- /// assert_eq!(v, ["lamb", "little", "a", "had", "Mary"]);
+ /// let v: Vec<&str> = "A.B.".rsplit_terminator('.').collect();
+ /// assert_eq!(v, ["B", "A"]);
///
- /// let v: Vec<&str> = "lion::tiger::leopard".rsplit("::").collect();
- /// assert_eq!(v, ["leopard", "tiger", "lion"]);
+ /// let v: Vec<&str> = "A..B..".rsplit_terminator(".").collect();
+ /// assert_eq!(v, ["", "B", "", "A"]);
/// ```
///
- /// More complex patterns with a lambda:
+ /// More complex patterns with closures:
///
/// ```
- /// let v: Vec<&str> = "abc1def2ghi".rsplit(|c: char| c.is_numeric()).collect();
+ /// let v: Vec<&str> = "abc1def2ghi3".rsplit_terminator(|c: char| c.is_numeric()).collect();
/// assert_eq!(v, ["ghi", "def", "abc"]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn rsplit<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplit<'a, P>
+ pub fn rsplit_terminator<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplitTerminator<'a, P>
where P::Searcher: ReverseSearcher<'a>
{
- core_str::StrExt::rsplit(&self[..], pat)
+ core_str::StrExt::rsplit_terminator(&self[..], pat)
+ }
+
+ /// An iterator over substrings of `self`, separated by a pattern,
+ /// restricted to returning
+ /// at most `count` items.
+ ///
+ /// The last element returned, if any, will contain the remainder of the
+ /// string.
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines the split.
+ /// Additional libraries might provide more complex patterns like
+ /// regular expressions.
+ ///
+ /// # Iterator behavior
+ ///
+ /// The returned iterator will not be double ended, because it is
+ /// not efficient to support.
+ ///
+ /// If the pattern allows a reverse search, `rsplitn()` can be used.
+ ///
+ /// # Examples
+ ///
+ /// Simple patterns:
+ ///
+ /// ```
+ /// let v: Vec<&str> = "Mary had a little lambda".splitn(3, ' ').collect();
+ /// assert_eq!(v, ["Mary", "had", "a little lambda"]);
+ ///
+ /// let v: Vec<&str> = "lionXXtigerXleopard".splitn(3, "X").collect();
+ /// assert_eq!(v, ["lion", "", "tigerXleopard"]);
+ ///
+ /// let v: Vec<&str> = "abcXdef".splitn(1, 'X').collect();
+ /// assert_eq!(v, ["abcXdef"]);
+ ///
+ /// let v: Vec<&str> = "".splitn(1, 'X').collect();
+ /// assert_eq!(v, [""]);
+ /// ```
+ ///
+ /// More complex patterns with closures:
+ ///
+ /// ```
+ /// let v: Vec<&str> = "abc1def2ghi".splitn(2, |c: char| c.is_numeric()).collect();
+ /// assert_eq!(v, ["abc", "def2ghi"]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn splitn<'a, P: Pattern<'a>>(&'a self, count: usize, pat: P) -> SplitN<'a, P> {
+ core_str::StrExt::splitn(&self[..], count, pat)
}
/// An iterator over substrings of `self`, separated by a pattern,
/// The last element returned, if any, will contain the remainder of the
/// string.
///
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines the split.
+ /// Additional libraries might provide more complex patterns like
+ /// regular expressions.
+ ///
+ /// # Iterator behavior
+ ///
+ /// The returned iterator will not be double ended, because it is not
+ /// efficient to support.
+ ///
+ /// `splitn()` can be used for splitting from the front.
+ ///
/// # Examples
///
/// Simple patterns:
/// let v: Vec<&str> = "Mary had a little lamb".rsplitn(3, ' ').collect();
/// assert_eq!(v, ["lamb", "little", "Mary had a"]);
///
+ /// let v: Vec<&str> = "lionXXtigerXleopard".rsplitn(3, 'X').collect();
+ /// assert_eq!(v, ["leopard", "tiger", "lionX"]);
+ ///
/// let v: Vec<&str> = "lion::tiger::leopard".rsplitn(2, "::").collect();
/// assert_eq!(v, ["leopard", "lion::tiger"]);
/// ```
///
- /// More complex patterns with a lambda:
+ /// More complex patterns with closures:
///
/// ```
/// let v: Vec<&str> = "abc1def2ghi".rsplitn(2, |c: char| c.is_numeric()).collect();
core_str::StrExt::rsplitn(&self[..], count, pat)
}
- /// An iterator over the start and end indices of the disjoint matches of a `&str` within
- /// `self`.
+ /// An iterator over the matches of a pattern within `self`.
+ ///
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines the split.
+ /// Additional libraries might provide more complex patterns like
+ /// regular expressions.
+ ///
+ /// # Iterator behavior
+ ///
+ /// The returned iterator will be double ended if the pattern allows
+ /// a reverse search
+ /// and forward/reverse search yields the same elements. This is true
+ /// for, eg, `char` but not
+ /// for `&str`.
+ ///
+ /// If the pattern allows a reverse search but its results might differ
+ /// from a forward search, `rmatches()` can be used.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![feature(collections)]
+ /// let v: Vec<&str> = "abcXXXabcYYYabc".matches("abc").collect();
+ /// assert_eq!(v, ["abc", "abc", "abc"]);
+ ///
+ /// let v: Vec<&str> = "1abc2abc3".matches(|c: char| c.is_numeric()).collect();
+ /// assert_eq!(v, ["1", "2", "3"]);
+ /// ```
+ #[unstable(feature = "collections",
+ reason = "method got recently added")]
+ pub fn matches<'a, P: Pattern<'a>>(&'a self, pat: P) -> Matches<'a, P> {
+ core_str::StrExt::matches(&self[..], pat)
+ }
+
+ /// An iterator over the matches of a pattern within `self`, yielded in
+ /// reverse order.
+ ///
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines the split.
+ /// Additional libraries might provide more complex patterns like
+ /// regular expressions.
+ ///
+ /// # Iterator behavior
+ ///
+ /// The returned iterator requires that the pattern supports a
+ /// reverse search,
+ /// and it will be double ended if a forward/reverse search yields
+ /// the same elements.
+ ///
+ /// For iterating from the front, `matches()` can be used.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![feature(collections)]
+ /// let v: Vec<&str> = "abcXXXabcYYYabc".rmatches("abc").collect();
+ /// assert_eq!(v, ["abc", "abc", "abc"]);
+ ///
+ /// let v: Vec<&str> = "1abc2abc3".rmatches(|c: char| c.is_numeric()).collect();
+ /// assert_eq!(v, ["3", "2", "1"]);
+ /// ```
+ #[unstable(feature = "collections",
+ reason = "method got recently added")]
+ pub fn rmatches<'a, P: Pattern<'a>>(&'a self, pat: P) -> RMatches<'a, P>
+ where P::Searcher: ReverseSearcher<'a>
+ {
+ core_str::StrExt::rmatches(&self[..], pat)
+ }
+
+ /// An iterator over the start and end indices of the disjoint matches
+ /// of a pattern within `self`.
///
- /// That is, each returned value `(start, end)` satisfies `self.slice(start, end) == sep`. For
- /// matches of `sep` within `self` that overlap, only the indices corresponding to the first
+ /// For matches of `pat` within `self` that overlap, only the indices
+ /// corresponding to the first
/// match are returned.
///
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines
+ /// the split.
+ /// Additional libraries might provide more complex patterns like
+ /// regular expressions.
+ ///
+ /// # Iterator behavior
+ ///
+ /// The returned iterator will be double ended if the pattern allows a
+ /// reverse search
+ /// and forward/reverse search yields the same elements. This is true for,
+ /// eg, `char` but not
+ /// for `&str`.
+ ///
+ /// If the pattern allows a reverse search but its results might differ
+ /// from a forward search, `rmatch_indices()` can be used.
+ ///
/// # Examples
///
/// ```
/// # #![feature(collections)]
/// let v: Vec<(usize, usize)> = "abcXXXabcYYYabc".match_indices("abc").collect();
- /// assert_eq!(v, [(0,3), (6,9), (12,15)]);
+ /// assert_eq!(v, [(0, 3), (6, 9), (12, 15)]);
///
/// let v: Vec<(usize, usize)> = "1abcabc2".match_indices("abc").collect();
- /// assert_eq!(v, [(1,4), (4,7)]);
+ /// assert_eq!(v, [(1, 4), (4, 7)]);
///
/// let v: Vec<(usize, usize)> = "ababa".match_indices("aba").collect();
/// assert_eq!(v, [(0, 3)]); // only the first `aba`
/// ```
#[unstable(feature = "collections",
reason = "might have its iterator type changed")]
- // NB: Right now MatchIndices yields `(usize, usize)`,
- // but it would be more consistent and useful to return `(usize, &str)`
+ // NB: Right now MatchIndices yields `(usize, usize)`, but it would
+ // be more consistent with `matches` and `char_indices` to return `(usize, &str)`
pub fn match_indices<'a, P: Pattern<'a>>(&'a self, pat: P) -> MatchIndices<'a, P> {
core_str::StrExt::match_indices(&self[..], pat)
}
+ /// An iterator over the start and end indices of the disjoint matches of
+ /// a pattern within
+ /// `self`, yielded in reverse order.
+ ///
+ /// For matches of `pat` within `self` that overlap, only the indices
+ /// corresponding to the last
+ /// match are returned.
+ ///
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines
+ /// the split.
+ /// Additional libraries might provide more complex patterns like
+ /// regular expressions.
+ ///
+ /// # Iterator behavior
+ ///
+ /// The returned iterator requires that the pattern supports a
+ /// reverse search,
+ /// and it will be double ended if a forward/reverse search yields
+ /// the same elements.
+ ///
+ /// For iterating from the front, `match_indices()` can be used.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![feature(collections)]
+ /// let v: Vec<(usize, usize)> = "abcXXXabcYYYabc".rmatch_indices("abc").collect();
+ /// assert_eq!(v, [(12, 15), (6, 9), (0, 3)]);
+ ///
+ /// let v: Vec<(usize, usize)> = "1abcabc2".rmatch_indices("abc").collect();
+ /// assert_eq!(v, [(4, 7), (1, 4)]);
+ ///
+ /// let v: Vec<(usize, usize)> = "ababa".rmatch_indices("aba").collect();
+ /// assert_eq!(v, [(2, 5)]); // only the last `aba`
+ /// ```
+ #[unstable(feature = "collections",
+ reason = "might have its iterator type changed")]
+ // NB: Right now RMatchIndices yields `(usize, usize)`, but it would
+ // be more consistent with `rmatches` and `char_indices` to return `(usize, &str)`
+ pub fn rmatch_indices<'a, P: Pattern<'a>>(&'a self, pat: P) -> RMatchIndices<'a, P>
+ where P::Searcher: ReverseSearcher<'a>
+ {
+ core_str::StrExt::rmatch_indices(&self[..], pat)
+ }
+
/// An iterator over the lines of a string, separated by `\n`.
///
/// This does not include the empty string after a trailing `\n`.
core_str::StrExt::lines(&self[..])
}
- /// An iterator over the lines of a string, separated by either `\n` or `\r\n`.
+ /// An iterator over the lines of a string, separated by either
+ /// `\n` or `\r\n`.
///
/// As with `.lines()`, this does not include an empty trailing line.
///
///
/// # Unsafety
///
- /// Caller must check both UTF-8 character boundaries and the boundaries of the entire slice as
+ /// Caller must check both UTF-8 character boundaries and the boundaries
+ /// of the entire slice as
/// well.
///
/// # Examples
core_str::StrExt::ends_with(&self[..], pat)
}
- /// Returns a string with all pre- and suffixes that match a pattern repeatedly removed.
+ /// Returns a string with all pre- and suffixes that match a pattern
+ /// repeatedly removed.
///
- /// The pattern can be a simple `&str`, or a closure that determines the split.
+ /// The pattern can be a simple `char`, or a closure that determines
+ /// the split.
///
/// # Examples
///
- /// Simple `&str` patterns:
+ /// Simple patterns:
///
/// ```
/// assert_eq!("11foo1bar11".trim_matches('1'), "foo1bar");
/// assert_eq!("12foo1bar12".trim_matches(x), "foo1bar");
/// ```
///
- /// More complex patterns with a lambda:
+ /// More complex patterns with closures:
///
/// ```
/// assert_eq!("123foo1bar123".trim_matches(|c: char| c.is_numeric()), "foo1bar");
core_str::StrExt::trim_matches(&self[..], pat)
}
- /// Returns a string with all prefixes that match a pattern repeatedly removed.
+ /// Returns a string with all prefixes that match a pattern
+ /// repeatedly removed.
///
- /// The pattern can be a simple `&str`, or a closure that determines the split.
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines the split.
///
/// # Examples
///
- /// Simple `&str` patterns:
+ /// Simple patterns:
///
/// ```
/// assert_eq!("11foo1bar11".trim_left_matches('1'), "foo1bar11");
/// assert_eq!("12foo1bar12".trim_left_matches(x), "foo1bar12");
/// ```
///
- /// More complex patterns with a lambda:
+ /// More complex patterns with closures:
///
/// ```
/// assert_eq!("123foo1bar123".trim_left_matches(|c: char| c.is_numeric()), "foo1bar123");
core_str::StrExt::trim_left_matches(&self[..], pat)
}
- /// Returns a string with all suffixes that match a pattern repeatedly removed.
+ /// Returns a string with all suffixes that match a pattern
+ /// repeatedly removed.
///
- /// The pattern can be a simple `&str`, or a closure that determines the split.
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines the split.
///
/// # Examples
///
- /// Simple `&str` patterns:
+ /// Simple patterns:
///
/// ```
/// assert_eq!("11foo1bar11".trim_right_matches('1'), "11foo1bar");
/// assert_eq!("12foo1bar12".trim_right_matches(x), "12foo1bar");
/// ```
///
- /// More complex patterns with a lambda:
+ /// More complex patterns with closures:
///
/// ```
/// assert_eq!("123foo1bar123".trim_right_matches(|c: char| c.is_numeric()), "123foo1bar");
core_str::StrExt::trim_right_matches(&self[..], pat)
}
- /// Check that `index`-th byte lies at the start and/or end of a UTF-8 code point sequence.
+ /// Check that `index`-th byte lies at the start and/or end of a
+ /// UTF-8 code point sequence.
///
- /// The start and end of the string (when `index == self.len()`) are considered to be
+ /// The start and end of the string (when `index == self.len()`) are
+ /// considered to be
/// boundaries.
///
/// # Panics
///
/// # Examples
///
- /// This example manually iterates through the characters of a string; this should normally be
+ /// This example manually iterates through the characters of a string;
+ /// this should normally be
/// done by `.chars()` or `.char_indices()`.
///
/// ```
///
/// # Examples
///
- /// This example manually iterates through the characters of a string; this should normally be
+ /// This example manually iterates through the characters of a string;
+ /// this should normally be
/// done by `.chars().rev()` or `.char_indices()`.
///
/// ```
core_str::StrExt::char_at(&self[..], i)
}
- /// Given a byte position, return the `char` at that position, counting from the end.
+ /// Given a byte position, return the `char` at that position, counting
+ /// from the end.
///
/// # Panics
///
core_str::StrExt::as_bytes(&self[..])
}
- /// Returns the byte index of the first character of `self` that matches the pattern, if it
+ /// Returns the byte index of the first character of `self` that matches
+ /// the pattern, if it
/// exists.
///
/// Returns `None` if it doesn't exist.
///
- /// The pattern can be a simple `&str`, or a closure that determines the split.
+ /// The pattern can be a simple `&str`, `char`, or a closure that
+ /// determines the
+ /// split.
///
/// # Examples
///
- /// Simple `&str` patterns:
+ /// Simple patterns:
///
/// ```
/// let s = "Löwe 老虎 Léopard";
///
/// assert_eq!(s.find('L'), Some(0));
/// assert_eq!(s.find('é'), Some(14));
+ /// assert_eq!(s.find("Léopard"), Some(13));
///
/// ```
///
- /// More complex patterns with a lambda:
+ /// More complex patterns with closures:
///
/// ```
/// let s = "Löwe 老虎 Léopard";
///
/// assert_eq!(s.find(|c: char| c.is_whitespace()), Some(5));
+ /// assert_eq!(s.find(char::is_lowercase), Some(1));
/// ```
///
/// Not finding the pattern:
core_str::StrExt::find(&self[..], pat)
}
- /// Returns the byte index of the last character of `self` that matches the pattern, if it
+ /// Returns the byte index of the last character of `self` that
+ /// matches the pattern, if it
/// exists.
///
/// Returns `None` if it doesn't exist.
///
- /// The pattern can be a simple `&str`, or a closure that determines the split.
+ /// The pattern can be a simple `&str`, `char`,
+ /// or a closure that determines the split.
///
/// # Examples
///
- /// Simple `&str` patterns:
+ /// Simple patterns:
///
/// ```
/// let s = "Löwe 老虎 Léopard";
/// assert_eq!(s.rfind('é'), Some(14));
/// ```
///
- /// More complex patterns with a lambda:
+ /// More complex patterns with closures:
///
/// ```
/// let s = "Löwe 老虎 Léopard";
///
/// assert_eq!(s.rfind(|c: char| c.is_whitespace()), Some(12));
+ /// assert_eq!(s.rfind(char::is_lowercase), Some(20));
/// ```
///
/// Not finding the pattern:
/// Retrieves the first character from a `&str` and returns it.
///
- /// This does not allocate a new string; instead, it returns a slice that points one character
+ /// This does not allocate a new string; instead, it returns a slice that
+ /// points one character
/// beyond the character that was shifted.
///
/// If the slice does not contain any characters, None is returned instead.
core_str::StrExt::slice_shift_char(&self[..])
}
- /// Returns the byte offset of an inner slice relative to an enclosing outer slice.
+ /// Returns the byte offset of an inner slice relative to an enclosing
+ /// outer slice.
///
/// # Panics
///
/// Return an unsafe pointer to the `&str`'s buffer.
///
- /// The caller must ensure that the string outlives this pointer, and that it is not
+ /// The caller must ensure that the string outlives this pointer, and
+ /// that it is not
/// reallocated (e.g. by pushing to the string).
///
/// # Examples
///
/// [graphemes]: http://www.unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries
///
- /// If `is_extended` is true, the iterator is over the *extended grapheme clusters*;
+ /// If `is_extended` is true, the iterator is over the
+ /// *extended grapheme clusters*;
/// otherwise, the iterator is over the *legacy grapheme clusters*.
/// [UAX#29](http://www.unicode.org/reports/tr29/#Grapheme_Cluster_Boundaries)
/// recommends extended grapheme cluster boundaries for general processing.
UnicodeStr::graphemes(&self[..], is_extended)
}
- /// Returns an iterator over the grapheme clusters of `self` and their byte offsets. See
+ /// Returns an iterator over the grapheme clusters of `self` and their
+ /// byte offsets. See
/// `graphemes()` for more information.
///
/// # Examples
/// An iterator over the non-empty words of `self`.
///
- /// A 'word' is a subsequence separated by any sequence of whitespace. Sequences of whitespace
+ /// A 'word' is a subsequence separated by any sequence of whitespace.
+ /// Sequences of whitespace
/// are collapsed, so empty "words" are not included.
///
/// # Examples
///
/// Control characters have zero width.
///
- /// `is_cjk` determines behavior for characters in the Ambiguous category: if `is_cjk` is
- /// `true`, these are 2 columns wide; otherwise, they are 1. In CJK locales, `is_cjk` should be
+ /// `is_cjk` determines behavior for characters in the Ambiguous category:
+ /// if `is_cjk` is
+ /// `true`, these are 2 columns wide; otherwise, they are 1.
+ /// In CJK locales, `is_cjk` should be
/// `true`, else it should be `false`.
- /// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/) recommends that these
- /// characters be treated as 1 column (i.e., `is_cjk = false`) if the locale is unknown.
+ /// [Unicode Standard Annex #11](http://www.unicode.org/reports/tr11/)
+ /// recommends that these
+ /// characters be treated as 1 column (i.e., `is_cjk = false`) if the
+ /// locale is unknown.
#[unstable(feature = "unicode",
reason = "this functionality may only be provided by libunicode")]
pub fn width(&self, is_cjk: bool) -> usize {
use core::ops::{self, Deref, Add, Index};
use core::ptr;
use core::slice;
-use core::str::Pattern;
+use core::str::pattern::Pattern;
use unicode::str as unicode_str;
use unicode::str::Utf16Item;
#[stable(feature = "rust1", since = "1.0.0")]
impl PartialEq for String {
#[inline]
- fn eq(&self, other: &String) -> bool { PartialEq::eq(&**self, &**other) }
+ fn eq(&self, other: &String) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
- fn ne(&self, other: &String) -> bool { PartialEq::ne(&**self, &**other) }
+ fn ne(&self, other: &String) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
macro_rules! impl_eq {
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> PartialEq<$rhs> for $lhs {
#[inline]
- fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&**self, &**other) }
+ fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
- fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&**self, &**other) }
+ fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a> PartialEq<$lhs> for $rhs {
#[inline]
- fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&**self, &**other) }
+ fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
- fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&**self, &**other) }
+ fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
}
}
+impl_eq! { String, str }
impl_eq! { String, &'a str }
+impl_eq! { Cow<'a, str>, str }
impl_eq! { Cow<'a, str>, String }
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b> PartialEq<&'b str> for Cow<'a, str> {
#[inline]
- fn eq(&self, other: &&'b str) -> bool { PartialEq::eq(&**self, &**other) }
+ fn eq(&self, other: &&'b str) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
- fn ne(&self, other: &&'b str) -> bool { PartialEq::ne(&**self, &**other) }
+ fn ne(&self, other: &&'b str) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b> PartialEq<Cow<'a, str>> for &'b str {
#[inline]
- fn eq(&self, other: &Cow<'a, str>) -> bool { PartialEq::eq(&**self, &**other) }
+ fn eq(&self, other: &Cow<'a, str>) -> bool { PartialEq::eq(&self[..], &other[..]) }
#[inline]
- fn ne(&self, other: &Cow<'a, str>) -> bool { PartialEq::ne(&**self, &**other) }
+ fn ne(&self, other: &Cow<'a, str>) -> bool { PartialEq::ne(&self[..], &other[..]) }
}
#[unstable(feature = "collections", reason = "waiting on Str stabilization")]
/// # Examples
///
/// ```
- /// let mut vec: Vec<_> = Vec::with_capacity(10);
+ /// let mut vec = Vec::with_capacity(10);
///
/// // The vector contains no items, even though it has capacity for more
/// assert_eq!(vec.len(), 0);
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
assert_eq!(from_utf8(xs), Err(Utf8Error::TooShort));
}
+#[test]
+fn test_pattern_deref_forward() {
+ let data = "aabcdaa";
+ assert!(data.contains("bcd"));
+ assert!(data.contains(&"bcd"));
+ assert!(data.contains(&"bcd".to_string()));
+}
+
+#[test]
+fn test_empty_match_indices() {
+ let data = "aä中!";
+ let vec: Vec<_> = data.match_indices("").collect();
+ assert_eq!(vec, [(0, 0), (1, 1), (3, 3), (6, 6), (7, 7)]);
+}
+
+#[test]
+fn test_bool_from_str() {
+ assert_eq!("true".parse().ok(), Some(true));
+ assert_eq!("false".parse().ok(), Some(false));
+ assert_eq!("not even a boolean".parse::<bool>().ok(), None);
+}
+
+fn check_contains_all_substrings(s: &str) {
+ assert!(s.contains(""));
+ for i in 0..s.len() {
+ for j in i+1..s.len() + 1 {
+ assert!(s.contains(&s[i..j]));
+ }
+ }
+}
+
+#[test]
+fn strslice_issue_16589() {
+ assert!("bananas".contains("nana"));
+
+ // prior to the fix for #16589, x.contains("abcdabcd") returned false
+ // test all substrings for good measure
+ check_contains_all_substrings("012345678901234567890123456789bcdabcdabcd");
+}
+
+#[test]
+fn strslice_issue_16878() {
+ assert!(!"1234567ah012345678901ah".contains("hah"));
+ assert!(!"00abc01234567890123456789abc".contains("bcabc"));
+}
+
+
+#[test]
+fn test_strslice_contains() {
+ let x = "There are moments, Jeeves, when one asks oneself, 'Do trousers matter?'";
+ check_contains_all_substrings(x);
+}
+
+#[test]
+fn test_rsplitn_char_iterator() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let mut split: Vec<&str> = data.rsplitn(4, ' ').collect();
+ split.reverse();
+ assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == ' ').collect();
+ split.reverse();
+ assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ // Unicode
+ let mut split: Vec<&str> = data.rsplitn(4, 'ä').collect();
+ split.reverse();
+ assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
+
+ let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == 'ä').collect();
+ split.reverse();
+ assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
+}
+
+#[test]
+fn test_split_char_iterator() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let split: Vec<&str> = data.split(' ').collect();
+ assert_eq!( split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ let mut rsplit: Vec<&str> = data.split(' ').rev().collect();
+ rsplit.reverse();
+ assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ let split: Vec<&str> = data.split(|c: char| c == ' ').collect();
+ assert_eq!( split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect();
+ rsplit.reverse();
+ assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ // Unicode
+ let split: Vec<&str> = data.split('ä').collect();
+ assert_eq!( split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+
+ let mut rsplit: Vec<&str> = data.split('ä').rev().collect();
+ rsplit.reverse();
+ assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+
+ let split: Vec<&str> = data.split(|c: char| c == 'ä').collect();
+ assert_eq!( split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+
+ let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect();
+ rsplit.reverse();
+ assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+}
+
+#[test]
+fn test_rev_split_char_iterator_no_trailing() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let mut split: Vec<&str> = data.split('\n').rev().collect();
+ split.reverse();
+ assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]);
+
+ let mut split: Vec<&str> = data.split_terminator('\n').rev().collect();
+ split.reverse();
+ assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]);
+}
+
+#[test]
+fn test_utf16_code_units() {
+ use unicode::str::Utf16Encoder;
+ assert_eq!(Utf16Encoder::new(vec!['é', '\u{1F4A9}'].into_iter()).collect::<Vec<u16>>(),
+ [0xE9, 0xD83D, 0xDCA9])
+}
+
+#[test]
+fn starts_with_in_unicode() {
+ assert!(!"├── Cargo.toml".starts_with("# "));
+}
+
+#[test]
+fn starts_short_long() {
+ assert!(!"".starts_with("##"));
+ assert!(!"##".starts_with("####"));
+ assert!("####".starts_with("##"));
+ assert!(!"##ä".starts_with("####"));
+ assert!("####ä".starts_with("##"));
+ assert!(!"##".starts_with("####ä"));
+ assert!("##ä##".starts_with("##ä"));
+
+ assert!("".starts_with(""));
+ assert!("ä".starts_with(""));
+ assert!("#ä".starts_with(""));
+ assert!("##ä".starts_with(""));
+ assert!("ä###".starts_with(""));
+ assert!("#ä##".starts_with(""));
+ assert!("##ä#".starts_with(""));
+}
+
+#[test]
+fn contains_weird_cases() {
+ assert!("* \t".contains(' '));
+ assert!(!"* \t".contains('?'));
+ assert!(!"* \t".contains('\u{1F4A9}'));
+}
+
+#[test]
+fn trim_ws() {
+ assert_eq!(" \t a \t ".trim_left_matches(|c: char| c.is_whitespace()),
+ "a \t ");
+ assert_eq!(" \t a \t ".trim_right_matches(|c: char| c.is_whitespace()),
+ " \t a");
+ assert_eq!(" \t a \t ".trim_matches(|c: char| c.is_whitespace()),
+ "a");
+ assert_eq!(" \t \t ".trim_left_matches(|c: char| c.is_whitespace()),
+ "");
+ assert_eq!(" \t \t ".trim_right_matches(|c: char| c.is_whitespace()),
+ "");
+ assert_eq!(" \t \t ".trim_matches(|c: char| c.is_whitespace()),
+ "");
+}
+
+mod pattern {
+ use std::str::pattern::Pattern;
+ use std::str::pattern::{Searcher, ReverseSearcher};
+ use std::str::pattern::SearchStep::{self, Match, Reject, Done};
+
+ macro_rules! make_test {
+ ($name:ident, $p:expr, $h:expr, [$($e:expr,)*]) => {
+ mod $name {
+ use std::str::pattern::SearchStep::{Match, Reject};
+ use super::{cmp_search_to_vec};
+ #[test]
+ fn fwd() {
+ cmp_search_to_vec(false, $p, $h, vec![$($e),*]);
+ }
+ #[test]
+ fn bwd() {
+ cmp_search_to_vec(true, $p, $h, vec![$($e),*]);
+ }
+ }
+ }
+ }
+
+ fn cmp_search_to_vec<'a, P: Pattern<'a>>(rev: bool, pat: P, haystack: &'a str,
+ right: Vec<SearchStep>)
+ where P::Searcher: ReverseSearcher<'a>
+ {
+ let mut searcher = pat.into_searcher(haystack);
+ let mut v = vec![];
+ loop {
+ match if !rev {searcher.next()} else {searcher.next_back()} {
+ Match(a, b) => v.push(Match(a, b)),
+ Reject(a, b) => v.push(Reject(a, b)),
+ Done => break,
+ }
+ }
+ if rev {
+ v.reverse();
+ }
+
+ let mut first_index = 0;
+ let mut err = None;
+
+ for (i, e) in right.iter().enumerate() {
+ match *e {
+ Match(a, b) | Reject(a, b)
+ if a <= b && a == first_index => {
+ first_index = b;
+ }
+ _ => {
+ err = Some(i);
+ break;
+ }
+ }
+ }
+
+ if let Some(err) = err {
+ panic!("Input skipped range at {}", err);
+ }
+
+ if first_index != haystack.len() {
+ panic!("Did not cover whole input");
+ }
+
+ assert_eq!(v, right);
+ }
+
+ make_test!(str_searcher_ascii_haystack, "bb", "abbcbbd", [
+ Reject(0, 1),
+ Match (1, 3),
+ Reject(3, 4),
+ Match (4, 6),
+ Reject(6, 7),
+ ]);
+ make_test!(str_searcher_empty_needle_ascii_haystack, "", "abbcbbd", [
+ Match (0, 0),
+ Reject(0, 1),
+ Match (1, 1),
+ Reject(1, 2),
+ Match (2, 2),
+ Reject(2, 3),
+ Match (3, 3),
+ Reject(3, 4),
+ Match (4, 4),
+ Reject(4, 5),
+ Match (5, 5),
+ Reject(5, 6),
+ Match (6, 6),
+ Reject(6, 7),
+ Match (7, 7),
+ ]);
+ make_test!(str_searcher_mulibyte_haystack, " ", "├──", [
+ Reject(0, 3),
+ Reject(3, 6),
+ Reject(6, 9),
+ ]);
+ make_test!(str_searcher_empty_needle_mulibyte_haystack, "", "├──", [
+ Match (0, 0),
+ Reject(0, 3),
+ Match (3, 3),
+ Reject(3, 6),
+ Match (6, 6),
+ Reject(6, 9),
+ Match (9, 9),
+ ]);
+ make_test!(str_searcher_empty_needle_empty_haystack, "", "", [
+ Match(0, 0),
+ ]);
+ make_test!(str_searcher_nonempty_needle_empty_haystack, "├", "", [
+ ]);
+ make_test!(char_searcher_ascii_haystack, 'b', "abbcbbd", [
+ Reject(0, 1),
+ Match (1, 2),
+ Match (2, 3),
+ Reject(3, 4),
+ Match (4, 5),
+ Match (5, 6),
+ Reject(6, 7),
+ ]);
+ make_test!(char_searcher_mulibyte_haystack, ' ', "├──", [
+ Reject(0, 3),
+ Reject(3, 6),
+ Reject(6, 9),
+ ]);
+ make_test!(char_searcher_short_haystack, '\u{1F4A9}', "* \t", [
+ Reject(0, 1),
+ Reject(1, 2),
+ Reject(2, 3),
+ ]);
+
+}
+
+macro_rules! generate_iterator_test {
+ {
+ $name:ident {
+ $(
+ ($($arg:expr),*) -> [$($t:tt)*];
+ )*
+ }
+ with $fwd:expr, $bwd:expr;
+ } => {
+ #[test]
+ fn $name() {
+ $(
+ {
+ let res = vec![$($t)*];
+
+ let fwd_vec: Vec<_> = ($fwd)($($arg),*).collect();
+ assert_eq!(fwd_vec, res);
+
+ let mut bwd_vec: Vec<_> = ($bwd)($($arg),*).collect();
+ bwd_vec.reverse();
+ assert_eq!(bwd_vec, res);
+ }
+ )*
+ }
+ };
+ {
+ $name:ident {
+ $(
+ ($($arg:expr),*) -> [$($t:tt)*];
+ )*
+ }
+ with $fwd:expr;
+ } => {
+ #[test]
+ fn $name() {
+ $(
+ {
+ let res = vec![$($t)*];
+
+ let fwd_vec: Vec<_> = ($fwd)($($arg),*).collect();
+ assert_eq!(fwd_vec, res);
+ }
+ )*
+ }
+ }
+}
+
+generate_iterator_test! {
+ double_ended_split {
+ ("foo.bar.baz", '.') -> ["foo", "bar", "baz"];
+ ("foo::bar::baz", "::") -> ["foo", "bar", "baz"];
+ }
+ with str::split, str::rsplit;
+}
+
+generate_iterator_test! {
+ double_ended_split_terminator {
+ ("foo;bar;baz;", ';') -> ["foo", "bar", "baz"];
+ }
+ with str::split_terminator, str::rsplit_terminator;
+}
+
+generate_iterator_test! {
+ double_ended_matches {
+ ("a1b2c3", char::is_numeric) -> ["1", "2", "3"];
+ }
+ with str::matches, str::rmatches;
+}
+
+generate_iterator_test! {
+ double_ended_match_indices {
+ ("a1b2c3", char::is_numeric) -> [(1, 2), (3, 4), (5, 6)];
+ }
+ with str::match_indices, str::rmatch_indices;
+}
+
+generate_iterator_test! {
+ not_double_ended_splitn {
+ ("foo::bar::baz", 2, "::") -> ["foo", "bar::baz"];
+ }
+ with str::splitn;
+}
+
+generate_iterator_test! {
+ not_double_ended_rsplitn {
+ ("foo::bar::baz", 2, "::") -> ["baz", "foo::bar"];
+ }
+ with str::rsplitn;
+}
+
mod bench {
use test::{Bencher, black_box};
assert!(haystack.contains(needle));
})
}
+
+ macro_rules! make_test_inner {
+ ($s:ident, $code:expr, $name:ident, $str:expr) => {
+ #[bench]
+ fn $name(bencher: &mut Bencher) {
+ let mut $s = $str;
+ black_box(&mut $s);
+ bencher.iter(|| $code);
+ }
+ }
+ }
+
+ macro_rules! make_test {
+ ($name:ident, $s:ident, $code:expr) => {
+ mod $name {
+ use test::Bencher;
+ use test::black_box;
+
+ // Short strings: 65 bytes each
+ make_test_inner!($s, $code, short_ascii,
+ "Mary had a little lamb, Little lamb Mary had a littl lamb, lamb!");
+ make_test_inner!($s, $code, short_mixed,
+ "ศไทย中华Việt Nam; Mary had a little lamb, Little lam!");
+ make_test_inner!($s, $code, short_pile_of_poo,
+ "💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩!");
+ make_test_inner!($s, $code, long_lorem_ipsum,"\
+Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \
+ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \
+eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \
+sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \
+tempus vel, gravida nec quam.
+
+In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \
+sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \
+diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \
+lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \
+eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \
+interdum. Curabitur ut nisi justo.
+
+Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \
+mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \
+lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \
+est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \
+felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \
+ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \
+feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \
+Aliquam sit amet placerat lorem.
+
+Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \
+mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \
+Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \
+lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \
+suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \
+cursus accumsan.
+
+Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \
+feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \
+vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \
+leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \
+malesuada sollicitudin quam eu fermentum!");
+ }
+ }
+ }
+
+ make_test!(chars_count, s, s.chars().count());
+
+ make_test!(contains_bang_str, s, s.contains("!"));
+ make_test!(contains_bang_char, s, s.contains('!'));
+
+ make_test!(match_indices_a_str, s, s.match_indices("a").count());
+
+ make_test!(split_a_str, s, s.split("a").count());
+
+ make_test!(trim_ascii_char, s, {
+ use std::ascii::AsciiExt;
+ s.trim_matches(|c: char| c.is_ascii())
+ });
+ make_test!(trim_left_ascii_char, s, {
+ use std::ascii::AsciiExt;
+ s.trim_left_matches(|c: char| c.is_ascii())
+ });
+ make_test!(trim_right_ascii_char, s, {
+ use std::ascii::AsciiExt;
+ s.trim_right_matches(|c: char| c.is_ascii())
+ });
+
+ make_test!(find_underscore_char, s, s.find('_'));
+ make_test!(rfind_underscore_char, s, s.rfind('_'));
+ make_test!(find_underscore_str, s, s.find("_"));
+
+ make_test!(find_zzz_char, s, s.find('\u{1F4A4}'));
+ make_test!(rfind_zzz_char, s, s.rfind('\u{1F4A4}'));
+ make_test!(find_zzz_str, s, s.find("\u{1F4A4}"));
+
+ make_test!(split_space_char, s, s.split(' ').count());
+ make_test!(split_terminator_space_char, s, s.split_terminator(' ').count());
+
+ make_test!(splitn_space_char, s, s.splitn(10, ' ').count());
+ make_test!(rsplitn_space_char, s, s.rsplitn(10, ' ').count());
+
+ make_test!(split_space_str, s, s.split(" ").count());
+ make_test!(split_ad_str, s, s.split("ad").count());
}
/// A cheap, reference-to-reference conversion.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait AsRef<T: ?Sized> {
- /// Perform the conversion.
+ /// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn as_ref(&self) -> &T;
}
/// A cheap, mutable reference-to-mutable reference conversion.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait AsMut<T: ?Sized> {
- /// Perform the conversion.
+ /// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn as_mut(&mut self) -> &mut T;
}
/// expensive.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Into<T>: Sized {
- /// Perform the conversion.
+ /// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn into(self) -> T;
}
/// Construct `Self` via a conversion.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait From<T> {
- /// Perform the conversion.
+ /// Performs the conversion.
#[stable(feature = "rust1", since = "1.0.0")]
fn from(T) -> Self;
}
fn all<F>(&mut self, mut f: F) -> bool where
Self: Sized, F: FnMut(Self::Item) -> bool
{
- for x in self.by_ref() { if !f(x) { return false; } }
+ for x in self.by_ref() {
+ if !f(x) {
+ return false;
+ }
+ }
true
}
Self: Sized,
F: FnMut(Self::Item) -> bool
{
- for x in self.by_ref() { if f(x) { return true; } }
+ for x in self.by_ref() {
+ if f(x) {
+ return true;
+ }
+ }
false
}
#[stable(feature = "rust1", since = "1.0.0")]
type Item;
- /// A container for iterating over elements of type Item
+ /// A container for iterating over elements of type `Item`
#[stable(feature = "rust1", since = "1.0.0")]
type IntoIter: Iterator<Item=Self::Item>;
#[inline]
fn next(&mut self) -> Option<(A::Item, B::Item)> {
- match self.a.next() {
- None => None,
- Some(x) => match self.b.next() {
- None => None,
- Some(y) => Some((x, y))
- }
- }
+ self.a.next().and_then(|x| {
+ self.b.next().and_then(|y| {
+ Some((x, y))
+ })
+ })
}
#[inline]
#[inline]
fn idx(&mut self, index: usize) -> Option<(A::Item, B::Item)> {
- match self.a.idx(index) {
- None => None,
- Some(x) => match self.b.idx(index) {
- None => None,
- Some(y) => Some((x, y))
- }
- }
+ self.a.idx(index).and_then(|x| {
+ self.b.idx(index).and_then(|y| {
+ Some((x, y))
+ })
+ })
}
}
#[inline]
fn next(&mut self) -> Option<B> {
for x in self.iter.by_ref() {
- match (self.f)(x) {
- Some(y) => return Some(y),
- None => ()
+ if let Some(y) = (self.f)(x) {
+ return Some(y);
}
}
None
#[inline]
fn next_back(&mut self) -> Option<B> {
for x in self.iter.by_ref().rev() {
- match (self.f)(x) {
- Some(y) => return Some(y),
- None => ()
+ if let Some(y) = (self.f)(x) {
+ return Some(y);
}
}
None
#[inline]
fn next(&mut self) -> Option<(usize, <I as Iterator>::Item)> {
- match self.iter.next() {
- Some(a) => {
- let ret = Some((self.count, a));
- self.count += 1;
- ret
- }
- _ => None
- }
+ self.iter.next().map(|a| {
+ let ret = (self.count, a);
+ self.count += 1;
+ ret
+ })
}
#[inline]
{
#[inline]
fn next_back(&mut self) -> Option<(usize, <I as Iterator>::Item)> {
- match self.iter.next_back() {
- Some(a) => {
- let len = self.iter.len();
- Some((self.count + len, a))
- }
- _ => None
- }
+ self.iter.next_back().map(|a| {
+ let len = self.iter.len();
+ (self.count + len, a)
+ })
}
}
#[inline]
fn idx(&mut self, index: usize) -> Option<(usize, <I as Iterator>::Item)> {
- match self.iter.idx(index) {
- Some(a) => Some((self.count + index, a)),
- _ => None,
- }
+ self.iter.idx(index).map(|a| (self.count + index, a))
}
}
#[inline]
fn next(&mut self) -> Option<I::Item> {
- if self.peeked.is_some() { self.peeked.take() }
- else { self.iter.next() }
+ match self.peeked {
+ Some(_) => self.peeked.take(),
+ None => self.iter.next(),
+ }
}
#[inline]
let (lo, hi) = self.iter.size_hint();
if self.peeked.is_some() {
let lo = lo.saturating_add(1);
- let hi = match hi {
- Some(x) => x.checked_add(1),
- None => None
- };
+ let hi = hi.and_then(|x| x.checked_add(1));
(lo, hi)
} else {
(lo, hi)
if self.flag {
None
} else {
- match self.iter.next() {
- Some(x) => {
- if (self.predicate)(&x) {
- Some(x)
- } else {
- self.flag = true;
- None
- }
+ self.iter.next().and_then(|x| {
+ if (self.predicate)(&x) {
+ Some(x)
+ } else {
+ self.flag = true;
+ None
}
- None => None
- }
+ })
}
}
let (lower, upper) = self.iter.size_hint();
let lower = lower.saturating_sub(self.n);
-
- let upper = match upper {
- Some(x) => Some(x.saturating_sub(self.n)),
- None => None
- };
+ let upper = upper.map(|x| x.saturating_sub(self.n));
(lower, upper)
}
impl<I: Iterator, F> Inspect<I, F> where F: FnMut(&I::Item) {
#[inline]
fn do_inspect(&mut self, elt: Option<I::Item>) -> Option<I::Item> {
- match elt {
- Some(ref a) => (self.f)(a),
- None => ()
+ if let Some(ref a) = elt {
+ (self.f)(a);
}
elt
#[inline]
fn next(&mut self) -> Option<A> {
- match self.range.next() {
- Some(x) => Some(x),
- None => {
- if !self.done && self.range.start == self.range.end {
- self.done = true;
- Some(self.range.end.clone())
- } else {
- None
- }
+ self.range.next().or_else(|| {
+ if !self.done && self.range.start == self.range.end {
+ self.done = true;
+ Some(self.range.end.clone())
+ } else {
+ None
}
- }
+ })
}
#[inline]
(lo, hi)
} else {
let lo = lo.saturating_add(1);
- let hi = match hi {
- Some(x) => x.checked_add(1),
- None => None
- };
+ let hi = hi.and_then(|x| x.checked_add(1));
(lo, hi)
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
- if let Some(hint) = Step::steps_between(&self.start, &self.end, &A::one()) {
- (hint, Some(hint))
- } else {
- (0, None)
+ match Step::steps_between(&self.start, &self.end, &A::one()) {
+ Some(hint) => (hint, Some(hint)),
+ None => (0, None)
}
}
}
let &mut (ref mut f, ref mut val, ref mut first) = st;
if *first {
*first = false;
- } else {
- match val.take() {
- Some(x) => {
- *val = Some((*f)(x))
- }
- None => {}
- }
+ } else if let Some(x) = val.take() {
+ *val = Some((*f)(x))
}
val.clone()
}
#[stable(feature = "rust1", since = "1.0.0")]
#[lang="send"]
#[rustc_on_unimplemented = "`{Self}` cannot be sent between threads safely"]
+#[allow(deprecated)]
pub unsafe trait Send : MarkerTrait {
// empty.
}
#[lang="sized"]
#[rustc_on_unimplemented = "`{Self}` does not have a constant size known at compile-time"]
#[fundamental] // for Default, for example, which requires that `[T]: !Default` be evaluatable
+#[allow(deprecated)]
pub trait Sized : MarkerTrait {
// Empty.
}
#[stable(feature = "rust1", since = "1.0.0")]
#[lang="sync"]
#[rustc_on_unimplemented = "`{Self}` cannot be shared between threads safely"]
+#[allow(deprecated)]
pub unsafe trait Sync : MarkerTrait {
// Empty
}
)
}
-/// `MarkerTrait` is intended to be used as the supertrait for traits
-/// that don't have any methods but instead serve just to designate
-/// categories of types. An example would be the `Send` trait, which
-/// indicates types that are sendable: `Send` does not itself offer
-/// any methods, but instead is used to gate access to data.
-///
-/// FIXME. Better documentation needed here!
+/// `MarkerTrait` is deprecated and no longer needed.
#[stable(feature = "rust1", since = "1.0.0")]
+#[deprecated(since = "1.0.0", reason = "No longer needed")]
+#[allow(deprecated)]
+#[cfg(stage0)]
pub trait MarkerTrait : PhantomFn<Self,Self> { }
-// ~~~~~ <-- FIXME(#22806)?
-//
-// Marker trait has been made invariant so as to avoid inf recursion,
-// but we should ideally solve the underlying problem. That's a bit
-// complicated.
+/// `MarkerTrait` is deprecated and no longer needed.
+#[stable(feature = "rust1", since = "1.0.0")]
+#[deprecated(since = "1.0.0", reason = "No longer needed")]
+#[allow(deprecated)]
+#[cfg(not(stage0))]
+pub trait MarkerTrait { }
+
+#[allow(deprecated)]
impl<T:?Sized> MarkerTrait for T { }
-/// `PhantomFn` is a marker trait for use with traits that contain
-/// type or lifetime parameters that do not appear in any of their
-/// methods. In that case, you can either remove those parameters, or
-/// add a `PhantomFn` supertrait that reflects the signature of
-/// methods that compiler should "pretend" exists. This most commonly
-/// occurs for traits with no methods: in that particular case, you
-/// can extend `MarkerTrait`, which is equivalent to
-/// `PhantomFn<Self>`.
-///
-/// # Examples
-///
-/// As an example, consider a trait with no methods like `Even`, meant
-/// to represent types that are "even":
-///
-/// ```rust,ignore
-/// trait Even { }
-/// ```
-///
-/// In this case, because the implicit parameter `Self` is unused, the
-/// compiler will issue an error. The only purpose of this trait is to
-/// categorize types (and hence instances of those types) as "even" or
-/// not, so if we *were* going to have a method, it might look like:
-///
-/// ```rust,ignore
-/// trait Even {
-/// fn is_even(self) -> bool { true }
-/// }
-/// ```
-///
-/// Therefore, we can model a method like this as follows:
-///
-/// ```
-/// use std::marker::PhantomFn;
-/// trait Even : PhantomFn<Self> { }
-/// ```
-///
-/// Another equivalent, but clearer, option would be to use
-/// `MarkerTrait`:
-///
-/// ```
-/// # #![feature(core)]
-/// use std::marker::MarkerTrait;
-/// trait Even : MarkerTrait { }
-/// ```
-///
-/// # Parameters
-///
-/// - `A` represents the type of the method's argument. You can use a
-/// tuple to represent "multiple" arguments. Any types appearing here
-/// will be considered "contravariant".
-/// - `R`, if supplied, represents the method's return type. This defaults
-/// to `()` as it is rarely needed.
-///
-/// # Additional reading
-///
-/// More details and background can be found in [RFC 738][738].
-///
-/// [738]: https://github.com/rust-lang/rfcs/blob/master/text/0738-variance.md
+/// `PhantomFn` is a deprecated marker trait that is no longer needed.
#[lang="phantom_fn"]
#[stable(feature = "rust1", since = "1.0.0")]
-pub trait PhantomFn<A:?Sized,R:?Sized=()> { }
+#[deprecated(since = "1.0.0", reason = "No longer needed")]
+#[cfg(stage0)]
+pub trait PhantomFn<A:?Sized,R:?Sized=()> {
+}
+
+/// `PhantomFn` is a deprecated marker trait that is no longer needed.
+#[stable(feature = "rust1", since = "1.0.0")]
+#[deprecated(since = "1.0.0", reason = "No longer needed")]
+#[cfg(not(stage0))]
+pub trait PhantomFn<A:?Sized,R:?Sized=()> {
+}
+
+#[allow(deprecated)]
+#[cfg(not(stage0))]
+impl<A:?Sized,R:?Sized,T:?Sized> PhantomFn<A,R> for T { }
/// `PhantomData<T>` allows you to describe that a type acts as if it stores a value of type `T`,
/// even though it does not. This allows you to inform the compiler about certain safety properties
/// [1]: http://en.wikipedia.org/wiki/Parametricity
#[rustc_reflect_like]
#[unstable(feature = "core", reason = "requires RFC and more experience")]
+#[allow(deprecated)]
pub trait Reflect : MarkerTrait {
}
use ops::Deref;
/// Unsafe trait to indicate what types are usable with the NonZero struct
+#[allow(deprecated)]
pub unsafe trait Zeroable : MarkerTrait {}
unsafe impl<T:?Sized> Zeroable for *const T {}
//! enum Version { Version1, Version2 }
//!
//! fn parse_version(header: &[u8]) -> Result<Version, &'static str> {
-//! if header.len() < 1 {
-//! return Err("invalid header length");
-//! }
-//! match header[0] {
-//! 1 => Ok(Version::Version1),
-//! 2 => Ok(Version::Version2),
-//! _ => Err("invalid version")
+//! match header.get(0) {
+//! None => Err("invalid header length"),
+//! Some(&1) => Ok(Version::Version1),
+//! Some(&2) => Ok(Version::Version2),
+//! Some(_) => Err("invalid version")
//! }
//! }
//!
//! let version = parse_version(&[1, 2, 3, 4]);
//! match version {
-//! Ok(v) => {
-//! println!("working with version: {:?}", v);
-//! }
-//! Err(e) => {
-//! println!("error parsing header: {:?}", e);
-//! }
+//! Ok(v) => println!("working with version: {:?}", v),
+//! Err(e) => println!("error parsing header: {:?}", e),
//! }
//! ```
//!
//! let mut file = File::open_mode(&Path::new("valuable_data.txt"), Open, Write);
//! try!(file.write_line("important message"));
//! drop(file);
-//! return Ok(());
+//! Ok(())
//! }
//! ```
//!
//! if let Err(e) = file.write_line(&format!("age: {}", info.age)) {
//! return Err(e)
//! }
-//! return file.write_line(&format!("rating: {}", info.rating));
+//! file.write_line(&format!("rating: {}", info.rating))
//! }
//! ```
//!
//! try!(file.write_line(&format!("name: {}", info.name)));
//! try!(file.write_line(&format!("age: {}", info.age)));
//! try!(file.write_line(&format!("rating: {}", info.rating)));
-//! return Ok(());
+//! Ok(())
//! }
//! ```
//!
#![doc(primitive = "str")]
use self::OldSearcher::{TwoWay, TwoWayLong};
+use self::pattern::Pattern;
+use self::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher};
use char::CharExt;
use clone::Clone;
use slice::{self, SliceExt};
use usize;
-pub use self::pattern::Pattern;
-pub use self::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher, SearchStep};
-
-mod pattern;
-
-macro_rules! delegate_iter {
- (exact $te:ty : $ti:ty) => {
- delegate_iter!{$te : $ti}
- impl<'a> ExactSizeIterator for $ti {
- #[inline]
- fn len(&self) -> usize {
- self.0.len()
- }
- }
- };
- ($te:ty : $ti:ty) => {
- #[stable(feature = "rust1", since = "1.0.0")]
- impl<'a> Iterator for $ti {
- type Item = $te;
-
- #[inline]
- fn next(&mut self) -> Option<$te> {
- self.0.next()
- }
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.0.size_hint()
- }
- }
- #[stable(feature = "rust1", since = "1.0.0")]
- impl<'a> DoubleEndedIterator for $ti {
- #[inline]
- fn next_back(&mut self) -> Option<$te> {
- self.0.next_back()
- }
- }
- };
- (pattern $te:ty : $ti:ty) => {
- #[stable(feature = "rust1", since = "1.0.0")]
- impl<'a, P: Pattern<'a>> Iterator for $ti {
- type Item = $te;
-
- #[inline]
- fn next(&mut self) -> Option<$te> {
- self.0.next()
- }
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.0.size_hint()
- }
- }
- #[stable(feature = "rust1", since = "1.0.0")]
- impl<'a, P: Pattern<'a>> DoubleEndedIterator for $ti
- where P::Searcher: DoubleEndedSearcher<'a> {
- #[inline]
- fn next_back(&mut self) -> Option<$te> {
- self.0.next_back()
- }
- }
- };
- (pattern forward $te:ty : $ti:ty) => {
- #[stable(feature = "rust1", since = "1.0.0")]
- impl<'a, P: Pattern<'a>> Iterator for $ti
- where P::Searcher: DoubleEndedSearcher<'a> {
- type Item = $te;
-
- #[inline]
- fn next(&mut self) -> Option<$te> {
- self.0.next()
- }
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.0.size_hint()
- }
- }
- };
- (pattern reverse $te:ty : $ti:ty) => {
- #[stable(feature = "rust1", since = "1.0.0")]
- impl<'a, P: Pattern<'a>> Iterator for $ti
- where P::Searcher: ReverseSearcher<'a>
- {
- type Item = $te;
-
- #[inline]
- fn next(&mut self) -> Option<$te> {
- self.0.next()
- }
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.0.size_hint()
- }
- }
- };
-}
+pub mod pattern;
/// A trait to abstract the idea of creating a new instance of a type from a
/// string.
}
}
-/// External iterator for a string's characters and their byte offsets.
-/// Use with the `std::iter` module.
+/// Iterator for a string's characters and their byte offsets.
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct CharIndices<'a> {
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone)]
pub struct Bytes<'a>(Map<slice::Iter<'a, u8>, BytesDeref>);
-delegate_iter!{exact u8 : Bytes<'a>}
-/// A temporary fn new type that ensures that the `Bytes` iterator
-/// is cloneable.
-#[derive(Copy, Clone)]
+/// A nameable, clonable fn type
+#[derive(Clone)]
struct BytesDeref;
impl<'a> Fn<(&'a u8,)> for BytesDeref {
}
}
-/// An iterator over the substrings of a string, separated by `sep`.
-struct CharSplits<'a, P: Pattern<'a>> {
- /// The slice remaining to be iterated
- start: usize,
- end: usize,
- matcher: P::Searcher,
- /// Whether an empty string at the end is allowed
- allow_trailing_empty: bool,
- finished: bool,
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> Iterator for Bytes<'a> {
+ type Item = u8;
+
+ #[inline]
+ fn next(&mut self) -> Option<u8> {
+ self.0.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.0.size_hint()
+ }
}
-/// An iterator over the substrings of a string, separated by `sep`,
-/// splitting at most `count` times.
-struct CharSplitsN<'a, P: Pattern<'a>> {
- iter: CharSplits<'a, P>,
- /// The number of items remaining
- count: usize,
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> DoubleEndedIterator for Bytes<'a> {
+ #[inline]
+ fn next_back(&mut self) -> Option<u8> {
+ self.0.next_back()
+ }
}
-/// An iterator over the substrings of a string, separated by a
-/// pattern, in reverse order.
-struct RCharSplits<'a, P: Pattern<'a>> {
- /// The slice remaining to be iterated
- start: usize,
- end: usize,
- matcher: P::Searcher,
- /// Whether an empty string at the end of iteration is allowed
- allow_final_empty: bool,
- finished: bool,
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> ExactSizeIterator for Bytes<'a> {
+ #[inline]
+ fn len(&self) -> usize {
+ self.0.len()
+ }
}
-/// An iterator over the substrings of a string, separated by a
-/// pattern, splitting at most `count` times, in reverse order.
-struct RCharSplitsN<'a, P: Pattern<'a>> {
- iter: RCharSplits<'a, P>,
- /// The number of splits remaining
- count: usize,
+/// This macro generates a Clone impl for string pattern API
+/// wrapper types of the form X<'a, P>
+macro_rules! derive_pattern_clone {
+ (clone $t:ident with |$s:ident| $e:expr) => {
+ impl<'a, P: Pattern<'a>> Clone for $t<'a, P>
+ where P::Searcher: Clone
+ {
+ fn clone(&self) -> Self {
+ let $s = self;
+ $e
+ }
+ }
+ }
}
-/// An iterator over the lines of a string, separated by `\n`.
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Lines<'a> {
- inner: CharSplits<'a, char>,
+/// This macro generates two public iterator structs
+/// wrapping an private internal one that makes use of the `Pattern` API.
+///
+/// For all patterns `P: Pattern<'a>` the following items will be
+/// generated (generics ommitted):
+///
+/// struct $forward_iterator($internal_iterator);
+/// struct $reverse_iterator($internal_iterator);
+///
+/// impl Iterator for $forward_iterator
+/// { /* internal ends up calling Searcher::next_match() */ }
+///
+/// impl DoubleEndedIterator for $forward_iterator
+/// where P::Searcher: DoubleEndedSearcher
+/// { /* internal ends up calling Searcher::next_match_back() */ }
+///
+/// impl Iterator for $reverse_iterator
+/// where P::Searcher: ReverseSearcher
+/// { /* internal ends up calling Searcher::next_match_back() */ }
+///
+/// impl DoubleEndedIterator for $reverse_iterator
+/// where P::Searcher: DoubleEndedSearcher
+/// { /* internal ends up calling Searcher::next_match() */ }
+///
+/// The internal one is defined outside the macro, and has almost the same
+/// semantic as a DoubleEndedIterator by delegating to `pattern::Searcher` and
+/// `pattern::ReverseSearcher` for both forward and reverse iteration.
+///
+/// "Almost", because a `Searcher` and a `ReverseSearcher` for a given
+/// `Pattern` might not return the same elements, so actually implementing
+/// `DoubleEndedIterator` for it would be incorrect.
+/// (See the docs in `str::pattern` for more details)
+///
+/// However, the internal struct still represents a single ended iterator from
+/// either end, and depending on pattern is also a valid double ended iterator,
+/// so the two wrapper structs implement `Iterator`
+/// and `DoubleEndedIterator` depending on the concrete pattern type, leading
+/// to the complex impls seen above.
+macro_rules! generate_pattern_iterators {
+ {
+ // Forward iterator
+ forward:
+ $(#[$forward_iterator_attribute:meta])*
+ struct $forward_iterator:ident;
+
+ // Reverse iterator
+ reverse:
+ $(#[$reverse_iterator_attribute:meta])*
+ struct $reverse_iterator:ident;
+
+ // Stability of all generated items
+ stability:
+ $(#[$common_stability_attribute:meta])*
+
+ // Internal almost-iterator that is being delegated to
+ internal:
+ $internal_iterator:ident yielding ($iterty:ty);
+
+ // Kind of delgation - either single ended or double ended
+ delegate $($t:tt)*
+ } => {
+ $(#[$forward_iterator_attribute])*
+ $(#[$common_stability_attribute])*
+ pub struct $forward_iterator<'a, P: Pattern<'a>>($internal_iterator<'a, P>);
+
+ $(#[$common_stability_attribute])*
+ impl<'a, P: Pattern<'a>> Iterator for $forward_iterator<'a, P> {
+ type Item = $iterty;
+
+ #[inline]
+ fn next(&mut self) -> Option<$iterty> {
+ self.0.next()
+ }
+ }
+
+ $(#[$common_stability_attribute])*
+ impl<'a, P: Pattern<'a>> Clone for $forward_iterator<'a, P>
+ where P::Searcher: Clone
+ {
+ fn clone(&self) -> Self {
+ $forward_iterator(self.0.clone())
+ }
+ }
+
+ $(#[$reverse_iterator_attribute])*
+ $(#[$common_stability_attribute])*
+ pub struct $reverse_iterator<'a, P: Pattern<'a>>($internal_iterator<'a, P>);
+
+ $(#[$common_stability_attribute])*
+ impl<'a, P: Pattern<'a>> Iterator for $reverse_iterator<'a, P>
+ where P::Searcher: ReverseSearcher<'a>
+ {
+ type Item = $iterty;
+
+ #[inline]
+ fn next(&mut self) -> Option<$iterty> {
+ self.0.next_back()
+ }
+ }
+
+ $(#[$common_stability_attribute])*
+ impl<'a, P: Pattern<'a>> Clone for $reverse_iterator<'a, P>
+ where P::Searcher: Clone
+ {
+ fn clone(&self) -> Self {
+ $reverse_iterator(self.0.clone())
+ }
+ }
+
+ generate_pattern_iterators!($($t)* with $(#[$common_stability_attribute])*,
+ $forward_iterator,
+ $reverse_iterator, $iterty);
+ };
+ {
+ double ended; with $(#[$common_stability_attribute:meta])*,
+ $forward_iterator:ident,
+ $reverse_iterator:ident, $iterty:ty
+ } => {
+ $(#[$common_stability_attribute])*
+ impl<'a, P: Pattern<'a>> DoubleEndedIterator for $forward_iterator<'a, P>
+ where P::Searcher: DoubleEndedSearcher<'a>
+ {
+ #[inline]
+ fn next_back(&mut self) -> Option<$iterty> {
+ self.0.next_back()
+ }
+ }
+
+ $(#[$common_stability_attribute])*
+ impl<'a, P: Pattern<'a>> DoubleEndedIterator for $reverse_iterator<'a, P>
+ where P::Searcher: DoubleEndedSearcher<'a>
+ {
+ #[inline]
+ fn next_back(&mut self) -> Option<$iterty> {
+ self.0.next()
+ }
+ }
+ };
+ {
+ single ended; with $(#[$common_stability_attribute:meta])*,
+ $forward_iterator:ident,
+ $reverse_iterator:ident, $iterty:ty
+ } => {}
}
-/// An iterator over the lines of a string, separated by either `\n` or (`\r\n`).
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct LinesAny<'a> {
- inner: Map<Lines<'a>, fn(&str) -> &str>,
+derive_pattern_clone!{
+ clone SplitInternal
+ with |s| SplitInternal { matcher: s.matcher.clone(), ..*s }
+}
+struct SplitInternal<'a, P: Pattern<'a>> {
+ start: usize,
+ end: usize,
+ matcher: P::Searcher,
+ allow_trailing_empty: bool,
+ finished: bool,
}
-impl<'a, P: Pattern<'a>> CharSplits<'a, P> {
+impl<'a, P: Pattern<'a>> SplitInternal<'a, P> {
#[inline]
fn get_end(&mut self) -> Option<&'a str> {
if !self.finished && (self.allow_trailing_empty || self.end - self.start > 0) {
None
}
}
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, P: Pattern<'a>> Iterator for CharSplits<'a, P> {
- type Item = &'a str;
#[inline]
fn next(&mut self) -> Option<&'a str> {
None => self.get_end(),
}
}
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, P: Pattern<'a>> DoubleEndedIterator for CharSplits<'a, P>
-where P::Searcher: DoubleEndedSearcher<'a> {
#[inline]
- fn next_back(&mut self) -> Option<&'a str> {
+ fn next_back(&mut self) -> Option<&'a str>
+ where P::Searcher: ReverseSearcher<'a>
+ {
if self.finished { return None }
if !self.allow_trailing_empty {
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, P: Pattern<'a>> Iterator for CharSplitsN<'a, P> {
- type Item = &'a str;
+generate_pattern_iterators! {
+ forward:
+ /// Return type of `str::split()`
+ struct Split;
+ reverse:
+ /// Return type of `str::rsplit()`
+ struct RSplit;
+ stability:
+ #[stable(feature = "rust1", since = "1.0.0")]
+ internal:
+ SplitInternal yielding (&'a str);
+ delegate double ended;
+}
+
+generate_pattern_iterators! {
+ forward:
+ /// Return type of `str::split_terminator()`
+ struct SplitTerminator;
+ reverse:
+ /// Return type of `str::rsplit_terminator()`
+ struct RSplitTerminator;
+ stability:
+ #[stable(feature = "rust1", since = "1.0.0")]
+ internal:
+ SplitInternal yielding (&'a str);
+ delegate double ended;
+}
+
+derive_pattern_clone!{
+ clone SplitNInternal
+ with |s| SplitNInternal { iter: s.iter.clone(), ..*s }
+}
+struct SplitNInternal<'a, P: Pattern<'a>> {
+ iter: SplitInternal<'a, P>,
+ /// The number of splits remaining
+ count: usize,
+}
+impl<'a, P: Pattern<'a>> SplitNInternal<'a, P> {
#[inline]
fn next(&mut self) -> Option<&'a str> {
match self.count {
_ => { self.count -= 1; self.iter.next() }
}
}
-}
-impl<'a, P: Pattern<'a>> RCharSplits<'a, P> {
#[inline]
- fn get_remainder(&mut self) -> Option<&'a str> {
- if !self.finished && (self.allow_final_empty || self.end - self.start > 0) {
- self.finished = true;
- unsafe {
- let string = self.matcher.haystack().slice_unchecked(self.start, self.end);
- Some(string)
- }
- } else {
- None
+ fn next_back(&mut self) -> Option<&'a str>
+ where P::Searcher: ReverseSearcher<'a>
+ {
+ match self.count {
+ 0 => None,
+ 1 => { self.count = 0; self.iter.get_end() }
+ _ => { self.count -= 1; self.iter.next_back() }
}
}
}
+generate_pattern_iterators! {
+ forward:
+ /// Return type of `str::splitn()`
+ struct SplitN;
+ reverse:
+ /// Return type of `str::rsplitn()`
+ struct RSplitN;
+ stability:
+ #[stable(feature = "rust1", since = "1.0.0")]
+ internal:
+ SplitNInternal yielding (&'a str);
+ delegate single ended;
+}
+
+derive_pattern_clone!{
+ clone MatchIndicesInternal
+ with |s| MatchIndicesInternal(s.0.clone())
+}
+struct MatchIndicesInternal<'a, P: Pattern<'a>>(P::Searcher);
+
+impl<'a, P: Pattern<'a>> MatchIndicesInternal<'a, P> {
+ #[inline]
+ fn next(&mut self) -> Option<(usize, usize)> {
+ self.0.next_match()
+ }
+
+ #[inline]
+ fn next_back(&mut self) -> Option<(usize, usize)>
+ where P::Searcher: ReverseSearcher<'a>
+ {
+ self.0.next_match_back()
+ }
+}
+
+generate_pattern_iterators! {
+ forward:
+ /// Return type of `str::match_indices()`
+ struct MatchIndices;
+ reverse:
+ /// Return type of `str::rmatch_indices()`
+ struct RMatchIndices;
+ stability:
+ #[unstable(feature = "core",
+ reason = "type may be removed or have its iterator impl changed")]
+ internal:
+ MatchIndicesInternal yielding ((usize, usize));
+ delegate double ended;
+}
+
+derive_pattern_clone!{
+ clone MatchesInternal
+ with |s| MatchesInternal(s.0.clone())
+}
+struct MatchesInternal<'a, P: Pattern<'a>>(P::Searcher);
+
+impl<'a, P: Pattern<'a>> MatchesInternal<'a, P> {
+ #[inline]
+ fn next(&mut self) -> Option<&'a str> {
+ self.0.next_match().map(|(a, b)| unsafe {
+ // Indices are known to be on utf8 boundaries
+ self.0.haystack().slice_unchecked(a, b)
+ })
+ }
+
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a str>
+ where P::Searcher: ReverseSearcher<'a>
+ {
+ self.0.next_match_back().map(|(a, b)| unsafe {
+ // Indices are known to be on utf8 boundaries
+ self.0.haystack().slice_unchecked(a, b)
+ })
+ }
+}
+
+generate_pattern_iterators! {
+ forward:
+ /// Return type of `str::matches()`
+ struct Matches;
+ reverse:
+ /// Return type of `str::rmatches()`
+ struct RMatches;
+ stability:
+ #[unstable(feature = "core", reason = "type got recently added")]
+ internal:
+ MatchesInternal yielding (&'a str);
+ delegate double ended;
+}
+
+/// Return type of `str::lines()`
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Clone)]
+pub struct Lines<'a>(SplitTerminator<'a, char>);
+
#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, P: Pattern<'a>> Iterator for RCharSplits<'a, P>
- where P::Searcher: ReverseSearcher<'a>
-{
+impl<'a> Iterator for Lines<'a> {
type Item = &'a str;
#[inline]
fn next(&mut self) -> Option<&'a str> {
- if self.finished { return None }
+ self.0.next()
+ }
- let haystack = self.matcher.haystack();
- match self.matcher.next_match_back() {
- Some((a, b)) => unsafe {
- let elt = haystack.slice_unchecked(b, self.end);
- self.end = a;
- Some(elt)
- },
- None => self.get_remainder(),
- }
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.0.size_hint()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> DoubleEndedIterator for Lines<'a> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a str> {
+ self.0.next_back()
+ }
+}
+
+/// Return type of `str::lines_any()`
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Clone)]
+pub struct LinesAny<'a>(Map<Lines<'a>, LinesAnyMap>);
+
+/// A nameable, clonable fn type
+#[derive(Clone)]
+struct LinesAnyMap;
+
+impl<'a> Fn<(&'a str,)> for LinesAnyMap {
+ #[inline]
+ extern "rust-call" fn call(&self, (line,): (&'a str,)) -> &'a str {
+ let l = line.len();
+ if l > 0 && line.as_bytes()[l - 1] == b'\r' { &line[0 .. l - 1] }
+ else { line }
+ }
+}
+
+impl<'a> FnMut<(&'a str,)> for LinesAnyMap {
+ #[inline]
+ extern "rust-call" fn call_mut(&mut self, (line,): (&'a str,)) -> &'a str {
+ Fn::call(&*self, (line,))
+ }
+}
+
+impl<'a> FnOnce<(&'a str,)> for LinesAnyMap {
+ type Output = &'a str;
+
+ #[inline]
+ extern "rust-call" fn call_once(self, (line,): (&'a str,)) -> &'a str {
+ Fn::call(&self, (line,))
}
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, P: Pattern<'a>> Iterator for RCharSplitsN<'a, P>
- where P::Searcher: ReverseSearcher<'a>
-{
+impl<'a> Iterator for LinesAny<'a> {
type Item = &'a str;
#[inline]
fn next(&mut self) -> Option<&'a str> {
- match self.count {
- 0 => None,
- 1 => { self.count -= 1; self.iter.get_remainder() }
- _ => { self.count -= 1; self.iter.next() }
- }
+ self.0.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.0.size_hint()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> DoubleEndedIterator for LinesAny<'a> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a str> {
+ self.0.next_back()
}
}
searcher: OldSearcher
}
-// FIXME: #21637 Prevents a Clone impl
-/// An iterator over the start and end indices of the matches of a
-/// substring within a larger string
-#[unstable(feature = "core", reason = "type may be removed")]
-pub struct MatchIndices<'a, P: Pattern<'a>>(P::Searcher);
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, P: Pattern<'a>> Iterator for MatchIndices<'a, P> {
- type Item = (usize, usize);
-
- #[inline]
- fn next(&mut self) -> Option<(usize, usize)> {
- self.0.next_match()
- }
-}
-
impl<'a, 'b> OldMatchIndices<'a, 'b> {
#[inline]
#[allow(dead_code)]
fn as_slice(&self) -> &str { Str::as_slice(*self) }
}
-/// Return type of `str::split`
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct Split<'a, P: Pattern<'a>>(CharSplits<'a, P>);
-delegate_iter!{pattern &'a str : Split<'a, P>}
-
-/// Return type of `str::split_terminator`
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct SplitTerminator<'a, P: Pattern<'a>>(CharSplits<'a, P>);
-delegate_iter!{pattern &'a str : SplitTerminator<'a, P>}
-
-/// Return type of `str::splitn`
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct SplitN<'a, P: Pattern<'a>>(CharSplitsN<'a, P>);
-delegate_iter!{pattern forward &'a str : SplitN<'a, P>}
-
-/// Return type of `str::rsplit`
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct RSplit<'a, P: Pattern<'a>>(RCharSplits<'a, P>);
-delegate_iter!{pattern reverse &'a str : RSplit<'a, P>}
-
-/// Return type of `str::rsplitn`
-#[stable(feature = "rust1", since = "1.0.0")]
-pub struct RSplitN<'a, P: Pattern<'a>>(RCharSplitsN<'a, P>);
-delegate_iter!{pattern reverse &'a str : RSplitN<'a, P>}
-
/// Methods for string slices
#[allow(missing_docs)]
pub trait StrExt {
fn bytes<'a>(&'a self) -> Bytes<'a>;
fn char_indices<'a>(&'a self) -> CharIndices<'a>;
fn split<'a, P: Pattern<'a>>(&'a self, pat: P) -> Split<'a, P>;
- fn splitn<'a, P: Pattern<'a>>(&'a self, count: usize, pat: P) -> SplitN<'a, P>;
- fn split_terminator<'a, P: Pattern<'a>>(&'a self, pat: P) -> SplitTerminator<'a, P>;
fn rsplit<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplit<'a, P>
where P::Searcher: ReverseSearcher<'a>;
+ fn splitn<'a, P: Pattern<'a>>(&'a self, count: usize, pat: P) -> SplitN<'a, P>;
fn rsplitn<'a, P: Pattern<'a>>(&'a self, count: usize, pat: P) -> RSplitN<'a, P>
where P::Searcher: ReverseSearcher<'a>;
+ fn split_terminator<'a, P: Pattern<'a>>(&'a self, pat: P) -> SplitTerminator<'a, P>;
+ fn rsplit_terminator<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplitTerminator<'a, P>
+ where P::Searcher: ReverseSearcher<'a>;
+ fn matches<'a, P: Pattern<'a>>(&'a self, pat: P) -> Matches<'a, P>;
+ fn rmatches<'a, P: Pattern<'a>>(&'a self, pat: P) -> RMatches<'a, P>
+ where P::Searcher: ReverseSearcher<'a>;
fn match_indices<'a, P: Pattern<'a>>(&'a self, pat: P) -> MatchIndices<'a, P>;
+ fn rmatch_indices<'a, P: Pattern<'a>>(&'a self, pat: P) -> RMatchIndices<'a, P>
+ where P::Searcher: ReverseSearcher<'a>;
fn lines<'a>(&'a self) -> Lines<'a>;
fn lines_any<'a>(&'a self) -> LinesAny<'a>;
fn char_len(&self) -> usize;
#[inline]
fn split<'a, P: Pattern<'a>>(&'a self, pat: P) -> Split<'a, P> {
- Split(CharSplits {
+ Split(SplitInternal {
start: 0,
end: self.len(),
matcher: pat.into_searcher(self),
})
}
+ #[inline]
+ fn rsplit<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplit<'a, P>
+ where P::Searcher: ReverseSearcher<'a>
+ {
+ RSplit(self.split(pat).0)
+ }
+
#[inline]
fn splitn<'a, P: Pattern<'a>>(&'a self, count: usize, pat: P) -> SplitN<'a, P> {
- SplitN(CharSplitsN {
+ SplitN(SplitNInternal {
iter: self.split(pat).0,
count: count,
})
}
+ #[inline]
+ fn rsplitn<'a, P: Pattern<'a>>(&'a self, count: usize, pat: P) -> RSplitN<'a, P>
+ where P::Searcher: ReverseSearcher<'a>
+ {
+ RSplitN(self.splitn(count, pat).0)
+ }
+
#[inline]
fn split_terminator<'a, P: Pattern<'a>>(&'a self, pat: P) -> SplitTerminator<'a, P> {
- SplitTerminator(CharSplits {
+ SplitTerminator(SplitInternal {
allow_trailing_empty: false,
..self.split(pat).0
})
}
#[inline]
- fn rsplit<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplit<'a, P>
+ fn rsplit_terminator<'a, P: Pattern<'a>>(&'a self, pat: P) -> RSplitTerminator<'a, P>
where P::Searcher: ReverseSearcher<'a>
{
- RSplit(RCharSplits {
- start: 0,
- end: self.len(),
- matcher: pat.into_searcher(self),
- allow_final_empty: true,
- finished: false,
- })
+ RSplitTerminator(self.split_terminator(pat).0)
}
#[inline]
- fn rsplitn<'a, P: Pattern<'a>>(&'a self, count: usize, pat: P) -> RSplitN<'a, P>
+ fn matches<'a, P: Pattern<'a>>(&'a self, pat: P) -> Matches<'a, P> {
+ Matches(MatchesInternal(pat.into_searcher(self)))
+ }
+
+ #[inline]
+ fn rmatches<'a, P: Pattern<'a>>(&'a self, pat: P) -> RMatches<'a, P>
where P::Searcher: ReverseSearcher<'a>
{
- RSplitN(RCharSplitsN {
- iter: self.rsplit(pat).0,
- count: count,
- })
+ RMatches(self.matches(pat).0)
}
#[inline]
fn match_indices<'a, P: Pattern<'a>>(&'a self, pat: P) -> MatchIndices<'a, P> {
- MatchIndices(pat.into_searcher(self))
+ MatchIndices(MatchIndicesInternal(pat.into_searcher(self)))
}
+ #[inline]
+ fn rmatch_indices<'a, P: Pattern<'a>>(&'a self, pat: P) -> RMatchIndices<'a, P>
+ where P::Searcher: ReverseSearcher<'a>
+ {
+ RMatchIndices(self.match_indices(pat).0)
+ }
#[inline]
fn lines(&self) -> Lines {
- Lines { inner: self.split_terminator('\n').0 }
+ Lines(self.split_terminator('\n'))
}
+ #[inline]
fn lines_any(&self) -> LinesAny {
- fn f(line: &str) -> &str {
- let l = line.len();
- if l > 0 && line.as_bytes()[l - 1] == b'\r' { &line[0 .. l - 1] }
- else { line }
- }
-
- let f: fn(&str) -> &str = f; // coerce to fn pointer
- LinesAny { inner: self.lines().map(f) }
+ LinesAny(self.lines().map(LinesAnyMap))
}
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn default() -> &'a str { "" }
}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a> Iterator for Lines<'a> {
- type Item = &'a str;
-
- #[inline]
- fn next(&mut self) -> Option<&'a str> { self.inner.next() }
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a> DoubleEndedIterator for Lines<'a> {
- #[inline]
- fn next_back(&mut self) -> Option<&'a str> { self.inner.next_back() }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a> Iterator for LinesAny<'a> {
- type Item = &'a str;
-
- #[inline]
- fn next(&mut self) -> Option<&'a str> { self.inner.next() }
- #[inline]
- fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a> DoubleEndedIterator for LinesAny<'a> {
- #[inline]
- fn next_back(&mut self) -> Option<&'a str> { self.inner.next_back() }
-}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+//! The string Pattern API.
+//!
+//! For more details, see the traits `Pattern`, `Searcher`,
+//! `ReverseSearcher` and `DoubleEndedSearcher`.
+
use prelude::*;
// Pattern
/// `"[aa]a"` or `"a[aa]"`, depending from which side it is searched.
pub trait DoubleEndedSearcher<'a>: ReverseSearcher<'a> {}
+/////////////////////////////////////////////////////////////////////////////
// Impl for a CharEq wrapper
+/////////////////////////////////////////////////////////////////////////////
#[doc(hidden)]
trait CharEq {
struct CharEqPattern<C: CharEq>(C);
+#[derive(Clone)]
struct CharEqSearcher<'a, C: CharEq> {
char_eq: C,
haystack: &'a str,
impl<'a, C: CharEq> DoubleEndedSearcher<'a> for CharEqSearcher<'a, C> {}
+/////////////////////////////////////////////////////////////////////////////
// Impl for &str
+/////////////////////////////////////////////////////////////////////////////
// Todo: Optimize the naive implementation here
+/// Associated type for `<&str as Pattern<'a>>::Searcher`.
#[derive(Clone)]
-struct StrSearcher<'a, 'b> {
+pub struct StrSearcher<'a, 'b> {
haystack: &'a str,
needle: &'b str,
start: usize,
end: usize,
- done: bool,
+ state: State,
+}
+
+#[derive(Clone, PartialEq)]
+enum State { Done, NotDone, Reject(usize, usize) }
+impl State {
+ #[inline] fn done(&self) -> bool { *self == State::Done }
+ #[inline] fn take(&mut self) -> State { ::mem::replace(self, State::NotDone) }
}
/// Non-allocating substring search.
needle: self,
start: 0,
end: haystack.len(),
- done: false,
+ state: State::NotDone,
}
}
}
|m: &mut StrSearcher| {
// Forward step for empty needle
let current_start = m.start;
- if !m.done {
+ if !m.state.done() {
m.start = m.haystack.char_range_at(current_start).next;
+ m.state = State::Reject(current_start, m.start);
}
SearchStep::Match(current_start, current_start)
},
|m: &mut StrSearcher| {
// Backward step for empty needle
let current_end = m.end;
- if !m.done {
+ if !m.state.done() {
m.end = m.haystack.char_range_at_reverse(current_end).next;
+ m.state = State::Reject(m.end, current_end);
}
SearchStep::Match(current_end, current_end)
},
where F: FnOnce(&mut StrSearcher) -> SearchStep,
G: FnOnce(&mut StrSearcher) -> SearchStep
{
- if m.done {
+ if m.state.done() {
SearchStep::Done
} else if m.needle.len() == 0 && m.start <= m.end {
// Case for needle == ""
- if m.start == m.end {
- m.done = true;
+ if let State::Reject(a, b) = m.state.take() {
+ SearchStep::Reject(a, b)
+ } else {
+ if m.start == m.end {
+ m.state = State::Done;
+ }
+ empty_needle_step(&mut m)
}
- empty_needle_step(&mut m)
} else if m.start + m.needle.len() <= m.end {
// Case for needle != ""
nonempty_needle_step(&mut m)
} else if m.start < m.end {
// Remaining slice shorter than needle, reject it
- m.done = true;
+ m.state = State::Done;
SearchStep::Reject(m.start, m.end)
} else {
- m.done = true;
+ m.state = State::Done;
SearchStep::Done
}
}
-macro_rules! char_eq_pattern_impl {
- ($wrapper:ty, $wrapper_ident:ident) => {
- fn into_searcher(self, haystack: &'a str) -> $wrapper {
- $wrapper_ident(CharEqPattern(self).into_searcher(haystack))
+/////////////////////////////////////////////////////////////////////////////
+
+macro_rules! pattern_methods {
+ ($t:ty, $pmap:expr, $smap:expr) => {
+ type Searcher = $t;
+
+ #[inline]
+ fn into_searcher(self, haystack: &'a str) -> $t {
+ ($smap)(($pmap)(self).into_searcher(haystack))
}
+
#[inline]
fn is_contained_in(self, haystack: &'a str) -> bool {
- CharEqPattern(self).is_contained_in(haystack)
+ ($pmap)(self).is_contained_in(haystack)
}
+
#[inline]
fn is_prefix_of(self, haystack: &'a str) -> bool {
- CharEqPattern(self).is_prefix_of(haystack)
+ ($pmap)(self).is_prefix_of(haystack)
}
+
#[inline]
fn is_suffix_of(self, haystack: &'a str) -> bool
- where $wrapper: ReverseSearcher<'a>
+ where $t: ReverseSearcher<'a>
{
- CharEqPattern(self).is_suffix_of(haystack)
+ ($pmap)(self).is_suffix_of(haystack)
}
}
}
-// Pattern for char
-
-impl<'a> Pattern<'a> for char {
- type Searcher = CharSearcher<'a>;
- char_eq_pattern_impl!(CharSearcher<'a>, CharSearcher);
+macro_rules! searcher_methods {
+ (forward) => {
+ #[inline]
+ fn haystack(&self) -> &'a str {
+ self.0.haystack()
+ }
+ #[inline]
+ fn next(&mut self) -> SearchStep {
+ self.0.next()
+ }
+ #[inline]
+ fn next_match(&mut self) -> Option<(usize, usize)> {
+ self.0.next_match()
+ }
+ #[inline]
+ fn next_reject(&mut self) -> Option<(usize, usize)> {
+ self.0.next_reject()
+ }
+ };
+ (reverse) => {
+ #[inline]
+ fn next_back(&mut self) -> SearchStep {
+ self.0.next_back()
+ }
+ #[inline]
+ fn next_match_back(&mut self) -> Option<(usize, usize)> {
+ self.0.next_match_back()
+ }
+ #[inline]
+ fn next_reject_back(&mut self) -> Option<(usize, usize)> {
+ self.0.next_reject_back()
+ }
+ }
}
-pub struct CharSearcher<'a>(CharEqSearcher<'a, char>);
+/////////////////////////////////////////////////////////////////////////////
+// Impl for char
+/////////////////////////////////////////////////////////////////////////////
+
+/// Associated type for `<char as Pattern<'a>>::Searcher`.
+#[derive(Clone)]
+pub struct CharSearcher<'a>(<CharEqPattern<char> as Pattern<'a>>::Searcher);
unsafe impl<'a> Searcher<'a> for CharSearcher<'a> {
- #[inline]
- fn haystack(&self) -> &'a str { self.0.haystack() }
- #[inline]
- fn next(&mut self) -> SearchStep { self.0.next() }
+ searcher_methods!(forward);
}
+
unsafe impl<'a> ReverseSearcher<'a> for CharSearcher<'a> {
- #[inline]
- fn next_back(&mut self) -> SearchStep { self.0.next_back() }
+ searcher_methods!(reverse);
}
-impl<'a> DoubleEndedSearcher<'a> for CharSearcher<'a> {}
-// Pattern for &[char]
+impl<'a> DoubleEndedSearcher<'a> for CharSearcher<'a> {}
-impl<'a, 'b> Pattern<'a> for &'b [char] {
- type Searcher = CharSliceSearcher<'a, 'b>;
- char_eq_pattern_impl!(CharSliceSearcher<'a, 'b>, CharSliceSearcher);
+/// Searches for chars that are equal to a given char
+impl<'a> Pattern<'a> for char {
+ pattern_methods!(CharSearcher<'a>, CharEqPattern, CharSearcher);
}
-pub struct CharSliceSearcher<'a, 'b>(CharEqSearcher<'a, &'b [char]>);
+/////////////////////////////////////////////////////////////////////////////
+// Impl for &[char]
+/////////////////////////////////////////////////////////////////////////////
+
+// Todo: Change / Remove due to ambiguity in meaning.
+
+/// Associated type for `<&[char] as Pattern<'a>>::Searcher`.
+#[derive(Clone)]
+pub struct CharSliceSearcher<'a, 'b>(<CharEqPattern<&'b [char]> as Pattern<'a>>::Searcher);
unsafe impl<'a, 'b> Searcher<'a> for CharSliceSearcher<'a, 'b> {
- #[inline]
- fn haystack(&self) -> &'a str { self.0.haystack() }
- #[inline]
- fn next(&mut self) -> SearchStep { self.0.next() }
+ searcher_methods!(forward);
}
+
unsafe impl<'a, 'b> ReverseSearcher<'a> for CharSliceSearcher<'a, 'b> {
- #[inline]
- fn next_back(&mut self) -> SearchStep { self.0.next_back() }
+ searcher_methods!(reverse);
}
-impl<'a, 'b> DoubleEndedSearcher<'a> for CharSliceSearcher<'a, 'b> {}
-// Pattern for predicates
+impl<'a, 'b> DoubleEndedSearcher<'a> for CharSliceSearcher<'a, 'b> {}
-impl<'a, F: FnMut(char) -> bool> Pattern<'a> for F {
- type Searcher = CharPredSearcher<'a, F>;
- char_eq_pattern_impl!(CharPredSearcher<'a, F>, CharPredSearcher);
+/// Searches for chars that are equal to any of the chars in the array
+impl<'a, 'b> Pattern<'a> for &'b [char] {
+ pattern_methods!(CharSliceSearcher<'a, 'b>, CharEqPattern, CharSliceSearcher);
}
-pub struct CharPredSearcher<'a, F: FnMut(char) -> bool>(CharEqSearcher<'a, F>);
+/////////////////////////////////////////////////////////////////////////////
+// Impl for F: FnMut(char) -> bool
+/////////////////////////////////////////////////////////////////////////////
+
+/// Associated type for `<F as Pattern<'a>>::Searcher`.
+#[derive(Clone)]
+pub struct CharPredicateSearcher<'a, F>(<CharEqPattern<F> as Pattern<'a>>::Searcher)
+ where F: FnMut(char) -> bool;
-unsafe impl<'a, F> Searcher<'a> for CharPredSearcher<'a, F>
+unsafe impl<'a, F> Searcher<'a> for CharPredicateSearcher<'a, F>
where F: FnMut(char) -> bool
{
- #[inline]
- fn haystack(&self) -> &'a str { self.0.haystack() }
- #[inline]
- fn next(&mut self) -> SearchStep { self.0.next() }
+ searcher_methods!(forward);
}
-unsafe impl<'a, F> ReverseSearcher<'a> for CharPredSearcher<'a, F>
+
+unsafe impl<'a, F> ReverseSearcher<'a> for CharPredicateSearcher<'a, F>
where F: FnMut(char) -> bool
{
- #[inline]
- fn next_back(&mut self) -> SearchStep { self.0.next_back() }
+ searcher_methods!(reverse);
}
-impl<'a, F> DoubleEndedSearcher<'a> for CharPredSearcher<'a, F>
- where F: FnMut(char) -> bool
-{}
-// Pattern for &&str
+impl<'a, F> DoubleEndedSearcher<'a> for CharPredicateSearcher<'a, F>
+ where F: FnMut(char) -> bool {}
+/// Searches for chars that match the given predicate
+impl<'a, F> Pattern<'a> for F where F: FnMut(char) -> bool {
+ pattern_methods!(CharPredicateSearcher<'a, F>, CharEqPattern, CharPredicateSearcher);
+}
+
+/////////////////////////////////////////////////////////////////////////////
+// Impl for &&str
+/////////////////////////////////////////////////////////////////////////////
+
+/// Delegates to the `&str` impl.
impl<'a, 'b> Pattern<'a> for &'b &'b str {
- type Searcher = <&'b str as Pattern<'a>>::Searcher;
- #[inline]
- fn into_searcher(self, haystack: &'a str)
- -> <&'b str as Pattern<'a>>::Searcher {
- (*self).into_searcher(haystack)
- }
- #[inline]
- fn is_contained_in(self, haystack: &'a str) -> bool {
- (*self).is_contained_in(haystack)
- }
- #[inline]
- fn is_prefix_of(self, haystack: &'a str) -> bool {
- (*self).is_prefix_of(haystack)
- }
- #[inline]
- fn is_suffix_of(self, haystack: &'a str) -> bool {
- (*self).is_suffix_of(haystack)
- }
+ pattern_methods!(StrSearcher<'a, 'b>, |&s| s, |s| s);
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#[test]
-fn test_pattern_deref_forward() {
- let data = "aabcdaa";
- assert!(data.contains("bcd"));
- assert!(data.contains(&"bcd"));
- assert!(data.contains(&"bcd".to_string()));
-}
-
-#[test]
-fn test_empty_match_indices() {
- let data = "aä中!";
- let vec: Vec<_> = data.match_indices("").collect();
- assert_eq!(vec, [(0, 0), (1, 1), (3, 3), (6, 6), (7, 7)]);
-}
-
-#[test]
-fn test_bool_from_str() {
- assert_eq!("true".parse().ok(), Some(true));
- assert_eq!("false".parse().ok(), Some(false));
- assert_eq!("not even a boolean".parse::<bool>().ok(), None);
-}
-
-fn check_contains_all_substrings(s: &str) {
- assert!(s.contains(""));
- for i in 0..s.len() {
- for j in i+1..s.len() + 1 {
- assert!(s.contains(&s[i..j]));
- }
- }
-}
-
-#[test]
-fn strslice_issue_16589() {
- assert!("bananas".contains("nana"));
-
- // prior to the fix for #16589, x.contains("abcdabcd") returned false
- // test all substrings for good measure
- check_contains_all_substrings("012345678901234567890123456789bcdabcdabcd");
-}
-
-#[test]
-fn strslice_issue_16878() {
- assert!(!"1234567ah012345678901ah".contains("hah"));
- assert!(!"00abc01234567890123456789abc".contains("bcabc"));
-}
-
-
-#[test]
-fn test_strslice_contains() {
- let x = "There are moments, Jeeves, when one asks oneself, 'Do trousers matter?'";
- check_contains_all_substrings(x);
-}
-
-#[test]
-fn test_rsplitn_char_iterator() {
- let data = "\nMäry häd ä little lämb\nLittle lämb\n";
-
- let mut split: Vec<&str> = data.rsplitn(4, ' ').collect();
- split.reverse();
- assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
-
- let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == ' ').collect();
- split.reverse();
- assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
-
- // Unicode
- let mut split: Vec<&str> = data.rsplitn(4, 'ä').collect();
- split.reverse();
- assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
-
- let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == 'ä').collect();
- split.reverse();
- assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
-}
-
-#[test]
-fn test_split_char_iterator() {
- let data = "\nMäry häd ä little lämb\nLittle lämb\n";
-
- let split: Vec<&str> = data.split(' ').collect();
- assert_eq!( split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
-
- let mut rsplit: Vec<&str> = data.split(' ').rev().collect();
- rsplit.reverse();
- assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
-
- let split: Vec<&str> = data.split(|c: char| c == ' ').collect();
- assert_eq!( split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
-
- let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect();
- rsplit.reverse();
- assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
-
- // Unicode
- let split: Vec<&str> = data.split('ä').collect();
- assert_eq!( split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
-
- let mut rsplit: Vec<&str> = data.split('ä').rev().collect();
- rsplit.reverse();
- assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
-
- let split: Vec<&str> = data.split(|c: char| c == 'ä').collect();
- assert_eq!( split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
-
- let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect();
- rsplit.reverse();
- assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
-}
-
-#[test]
-fn test_rev_split_char_iterator_no_trailing() {
- let data = "\nMäry häd ä little lämb\nLittle lämb\n";
-
- let mut split: Vec<&str> = data.split('\n').rev().collect();
- split.reverse();
- assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]);
-
- let mut split: Vec<&str> = data.split_terminator('\n').rev().collect();
- split.reverse();
- assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]);
-}
-
-#[test]
-fn test_utf16_code_units() {
- use unicode::str::Utf16Encoder;
- assert_eq!(Utf16Encoder::new(vec!['é', '\u{1F4A9}'].into_iter()).collect::<Vec<u16>>(),
- [0xE9, 0xD83D, 0xDCA9])
-}
-
-#[test]
-fn starts_with_in_unicode() {
- assert!(!"├── Cargo.toml".starts_with("# "));
-}
-
-#[test]
-fn starts_short_long() {
- assert!(!"".starts_with("##"));
- assert!(!"##".starts_with("####"));
- assert!("####".starts_with("##"));
- assert!(!"##ä".starts_with("####"));
- assert!("####ä".starts_with("##"));
- assert!(!"##".starts_with("####ä"));
- assert!("##ä##".starts_with("##ä"));
-
- assert!("".starts_with(""));
- assert!("ä".starts_with(""));
- assert!("#ä".starts_with(""));
- assert!("##ä".starts_with(""));
- assert!("ä###".starts_with(""));
- assert!("#ä##".starts_with(""));
- assert!("##ä#".starts_with(""));
-}
-
-#[test]
-fn contains_weird_cases() {
- assert!("* \t".contains(' '));
- assert!(!"* \t".contains('?'));
- assert!(!"* \t".contains('\u{1F4A9}'));
-}
-
-#[test]
-fn trim_ws() {
- assert_eq!(" \t a \t ".trim_left_matches(|c: char| c.is_whitespace()),
- "a \t ");
- assert_eq!(" \t a \t ".trim_right_matches(|c: char| c.is_whitespace()),
- " \t a");
- assert_eq!(" \t a \t ".trim_matches(|c: char| c.is_whitespace()),
- "a");
- assert_eq!(" \t \t ".trim_left_matches(|c: char| c.is_whitespace()),
- "");
- assert_eq!(" \t \t ".trim_right_matches(|c: char| c.is_whitespace()),
- "");
- assert_eq!(" \t \t ".trim_matches(|c: char| c.is_whitespace()),
- "");
-}
-
-mod pattern {
- use std::str::Pattern;
- use std::str::{Searcher, ReverseSearcher};
- use std::str::SearchStep::{self, Match, Reject, Done};
-
- macro_rules! make_test {
- ($name:ident, $p:expr, $h:expr, [$($e:expr,)*]) => {
- mod $name {
- use std::str::SearchStep::{Match, Reject};
- use super::{cmp_search_to_vec};
- #[test]
- fn fwd() {
- cmp_search_to_vec(false, $p, $h, vec![$($e),*]);
- }
- #[test]
- fn bwd() {
- cmp_search_to_vec(true, $p, $h, vec![$($e),*]);
- }
- }
- }
- }
-
- fn cmp_search_to_vec<'a, P: Pattern<'a>>(rev: bool, pat: P, haystack: &'a str,
- right: Vec<SearchStep>)
- where P::Searcher: ReverseSearcher<'a>
- {
- let mut searcher = pat.into_searcher(haystack);
- let mut v = vec![];
- loop {
- match if !rev {searcher.next()} else {searcher.next_back()} {
- Match(a, b) => v.push(Match(a, b)),
- Reject(a, b) => v.push(Reject(a, b)),
- Done => break,
- }
- }
- if rev {
- v.reverse();
- }
- assert_eq!(v, right);
- }
-
- make_test!(str_searcher_ascii_haystack, "bb", "abbcbbd", [
- Reject(0, 1),
- Match (1, 3),
- Reject(3, 4),
- Match (4, 6),
- Reject(6, 7),
- ]);
- make_test!(str_searcher_empty_needle_ascii_haystack, "", "abbcbbd", [
- Match(0, 0),
- Match(1, 1),
- Match(2, 2),
- Match(3, 3),
- Match(4, 4),
- Match(5, 5),
- Match(6, 6),
- Match(7, 7),
- ]);
- make_test!(str_searcher_mulibyte_haystack, " ", "├──", [
- Reject(0, 3),
- Reject(3, 6),
- Reject(6, 9),
- ]);
- make_test!(str_searcher_empty_needle_mulibyte_haystack, "", "├──", [
- Match(0, 0),
- Match(3, 3),
- Match(6, 6),
- Match(9, 9),
- ]);
- make_test!(str_searcher_empty_needle_empty_haystack, "", "", [
- Match(0, 0),
- ]);
- make_test!(str_searcher_nonempty_needle_empty_haystack, "├", "", [
- ]);
- make_test!(char_searcher_ascii_haystack, 'b', "abbcbbd", [
- Reject(0, 1),
- Match (1, 2),
- Match (2, 3),
- Reject(3, 4),
- Match (4, 5),
- Match (5, 6),
- Reject(6, 7),
- ]);
- make_test!(char_searcher_mulibyte_haystack, ' ', "├──", [
- Reject(0, 3),
- Reject(3, 6),
- Reject(6, 9),
- ]);
- make_test!(char_searcher_short_haystack, '\u{1F4A9}', "* \t", [
- Reject(0, 1),
- Reject(1, 2),
- Reject(2, 3),
- ]);
-
-}
-
-mod bench {
- macro_rules! make_test_inner {
- ($s:ident, $code:expr, $name:ident, $str:expr) => {
- #[bench]
- fn $name(bencher: &mut Bencher) {
- let mut $s = $str;
- black_box(&mut $s);
- bencher.iter(|| $code);
- }
- }
- }
-
- macro_rules! make_test {
- ($name:ident, $s:ident, $code:expr) => {
- mod $name {
- use test::Bencher;
- use test::black_box;
-
- // Short strings: 65 bytes each
- make_test_inner!($s, $code, short_ascii,
- "Mary had a little lamb, Little lamb Mary had a littl lamb, lamb!");
- make_test_inner!($s, $code, short_mixed,
- "ศไทย中华Việt Nam; Mary had a little lamb, Little lam!");
- make_test_inner!($s, $code, short_pile_of_poo,
- "💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩!");
- make_test_inner!($s, $code, long_lorem_ipsum,"\
-Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \
-ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \
-eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \
-sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \
-tempus vel, gravida nec quam.
-
-In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \
-sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \
-diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \
-lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \
-eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \
-interdum. Curabitur ut nisi justo.
-
-Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \
-mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \
-lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \
-est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \
-felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \
-ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \
-feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \
-Aliquam sit amet placerat lorem.
-
-Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \
-mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \
-Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \
-lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \
-suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \
-cursus accumsan.
-
-Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \
-feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \
-vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \
-leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \
-malesuada sollicitudin quam eu fermentum!");
- }
- }
- }
-
- make_test!(chars_count, s, s.chars().count());
-
- make_test!(contains_bang_str, s, s.contains("!"));
- make_test!(contains_bang_char, s, s.contains('!'));
-
- make_test!(match_indices_a_str, s, s.match_indices("a").count());
-
- make_test!(split_a_str, s, s.split("a").count());
-
- make_test!(trim_ascii_char, s, {
- use std::ascii::AsciiExt;
- s.trim_matches(|c: char| c.is_ascii())
- });
- make_test!(trim_left_ascii_char, s, {
- use std::ascii::AsciiExt;
- s.trim_left_matches(|c: char| c.is_ascii())
- });
- make_test!(trim_right_ascii_char, s, {
- use std::ascii::AsciiExt;
- s.trim_right_matches(|c: char| c.is_ascii())
- });
-
- make_test!(find_underscore_char, s, s.find('_'));
- make_test!(rfind_underscore_char, s, s.rfind('_'));
- make_test!(find_underscore_str, s, s.find("_"));
-
- make_test!(find_zzz_char, s, s.find('\u{1F4A4}'));
- make_test!(rfind_zzz_char, s, s.rfind('\u{1F4A4}'));
- make_test!(find_zzz_str, s, s.find("\u{1F4A4}"));
-
- make_test!(split_space_char, s, s.split(' ').count());
- make_test!(split_terminator_space_char, s, s.split_terminator(' ').count());
-
- make_test!(splitn_space_char, s, s.splitn(10, ' ').count());
- make_test!(rsplitn_space_char, s, s.rsplitn(10, ' ').count());
-
- make_test!(split_space_str, s, s.split(" ").count());
- make_test!(split_ad_str, s, s.split("ad").count());
-}
+// All `str` tests live in libcollectiontest::str
source_name.clone(),
body);
let lo = p.span.lo;
- let body = p.parse_all_token_trees();
+ let body = match p.parse_all_token_trees() {
+ Ok(body) => body,
+ Err(err) => panic!(err),
+ };
let span = mk_sp(lo, p.last_span.hi);
p.abort_if_errors();
macros.push(ast::MacroDef {
let arg_tys = get_struct_fields(intr.clone(), cdata, did.node)
.iter()
.map(|field_ty| {
- arg_names.push(ast::Ident::new(field_ty.name));
+ arg_names.push(field_ty.name);
get_type(cdata, field_ty.id.node, tcx).ty
})
.collect();
rbml_w.wr_tagged_str(tag_paths_data_name, &token::get_name(name));
}
-fn encode_impl_type_basename(rbml_w: &mut Encoder, name: ast::Ident) {
- rbml_w.wr_tagged_str(tag_item_impl_type_basename, &token::get_ident(name));
+fn encode_impl_type_basename(rbml_w: &mut Encoder, name: ast::Name) {
+ rbml_w.wr_tagged_str(tag_item_impl_type_basename, &token::get_name(name));
}
pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) {
attrs: &[ast::Attribute],
id: NodeId,
path: PathElems,
- name: ast::Ident,
+ name: ast::Name,
vis: ast::Visibility) {
rbml_w.start_tag(tag_items_data_item);
encode_def_id(rbml_w, local_def(id));
encode_family(rbml_w, 'm');
- encode_name(rbml_w, name.name);
+ encode_name(rbml_w, name);
debug!("(encoding info for module) encoding info for module ID {}", id);
// Encode info about all the module children.
fn encode_info_for_struct_ctor(ecx: &EncodeContext,
rbml_w: &mut Encoder,
- name: ast::Ident,
+ name: ast::Name,
ctor_id: NodeId,
index: &mut Vec<entry<i64>>,
struct_id: NodeId) {
encode_def_id(rbml_w, local_def(ctor_id));
encode_family(rbml_w, 'o');
encode_bounds_and_type_for_item(rbml_w, ecx, ctor_id);
- encode_name(rbml_w, name.name);
+ encode_name(rbml_w, name);
ecx.tcx.map.with_path(ctor_id, |path| encode_path(rbml_w, path));
encode_parent_item(rbml_w, local_def(struct_id));
for arg in &decl.inputs {
let tag = tag_method_argument_name;
if let ast::PatIdent(_, ref path1, _) = arg.pat.node {
- let name = token::get_ident(path1.node);
+ let name = token::get_name(path1.node.name);
rbml_w.wr_tagged_bytes(tag, name.as_bytes());
} else {
rbml_w.wr_tagged_bytes(tag, &[]);
&item.attrs,
item.id,
path,
- item.ident,
+ item.ident.name,
item.vis);
}
ast::ItemForeignMod(ref fm) => {
// If this is a tuple-like struct, encode the type of the constructor.
match struct_def.ctor_id {
Some(ctor_id) => {
- encode_info_for_struct_ctor(ecx, rbml_w, item.ident,
+ encode_info_for_struct_ctor(ecx, rbml_w, item.ident.name,
ctor_id, index, def_id.node);
}
None => {}
encode_polarity(rbml_w, polarity);
match ty.node {
ast::TyPath(None, ref path) if path.segments.len() == 1 => {
- let ident = path.segments.last().unwrap().identifier;
- encode_impl_type_basename(rbml_w, ident);
+ let name = path.segments.last().unwrap().identifier.name;
+ encode_impl_type_basename(rbml_w, name);
}
_ => {}
}
&[],
ast::CRATE_NODE_ID,
[].iter().cloned().chain(LinkedPath::empty()),
- syntax::parse::token::special_idents::invalid,
+ syntax::parse::token::special_idents::invalid.name,
ast::Public);
visit::walk_crate(&mut EncodeVisitor {
return op(&st.data[start_pos..end_pos]);
}
-pub fn parse_ident(st: &mut PState, last: char) -> ast::Ident {
- ast::Ident::new(parse_name(st, last))
-}
-
pub fn parse_name(st: &mut PState, last: char) -> ast::Name {
fn is_last(b: char, c: char) -> bool { return c == b; }
parse_name_(st, |a| is_last(last, a) )
}
}
- fn handle_field_access(&mut self, lhs: &ast::Expr, name: &ast::Ident) {
+ fn handle_field_access(&mut self, lhs: &ast::Expr, name: ast::Name) {
match ty::expr_ty_adjusted(self.tcx, lhs).sty {
ty::ty_struct(id, _) => {
let fields = ty::lookup_struct_fields(self.tcx, id);
let field_id = fields.iter()
- .find(|field| field.name == name.name).unwrap().id;
+ .find(|field| field.name == name).unwrap().id;
self.live_symbols.insert(field_id.node);
},
_ => ()
self.lookup_and_handle_method(expr.id, expr.span);
}
ast::ExprField(ref lhs, ref ident) => {
- self.handle_field_access(&**lhs, &ident.node);
+ self.handle_field_access(&**lhs, ident.node.name);
}
ast::ExprTupField(ref lhs, idx) => {
self.handle_tup_field_access(&**lhs, idx.node);
fn warn_dead_code(&mut self,
id: ast::NodeId,
span: codemap::Span,
- ident: ast::Ident,
+ name: ast::Name,
node_type: &str) {
- let name = ident.as_str();
+ let name = name.as_str();
if !name.starts_with("_") {
self.tcx
.sess
impl<'a, 'tcx, 'v> Visitor<'v> for DeadVisitor<'a, 'tcx> {
fn visit_item(&mut self, item: &ast::Item) {
if self.should_warn_about_item(item) {
- self.warn_dead_code(item.id, item.span, item.ident, item.node.descriptive_variant());
+ self.warn_dead_code(
+ item.id,
+ item.span,
+ item.ident.name,
+ item.node.descriptive_variant()
+ );
} else {
match item.node {
ast::ItemEnum(ref enum_def, _) => {
for variant in &enum_def.variants {
if self.should_warn_about_variant(&variant.node) {
self.warn_dead_code(variant.node.id, variant.span,
- variant.node.name, "variant");
+ variant.node.name.name, "variant");
}
}
},
fn visit_foreign_item(&mut self, fi: &ast::ForeignItem) {
if !self.symbol_is_live(fi.id, None) {
- self.warn_dead_code(fi.id, fi.span, fi.ident, fi.node.descriptive_variant());
+ self.warn_dead_code(fi.id, fi.span, fi.ident.name, fi.node.descriptive_variant());
}
visit::walk_foreign_item(self, fi);
}
match fk {
visit::FkMethod(name, _) => {
if !self.symbol_is_live(id, None) {
- self.warn_dead_code(id, span, name, "method");
+ self.warn_dead_code(id, span, name.name, "method");
}
}
_ => ()
fn visit_struct_field(&mut self, field: &ast::StructField) {
if self.should_warn_about_field(&field.node) {
self.warn_dead_code(field.node.id, field.span,
- field.node.ident().unwrap(), "struct field");
+ field.node.ident().unwrap().name, "struct field");
}
visit::walk_struct_field(self, field);
ExchangeHeapLangItem, "exchange_heap", exchange_heap;
OwnedBoxLangItem, "owned_box", owned_box;
- PhantomFnItem, "phantom_fn", phantom_fn;
PhantomDataItem, "phantom_data", phantom_data;
// Deprecated:
#[derive(Copy, Clone, Debug)]
struct LocalInfo {
id: NodeId,
- ident: ast::Ident
+ name: ast::Name
}
#[derive(Copy, Clone, Debug)]
enum VarKind {
- Arg(NodeId, ast::Ident),
+ Arg(NodeId, ast::Name),
Local(LocalInfo),
ImplicitRet,
CleanExit
fn variable_name(&self, var: Variable) -> String {
match self.var_kinds[var.get()] {
- Local(LocalInfo { ident: nm, .. }) | Arg(_, nm) => {
- token::get_ident(nm).to_string()
+ Local(LocalInfo { name, .. }) | Arg(_, name) => {
+ token::get_name(name).to_string()
},
ImplicitRet => "<implicit-ret>".to_string(),
CleanExit => "<clean-exit>".to_string()
&*arg.pat,
|_bm, arg_id, _x, path1| {
debug!("adding argument {}", arg_id);
- let ident = path1.node;
- fn_maps.add_variable(Arg(arg_id, ident));
+ let name = path1.node.name;
+ fn_maps.add_variable(Arg(arg_id, name));
})
};
fn visit_local(ir: &mut IrMaps, local: &ast::Local) {
pat_util::pat_bindings(&ir.tcx.def_map, &*local.pat, |_, p_id, sp, path1| {
debug!("adding local variable {}", p_id);
- let name = path1.node;
+ let name = path1.node.name;
ir.add_live_node_for_node(p_id, VarDefNode(sp));
ir.add_variable(Local(LocalInfo {
id: p_id,
- ident: name
+ name: name
}));
});
visit::walk_local(ir, local);
pat_util::pat_bindings(&ir.tcx.def_map, &**pat, |bm, p_id, sp, path1| {
debug!("adding local variable {} from match with bm {:?}",
p_id, bm);
- let name = path1.node;
+ let name = path1.node.name;
ir.add_live_node_for_node(p_id, VarDefNode(sp));
ir.add_variable(Local(LocalInfo {
id: p_id,
- ident: name
+ name: name
}));
})
}
EarlyScope(_, lifetimes, s) |
LateScope(lifetimes, s) => {
if let Some((_, lifetime_def)) = search_lifetimes(lifetimes, lifetime) {
- self.sess.span_warn(
+ self.sess.span_err(
lifetime.span,
&format!("lifetime name `{}` shadows another \
lifetime name that is already in scope",
lifetime_def.span,
&format!("shadowed lifetime `{}` declared here",
token::get_name(lifetime.name)));
- self.sess.span_note(
- lifetime.span,
- "shadowed lifetimes are deprecated \
- and will become a hard error before 1.0");
return;
}
match predicate {
ty::Predicate::Trait(ref data) => {
// In the case of a trait predicate, we can skip the "self" type.
- Some(data.def_id()) != tcx.lang_items.phantom_fn() &&
- data.0.trait_ref.substs.types.get_slice(TypeSpace)
- .iter()
- .cloned()
- .any(is_self)
+ data.0.trait_ref.substs.types.get_slice(TypeSpace)
+ .iter()
+ .cloned()
+ .any(is_self)
}
ty::Predicate::Projection(..) |
ty::Predicate::TypeOutlives(..) |
ambiguous: false
};
- // Check for the `PhantomFn` trait. This is really just a
- // special annotation that is *always* considered to match, no
- // matter what the type parameters are etc.
- if self.tcx().lang_items.phantom_fn() == Some(obligation.predicate.def_id()) {
- candidates.vec.push(PhantomFnCandidate);
- return Ok(candidates);
- }
-
// Other bounds. Consider both in-scope bounds from fn decl
// and applicable impls. There is a certain set of precedence rules here.
// #18453.
true
}
+ (&ImplCandidate(..), &ObjectCandidate(..)) => {
+ // This means that we are matching an object of type
+ // `Trait` against the trait `Trait`. In that case, we
+ // always prefer to use the object vtable over the
+ // impl. Like a where clause, the impl may or may not
+ // be the one that is used by the object (because the
+ // impl may have additional where-clauses that the
+ // object's source might not meet) -- if it is, using
+ // the vtable is fine. If it is not, using the vtable
+ // is good. A win win!
+ true
+ }
(&DefaultImplCandidate(_), _) => {
// Prefer other candidates over default implementations.
self.tcx().sess.bug(
use collections::enum_set::{EnumSet, CLike};
use std::collections::{HashMap, HashSet};
use syntax::abi;
-use syntax::ast::{CrateNum, DefId, Ident, ItemTrait, LOCAL_CRATE};
+use syntax::ast::{CrateNum, DefId, ItemTrait, LOCAL_CRATE};
use syntax::ast::{MutImmutable, MutMutable, Name, NamedField, NodeId};
use syntax::ast::{StmtExpr, StmtSemi, StructField, UnnamedField, Visibility};
use syntax::ast_util::{self, is_local, lit_is_str, local_def};
variant_info.arg_names.as_ref()
.expect("must have struct enum variant if accessing a named fields")
.iter().zip(variant_info.args.iter())
- .find(|&(ident, _)| ident.name == n)
- .map(|(_ident, arg_t)| arg_t.subst(cx, substs))
+ .find(|&(&name, _)| name == n)
+ .map(|(_name, arg_t)| arg_t.subst(cx, substs))
}
_ => None
}
#[derive(Clone)]
pub struct VariantInfo<'tcx> {
pub args: Vec<Ty<'tcx>>,
- pub arg_names: Option<Vec<ast::Ident>>,
+ pub arg_names: Option<Vec<ast::Name>>,
pub ctor_ty: Option<Ty<'tcx>>,
pub name: ast::Name,
pub id: ast::DefId,
.map(|field| node_id_to_type(cx, field.node.id)).collect();
let arg_names = fields.iter().map(|field| {
match field.node.kind {
- NamedField(ident, _) => ident,
+ NamedField(ident, _) => ident.name,
UnnamedField(..) => cx.sess.bug(
"enum_variants: all fields in struct must have a name")
}
relate_substs(relation, opt_variances, a_subst, b_subst)
}
-fn relate_substs<'a,'tcx,R>(relation: &mut R,
- variances: Option<&ty::ItemVariances>,
- a_subst: &Substs<'tcx>,
- b_subst: &Substs<'tcx>)
- -> RelateResult<'tcx, Substs<'tcx>>
+fn relate_substs<'a,'tcx:'a,R>(relation: &mut R,
+ variances: Option<&ty::ItemVariances>,
+ a_subst: &Substs<'tcx>,
+ b_subst: &Substs<'tcx>)
+ -> RelateResult<'tcx, Substs<'tcx>>
where R: TypeRelation<'a,'tcx>
{
let mut substs = Substs::empty();
Ok(substs)
}
-fn relate_type_params<'a,'tcx,R>(relation: &mut R,
- variances: Option<&[ty::Variance]>,
- a_tys: &[Ty<'tcx>],
- b_tys: &[Ty<'tcx>])
- -> RelateResult<'tcx, Vec<Ty<'tcx>>>
+fn relate_type_params<'a,'tcx:'a,R>(relation: &mut R,
+ variances: Option<&[ty::Variance]>,
+ a_tys: &[Ty<'tcx>],
+ b_tys: &[Ty<'tcx>])
+ -> RelateResult<'tcx, Vec<Ty<'tcx>>>
where R: TypeRelation<'a,'tcx>
{
if a_tys.len() != b_tys.len() {
}
}
-fn relate_arg_vecs<'a,'tcx,R>(relation: &mut R,
- a_args: &[Ty<'tcx>],
- b_args: &[Ty<'tcx>])
- -> RelateResult<'tcx, Vec<Ty<'tcx>>>
+fn relate_arg_vecs<'a,'tcx:'a,R>(relation: &mut R,
+ a_args: &[Ty<'tcx>],
+ b_args: &[Ty<'tcx>])
+ -> RelateResult<'tcx, Vec<Ty<'tcx>>>
where R: TypeRelation<'a,'tcx>
{
if a_args.len() != b_args.len() {
///////////////////////////////////////////////////////////////////////////
// Error handling
-pub fn expected_found<'a,'tcx,R,T>(relation: &mut R,
- a: &T,
- b: &T)
- -> ty::expected_found<T>
+pub fn expected_found<'a,'tcx:'a,R,T>(relation: &mut R,
+ a: &T,
+ b: &T)
+ -> ty::expected_found<T>
where R: TypeRelation<'a,'tcx>, T: Clone
{
expected_found_bool(relation.a_is_expected(), a, b)
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
- self.diagnostic().span_fatal(sp, msg)
+ panic!(self.diagnostic().span_fatal(sp, msg))
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> ! {
if self.opts.treat_err_as_bug {
self.span_bug(sp, msg);
}
- self.diagnostic().span_fatal_with_code(sp, msg, code)
+ panic!(self.diagnostic().span_fatal_with_code(sp, msg, code))
}
pub fn fatal(&self, msg: &str) -> ! {
if self.opts.treat_err_as_bug {
match *origin_field_name {
mc::NamedField(ast_name) => {
let variant_arg_names = variant_info.arg_names.as_ref().unwrap();
- for variant_arg_ident in variant_arg_names {
- if variant_arg_ident.name == ast_name {
+ for &variant_arg_name in variant_arg_names {
+ if variant_arg_name == ast_name {
continue;
}
- let field_name = mc::NamedField(variant_arg_ident.name);
+ let field_name = mc::NamedField(variant_arg_name);
add_fragment_sibling_local(field_name, Some(variant_info.id));
}
}
fn check_static_method(&mut self,
span: Span,
method_id: ast::DefId,
- name: ast::Ident) {
+ name: ast::Name) {
// If the method is a default method, we need to use the def_id of
// the default implementation.
let method_id = match ty::impl_or_trait_item(self.tcx, method_id) {
ty::TypeTraitItem(_) => method_id,
};
- let string = token::get_ident(name);
+ let string = token::get_name(name);
self.report_error(self.ensure_public(span,
method_id,
None,
}
// Checks that a path is in scope.
- fn check_path(&mut self, span: Span, path_id: ast::NodeId, last: ast::Ident) {
+ fn check_path(&mut self, span: Span, path_id: ast::NodeId, last: ast::Name) {
debug!("privacy - path {}", self.nodestr(path_id));
let path_res = *self.tcx.def_map.borrow().get(&path_id).unwrap();
let ck = |tyname: &str| {
let ck_public = |def: ast::DefId| {
debug!("privacy - ck_public {:?}", def);
- let name = token::get_ident(last);
+ let name = token::get_name(last);
let origdid = path_res.def_id();
self.ensure_public(span,
def,
// Checks that a method is in scope.
fn check_method(&mut self, span: Span, origin: &MethodOrigin,
- ident: ast::Ident) {
+ name: ast::Name) {
match *origin {
MethodStatic(method_id) => {
- self.check_static_method(span, method_id, ident)
+ self.check_static_method(span, method_id, name)
}
MethodStaticClosure(_) => {}
// Trait methods are always all public. The only controlling factor
match pid.node {
ast::PathListIdent { id, name } => {
debug!("privacy - ident item {}", id);
- self.check_path(pid.span, id, name);
+ self.check_path(pid.span, id, name.name);
}
ast::PathListMod { id } => {
debug!("privacy - mod item {}", id);
- let name = prefix.segments.last().unwrap().identifier;
+ let name = prefix.segments.last().unwrap().identifier.name;
self.check_path(pid.span, id, name);
}
}
}
Some(method) => {
debug!("(privacy checking) checking impl method");
- self.check_method(expr.span, &method.origin, ident.node);
+ self.check_method(expr.span, &method.origin, ident.node.name);
}
}
}
}
fn visit_path(&mut self, path: &ast::Path, id: ast::NodeId) {
- self.check_path(path.span, id, path.segments.last().unwrap().identifier);
+ self.check_path(path.span, id, path.segments.last().unwrap().identifier.name);
visit::walk_path(self, path);
}
}
fn process_method(&mut self, sig: &ast::MethodSig,
body: Option<&ast::Block>,
- id: ast::NodeId, ident: ast::Ident,
+ id: ast::NodeId, name: ast::Name,
span: Span) {
if generated_code(span) {
return;
},
};
- let qualname = &format!("{}::{}", qualname, &get_ident(ident));
+ let qualname = &format!("{}::{}", qualname, &token::get_name(name));
// record the decl for this def (if it has one)
let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx,
match trait_item.node {
ast::MethodTraitItem(ref sig, ref body) => {
self.process_method(sig, body.as_ref().map(|x| &**x),
- trait_item.id, trait_item.ident, trait_item.span);
+ trait_item.id, trait_item.ident.name, trait_item.span);
}
ast::TypeTraitItem(..) => {}
}
match impl_item.node {
ast::MethodImplItem(ref sig, ref body) => {
self.process_method(sig, Some(body), impl_item.id,
- impl_item.ident, impl_item.span);
+ impl_item.ident.name, impl_item.span);
}
ast::TypeImplItem(_) |
ast::MacImplItem(_) => {}
use std::iter::AdditiveIterator;
use std::rc::Rc;
use syntax::ast;
-use syntax::ast::{DUMMY_NODE_ID, Ident, NodeId};
+use syntax::ast::{DUMMY_NODE_ID, NodeId};
use syntax::codemap::Span;
use syntax::fold::Folder;
use syntax::ptr::P;
pub ty: Ty<'tcx>,
}
-type BindingsMap<'tcx> = FnvHashMap<Ident, BindingInfo<'tcx>>;
+type BindingsMap<'tcx> = FnvHashMap<ast::Ident, BindingInfo<'tcx>>;
struct ArmData<'p, 'blk, 'tcx: 'blk> {
bodycx: Block<'blk, 'tcx>,
struct Match<'a, 'p: 'a, 'blk: 'a, 'tcx: 'blk> {
pats: Vec<&'p ast::Pat>,
data: &'a ArmData<'p, 'blk, 'tcx>,
- bound_ptrs: Vec<(Ident, ValueRef)>,
+ bound_ptrs: Vec<(ast::Ident, ValueRef)>,
// Thread along renamings done by the check_match::StaticInliner, so we can
// map back to original NodeIds
pat_renaming_map: Option<&'a FnvHashMap<(NodeId, Span), NodeId>>
debug!("binding {} to {}", binding_info.id, bcx.val_to_string(llval));
bcx.fcx.lllocals.borrow_mut().insert(binding_info.id, datum);
- debuginfo::create_match_binding_metadata(bcx, ident, binding_info);
+ debuginfo::create_match_binding_metadata(bcx, ident.name, binding_info);
}
bcx
}
let mut bindings_map = FnvHashMap();
pat_bindings(&tcx.def_map, &*pat, |bm, p_id, span, path1| {
let ident = path1.node;
+ let name = ident.name;
let variable_ty = node_id_type(bcx, p_id);
let llvariable_ty = type_of::type_of(ccx, variable_ty);
let tcx = bcx.tcx();
"__llmatch");
trmode = TrByCopy(alloca_no_lifetime(bcx,
llvariable_ty,
- &bcx.ident(ident)));
+ &bcx.name(name)));
}
ast::BindByValue(_) => {
// in this case, the final type of the variable will be T,
// above
llmatch = alloca_no_lifetime(bcx,
llvariable_ty.ptr_to(),
- &bcx.ident(ident));
+ &bcx.name(name));
trmode = TrByMove;
}
ast::BindByRef(_) => {
llmatch = alloca_no_lifetime(bcx,
llvariable_ty,
- &bcx.ident(ident));
+ &bcx.name(name));
trmode = TrByRef;
}
};
pat_bindings(&tcx.def_map, pat, |_, p_id, _, path1| {
let scope = cleanup::var_scope(tcx, p_id);
bcx = mk_binding_alloca(
- bcx, p_id, &path1.node, scope, (),
+ bcx, p_id, path1.node.name, scope, (),
|(), bcx, llval, ty| { drop_done_fill_mem(bcx, llval, ty); bcx });
});
bcx
Some(ident) => {
let var_scope = cleanup::var_scope(tcx, local.id);
return mk_binding_alloca(
- bcx, pat.id, ident, var_scope, (),
+ bcx, pat.id, ident.name, var_scope, (),
|(), bcx, v, _| expr::trans_into(bcx, &**init_expr,
expr::SaveIn(v)));
}
bcx
} else {
mk_binding_alloca(
- bcx, pat.id, ident, arg_scope, arg,
+ bcx, pat.id, ident.name, arg_scope, arg,
|arg, bcx, llval, _| arg.store_to(bcx, llval))
}
}
fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>,
p_id: ast::NodeId,
- ident: &ast::Ident,
+ name: ast::Name,
cleanup_scope: cleanup::ScopeId,
arg: A,
populate: F)
let var_ty = node_id_type(bcx, p_id);
// Allocate memory on stack for the binding.
- let llval = alloc_ty(bcx, var_ty, &bcx.ident(*ident));
+ let llval = alloc_ty(bcx, var_ty, &bcx.name(name));
// Subtle: be sure that we *populate* the memory *before*
// we schedule the cleanup.
// binding will live and place it into the appropriate
// map.
bcx = mk_binding_alloca(
- bcx, pat.id, &path1.node, cleanup_scope, (),
+ bcx, pat.id, path1.node.name, cleanup_scope, (),
|(), bcx, llval, ty| {
match pat_binding_mode {
ast::BindByValue(_) => {
use std::cell::{Cell, RefCell};
use std::result::Result as StdResult;
use std::vec::Vec;
-use syntax::ast::Ident;
use syntax::ast;
use syntax::ast_map::{PathElem, PathName};
use syntax::codemap::{DUMMY_SP, Span};
}
pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() }
- pub fn ident(&self, ident: Ident) -> String {
- token::get_ident(ident).to_string()
+ pub fn name(&self, name: ast::Name) -> String {
+ token::get_name(name).to_string()
}
pub fn node_id_to_string(&self, id: ast::NodeId) -> String {
use util::ppaux::Repr;
use syntax::ast;
-use syntax::ast::Ident;
use syntax::ast_util;
use syntax::parse::token::InternedString;
use syntax::parse::token;
pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &ast::Expr,
- opt_label: Option<Ident>,
+ opt_label: Option<ast::Ident>,
exit: usize)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("trans_break_cont");
pub fn trans_break<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &ast::Expr,
- label_opt: Option<Ident>)
+ label_opt: Option<ast::Ident>)
-> Block<'blk, 'tcx> {
return trans_break_cont(bcx, expr, label_opt, cleanup::EXIT_BREAK);
}
pub fn trans_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &ast::Expr,
- label_opt: Option<Ident>)
+ label_opt: Option<ast::Ident>)
-> Block<'blk, 'tcx> {
return trans_break_cont(bcx, expr, label_opt, cleanup::EXIT_LOOP);
}
let var_item = cx.tcx().map.get(node_id);
- let (ident, span) = match var_item {
+ let (name, span) = match var_item {
ast_map::NodeItem(item) => {
match item.node {
- ast::ItemStatic(..) => (item.ident, item.span),
- ast::ItemConst(..) => (item.ident, item.span),
+ ast::ItemStatic(..) => (item.ident.name, item.span),
+ ast::ItemConst(..) => (item.ident.name, item.span),
_ => {
cx.sess()
.span_bug(item.span,
let variable_type = ty::node_id_to_type(cx.tcx(), node_id);
let type_metadata = type_metadata(cx, variable_type, span);
let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id));
- let var_name = token::get_ident(ident).to_string();
+ let var_name = token::get_name(name).to_string();
let linkage_name =
namespace_node.mangled_name_of_contained_item(&var_name[..]);
let var_scope = namespace_node.scope;
let scope_metadata = scope_metadata(bcx.fcx, node_id, span);
declare_local(bcx,
- var_ident.node,
+ var_ident.node.name,
datum.ty,
scope_metadata,
DirectVariable { alloca: datum.val },
let ast_item = cx.tcx().map.find(node_id);
- let variable_ident = match ast_item {
+ let variable_name = match ast_item {
None => {
cx.sess().span_bug(span, "debuginfo::create_captured_var_metadata: node not found");
}
Some(ast_map::NodeLocal(pat)) | Some(ast_map::NodeArg(pat)) => {
match pat.node {
ast::PatIdent(_, ref path1, _) => {
- path1.node
+ path1.node.name
}
_ => {
cx.sess()
};
declare_local(bcx,
- variable_ident,
+ variable_name,
variable_type,
scope_metadata,
variable_access,
///
/// Adds the created metadata nodes directly to the crate's IR.
pub fn create_match_binding_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
- variable_ident: ast::Ident,
+ variable_name: ast::Name,
binding: BindingInfo<'tcx>) {
if bcx.unreachable.get() ||
fn_should_be_ignored(bcx.fcx) ||
};
declare_local(bcx,
- variable_ident,
+ variable_name,
binding.ty,
scope_metadata,
var_access,
};
declare_local(bcx,
- var_ident.node,
+ var_ident.node.name,
datum.ty,
scope_metadata,
DirectVariable { alloca: datum.val },
let fnitem = cx.tcx().map.get(fn_ast_id);
- let (ident, fn_decl, generics, top_level_block, span, has_path) = match fnitem {
+ let (name, fn_decl, generics, top_level_block, span, has_path) = match fnitem {
ast_map::NodeItem(ref item) => {
if contains_nodebug_attribute(&item.attrs) {
return FunctionDebugContext::FunctionWithoutDebugInfo;
match item.node {
ast::ItemFn(ref fn_decl, _, _, ref generics, ref top_level_block) => {
- (item.ident, fn_decl, generics, top_level_block, item.span, true)
+ (item.ident.name, fn_decl, generics, top_level_block, item.span, true)
}
_ => {
cx.sess().span_bug(item.span,
return FunctionDebugContext::FunctionWithoutDebugInfo;
}
- (impl_item.ident,
+ (impl_item.ident.name,
&sig.decl,
&sig.generics,
body,
match expr.node {
ast::ExprClosure(_, ref fn_decl, ref top_level_block) => {
let name = format!("fn{}", token::gensym("fn"));
- let name = token::str_to_ident(&name[..]);
+ let name = token::intern(&name[..]);
(name, fn_decl,
// This is not quite right. It should actually inherit
// the generics of the enclosing function.
return FunctionDebugContext::FunctionWithoutDebugInfo;
}
- (trait_item.ident,
+ (trait_item.ident.name,
&sig.decl,
&sig.generics,
body,
// Get_template_parameters() will append a `<...>` clause to the function
// name if necessary.
- let mut function_name = String::from_str(&token::get_ident(ident));
+ let mut function_name = String::from_str(&token::get_name(name));
let template_parameters = get_template_parameters(cx,
generics,
param_substs,
actual_self_type,
codemap::DUMMY_SP);
- let ident = special_idents::type_self;
+ let name = token::get_name(special_idents::type_self.name);
- let ident = token::get_ident(ident);
- let name = CString::new(ident.as_bytes()).unwrap();
+ let name = CString::new(name.as_bytes()).unwrap();
let param_metadata = unsafe {
llvm::LLVMDIBuilderCreateTemplateTypeParameter(
DIB(cx),
}
fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
- variable_ident: ast::Ident,
+ variable_name: ast::Name,
variable_type: Ty<'tcx>,
scope_metadata: DIScope,
variable_access: VariableAccess,
let filename = span_start(cx, span).file.name.clone();
let file_metadata = file_metadata(cx, &filename[..]);
- let name = token::get_ident(variable_ident);
+ let name = token::get_name(variable_name);
let loc = span_start(cx, span);
let type_metadata = type_metadata(cx, variable_type, span);
// MemberDescription of the struct's single field.
let sole_struct_member_description = MemberDescription {
name: match non_null_variant.arg_names {
- Some(ref names) => token::get_ident(names[0]).to_string(),
+ Some(ref names) => token::get_name(names[0]).to_string(),
None => "".to_string()
},
llvm_type: non_null_llvm_type,
let mut arg_names: Vec<_> = match variant_info.arg_names {
Some(ref names) => {
names.iter()
- .map(|ident| {
- token::get_ident(*ident).to_string()
- }).collect()
+ .map(|&name| token::get_name(name).to_string())
+ .collect()
}
None => variant_info.args.iter().map(|_| "".to_string()).collect()
};
struct ScopeStackEntry {
scope_metadata: DIScope,
- ident: Option<ast::Ident>
+ name: Option<ast::Name>
}
- let mut scope_stack = vec!(ScopeStackEntry { scope_metadata: fn_metadata,
- ident: None });
+ let mut scope_stack = vec!(ScopeStackEntry { scope_metadata: fn_metadata, name: None });
scope_map.insert(fn_ast_id, fn_metadata);
// Push argument identifiers onto the stack so arguments integrate nicely
for arg in args {
pat_util::pat_bindings(def_map, &*arg.pat, |_, node_id, _, path1| {
scope_stack.push(ScopeStackEntry { scope_metadata: fn_metadata,
- ident: Some(path1.node) });
+ name: Some(path1.node.name) });
scope_map.insert(node_id, fn_metadata);
})
}
loc.col.to_usize() as c_uint)
};
- scope_stack.push(ScopeStackEntry { scope_metadata: scope_metadata,
- ident: None });
+ scope_stack.push(ScopeStackEntry { scope_metadata: scope_metadata, name: None });
inner_walk(cx, scope_stack, scope_map);
// pop artificial scopes
- while scope_stack.last().unwrap().ident.is_some() {
+ while scope_stack.last().unwrap().name.is_some() {
scope_stack.pop();
}
// scope stack and maybe introduce an artificial scope
if pat_util::pat_is_binding(def_map, &*pat) {
- let ident = path1.node;
+ let name = path1.node.name;
// LLVM does not properly generate 'DW_AT_start_scope' fields
// for variable DIEs. For this reason we have to introduce
// variables with the same name will cause the problem.
let need_new_scope = scope_stack
.iter()
- .any(|entry| entry.ident.iter().any(|i| i.name == ident.name));
+ .any(|entry| entry.name == Some(name));
if need_new_scope {
// Create a new lexical scope and push it onto the stack
scope_stack.push(ScopeStackEntry {
scope_metadata: scope_metadata,
- ident: Some(ident)
+ name: Some(name)
});
} else {
let prev_metadata = scope_stack.last().unwrap().scope_metadata;
scope_stack.push(ScopeStackEntry {
scope_metadata: prev_metadata,
- ident: Some(ident)
+ name: Some(name)
});
}
}
ty::with_path(cx.tcx(), def_id, |path| {
// prepend crate name if not already present
let krate = if def_id.krate == ast::LOCAL_CRATE {
- let crate_namespace_ident = token::str_to_ident(crate_root_namespace(cx));
- Some(ast_map::PathMod(crate_namespace_ident.name))
+ let crate_namespace_name = token::intern(crate_root_namespace(cx));
+ Some(ast_map::PathMod(crate_namespace_name))
} else {
None
};
trans_def(bcx, expr, bcx.def(expr.id))
}
ast::ExprField(ref base, ident) => {
- trans_rec_field(bcx, &**base, ident.node)
+ trans_rec_field(bcx, &**base, ident.node.name)
}
ast::ExprTupField(ref base, idx) => {
trans_rec_tup_field(bcx, &**base, idx.node)
/// Translates `base.field`.
fn trans_rec_field<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
base: &ast::Expr,
- field: ast::Ident)
+ field: ast::Name)
-> DatumBlock<'blk, 'tcx, Expr> {
- trans_field(bcx, base, |tcx, field_tys| ty::field_idx_strict(tcx, field.name, field_tys))
+ trans_field(bcx, base, |tcx, field_tys| ty::field_idx_strict(tcx, field, field_tys))
}
/// Translates `base.<idx>`.
self.check_variances_for_type_defn(item, ast_generics);
}
- ast::ItemTrait(_, ref ast_generics, _, ref items) => {
+ ast::ItemTrait(_, _, _, ref items) => {
let trait_predicates =
ty::lookup_predicates(ccx.tcx, local_def(item.id));
- reject_non_type_param_bounds(
- ccx.tcx,
- item.span,
- &trait_predicates);
- self.check_variances(item, ast_generics, &trait_predicates,
- self.tcx().lang_items.phantom_fn());
+ reject_non_type_param_bounds(ccx.tcx, item.span, &trait_predicates);
if ty::trait_has_default_impl(ccx.tcx, local_def(item.id)) {
if !items.is_empty() {
ccx.tcx.sess.span_err(
ast_generics: &ast::Generics)
{
let item_def_id = local_def(item.id);
- let predicates = ty::lookup_predicates(self.tcx(), item_def_id);
- self.check_variances(item,
- ast_generics,
- &predicates,
- self.tcx().lang_items.phantom_data());
- }
-
- fn check_variances(&self,
- item: &ast::Item,
- ast_generics: &ast::Generics,
- ty_predicates: &ty::GenericPredicates<'tcx>,
- suggested_marker_id: Option<ast::DefId>)
- {
- let variance_lang_items = &[
- self.tcx().lang_items.phantom_fn(),
- self.tcx().lang_items.phantom_data(),
- ];
-
- let item_def_id = local_def(item.id);
- let is_lang_item = variance_lang_items.iter().any(|n| *n == Some(item_def_id));
- if is_lang_item {
- return;
- }
-
+ let ty_predicates = ty::lookup_predicates(self.tcx(), item_def_id);
let variances = ty::item_variances(self.tcx(), item_def_id);
let mut constrained_parameters: HashSet<_> =
continue;
}
let span = self.ty_param_span(ast_generics, item, space, index);
- self.report_bivariance(span, param_ty.name, suggested_marker_id);
+ self.report_bivariance(span, param_ty.name);
}
for (space, index, &variance) in variances.regions.iter_enumerated() {
assert_eq!(space, TypeSpace);
let span = ast_generics.lifetimes[index].lifetime.span;
let name = ast_generics.lifetimes[index].lifetime.name;
- self.report_bivariance(span, name, suggested_marker_id);
+ self.report_bivariance(span, name);
}
}
fn report_bivariance(&self,
span: Span,
- param_name: ast::Name,
- suggested_marker_id: Option<ast::DefId>)
+ param_name: ast::Name)
{
self.tcx().sess.span_err(
span,
&format!("parameter `{}` is never used",
param_name.user_string(self.tcx())));
+ let suggested_marker_id = self.tcx().lang_items.phantom_data();
match suggested_marker_id {
Some(def_id) => {
self.tcx().sess.fileline_help(
use syntax::visit;
use syntax::codemap::Span;
use util::nodemap::DefIdMap;
-use util::ppaux::Repr;
+use util::ppaux::{Repr, UserString};
pub fn check(tcx: &ty::ctxt) {
let mut overlap = OverlapChecker { tcx: tcx, default_impls: DefIdMap() };
overlap.check_for_overlapping_impls();
- // this secondary walk specifically checks for impls of defaulted
- // traits, for which additional overlap rules exist
+ // this secondary walk specifically checks for some other cases,
+ // like defaulted traits, for which additional overlap rules exist
visit::walk_crate(&mut overlap, tcx.map.krate());
}
None => { }
}
}
- _ => {}
+ ast::ItemImpl(_, _, _, Some(_), ref self_ty, _) => {
+ let impl_def_id = ast_util::local_def(item.id);
+ let trait_ref = ty::impl_trait_ref(self.tcx, impl_def_id).unwrap();
+ let trait_def_id = trait_ref.def_id;
+ match trait_ref.self_ty().sty {
+ ty::ty_trait(ref data) => {
+ // This is something like impl Trait1 for Trait2. Illegal
+ // if Trait1 is a supertrait of Trait2 or Trait2 is not object safe.
+
+ if !traits::is_object_safe(self.tcx, data.principal_def_id()) {
+ // this just means the self-ty is illegal,
+ // and probably this error should have
+ // been reported elsewhere, but I'm trying to avoid
+ // giving a misleading message below.
+ span_err!(self.tcx.sess, self_ty.span, E0372,
+ "the trait `{}` cannot be made into an object",
+ ty::item_path_str(self.tcx, data.principal_def_id()));
+ } else {
+ let mut supertrait_def_ids =
+ traits::supertrait_def_ids(self.tcx, data.principal_def_id());
+ if supertrait_def_ids.any(|d| d == trait_def_id) {
+ span_err!(self.tcx.sess, item.span, E0371,
+ "the object type `{}` automatically \
+ implements the trait `{}`",
+ trait_ref.self_ty().user_string(self.tcx),
+ ty::item_path_str(self.tcx, trait_def_id));
+ }
+ }
+ }
+ _ => { }
+ }
+ }
+ _ => {
+ }
}
+ visit::walk_item(self, item);
}
}
E0366, // dropck forbid specialization to concrete type or region
E0367, // dropck forbid specialization to predicate not in struct/enum
E0368, // binary operation `<op>=` cannot be applied to types
- E0369 // binary operation `<op>` cannot be applied to types
+ E0369, // binary operation `<op>` cannot be applied to types
+ E0371, // impl Trait for Trait is illegal
+ E0372 // impl Trait for Trait where Trait is not object safe
}
__build_diagnostic_array! { DIAGNOSTICS }
//! defined on type `X`, we only consider the definition of the type `X`
//! and the definitions of any types it references.
//!
-//! We only infer variance for type parameters found on *types*: structs,
-//! enums, and traits. We do not infer variance for type parameters found
-//! on fns or impls. This is because those things are not type definitions
-//! and variance doesn't really make sense in that context.
-//!
-//! It is worth covering what variance means in each case. For structs and
-//! enums, I think it is fairly straightforward. The variance of the type
+//! We only infer variance for type parameters found on *data types*
+//! like structs and enums. In these cases, there is fairly straightforward
+//! explanation for what variance means. The variance of the type
//! or lifetime parameters defines whether `T<A>` is a subtype of `T<B>`
//! (resp. `T<'a>` and `T<'b>`) based on the relationship of `A` and `B`
-//! (resp. `'a` and `'b`). (FIXME #3598 -- we do not currently make use of
-//! the variances we compute for type parameters.)
+//! (resp. `'a` and `'b`).
+//!
+//! We do not infer variance for type parameters found on traits, fns,
+//! or impls. Variance on trait parameters can make indeed make sense
+//! (and we used to compute it) but it is actually rather subtle in
+//! meaning and not that useful in practice, so we removed it. See the
+//! addendum for some details. Variances on fn/impl parameters, otoh,
+//! doesn't make sense because these parameters are instantiated and
+//! then forgotten, they don't persist in types or compiled
+//! byproducts.
+//!
+//! ### The algorithm
+//!
+//! The basic idea is quite straightforward. We iterate over the types
+//! defined and, for each use of a type parameter X, accumulate a
+//! constraint indicating that the variance of X must be valid for the
+//! variance of that use site. We then iteratively refine the variance of
+//! X until all constraints are met. There is *always* a sol'n, because at
+//! the limit we can declare all type parameters to be invariant and all
+//! constraints will be satisfied.
+//!
+//! As a simple example, consider:
+//!
+//! enum Option<A> { Some(A), None }
+//! enum OptionalFn<B> { Some(|B|), None }
+//! enum OptionalMap<C> { Some(|C| -> C), None }
+//!
+//! Here, we will generate the constraints:
+//!
+//! 1. V(A) <= +
+//! 2. V(B) <= -
+//! 3. V(C) <= +
+//! 4. V(C) <= -
+//!
+//! These indicate that (1) the variance of A must be at most covariant;
+//! (2) the variance of B must be at most contravariant; and (3, 4) the
+//! variance of C must be at most covariant *and* contravariant. All of these
+//! results are based on a variance lattice defined as follows:
+//!
+//! * Top (bivariant)
+//! - +
+//! o Bottom (invariant)
+//!
+//! Based on this lattice, the solution V(A)=+, V(B)=-, V(C)=o is the
+//! optimal solution. Note that there is always a naive solution which
+//! just declares all variables to be invariant.
+//!
+//! You may be wondering why fixed-point iteration is required. The reason
+//! is that the variance of a use site may itself be a function of the
+//! variance of other type parameters. In full generality, our constraints
+//! take the form:
+//!
+//! V(X) <= Term
+//! Term := + | - | * | o | V(X) | Term x Term
+//!
+//! Here the notation V(X) indicates the variance of a type/region
+//! parameter `X` with respect to its defining class. `Term x Term`
+//! represents the "variance transform" as defined in the paper:
+//!
+//! If the variance of a type variable `X` in type expression `E` is `V2`
+//! and the definition-site variance of the [corresponding] type parameter
+//! of a class `C` is `V1`, then the variance of `X` in the type expression
+//! `C<E>` is `V3 = V1.xform(V2)`.
+//!
+//! ### Constraints
+//!
+//! If I have a struct or enum with where clauses:
+//!
+//! struct Foo<T:Bar> { ... }
+//!
+//! you might wonder whether the variance of `T` with respect to `Bar`
+//! affects the variance `T` with respect to `Foo`. I claim no. The
+//! reason: assume that `T` is invariant w/r/t `Bar` but covariant w/r/t
+//! `Foo`. And then we have a `Foo<X>` that is upcast to `Foo<Y>`, where
+//! `X <: Y`. However, while `X : Bar`, `Y : Bar` does not hold. In that
+//! case, the upcast will be illegal, but not because of a variance
+//! failure, but rather because the target type `Foo<Y>` is itself just
+//! not well-formed. Basically we get to assume well-formedness of all
+//! types involved before considering variance.
+//!
+//! ### Addendum: Variance on traits
//!
-//! ### Variance on traits
+//! As mentioned above, we used to permit variance on traits. This was
+//! computed based on the appearance of trait type parameters in
+//! method signatures and was used to represent the compatibility of
+//! vtables in trait objects (and also "virtual" vtables or dictionary
+//! in trait bounds). One complication was that variance for
+//! associated types is less obvious, since they can be projected out
+//! and put to myriad uses, so it's not clear when it is safe to allow
+//! `X<A>::Bar` to vary (or indeed just what that means). Moreover (as
+//! covered below) all inputs on any trait with an associated type had
+//! to be invariant, limiting the applicability. Finally, the
+//! annotations (`MarkerTrait`, `PhantomFn`) needed to ensure that all
+//! trait type parameters had a variance were confusing and annoying
+//! for little benefit.
//!
-//! The meaning of variance for trait parameters is more subtle and worth
-//! expanding upon. There are in fact two uses of the variance values we
-//! compute.
+//! Just for historical reference,I am going to preserve some text indicating
+//! how one could interpret variance and trait matching.
//!
-//! #### Trait variance and object types
+//! #### Variance and object types
//!
-//! The first is for object types. Just as with structs and enums, we can
-//! decide the subtyping relationship between two object types `&Trait<A>`
-//! and `&Trait<B>` based on the relationship of `A` and `B`. Note that
-//! for object types we ignore the `Self` type parameter -- it is unknown,
-//! and the nature of dynamic dispatch ensures that we will always call a
+//! Just as with structs and enums, we can decide the subtyping
+//! relationship between two object types `&Trait<A>` and `&Trait<B>`
+//! based on the relationship of `A` and `B`. Note that for object
+//! types we ignore the `Self` type parameter -- it is unknown, and
+//! the nature of dynamic dispatch ensures that we will always call a
//! function that is expected the appropriate `Self` type. However, we
-//! must be careful with the other type parameters, or else we could end
-//! up calling a function that is expecting one type but provided another.
+//! must be careful with the other type parameters, or else we could
+//! end up calling a function that is expecting one type but provided
+//! another.
//!
//! To see what I mean, consider a trait like so:
//!
//!
//! These conditions are satisfied and so we are happy.
//!
-//! ### The algorithm
+//! #### Variance and associated types
//!
-//! The basic idea is quite straightforward. We iterate over the types
-//! defined and, for each use of a type parameter X, accumulate a
-//! constraint indicating that the variance of X must be valid for the
-//! variance of that use site. We then iteratively refine the variance of
-//! X until all constraints are met. There is *always* a sol'n, because at
-//! the limit we can declare all type parameters to be invariant and all
-//! constraints will be satisfied.
-//!
-//! As a simple example, consider:
-//!
-//! enum Option<A> { Some(A), None }
-//! enum OptionalFn<B> { Some(|B|), None }
-//! enum OptionalMap<C> { Some(|C| -> C), None }
-//!
-//! Here, we will generate the constraints:
-//!
-//! 1. V(A) <= +
-//! 2. V(B) <= -
-//! 3. V(C) <= +
-//! 4. V(C) <= -
-//!
-//! These indicate that (1) the variance of A must be at most covariant;
-//! (2) the variance of B must be at most contravariant; and (3, 4) the
-//! variance of C must be at most covariant *and* contravariant. All of these
-//! results are based on a variance lattice defined as follows:
-//!
-//! * Top (bivariant)
-//! - +
-//! o Bottom (invariant)
-//!
-//! Based on this lattice, the solution V(A)=+, V(B)=-, V(C)=o is the
-//! optimal solution. Note that there is always a naive solution which
-//! just declares all variables to be invariant.
-//!
-//! You may be wondering why fixed-point iteration is required. The reason
-//! is that the variance of a use site may itself be a function of the
-//! variance of other type parameters. In full generality, our constraints
-//! take the form:
-//!
-//! V(X) <= Term
-//! Term := + | - | * | o | V(X) | Term x Term
-//!
-//! Here the notation V(X) indicates the variance of a type/region
-//! parameter `X` with respect to its defining class. `Term x Term`
-//! represents the "variance transform" as defined in the paper:
-//!
-//! If the variance of a type variable `X` in type expression `E` is `V2`
-//! and the definition-site variance of the [corresponding] type parameter
-//! of a class `C` is `V1`, then the variance of `X` in the type expression
-//! `C<E>` is `V3 = V1.xform(V2)`.
-//!
-//! ### Constraints
-//!
-//! If I have a struct or enum with where clauses:
-//!
-//! struct Foo<T:Bar> { ... }
-//!
-//! you might wonder whether the variance of `T` with respect to `Bar`
-//! affects the variance `T` with respect to `Foo`. I claim no. The
-//! reason: assume that `T` is invariant w/r/t `Bar` but covariant w/r/t
-//! `Foo`. And then we have a `Foo<X>` that is upcast to `Foo<Y>`, where
-//! `X <: Y`. However, while `X : Bar`, `Y : Bar` does not hold. In that
-//! case, the upcast will be illegal, but not because of a variance
-//! failure, but rather because the target type `Foo<Y>` is itself just
-//! not well-formed. Basically we get to assume well-formedness of all
-//! types involved before considering variance.
-//!
-//! ### Associated types
-//!
-//! Any trait with an associated type is invariant with respect to all
-//! of its inputs. To see why this makes sense, consider what
-//! subtyping for a trait reference means:
+//! Traits with associated types -- or at minimum projection
+//! expressions -- must be invariant with respect to all of their
+//! inputs. To see why this makes sense, consider what subtyping for a
+//! trait reference means:
//!
//! <T as Trait> <: <U as Trait>
//!
-//! means that if I know that `T as Trait`,
-//! I also know that `U as
-//! Trait`. Moreover, if you think of it as
-//! dictionary passing style, it means that
-//! a dictionary for `<T as Trait>` is safe
-//! to use where a dictionary for `<U as
-//! Trait>` is expected.
-//!
-//! The problem is that when you can
-//! project types out from `<T as Trait>`,
-//! the relationship to types projected out
-//! of `<U as Trait>` is completely unknown
-//! unless `T==U` (see #21726 for more
-//! details). Making `Trait` invariant
-//! ensures that this is true.
+//! means that if I know that `T as Trait`, I also know that `U as
+//! Trait`. Moreover, if you think of it as dictionary passing style,
+//! it means that a dictionary for `<T as Trait>` is safe to use where
+//! a dictionary for `<U as Trait>` is expected.
//!
-//! *Historical note: we used to preserve this invariant another way,
-//! by tweaking the subtyping rules and requiring that when a type `T`
-//! appeared as part of a projection, that was considered an invariant
-//! location, but this version does away with the need for those
-//! somewhat "special-case-feeling" rules.*
+//! The problem is that when you can project types out from `<T as
+//! Trait>`, the relationship to types projected out of `<U as Trait>`
+//! is completely unknown unless `T==U` (see #21726 for more
+//! details). Making `Trait` invariant ensures that this is true.
//!
//! Another related reason is that if we didn't make traits with
//! associated types invariant, then projection is no longer a
fn lang_items(tcx: &ty::ctxt) -> Vec<(ast::NodeId,Vec<ty::Variance>)> {
let all = vec![
- (tcx.lang_items.phantom_fn(), vec![ty::Contravariant, ty::Covariant]),
(tcx.lang_items.phantom_data(), vec![ty::Covariant]),
(tcx.lang_items.unsafe_cell_type(), vec![ty::Invariant]),
self.add_inferreds_for_item(item.id, false, generics);
}
ast::ItemTrait(_, ref generics, _, _) => {
+ // Note: all inputs for traits are ultimately
+ // constrained to be invariant. See `visit_item` in
+ // the impl for `ConstraintContext` below.
self.add_inferreds_for_item(item.id, true, generics);
visit::walk_item(self, item);
}
ast::ItemTrait(..) => {
let trait_def = ty::lookup_trait_def(tcx, did);
- let predicates = ty::lookup_super_predicates(tcx, did);
- self.add_constraints_from_predicates(&trait_def.generics,
- predicates.predicates.as_slice(),
- self.covariant);
-
- let trait_items = ty::trait_items(tcx, did);
- for trait_item in &*trait_items {
- match *trait_item {
- ty::MethodTraitItem(ref method) => {
- self.add_constraints_from_predicates(
- &method.generics,
- method.predicates.predicates.get_slice(FnSpace),
- self.contravariant);
-
- self.add_constraints_from_sig(
- &method.generics,
- &method.fty.sig,
- self.covariant);
- }
- ty::TypeTraitItem(ref data) => {
- // Any trait with an associated type is
- // invariant with respect to all of its
- // inputs. See length discussion in the comment
- // on this module.
- let projection_ty = ty::mk_projection(tcx,
- trait_def.trait_ref.clone(),
- data.name);
- self.add_constraints_from_ty(&trait_def.generics,
- projection_ty,
- self.invariant);
- }
- }
- }
+ self.add_constraints_from_trait_ref(&trait_def.generics,
+ &trait_def.trait_ref,
+ self.invariant);
}
ast::ItemExternCrate(_) |
}
}
- fn add_constraints_from_predicates(&mut self,
- generics: &ty::Generics<'tcx>,
- predicates: &[ty::Predicate<'tcx>],
- variance: VarianceTermPtr<'a>) {
- debug!("add_constraints_from_generics({})",
- generics.repr(self.tcx()));
-
- for predicate in predicates.iter() {
- match *predicate {
- ty::Predicate::Trait(ty::Binder(ref data)) => {
- self.add_constraints_from_trait_ref(generics, &*data.trait_ref, variance);
- }
-
- ty::Predicate::Equate(ty::Binder(ref data)) => {
- // A == B is only true if A and B are the same
- // types, not subtypes of one another, so this is
- // an invariant position:
- self.add_constraints_from_ty(generics, data.0, self.invariant);
- self.add_constraints_from_ty(generics, data.1, self.invariant);
- }
-
- ty::Predicate::TypeOutlives(ty::Binder(ref data)) => {
- // Why contravariant on both? Let's consider:
- //
- // Under what conditions is `(T:'t) <: (U:'u)`,
- // meaning that `(T:'t) => (U:'u)`. The answer is
- // if `U <: T` or `'u <= 't`. Let's see some examples:
- //
- // (T: 'big) => (T: 'small)
- // where 'small <= 'big
- //
- // (&'small Foo: 't) => (&'big Foo: 't)
- // where 'small <= 'big
- // note that &'big Foo <: &'small Foo
-
- let variance_r = self.xform(variance, self.contravariant);
- self.add_constraints_from_ty(generics, data.0, variance_r);
- self.add_constraints_from_region(generics, data.1, variance_r);
- }
-
- ty::Predicate::RegionOutlives(ty::Binder(ref data)) => {
- // `'a : 'b` is still true if 'a gets bigger
- self.add_constraints_from_region(generics, data.0, variance);
-
- // `'a : 'b` is still true if 'b gets smaller
- let variance_r = self.xform(variance, self.contravariant);
- self.add_constraints_from_region(generics, data.1, variance_r);
- }
-
- ty::Predicate::Projection(ty::Binder(ref data)) => {
- self.add_constraints_from_trait_ref(generics,
- &*data.projection_ty.trait_ref,
- variance);
-
- // as the equality predicate above, a binder is a
- // type equality relation, not a subtyping
- // relation
- self.add_constraints_from_ty(generics, data.ty, self.invariant);
- }
- }
- }
- }
-
/// Adds constraints appropriate for a function with signature
/// `sig` appearing in a context with ambient variance `variance`
fn add_constraints_from_sig(&mut self,
type headerfn = extern "C" fn(*mut hoedown_buffer, *const hoedown_buffer,
libc::c_int, *mut libc::c_void);
+type codespanfn = extern "C" fn(*mut hoedown_buffer, *const hoedown_buffer,
+ *mut libc::c_void);
+
type linkfn = extern "C" fn (*mut hoedown_buffer, *const hoedown_buffer,
*const hoedown_buffer, *const hoedown_buffer,
*mut libc::c_void) -> libc::c_int;
blockhtml: Option<extern "C" fn(*mut hoedown_buffer, *const hoedown_buffer,
*mut libc::c_void)>,
header: Option<headerfn>,
-
other_block_level_callbacks: [libc::size_t; 9],
/* span level callbacks - NULL or return 0 prints the span verbatim */
- other_span_level_callbacks_1: [libc::size_t; 9],
+ autolink: libc::size_t, // unused
+ codespan: Option<codespanfn>,
+ other_span_level_callbacks_1: [libc::size_t; 7],
link: Option<linkfn>,
other_span_level_callbacks_2: [libc::size_t; 5],
// hoedown will add `math` callback here, but we use an old version of it.
}
}
+/// Returns a new string with all consecutive whitespace collapsed into
+/// single spaces.
+///
+/// Any leading or trailing whitespace will be trimmed.
+fn collapse_whitespace(s: &str) -> String {
+ s.split(|c: char| c.is_whitespace()).filter(|s| {
+ !s.is_empty()
+ }).collect::<Vec<_>>().connect(" ")
+}
+
thread_local!(static USED_HEADER_MAP: RefCell<HashMap<String, usize>> = {
RefCell::new(HashMap::new())
});
reset_headers();
+ extern fn codespan(ob: *mut hoedown_buffer, text: *const hoedown_buffer, _: *mut libc::c_void) {
+ let content = if text.is_null() {
+ "".to_string()
+ } else {
+ let bytes = unsafe { (*text).as_bytes() };
+ let s = str::from_utf8(bytes).unwrap();
+ collapse_whitespace(s)
+ };
+
+ let content = format!("<code>{}</code>", Escape(&content));
+ let element = CString::new(content).unwrap();
+ unsafe { hoedown_buffer_puts(ob, element.as_ptr()); }
+ }
+
unsafe {
let ob = hoedown_buffer_new(DEF_OUNIT);
let renderer = hoedown_html_renderer_new(0, 0);
= &mut opaque as *mut _ as *mut libc::c_void;
(*renderer).blockcode = Some(block);
(*renderer).header = Some(header);
+ (*renderer).codespan = Some(codespan);
let document = hoedown_document_new(renderer, HOEDOWN_EXTENSIONS, 16);
hoedown_document_render(document, ob, s.as_ptr(),
#[cfg(test)]
mod tests {
use super::{LangString, Markdown};
- use super::plain_summary_line;
+ use super::{collapse_whitespace, plain_summary_line};
#[test]
fn test_lang_string_parse() {
t("# top header", "top header");
t("## header", "header");
}
+
+ #[test]
+ fn test_collapse_whitespace() {
+ fn t(input: &str, expected: &str) {
+ let actual = collapse_whitespace(input);
+ assert_eq!(actual, expected);
+ }
+
+ t("foo", "foo");
+ t("foo bar baz", "foo bar baz");
+ t(" foo bar", "foo bar");
+ t("\tfoo bar\nbaz", "foo bar baz");
+ t("foo bar \n baz\t\tqux\n", "foo bar baz qux");
+ }
}
if ($active.length) {
document.location.href = $active.find('a').prop('href');
}
+ } else {
+ $active.removeClass('highlighted');
}
});
}
if (crates[i] == window.currentCrate) {
klass += ' current';
}
- var desc = rawSearchIndex[crates[i]].items[0][3];
- div.append($('<a>', {'href': '../' + crates[i] + '/index.html',
- 'title': plainSummaryLine(desc),
- 'class': klass}).text(crates[i]));
+ if (rawSearchIndex[crates[i]].items[0]) {
+ var desc = rawSearchIndex[crates[i]].items[0][3];
+ div.append($('<a>', {'href': '../' + crates[i] + '/index.html',
+ 'title': plainSummaryLine(desc),
+ 'class': klass}).text(crates[i]));
+ }
}
sidebar.append(div);
}
optmulti("", "cfg", "pass a --cfg to rustc", ""),
optmulti("", "extern", "pass an --extern to rustc", "NAME=PATH"),
optmulti("", "plugin-path", "directory to load plugins from", "DIR"),
- optmulti("", "passes", "space separated list of passes to also run, a \
- value of `list` will print available passes",
+ optmulti("", "passes", "list of passes to also run, you might want \
+ to pass it multiple times; a value of `list` \
+ will print available passes",
"PASSES"),
optmulti("", "plugins", "space separated list of plugins to also load",
"PLUGINS"),
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, 'b> From<&'b str> for Box<Error + Send + 'a> {
- fn from(err: &'b str) -> Box<Error + Send + 'a> {
+impl From<String> for Box<Error + Send> {
+ fn from(err: String) -> Box<Error + Send> {
#[derive(Debug)]
struct StringError(String);
}
}
- Box::new(StringError(String::from_str(err)))
+ Box::new(StringError(err))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, 'b> From<&'b str> for Box<Error + Send + 'a> {
+ fn from(err: &'b str) -> Box<Error + Send + 'a> {
+ From::from(String::from_str(err))
}
}
//! including [`atomic`](sync/atomic/index.html), and [`mpsc`](sync/mpsc/index.html),
//! which contains the channel types for message passing.
//!
-//! Common types of I/O, including files, TCP, UDP, pipes, Unix domain sockets,
-//! timers, and process spawning, are defined in the
-//! [`old_io`](old_io/index.html) module.
+//! Common types of I/O, including files, TCP, UDP, pipes, Unix domain sockets, and
+//! process spawning, are defined in the [`io`](io/index.html) module.
//!
//! Rust's I/O and concurrency depends on a small runtime interface
//! that lives, along with its support code, in mod [`rt`](rt/index.html).
use prelude::v1::*;
#[cfg_attr(any(target_os = "freebsd",
- target_os = "openbsd"),
+ target_os = "openbsd",
+ target_os = "bitrig"),
ignore)]
// under some system, pipe(2) will return a bidrectionnal pipe
#[test]
let name = meta.name();
if !set.insert(name.clone()) {
- diagnostic.span_fatal(meta.span,
- &format!("duplicate meta item `{}`", name));
+ panic!(diagnostic.span_fatal(meta.span,
+ &format!("duplicate meta item `{}`", name)));
}
}
}
sp: RenderSpan, msg: &str, lvl: Level);
}
-/// This structure is used to signify that a task has panicked with a fatal error
-/// from the diagnostics. You can use this with the `Any` trait to figure out
-/// how a rustc task died (if so desired).
+/// Used as a return value to signify a fatal error occurred. (It is also
+/// used as the argument to panic at the moment, but that will eventually
+/// not be true.)
#[derive(Copy, Clone)]
+#[must_use]
pub struct FatalError;
/// Signifies that the compiler died with an explicit call to `.bug`
}
impl SpanHandler {
- pub fn span_fatal(&self, sp: Span, msg: &str) -> ! {
+ pub fn span_fatal(&self, sp: Span, msg: &str) -> FatalError {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
- panic!(FatalError);
+ return FatalError;
}
- pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> ! {
+ pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> FatalError {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
- panic!(FatalError);
+ return FatalError;
}
pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error);
p.token != token::ModSep {
if outputs.len() != 0 {
- p.eat(&token::Comma);
+ panictry!(p.eat(&token::Comma));
}
- let (constraint, _str_style) = p.parse_str();
+ let (constraint, _str_style) = panictry!(p.parse_str());
let span = p.last_span;
- p.expect(&token::OpenDelim(token::Paren));
+ panictry!(p.expect(&token::OpenDelim(token::Paren)));
let out = p.parse_expr();
- p.expect(&token::CloseDelim(token::Paren));
+ panictry!(p.expect(&token::CloseDelim(token::Paren)));
// Expands a read+write operand into two operands.
//
p.token != token::ModSep {
if inputs.len() != 0 {
- p.eat(&token::Comma);
+ panictry!(p.eat(&token::Comma));
}
- let (constraint, _str_style) = p.parse_str();
+ let (constraint, _str_style) = panictry!(p.parse_str());
if constraint.starts_with("=") {
cx.span_err(p.last_span, "input operand constraint contains '='");
cx.span_err(p.last_span, "input operand constraint contains '+'");
}
- p.expect(&token::OpenDelim(token::Paren));
+ panictry!(p.expect(&token::OpenDelim(token::Paren)));
let input = p.parse_expr();
- p.expect(&token::CloseDelim(token::Paren));
+ panictry!(p.expect(&token::CloseDelim(token::Paren)));
inputs.push((constraint, input));
}
p.token != token::ModSep {
if clobs.len() != 0 {
- p.eat(&token::Comma);
+ panictry!(p.eat(&token::Comma));
}
- let (s, _str_style) = p.parse_str();
+ let (s, _str_style) = panictry!(p.parse_str());
if OPTIONS.iter().any(|&opt| s == opt) {
cx.span_warn(p.last_span, "expected a clobber, found an option");
}
}
Options => {
- let (option, _str_style) = p.parse_str();
+ let (option, _str_style) = panictry!(p.parse_str());
if option == "volatile" {
// Indicates that the inline assembly has side effects
}
if p.token == token::Comma {
- p.eat(&token::Comma);
+ panictry!(p.eat(&token::Comma));
}
}
StateNone => ()
match (&p.token, state.next(), state.next().next()) {
(&token::Colon, StateNone, _) |
(&token::ModSep, _, StateNone) => {
- p.bump();
+ panictry!(p.bump());
break 'statement;
}
(&token::Colon, st, _) |
(&token::ModSep, _, st) => {
- p.bump();
+ panictry!(p.bump());
state = st;
}
(&token::Eof, _, _) => break 'statement,
pub fn bt_push(&mut self, ei: ExpnInfo) {
self.recursion_count += 1;
if self.recursion_count > self.ecfg.recursion_limit {
- self.span_fatal(ei.call_site,
+ panic!(self.span_fatal(ei.call_site,
&format!("recursion limit reached while expanding the macro `{}`",
- ei.callee.name));
+ ei.callee.name)));
}
let mut call_site = ei.call_site;
/// value doesn't have to match anything)
pub fn span_fatal(&self, sp: Span, msg: &str) -> ! {
self.print_backtrace();
- self.parse_sess.span_diagnostic.span_fatal(sp, msg);
+ panic!(self.parse_sess.span_diagnostic.span_fatal(sp, msg));
}
/// Emit `msg` attached to `sp`, without immediately stopping
let mut es = Vec::new();
while p.token != token::Eof {
es.push(cx.expander().fold_expr(p.parse_expr()));
- if p.eat(&token::Comma) {
+ if panictry!(p.eat(&token::Comma)){
continue;
}
if p.token != token::Eof {
let mut p = cx.new_parser_from_tts(tts);
let cfg = p.parse_meta_item();
- if !p.eat(&token::Eof) {
+ if !panictry!(p.eat(&token::Eof)){
cx.span_err(sp, "expected 1 cfg-pattern");
return DummyResult::expr(sp);
}
fn expand_crate_str(crate_str: String) -> ast::Crate {
let ps = parse::new_parse_sess();
- let crate_ast = string_to_parser(&ps, crate_str).parse_crate_mod();
+ let crate_ast = panictry!(string_to_parser(&ps, crate_str).parse_crate_mod());
// the cfg argument actually does matter, here...
expand_crate(&ps,test_ecfg(),vec!(),vec!(),crate_ast)
}
let fmtstr = p.parse_expr();
let mut named = false;
while p.token != token::Eof {
- if !p.eat(&token::Comma) {
+ if !panictry!(p.eat(&token::Comma)) {
ecx.span_err(sp, "expected token: `,`");
return None;
}
named = true;
let ident = match p.token {
token::Ident(i, _) => {
- p.bump();
+ panictry!(p.bump());
i
}
_ if named => {
let interned_name = token::get_ident(ident);
let name = &interned_name[..];
- p.expect(&token::Eq);
+ panictry!(p.expect(&token::Eq));
let e = p.parse_expr();
match names.get(name) {
None => {}
p.quote_depth += 1;
let cx_expr = p.parse_expr();
- if !p.eat(&token::Comma) {
- p.fatal("expected token `,`");
+ if !panictry!(p.eat(&token::Comma)) {
+ panic!(p.fatal("expected token `,`"));
}
- let tts = p.parse_all_token_trees();
+ let tts = panictry!(p.parse_all_token_trees());
p.abort_if_errors();
(cx_expr, tts)
while self.p.token != token::Eof {
match self.p.parse_item() {
Some(item) => ret.push(item),
- None => self.p.span_fatal(
+ None => panic!(self.p.span_fatal(
self.p.span,
&format!("expected item, found `{}`",
self.p.this_token_to_string())
- )
+ ))
}
}
Some(ret)
}
Occupied(..) => {
let string = token::get_ident(bind_name);
- p_s.span_diagnostic
+ panic!(p_s.span_diagnostic
.span_fatal(sp,
&format!("duplicated bind name: {}",
- &string))
+ &string)))
}
}
}
match parse(sess, cfg, rdr, &ms[..]) {
Success(m) => m,
Failure(sp, str) => {
- sess.span_diagnostic.span_fatal(sp, &str[..])
+ panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
}
Error(sp, str) => {
- sess.span_diagnostic.span_fatal(sp, &str[..])
+ panic!(sess.span_diagnostic.span_fatal(sp, &str[..]))
}
}
}
match name {
"tt" => {
p.quote_depth += 1; //but in theory, non-quoted tts might be useful
- let res = token::NtTT(P(p.parse_token_tree()));
+ let res = token::NtTT(P(panictry!(p.parse_token_tree())));
p.quote_depth -= 1;
return res;
}
_ => {}
}
// check at the beginning and the parser checks after each bump
- p.check_unknown_macro_variable();
+ panictry!(p.check_unknown_macro_variable());
match name {
"item" => match p.parse_item() {
Some(i) => token::NtItem(i),
- None => p.fatal("expected an item keyword")
+ None => panic!(p.fatal("expected an item keyword"))
},
- "block" => token::NtBlock(p.parse_block()),
+ "block" => token::NtBlock(panictry!(p.parse_block())),
"stmt" => match p.parse_stmt() {
Some(s) => token::NtStmt(s),
- None => p.fatal("expected a statement")
+ None => panic!(p.fatal("expected a statement"))
},
"pat" => token::NtPat(p.parse_pat()),
"expr" => token::NtExpr(p.parse_expr()),
"ty" => token::NtTy(p.parse_ty()),
// this could be handled like a token, since it is one
"ident" => match p.token {
- token::Ident(sn,b) => { p.bump(); token::NtIdent(box sn,b) }
+ token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(box sn,b) }
_ => {
let token_str = pprust::token_to_string(&p.token);
- p.fatal(&format!("expected ident, found {}",
- &token_str[..]))
+ panic!(p.fatal(&format!("expected ident, found {}",
+ &token_str[..])))
}
},
"path" => {
- token::NtPath(box p.parse_path(LifetimeAndTypesWithoutColons))
+ token::NtPath(box panictry!(p.parse_path(LifetimeAndTypesWithoutColons)))
}
"meta" => token::NtMeta(p.parse_meta_item()),
_ => {
- p.span_fatal_help(sp,
+ panic!(p.span_fatal_help(sp,
&format!("invalid fragment specifier `{}`", name),
"valid fragment specifiers are `ident`, `block`, \
`stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
- and `item`")
+ and `item`"))
}
}
}
fn ensure_complete_parse(&self, allow_semi: bool) {
let mut parser = self.parser.borrow_mut();
if allow_semi && parser.token == token::Semi {
- parser.bump()
+ panictry!(parser.bump())
}
if parser.token != token::Eof {
let token_str = parser.this_token_to_string();
let mut parser = self.parser.borrow_mut();
match parser.token {
token::Eof => break,
- _ => ret.push(parser.parse_impl_item())
+ _ => ret.push(panictry!(parser.parse_impl_item()))
}
}
self.ensure_complete_parse(false);
MatchedNonterminal(NtTT(ref lhs_tt)) => {
let lhs_tt = match **lhs_tt {
TtDelimited(_, ref delim) => &delim.tts[..],
- _ => cx.span_fatal(sp, "malformed macro lhs")
+ _ => panic!(cx.span_fatal(sp, "malformed macro lhs"))
};
match TokenTree::parse(cx, lhs_tt, arg) {
match **tt {
// ignore delimiters
TtDelimited(_, ref delimed) => delimed.tts.clone(),
- _ => cx.span_fatal(sp, "macro rhs must be delimited"),
+ _ => panic!(cx.span_fatal(sp, "macro rhs must be delimited")),
}
},
_ => cx.span_bug(sp, "bad thing in rhs")
imported_from,
rhs);
let mut p = Parser::new(cx.parse_sess(), cx.cfg(), Box::new(trncbr));
- p.check_unknown_macro_variable();
+ panictry!(p.check_unknown_macro_variable());
// Let the context choose how to interpret the result.
// Weird, but useful for X-macros.
return box ParserAnyMacro {
best_fail_spot = sp;
best_fail_msg = (*msg).clone();
},
- Error(sp, ref msg) => cx.span_fatal(sp, &msg[..])
+ Error(sp, ref msg) => panic!(cx.span_fatal(sp, &msg[..]))
}
}
_ => cx.bug("non-matcher found in parsed lhses")
}
}
- cx.span_fatal(best_fail_spot, &best_fail_msg[..]);
+ panic!(cx.span_fatal(best_fail_spot, &best_fail_msg[..]));
}
// Note that macro-by-example's input is also matched against a token tree:
match lockstep_iter_size(&TtSequence(sp, seq.clone()),
r) {
LisUnconstrained => {
- r.sp_diag.span_fatal(
+ panic!(r.sp_diag.span_fatal(
sp.clone(), /* blame macro writer */
"attempted to repeat an expression \
containing no syntax \
- variables matched as repeating at this depth");
+ variables matched as repeating at this depth"));
}
LisContradiction(ref msg) => {
// FIXME #2887 blame macro invoker instead
- r.sp_diag.span_fatal(sp.clone(), &msg[..]);
+ panic!(r.sp_diag.span_fatal(sp.clone(), &msg[..]));
}
LisConstraint(len, _) => {
if len == 0 {
if seq.op == ast::OneOrMore {
// FIXME #2887 blame invoker
- r.sp_diag.span_fatal(sp.clone(),
- "this must repeat at least once");
+ panic!(r.sp_diag.span_fatal(sp.clone(),
+ "this must repeat at least once"));
}
r.stack.last_mut().unwrap().idx += 1;
return ret_val;
}
MatchedSeq(..) => {
- r.sp_diag.span_fatal(
+ panic!(r.sp_diag.span_fatal(
r.cur_span, /* blame the macro writer */
&format!("variable '{:?}' is still repeating at this depth",
- token::get_ident(ident)));
+ token::get_ident(ident))));
}
}
}
extern crate serialize as rustc_serialize; // used by deriving
+// A variant of 'try!' that panics on Err(FatalError). This is used as a
+// crutch on the way towards a non-panic!-prone parser. It should be used
+// for fatal parsing errors; eventually we plan to convert all code using
+// panictry to just use normal try
+macro_rules! panictry {
+ ($e:expr) => ({
+ use std::result::Result::{Ok, Err};
+ use diagnostic::FatalError;
+ match $e {
+ Ok(e) => e,
+ Err(FatalError) => panic!(FatalError)
+ }
+ })
+}
+
pub mod util {
pub mod interner;
#[cfg(test)]
self.span.hi
);
if attr.node.style != ast::AttrOuter {
- self.fatal("expected outer comment");
+ panic!(self.fatal("expected outer comment"));
}
attrs.push(attr);
- self.bump();
+ panictry!(self.bump());
}
_ => break
}
let (span, value, mut style) = match self.token {
token::Pound => {
let lo = self.span.lo;
- self.bump();
+ panictry!(self.bump());
if permit_inner { self.expected_tokens.push(TokenType::Token(token::Not)); }
let style = if self.token == token::Not {
- self.bump();
+ panictry!(self.bump());
if !permit_inner {
let span = self.span;
self.span_err(span,
ast::AttrOuter
};
- self.expect(&token::OpenDelim(token::Bracket));
+ panictry!(self.expect(&token::OpenDelim(token::Bracket)));
let meta_item = self.parse_meta_item();
let hi = self.span.hi;
- self.expect(&token::CloseDelim(token::Bracket));
+ panictry!(self.expect(&token::CloseDelim(token::Bracket)));
(mk_sp(lo, hi), meta_item, style)
}
_ => {
let token_str = self.this_token_to_string();
- self.fatal(&format!("expected `#`, found `{}`", token_str));
+ panic!(self.fatal(&format!("expected `#`, found `{}`", token_str)));
}
};
if permit_inner && self.token == token::Semi {
- self.bump();
+ panictry!(self.bump());
self.span_warn(span, "this inner attribute syntax is deprecated. \
The new syntax is `#![foo]`, with a bang and no semicolon");
style = ast::AttrInner;
lo, hi);
if attr.node.style == ast::AttrInner {
attrs.push(attr);
- self.bump();
+ panictry!(self.bump());
} else {
break;
}
match nt_meta {
Some(meta) => {
- self.bump();
+ panictry!(self.bump());
return meta;
}
None => {}
}
let lo = self.span.lo;
- let ident = self.parse_ident();
+ let ident = panictry!(self.parse_ident());
let name = self.id_to_interned_str(ident);
match self.token {
token::Eq => {
- self.bump();
- let lit = self.parse_lit();
+ panictry!(self.bump());
+ let lit = panictry!(self.parse_lit());
// FIXME #623 Non-string meta items are not serialized correctly;
// just forbid them for now
match lit.node {
/// matches meta_seq = ( COMMASEP(meta_item) )
fn parse_meta_seq(&mut self) -> Vec<P<ast::MetaItem>> {
- self.parse_seq(&token::OpenDelim(token::Paren),
+ panictry!(self.parse_seq(&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
- |p| p.parse_meta_item()).node
+ |p| Ok(p.parse_meta_item()))).node
}
fn parse_optional_meta(&mut self) -> Vec<P<ast::MetaItem>> {
r
}
fn fatal(&self, m: &str) -> ! {
- self.sp_diag.span_fatal(self.cur_span, m);
+ panic!(self.sp_diag.span_fatal(self.cur_span, m));
}
fn err(&self, m: &str) {
self.sp_diag.span_err(self.cur_span, m);
/// Report a fatal lexical error with a given span.
pub fn fatal_span(&self, sp: Span, m: &str) -> ! {
- self.span_diagnostic.span_fatal(sp, m)
+ panic!(self.span_diagnostic.span_fatal(sp, m))
}
/// Report a lexical error with a given span.
use ast;
use codemap::{Span, CodeMap, FileMap};
-use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto};
+use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto, FatalError};
use parse::attr::ParserAttr;
use parse::parser::Parser;
use ptr::P;
+
use std::cell::{Cell, RefCell};
use std::fs::File;
use std::io::Read;
use std::rc::Rc;
use std::str;
+pub type PResult<T> = Result<T, FatalError>;
+
#[macro_use]
pub mod parser;
cfg: ast::CrateConfig,
sess: &ParseSess
) -> ast::Crate {
- new_parser_from_file(sess, cfg, input).parse_crate_mod()
+ panictry!(new_parser_from_file(sess, cfg, input).parse_crate_mod())
// why is there no p.abort_if_errors here?
}
cfg,
name,
source);
- maybe_aborted(p.parse_crate_mod(),p)
+ maybe_aborted(panictry!(p.parse_crate_mod()),p)
}
pub fn parse_crate_attrs_from_source_str(name: String,
);
p.quote_depth += 1;
// right now this is re-creating the token trees from ... token trees.
- maybe_aborted(p.parse_all_token_trees(),p)
+ maybe_aborted(panictry!(p.parse_all_token_trees()),p)
}
// Note: keep in sync with `with_hygiene::new_parser_from_source_str`
-> Rc<FileMap> {
let err = |msg: &str| {
match spanopt {
- Some(sp) => sess.span_diagnostic.span_fatal(sp, msg),
+ Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, msg)),
None => sess.span_diagnostic.handler().fatal(msg),
}
};
let cfg = Vec::new();
let srdr = lexer::StringReader::new(&sess.span_diagnostic, filemap);
let mut p1 = Parser::new(sess, cfg, box srdr);
- p1.parse_all_token_trees()
+ panictry!(p1.parse_all_token_trees())
}
/// Given tts and cfg, produce a parser
cfg: ast::CrateConfig) -> Parser<'a> {
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts);
let mut p = Parser::new(sess, cfg, box trdr);
- p.check_unknown_macro_variable();
+ panictry!(p.check_unknown_macro_variable());
p
}
);
p.quote_depth += 1;
// right now this is re-creating the token trees from ... token trees.
- maybe_aborted(p.parse_all_token_trees(),p)
+ maybe_aborted(panictry!(p.parse_all_token_trees()),p)
}
// Note: keep this in sync with `super::new_parser_from_source_str` until
let cfg = Vec::new();
let srdr = make_reader(&sess.span_diagnostic, filemap);
let mut p1 = Parser::new(sess, cfg, box srdr);
- p1.parse_all_token_trees()
+ panictry!(p1.parse_all_token_trees())
}
}
#[test] fn parse_ident_pat () {
let sess = new_parse_sess();
let mut parser = string_to_parser(&sess, "b".to_string());
- assert!(parser.parse_pat()
+ assert!(panictry!(parser.parse_pat_nopanic())
== P(ast::Pat{
id: ast::DUMMY_NODE_ID,
node: ast::PatIdent(ast::BindByValue(ast::MutImmutable),
fn eat_obsolete_ident(&mut self, ident: &str) -> bool {
if self.is_obsolete_ident(ident) {
- self.bump();
+ panictry!(self.bump());
true
} else {
false
use ast::{MutImmutable, MutMutable, Mac_, MacInvocTT, MatchSource};
use ast::{MutTy, BiMul, Mutability};
use ast::{MethodImplItem, NamedField, UnNeg, NoReturn, UnNot};
-use ast::{Pat, PatEnum, PatIdent, PatLit, PatRange, PatRegion, PatStruct};
-use ast::{PatTup, PatBox, PatWild, PatWildMulti, PatWildSingle};
+use ast::{Pat, PatBox, PatEnum, PatIdent, PatLit, PatMac, PatRange, PatRegion};
+use ast::{PatStruct, PatTup, PatVec, PatWild, PatWildMulti, PatWildSingle};
use ast::{PolyTraitRef, QSelf};
use ast::{Return, BiShl, BiShr, Stmt, StmtDecl};
use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField};
use print::pprust;
use ptr::P;
use owned_slice::OwnedSlice;
+use parse::PResult;
+use diagnostic::FatalError;
use std::collections::HashSet;
use std::io::prelude::*;
flags Restrictions: u8 {
const UNRESTRICTED = 0b0000,
const RESTRICTION_STMT_EXPR = 0b0001,
- const RESTRICTION_NO_BAR_OP = 0b0010,
- const RESTRICTION_NO_STRUCT_LITERAL = 0b0100,
+ const RESTRICTION_NO_STRUCT_LITERAL = 0b0010,
}
}
-
type ItemInfo = (Ident, Item_, Option<Vec<Attribute> >);
/// How to parse a path. There are four different kinds of paths, all of which
};
match found {
Some(e) => {
- $p.bump();
- return e;
+ try!($p.bump());
+ return Ok(e);
}
None => ()
}
{
let found = match ($p).token {
token::Interpolated(token::$constructor(_)) => {
- Some(($p).bump_and_get())
+ Some(try!(($p).bump_and_get()))
}
_ => None
};
if let Some(token::Interpolated(token::$constructor(x))) = found {
- return x.clone();
+ return Ok(x.clone());
}
}
);
{
let found = match ($p).token {
token::Interpolated(token::$constructor(_)) => {
- Some(($p).bump_and_get())
+ Some(try!(($p).bump_and_get()))
}
_ => None
};
if let Some(token::Interpolated(token::$constructor(x))) = found {
- return x;
+ return Ok(x);
}
}
);
{
let found = match ($p).token {
token::Interpolated(token::$constructor(_)) => {
- Some(($p).bump_and_get())
+ Some(try!(($p).bump_and_get()))
}
_ => None
};
if let Some(token::Interpolated(token::$constructor(x))) = found {
- return (*x).clone();
+ return Ok((*x).clone());
}
}
);
{
let found = match ($p).token {
token::Interpolated(token::$constructor(_)) => {
- Some(($p).bump_and_get())
+ Some(try!(($p).bump_and_get()))
}
_ => None
};
if let Some(token::Interpolated(token::$constructor(x))) = found {
- return Some((*x).clone());
+ return Ok(Some((*x).clone()));
}
}
);
{
let found = match ($p).token {
token::Interpolated(token::$constructor(_)) => {
- Some(($p).bump_and_get())
+ Some(try!(($p).bump_and_get()))
}
_ => None
};
if let Some(token::Interpolated(token::$constructor(x))) = found {
- return (Vec::new(), x);
+ return Ok((Vec::new(), x));
}
}
)
}
}
+ // Panicing fns (for now!)
+ // This is so that the quote_*!() syntax extensions
+ pub fn parse_expr(&mut self) -> P<Expr> {
+ panictry!(self.parse_expr_nopanic())
+ }
+
+ pub fn parse_item(&mut self) -> Option<P<Item>> {
+ panictry!(self.parse_item_nopanic())
+ }
+
+ pub fn parse_pat(&mut self) -> P<Pat> {
+ panictry!(self.parse_pat_nopanic())
+ }
+
+ pub fn parse_arm(&mut self) -> Arm {
+ panictry!(self.parse_arm_nopanic())
+ }
+
+ pub fn parse_ty(&mut self) -> P<Ty> {
+ panictry!(self.parse_ty_nopanic())
+ }
+
+ pub fn parse_stmt(&mut self) -> Option<P<Stmt>> {
+ panictry!(self.parse_stmt_nopanic())
+ }
+
/// Convert a token to a string using self's reader
pub fn token_to_string(token: &token::Token) -> String {
pprust::token_to_string(token)
Parser::token_to_string(&self.token)
}
- pub fn unexpected_last(&self, t: &token::Token) -> ! {
+ pub fn unexpected_last(&self, t: &token::Token) -> FatalError {
let token_str = Parser::token_to_string(t);
let last_span = self.last_span;
self.span_fatal(last_span, &format!("unexpected token: `{}`",
- token_str));
+ token_str))
}
- pub fn unexpected(&mut self) -> ! {
- self.expect_one_of(&[], &[]);
- unreachable!()
+ pub fn unexpected(&mut self) -> FatalError {
+ match self.expect_one_of(&[], &[]) {
+ Err(e) => e,
+ Ok(_) => unreachable!()
+ }
}
/// Expect and consume the token t. Signal an error if
/// the next token is not t.
- pub fn expect(&mut self, t: &token::Token) {
+ pub fn expect(&mut self, t: &token::Token) -> PResult<()> {
if self.expected_tokens.is_empty() {
if self.token == *t {
- self.bump();
+ self.bump()
} else {
let token_str = Parser::token_to_string(t);
let this_token_str = self.this_token_to_string();
- self.fatal(&format!("expected `{}`, found `{}`",
+ Err(self.fatal(&format!("expected `{}`, found `{}`",
token_str,
- this_token_str))
+ this_token_str)))
}
} else {
- self.expect_one_of(slice::ref_slice(t), &[]);
+ self.expect_one_of(slice::ref_slice(t), &[])
}
}
/// anything. Signal a fatal error if next token is unexpected.
pub fn expect_one_of(&mut self,
edible: &[token::Token],
- inedible: &[token::Token]) {
+ inedible: &[token::Token]) -> PResult<()>{
fn tokens_to_string(tokens: &[TokenType]) -> String {
let mut i = tokens.iter();
// This might be a sign we need a connect method on Iterator.
})
}
if edible.contains(&self.token) {
- self.bump();
+ self.bump()
} else if inedible.contains(&self.token) {
// leave it in the input
+ Ok(())
} else {
let mut expected = edible.iter().map(|x| TokenType::Token(x.clone()))
.collect::<Vec<_>>();
expected.dedup();
let expect = tokens_to_string(&expected[..]);
let actual = self.this_token_to_string();
- self.fatal(
+ Err(self.fatal(
&(if expected.len() > 1 {
(format!("expected one of {}, found `{}`",
expect,
expect,
actual))
})[..]
- )
+ ))
}
}
/// Check for erroneous `ident { }`; if matches, signal error and
/// recover (without consuming any expected input token). Returns
/// true if and only if input was consumed for recovery.
- pub fn check_for_erroneous_unit_struct_expecting(&mut self, expected: &[token::Token]) -> bool {
+ pub fn check_for_erroneous_unit_struct_expecting(&mut self,
+ expected: &[token::Token])
+ -> PResult<bool> {
if self.token == token::OpenDelim(token::Brace)
&& expected.iter().all(|t| *t != token::OpenDelim(token::Brace))
&& self.look_ahead(1, |t| *t == token::CloseDelim(token::Brace)) {
let span = self.span;
self.span_err(span,
"unit-like struct construction is written with no trailing `{ }`");
- self.eat(&token::OpenDelim(token::Brace));
- self.eat(&token::CloseDelim(token::Brace));
- true
+ try!(self.eat(&token::OpenDelim(token::Brace)));
+ try!(self.eat(&token::CloseDelim(token::Brace)));
+ Ok(true)
} else {
- false
+ Ok(false)
}
}
/// Commit to parsing a complete expression `e` expected to be
/// followed by some token from the set edible + inedible. Recover
/// from anticipated input errors, discarding erroneous characters.
- pub fn commit_expr(&mut self, e: &Expr, edible: &[token::Token], inedible: &[token::Token]) {
+ pub fn commit_expr(&mut self, e: &Expr, edible: &[token::Token],
+ inedible: &[token::Token]) -> PResult<()> {
debug!("commit_expr {:?}", e);
if let ExprPath(..) = e.node {
// might be unit-struct construction; check for recoverableinput error.
let mut expected = edible.iter().cloned().collect::<Vec<_>>();
expected.push_all(inedible);
- self.check_for_erroneous_unit_struct_expecting(&expected[..]);
+ try!(self.check_for_erroneous_unit_struct_expecting(&expected[..]));
}
self.expect_one_of(edible, inedible)
}
- pub fn commit_expr_expecting(&mut self, e: &Expr, edible: token::Token) {
+ pub fn commit_expr_expecting(&mut self, e: &Expr, edible: token::Token) -> PResult<()> {
self.commit_expr(e, &[edible], &[])
}
/// Commit to parsing a complete statement `s`, which expects to be
/// followed by some token from the set edible + inedible. Check
/// for recoverable input errors, discarding erroneous characters.
- pub fn commit_stmt(&mut self, edible: &[token::Token], inedible: &[token::Token]) {
+ pub fn commit_stmt(&mut self, edible: &[token::Token],
+ inedible: &[token::Token]) -> PResult<()> {
if self.last_token
.as_ref()
.map_or(false, |t| t.is_ident() || t.is_path()) {
let mut expected = edible.iter().cloned().collect::<Vec<_>>();
expected.push_all(&inedible);
- self.check_for_erroneous_unit_struct_expecting(&expected);
+ try!(self.check_for_erroneous_unit_struct_expecting(&expected));
}
self.expect_one_of(edible, inedible)
}
- pub fn commit_stmt_expecting(&mut self, edible: token::Token) {
+ pub fn commit_stmt_expecting(&mut self, edible: token::Token) -> PResult<()> {
self.commit_stmt(&[edible], &[])
}
- pub fn parse_ident(&mut self) -> ast::Ident {
+ pub fn parse_ident(&mut self) -> PResult<ast::Ident> {
self.check_strict_keywords();
- self.check_reserved_keywords();
+ try!(self.check_reserved_keywords());
match self.token {
token::Ident(i, _) => {
- self.bump();
- i
+ try!(self.bump());
+ Ok(i)
}
token::Interpolated(token::NtIdent(..)) => {
self.bug("ident interpolation not converted to real token");
}
_ => {
let token_str = self.this_token_to_string();
- self.fatal(&format!("expected ident, found `{}`",
- token_str))
+ Err(self.fatal(&format!("expected ident, found `{}`",
+ token_str)))
}
}
}
- pub fn parse_ident_or_self_type(&mut self) -> ast::Ident {
+ pub fn parse_ident_or_self_type(&mut self) -> PResult<ast::Ident> {
if self.is_self_type_ident() {
self.expect_self_type_ident()
} else {
}
}
- pub fn parse_path_list_item(&mut self) -> ast::PathListItem {
+ pub fn parse_path_list_item(&mut self) -> PResult<ast::PathListItem> {
let lo = self.span.lo;
- let node = if self.eat_keyword(keywords::SelfValue) {
+ let node = if try!(self.eat_keyword(keywords::SelfValue)) {
ast::PathListMod { id: ast::DUMMY_NODE_ID }
} else {
- let ident = self.parse_ident();
+ let ident = try!(self.parse_ident());
ast::PathListIdent { name: ident, id: ast::DUMMY_NODE_ID }
};
let hi = self.last_span.hi;
- spanned(lo, hi, node)
+ Ok(spanned(lo, hi, node))
}
/// Check if the next token is `tok`, and return `true` if so.
/// Consume token 'tok' if it exists. Returns true if the given
/// token was present, false otherwise.
- pub fn eat(&mut self, tok: &token::Token) -> bool {
+ pub fn eat(&mut self, tok: &token::Token) -> PResult<bool> {
let is_present = self.check(tok);
- if is_present { self.bump() }
- is_present
+ if is_present { try!(self.bump())}
+ Ok(is_present)
}
pub fn check_keyword(&mut self, kw: keywords::Keyword) -> bool {
/// If the next token is the given keyword, eat it and return
/// true. Otherwise, return false.
- pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool {
+ pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> PResult<bool> {
if self.check_keyword(kw) {
- self.bump();
- true
+ try!(self.bump());
+ Ok(true)
} else {
- false
+ Ok(false)
}
}
- pub fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool {
+ pub fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> PResult<bool> {
if self.token.is_keyword(kw) {
- self.bump();
- true
+ try!(self.bump());
+ Ok(true)
} else {
- false
+ Ok(false)
}
}
/// If the given word is not a keyword, signal an error.
/// If the next token is not the given word, signal an error.
/// Otherwise, eat it.
- pub fn expect_keyword(&mut self, kw: keywords::Keyword) {
- if !self.eat_keyword(kw) {
- self.expect_one_of(&[], &[]);
+ pub fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<()> {
+ if !try!(self.eat_keyword(kw) ){
+ self.expect_one_of(&[], &[])
+ } else {
+ Ok(())
}
}
}
/// Signal an error if the current token is a reserved keyword
- pub fn check_reserved_keywords(&mut self) {
+ pub fn check_reserved_keywords(&mut self) -> PResult<()>{
if self.token.is_reserved_keyword() {
let token_str = self.this_token_to_string();
- self.fatal(&format!("`{}` is a reserved keyword",
- token_str))
+ Err(self.fatal(&format!("`{}` is a reserved keyword",
+ token_str)))
+ } else {
+ Ok(())
}
}
/// Expect and consume an `&`. If `&&` is seen, replace it with a single
/// `&` and continue. If an `&` is not seen, signal an error.
- fn expect_and(&mut self) {
+ fn expect_and(&mut self) -> PResult<()> {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
match self.token {
token::BinOp(token::And) => self.bump(),
token::AndAnd => {
let span = self.span;
let lo = span.lo + BytePos(1);
- self.replace_token(token::BinOp(token::And), lo, span.hi)
- }
- _ => {
- self.expect_one_of(&[], &[]);
+ Ok(self.replace_token(token::BinOp(token::And), lo, span.hi))
}
+ _ => self.expect_one_of(&[], &[])
}
}
///
/// This is meant to be used when parsing generics on a path to get the
/// starting token.
- fn eat_lt(&mut self) -> bool {
+ fn eat_lt(&mut self) -> PResult<bool> {
self.expected_tokens.push(TokenType::Token(token::Lt));
match self.token {
- token::Lt => { self.bump(); true }
+ token::Lt => { try!(self.bump()); Ok(true)}
token::BinOp(token::Shl) => {
let span = self.span;
let lo = span.lo + BytePos(1);
self.replace_token(token::Lt, lo, span.hi);
- true
+ Ok(true)
}
- _ => false,
+ _ => Ok(false),
}
}
- fn expect_lt(&mut self) {
- if !self.eat_lt() {
- self.expect_one_of(&[], &[]);
+ fn expect_lt(&mut self) -> PResult<()> {
+ if !try!(self.eat_lt()) {
+ self.expect_one_of(&[], &[])
+ } else {
+ Ok(())
}
}
/// Expect and consume a GT. if a >> is seen, replace it
/// with a single > and continue. If a GT is not seen,
/// signal an error.
- pub fn expect_gt(&mut self) {
+ pub fn expect_gt(&mut self) -> PResult<()> {
self.expected_tokens.push(TokenType::Token(token::Gt));
match self.token {
token::Gt => self.bump(),
token::BinOp(token::Shr) => {
let span = self.span;
let lo = span.lo + BytePos(1);
- self.replace_token(token::Gt, lo, span.hi)
+ Ok(self.replace_token(token::Gt, lo, span.hi))
}
token::BinOpEq(token::Shr) => {
let span = self.span;
let lo = span.lo + BytePos(1);
- self.replace_token(token::Ge, lo, span.hi)
+ Ok(self.replace_token(token::Ge, lo, span.hi))
}
token::Ge => {
let span = self.span;
let lo = span.lo + BytePos(1);
- self.replace_token(token::Eq, lo, span.hi)
+ Ok(self.replace_token(token::Eq, lo, span.hi))
}
_ => {
let gt_str = Parser::token_to_string(&token::Gt);
let this_token_str = self.this_token_to_string();
- self.fatal(&format!("expected `{}`, found `{}`",
+ Err(self.fatal(&format!("expected `{}`, found `{}`",
gt_str,
- this_token_str))
+ this_token_str)))
}
}
}
pub fn parse_seq_to_before_gt_or_return<T, F>(&mut self,
sep: Option<token::Token>,
mut f: F)
- -> (OwnedSlice<T>, bool) where
- F: FnMut(&mut Parser) -> Option<T>,
+ -> PResult<(OwnedSlice<T>, bool)> where
+ F: FnMut(&mut Parser) -> PResult<Option<T>>,
{
let mut v = Vec::new();
// This loop works by alternating back and forth between parsing types
}
if i % 2 == 0 {
- match f(self) {
+ match try!(f(self)) {
Some(result) => v.push(result),
- None => return (OwnedSlice::from_vec(v), true)
+ None => return Ok((OwnedSlice::from_vec(v), true))
}
} else {
- sep.as_ref().map(|t| self.expect(t));
+ if let Some(t) = sep.as_ref() {
+ try!(self.expect(t));
+ }
+
}
}
- return (OwnedSlice::from_vec(v), false);
+ return Ok((OwnedSlice::from_vec(v), false));
}
/// Parse a sequence bracketed by '<' and '>', stopping
pub fn parse_seq_to_before_gt<T, F>(&mut self,
sep: Option<token::Token>,
mut f: F)
- -> OwnedSlice<T> where
- F: FnMut(&mut Parser) -> T,
+ -> PResult<OwnedSlice<T>> where
+ F: FnMut(&mut Parser) -> PResult<T>,
{
- let (result, returned) = self.parse_seq_to_before_gt_or_return(sep, |p| Some(f(p)));
+ let (result, returned) = try!(self.parse_seq_to_before_gt_or_return(sep,
+ |p| Ok(Some(try!(f(p))))));
assert!(!returned);
- return result;
+ return Ok(result);
}
pub fn parse_seq_to_gt<T, F>(&mut self,
sep: Option<token::Token>,
f: F)
- -> OwnedSlice<T> where
- F: FnMut(&mut Parser) -> T,
+ -> PResult<OwnedSlice<T>> where
+ F: FnMut(&mut Parser) -> PResult<T>,
{
- let v = self.parse_seq_to_before_gt(sep, f);
- self.expect_gt();
- return v;
+ let v = try!(self.parse_seq_to_before_gt(sep, f));
+ try!(self.expect_gt());
+ return Ok(v);
}
pub fn parse_seq_to_gt_or_return<T, F>(&mut self,
sep: Option<token::Token>,
f: F)
- -> (OwnedSlice<T>, bool) where
- F: FnMut(&mut Parser) -> Option<T>,
+ -> PResult<(OwnedSlice<T>, bool)> where
+ F: FnMut(&mut Parser) -> PResult<Option<T>>,
{
- let (v, returned) = self.parse_seq_to_before_gt_or_return(sep, f);
+ let (v, returned) = try!(self.parse_seq_to_before_gt_or_return(sep, f));
if !returned {
- self.expect_gt();
+ try!(self.expect_gt());
}
- return (v, returned);
+ return Ok((v, returned));
}
/// Parse a sequence, including the closing delimiter. The function
ket: &token::Token,
sep: SeqSep,
f: F)
- -> Vec<T> where
- F: FnMut(&mut Parser) -> T,
+ -> PResult<Vec<T>> where
+ F: FnMut(&mut Parser) -> PResult<T>,
{
- let val = self.parse_seq_to_before_end(ket, sep, f);
- self.bump();
- val
+ let val = try!(self.parse_seq_to_before_end(ket, sep, f));
+ try!(self.bump());
+ Ok(val)
}
/// Parse a sequence, not including the closing delimiter. The function
ket: &token::Token,
sep: SeqSep,
mut f: F)
- -> Vec<T> where
- F: FnMut(&mut Parser) -> T,
+ -> PResult<Vec<T>> where
+ F: FnMut(&mut Parser) -> PResult<T>,
{
let mut first: bool = true;
let mut v = vec!();
match sep.sep {
Some(ref t) => {
if first { first = false; }
- else { self.expect(t); }
+ else { try!(self.expect(t)); }
}
_ => ()
}
if sep.trailing_sep_allowed && self.check(ket) { break; }
- v.push(f(self));
+ v.push(try!(f(self)));
}
- return v;
+ return Ok(v);
}
/// Parse a sequence, including the closing delimiter. The function
ket: &token::Token,
sep: SeqSep,
f: F)
- -> Vec<T> where
- F: FnMut(&mut Parser) -> T,
+ -> PResult<Vec<T>> where
+ F: FnMut(&mut Parser) -> PResult<T>,
{
- self.expect(bra);
- let result = self.parse_seq_to_before_end(ket, sep, f);
- self.bump();
- result
+ try!(self.expect(bra));
+ let result = try!(self.parse_seq_to_before_end(ket, sep, f));
+ try!(self.bump());
+ Ok(result)
}
/// Parse a sequence parameter of enum variant. For consistency purposes,
ket: &token::Token,
sep: SeqSep,
f: F)
- -> Vec<T> where
- F: FnMut(&mut Parser) -> T,
+ -> PResult<Vec<T>> where
+ F: FnMut(&mut Parser) -> PResult<T>,
{
- let result = self.parse_unspanned_seq(bra, ket, sep, f);
+ let result = try!(self.parse_unspanned_seq(bra, ket, sep, f));
if result.is_empty() {
let last_span = self.last_span;
self.span_err(last_span,
"nullary enum variants are written with no trailing `( )`");
}
- result
+ Ok(result)
}
// NB: Do not use this function unless you actually plan to place the
ket: &token::Token,
sep: SeqSep,
f: F)
- -> Spanned<Vec<T>> where
- F: FnMut(&mut Parser) -> T,
+ -> PResult<Spanned<Vec<T>>> where
+ F: FnMut(&mut Parser) -> PResult<T>,
{
let lo = self.span.lo;
- self.expect(bra);
- let result = self.parse_seq_to_before_end(ket, sep, f);
+ try!(self.expect(bra));
+ let result = try!(self.parse_seq_to_before_end(ket, sep, f));
let hi = self.span.hi;
- self.bump();
- spanned(lo, hi, result)
+ try!(self.bump());
+ Ok(spanned(lo, hi, result))
}
/// Advance the parser by one token
- pub fn bump(&mut self) {
+ pub fn bump(&mut self) -> PResult<()> {
self.last_span = self.span;
// Stash token for error recovery (sometimes; clone is not necessarily cheap).
self.last_token = if self.token.is_ident() || self.token.is_path() {
self.tokens_consumed += 1;
self.expected_tokens.clear();
// check after each token
- self.check_unknown_macro_variable();
+ self.check_unknown_macro_variable()
}
/// Advance the parser by one token and return the bumped token.
- pub fn bump_and_get(&mut self) -> token::Token {
+ pub fn bump_and_get(&mut self) -> PResult<token::Token> {
let old_token = mem::replace(&mut self.token, token::Underscore);
- self.bump();
- old_token
+ try!(self.bump());
+ Ok(old_token)
}
/// EFFECT: replace the current token and span with the given one
}
f(&self.buffer[((self.buffer_start + dist - 1) & 3) as usize].tok)
}
- pub fn fatal(&self, m: &str) -> ! {
+ pub fn fatal(&self, m: &str) -> diagnostic::FatalError {
self.sess.span_diagnostic.span_fatal(self.span, m)
}
- pub fn span_fatal(&self, sp: Span, m: &str) -> ! {
+ pub fn span_fatal(&self, sp: Span, m: &str) -> diagnostic::FatalError {
self.sess.span_diagnostic.span_fatal(sp, m)
}
- pub fn span_fatal_help(&self, sp: Span, m: &str, help: &str) -> ! {
+ pub fn span_fatal_help(&self, sp: Span, m: &str, help: &str) -> diagnostic::FatalError {
self.span_err(sp, m);
self.fileline_help(sp, help);
- panic!(diagnostic::FatalError);
+ diagnostic::FatalError
}
pub fn span_note(&self, sp: Span, m: &str) {
self.sess.span_diagnostic.span_note(sp, m)
}
}
- pub fn parse_for_in_type(&mut self) -> Ty_ {
+ pub fn parse_for_in_type(&mut self) -> PResult<Ty_> {
/*
Parses whatever can come after a `for` keyword in a type.
The `for` has already been consumed.
// parse <'lt>
let lo = self.span.lo;
- let lifetime_defs = self.parse_late_bound_lifetime_defs();
+ let lifetime_defs = try!(self.parse_late_bound_lifetime_defs());
// examine next token to decide to do
if self.token_is_bare_fn_keyword() {
self.parse_ty_bare_fn(lifetime_defs)
} else {
let hi = self.span.hi;
- let trait_ref = self.parse_trait_ref();
+ let trait_ref = try!(self.parse_trait_ref());
let poly_trait_ref = ast::PolyTraitRef { bound_lifetimes: lifetime_defs,
trait_ref: trait_ref,
span: mk_sp(lo, hi)};
- let other_bounds = if self.eat(&token::BinOp(token::Plus)) {
- self.parse_ty_param_bounds(BoundParsingMode::Bare)
+ let other_bounds = if try!(self.eat(&token::BinOp(token::Plus)) ){
+ try!(self.parse_ty_param_bounds(BoundParsingMode::Bare))
} else {
OwnedSlice::empty()
};
Some(TraitTyParamBound(poly_trait_ref, TraitBoundModifier::None)).into_iter()
.chain(other_bounds.into_vec().into_iter())
.collect();
- ast::TyPolyTraitRef(all_bounds)
+ Ok(ast::TyPolyTraitRef(all_bounds))
}
}
- pub fn parse_ty_path(&mut self) -> Ty_ {
- TyPath(None, self.parse_path(LifetimeAndTypesWithoutColons))
+ pub fn parse_ty_path(&mut self) -> PResult<Ty_> {
+ Ok(TyPath(None, try!(self.parse_path(LifetimeAndTypesWithoutColons))))
}
/// parse a TyBareFn type:
- pub fn parse_ty_bare_fn(&mut self, lifetime_defs: Vec<ast::LifetimeDef>) -> Ty_ {
+ pub fn parse_ty_bare_fn(&mut self, lifetime_defs: Vec<ast::LifetimeDef>) -> PResult<Ty_> {
/*
[unsafe] [extern "ABI"] fn <'lt> (S) -> T
Function Style
*/
- let unsafety = self.parse_unsafety();
- let abi = if self.eat_keyword(keywords::Extern) {
- self.parse_opt_abi().unwrap_or(abi::C)
+ let unsafety = try!(self.parse_unsafety());
+ let abi = if try!(self.eat_keyword(keywords::Extern) ){
+ try!(self.parse_opt_abi()).unwrap_or(abi::C)
} else {
abi::Rust
};
- self.expect_keyword(keywords::Fn);
- let (inputs, variadic) = self.parse_fn_args(false, true);
- let ret_ty = self.parse_ret_ty();
+ try!(self.expect_keyword(keywords::Fn));
+ let (inputs, variadic) = try!(self.parse_fn_args(false, true));
+ let ret_ty = try!(self.parse_ret_ty());
let decl = P(FnDecl {
inputs: inputs,
output: ret_ty,
variadic: variadic
});
- TyBareFn(P(BareFnTy {
+ Ok(TyBareFn(P(BareFnTy {
abi: abi,
unsafety: unsafety,
lifetimes: lifetime_defs,
decl: decl
- }))
+ })))
}
/// Parses an obsolete closure kind (`&:`, `&mut:`, or `:`).
- pub fn parse_obsolete_closure_kind(&mut self) {
+ pub fn parse_obsolete_closure_kind(&mut self) -> PResult<()> {
let lo = self.span.lo;
if
self.check(&token::BinOp(token::And)) &&
self.look_ahead(1, |t| t.is_keyword(keywords::Mut)) &&
self.look_ahead(2, |t| *t == token::Colon)
{
- self.bump();
- self.bump();
- self.bump();
+ try!(self.bump());
+ try!(self.bump());
+ try!(self.bump());
} else if
self.token == token::BinOp(token::And) &&
self.look_ahead(1, |t| *t == token::Colon)
{
- self.bump();
- self.bump();
+ try!(self.bump());
+ try!(self.bump());
} else if
- self.eat(&token::Colon)
+ try!(self.eat(&token::Colon))
{
/* nothing */
} else {
- return;
+ return Ok(());
}
- let span = mk_sp(lo, self.span.hi);
- self.obsolete(span, ObsoleteSyntax::ClosureKind);
+ let span = mk_sp(lo, self.span.hi);
+ self.obsolete(span, ObsoleteSyntax::ClosureKind);
+ Ok(())
}
- pub fn parse_unsafety(&mut self) -> Unsafety {
- if self.eat_keyword(keywords::Unsafe) {
- return Unsafety::Unsafe;
+ pub fn parse_unsafety(&mut self) -> PResult<Unsafety> {
+ if try!(self.eat_keyword(keywords::Unsafe)) {
+ return Ok(Unsafety::Unsafe);
} else {
- return Unsafety::Normal;
+ return Ok(Unsafety::Normal);
}
}
/// Parse the items in a trait declaration
- pub fn parse_trait_items(&mut self) -> Vec<P<TraitItem>> {
+ pub fn parse_trait_items(&mut self) -> PResult<Vec<P<TraitItem>>> {
self.parse_unspanned_seq(
&token::OpenDelim(token::Brace),
&token::CloseDelim(token::Brace),
let lo = p.span.lo;
let mut attrs = p.parse_outer_attributes();
- let (name, node) = if p.eat_keyword(keywords::Type) {
- let TyParam {ident, bounds, default, ..} = p.parse_ty_param();
- p.expect(&token::Semi);
+ let (name, node) = if try!(p.eat_keyword(keywords::Type)) {
+ let TyParam {ident, bounds, default, ..} = try!(p.parse_ty_param());
+ try!(p.expect(&token::Semi));
(ident, TypeTraitItem(bounds, default))
} else {
- let style = p.parse_unsafety();
- let abi = if p.eat_keyword(keywords::Extern) {
- p.parse_opt_abi().unwrap_or(abi::C)
+ let style = try!(p.parse_unsafety());
+ let abi = if try!(p.eat_keyword(keywords::Extern)) {
+ try!(p.parse_opt_abi()).unwrap_or(abi::C)
} else {
abi::Rust
};
- p.expect_keyword(keywords::Fn);
+ try!(p.expect_keyword(keywords::Fn));
- let ident = p.parse_ident();
- let mut generics = p.parse_generics();
+ let ident = try!(p.parse_ident());
+ let mut generics = try!(p.parse_generics());
- let (explicit_self, d) = p.parse_fn_decl_with_self(|p| {
+ let (explicit_self, d) = try!(p.parse_fn_decl_with_self(|p|{
// This is somewhat dubious; We don't want to allow
// argument names to be left off if there is a
// definition...
p.parse_arg_general(false)
- });
+ }));
- generics.where_clause = p.parse_where_clause();
+ generics.where_clause = try!(p.parse_where_clause());
let sig = ast::MethodSig {
unsafety: style,
decl: d,
let body = match p.token {
token::Semi => {
- p.bump();
+ try!(p.bump());
debug!("parse_trait_methods(): parsing required method");
None
}
token::OpenDelim(token::Brace) => {
debug!("parse_trait_methods(): parsing provided method");
let (inner_attrs, body) =
- p.parse_inner_attrs_and_block();
+ try!(p.parse_inner_attrs_and_block());
attrs.push_all(&inner_attrs[..]);
Some(body)
}
_ => {
let token_str = p.this_token_to_string();
- p.fatal(&format!("expected `;` or `{{`, found `{}`",
- token_str)[..])
+ return Err(p.fatal(&format!("expected `;` or `{{`, found `{}`",
+ token_str)[..]))
}
};
(ident, ast::MethodTraitItem(sig, body))
};
- P(TraitItem {
+ Ok(P(TraitItem {
id: ast::DUMMY_NODE_ID,
ident: name,
attrs: attrs,
node: node,
span: mk_sp(lo, p.last_span.hi),
- })
+ }))
})
}
/// Parse a possibly mutable type
- pub fn parse_mt(&mut self) -> MutTy {
- let mutbl = self.parse_mutability();
- let t = self.parse_ty();
- MutTy { ty: t, mutbl: mutbl }
+ pub fn parse_mt(&mut self) -> PResult<MutTy> {
+ let mutbl = try!(self.parse_mutability());
+ let t = try!(self.parse_ty_nopanic());
+ Ok(MutTy { ty: t, mutbl: mutbl })
}
/// Parse optional return type [ -> TY ] in function decl
- pub fn parse_ret_ty(&mut self) -> FunctionRetTy {
- if self.eat(&token::RArrow) {
- if self.eat(&token::Not) {
- NoReturn(self.span)
+ pub fn parse_ret_ty(&mut self) -> PResult<FunctionRetTy> {
+ if try!(self.eat(&token::RArrow) ){
+ if try!(self.eat(&token::Not) ){
+ Ok(NoReturn(self.span))
} else {
- Return(self.parse_ty())
+ Ok(Return(try!(self.parse_ty_nopanic())))
}
} else {
let pos = self.span.lo;
- DefaultReturn(mk_sp(pos, pos))
+ Ok(DefaultReturn(mk_sp(pos, pos)))
}
}
/// Parse a type in a context where `T1+T2` is allowed.
- pub fn parse_ty_sum(&mut self) -> P<Ty> {
+ pub fn parse_ty_sum(&mut self) -> PResult<P<Ty>> {
let lo = self.span.lo;
- let lhs = self.parse_ty();
+ let lhs = try!(self.parse_ty_nopanic());
- if !self.eat(&token::BinOp(token::Plus)) {
- return lhs;
+ if !try!(self.eat(&token::BinOp(token::Plus)) ){
+ return Ok(lhs);
}
- let bounds = self.parse_ty_param_bounds(BoundParsingMode::Bare);
+ let bounds = try!(self.parse_ty_param_bounds(BoundParsingMode::Bare));
// In type grammar, `+` is treated like a binary operator,
// and hence both L and R side are required.
let sp = mk_sp(lo, self.last_span.hi);
let sum = ast::TyObjectSum(lhs, bounds);
- P(Ty {id: ast::DUMMY_NODE_ID, node: sum, span: sp})
+ Ok(P(Ty {id: ast::DUMMY_NODE_ID, node: sum, span: sp}))
}
/// Parse a type.
- pub fn parse_ty(&mut self) -> P<Ty> {
+ pub fn parse_ty_nopanic(&mut self) -> PResult<P<Ty>> {
maybe_whole!(no_clone self, NtTy);
let lo = self.span.lo;
let t = if self.check(&token::OpenDelim(token::Paren)) {
- self.bump();
+ try!(self.bump());
// (t) is a parenthesized ty
// (t,) is the type of a tuple with only one field,
let mut ts = vec![];
let mut last_comma = false;
while self.token != token::CloseDelim(token::Paren) {
- ts.push(self.parse_ty_sum());
+ ts.push(try!(self.parse_ty_sum()));
if self.check(&token::Comma) {
last_comma = true;
- self.bump();
+ try!(self.bump());
} else {
last_comma = false;
break;
}
}
- self.expect(&token::CloseDelim(token::Paren));
+ try!(self.expect(&token::CloseDelim(token::Paren)));
if ts.len() == 1 && !last_comma {
TyParen(ts.into_iter().nth(0).unwrap())
} else {
}
} else if self.check(&token::BinOp(token::Star)) {
// STAR POINTER (bare pointer?)
- self.bump();
- TyPtr(self.parse_ptr())
+ try!(self.bump());
+ TyPtr(try!(self.parse_ptr()))
} else if self.check(&token::OpenDelim(token::Bracket)) {
// VECTOR
- self.expect(&token::OpenDelim(token::Bracket));
- let t = self.parse_ty_sum();
+ try!(self.expect(&token::OpenDelim(token::Bracket)));
+ let t = try!(self.parse_ty_sum());
// Parse the `; e` in `[ i32; e ]`
// where `e` is a const expression
- let t = match self.maybe_parse_fixed_length_of_vec() {
+ let t = match try!(self.maybe_parse_fixed_length_of_vec()) {
None => TyVec(t),
Some(suffix) => TyFixedLengthVec(t, suffix)
};
- self.expect(&token::CloseDelim(token::Bracket));
+ try!(self.expect(&token::CloseDelim(token::Bracket)));
t
} else if self.check(&token::BinOp(token::And)) ||
self.token == token::AndAnd {
// BORROWED POINTER
- self.expect_and();
- self.parse_borrowed_pointee()
+ try!(self.expect_and());
+ try!(self.parse_borrowed_pointee())
} else if self.check_keyword(keywords::For) {
- self.parse_for_in_type()
+ try!(self.parse_for_in_type())
} else if self.token_is_bare_fn_keyword() {
// BARE FUNCTION
- self.parse_ty_bare_fn(Vec::new())
- } else if self.eat_keyword_noexpect(keywords::Typeof) {
+ try!(self.parse_ty_bare_fn(Vec::new()))
+ } else if try!(self.eat_keyword_noexpect(keywords::Typeof)) {
// TYPEOF
// In order to not be ambiguous, the type must be surrounded by parens.
- self.expect(&token::OpenDelim(token::Paren));
- let e = self.parse_expr();
- self.expect(&token::CloseDelim(token::Paren));
+ try!(self.expect(&token::OpenDelim(token::Paren)));
+ let e = try!(self.parse_expr_nopanic());
+ try!(self.expect(&token::CloseDelim(token::Paren)));
TyTypeof(e)
- } else if self.eat_lt() {
+ } else if try!(self.eat_lt()) {
// QUALIFIED PATH `<TYPE as TRAIT_REF>::item`
- let self_type = self.parse_ty_sum();
+ let self_type = try!(self.parse_ty_sum());
- let mut path = if self.eat_keyword(keywords::As) {
- self.parse_path(LifetimeAndTypesWithoutColons)
+ let mut path = if try!(self.eat_keyword(keywords::As) ){
+ try!(self.parse_path(LifetimeAndTypesWithoutColons))
} else {
ast::Path {
span: self.span,
position: path.segments.len()
};
- self.expect(&token::Gt);
- self.expect(&token::ModSep);
+ try!(self.expect(&token::Gt));
+ try!(self.expect(&token::ModSep));
path.segments.push(ast::PathSegment {
- identifier: self.parse_ident(),
+ identifier: try!(self.parse_ident()),
parameters: ast::PathParameters::none()
});
self.token.is_ident() ||
self.token.is_path() {
// NAMED TYPE
- self.parse_ty_path()
- } else if self.eat(&token::Underscore) {
+ try!(self.parse_ty_path())
+ } else if try!(self.eat(&token::Underscore) ){
// TYPE TO BE INFERRED
TyInfer
} else {
let this_token_str = self.this_token_to_string();
let msg = format!("expected type, found `{}`", this_token_str);
- self.fatal(&msg[..]);
+ return Err(self.fatal(&msg[..]));
};
let sp = mk_sp(lo, self.last_span.hi);
- P(Ty {id: ast::DUMMY_NODE_ID, node: t, span: sp})
+ Ok(P(Ty {id: ast::DUMMY_NODE_ID, node: t, span: sp}))
}
- pub fn parse_borrowed_pointee(&mut self) -> Ty_ {
+ pub fn parse_borrowed_pointee(&mut self) -> PResult<Ty_> {
// look for `&'lt` or `&'foo ` and interpret `foo` as the region name:
- let opt_lifetime = self.parse_opt_lifetime();
+ let opt_lifetime = try!(self.parse_opt_lifetime());
- let mt = self.parse_mt();
- return TyRptr(opt_lifetime, mt);
+ let mt = try!(self.parse_mt());
+ return Ok(TyRptr(opt_lifetime, mt));
}
- pub fn parse_ptr(&mut self) -> MutTy {
- let mutbl = if self.eat_keyword(keywords::Mut) {
+ pub fn parse_ptr(&mut self) -> PResult<MutTy> {
+ let mutbl = if try!(self.eat_keyword(keywords::Mut) ){
MutMutable
- } else if self.eat_keyword(keywords::Const) {
+ } else if try!(self.eat_keyword(keywords::Const) ){
MutImmutable
} else {
let span = self.last_span;
known as `*const T`");
MutImmutable
};
- let t = self.parse_ty();
- MutTy { ty: t, mutbl: mutbl }
+ let t = try!(self.parse_ty_nopanic());
+ Ok(MutTy { ty: t, mutbl: mutbl })
}
pub fn is_named_argument(&mut self) -> bool {
/// This version of parse arg doesn't necessarily require
/// identifier names.
- pub fn parse_arg_general(&mut self, require_name: bool) -> Arg {
+ pub fn parse_arg_general(&mut self, require_name: bool) -> PResult<Arg> {
let pat = if require_name || self.is_named_argument() {
debug!("parse_arg_general parse_pat (require_name:{})",
require_name);
- let pat = self.parse_pat();
+ let pat = try!(self.parse_pat_nopanic());
- self.expect(&token::Colon);
+ try!(self.expect(&token::Colon));
pat
} else {
debug!("parse_arg_general ident_to_pat");
special_idents::invalid)
};
- let t = self.parse_ty_sum();
+ let t = try!(self.parse_ty_sum());
- Arg {
+ Ok(Arg {
ty: t,
pat: pat,
id: ast::DUMMY_NODE_ID,
- }
+ })
}
/// Parse a single function argument
- pub fn parse_arg(&mut self) -> Arg {
+ pub fn parse_arg(&mut self) -> PResult<Arg> {
self.parse_arg_general(true)
}
/// Parse an argument in a lambda header e.g. |arg, arg|
- pub fn parse_fn_block_arg(&mut self) -> Arg {
- let pat = self.parse_pat();
- let t = if self.eat(&token::Colon) {
- self.parse_ty_sum()
+ pub fn parse_fn_block_arg(&mut self) -> PResult<Arg> {
+ let pat = try!(self.parse_pat_nopanic());
+ let t = if try!(self.eat(&token::Colon) ){
+ try!(self.parse_ty_sum())
} else {
P(Ty {
id: ast::DUMMY_NODE_ID,
span: mk_sp(self.span.lo, self.span.hi),
})
};
- Arg {
+ Ok(Arg {
ty: t,
pat: pat,
id: ast::DUMMY_NODE_ID
- }
+ })
}
- pub fn maybe_parse_fixed_length_of_vec(&mut self) -> Option<P<ast::Expr>> {
+ pub fn maybe_parse_fixed_length_of_vec(&mut self) -> PResult<Option<P<ast::Expr>>> {
if self.check(&token::Semi) {
- self.bump();
- Some(self.parse_expr())
+ try!(self.bump());
+ Ok(Some(try!(self.parse_expr_nopanic())))
} else {
- None
+ Ok(None)
}
}
/// Matches token_lit = LIT_INTEGER | ...
- pub fn lit_from_token(&self, tok: &token::Token) -> Lit_ {
+ pub fn lit_from_token(&self, tok: &token::Token) -> PResult<Lit_> {
match *tok {
token::Interpolated(token::NtExpr(ref v)) => {
match v.node {
- ExprLit(ref lit) => { lit.node.clone() }
- _ => { self.unexpected_last(tok); }
+ ExprLit(ref lit) => { Ok(lit.node.clone()) }
+ _ => { return Err(self.unexpected_last(tok)); }
}
}
token::Literal(lit, suf) => {
self.expect_no_suffix(sp, &*format!("{} literal", lit.short_name()), suf)
}
- out
+ Ok(out)
}
- _ => { self.unexpected_last(tok); }
+ _ => { return Err(self.unexpected_last(tok)); }
}
}
/// Matches lit = true | false | token_lit
- pub fn parse_lit(&mut self) -> Lit {
+ pub fn parse_lit(&mut self) -> PResult<Lit> {
let lo = self.span.lo;
- let lit = if self.eat_keyword(keywords::True) {
+ let lit = if try!(self.eat_keyword(keywords::True) ){
LitBool(true)
- } else if self.eat_keyword(keywords::False) {
+ } else if try!(self.eat_keyword(keywords::False) ){
LitBool(false)
} else {
- let token = self.bump_and_get();
- let lit = self.lit_from_token(&token);
+ let token = try!(self.bump_and_get());
+ let lit = try!(self.lit_from_token(&token));
lit
};
- codemap::Spanned { node: lit, span: mk_sp(lo, self.last_span.hi) }
+ Ok(codemap::Spanned { node: lit, span: mk_sp(lo, self.last_span.hi) })
}
/// matches '-' lit | lit
- pub fn parse_literal_maybe_minus(&mut self) -> P<Expr> {
+ pub fn parse_literal_maybe_minus(&mut self) -> PResult<P<Expr>> {
let minus_lo = self.span.lo;
- let minus_present = self.eat(&token::BinOp(token::Minus));
+ let minus_present = try!(self.eat(&token::BinOp(token::Minus)));
let lo = self.span.lo;
- let literal = P(self.parse_lit());
+ let literal = P(try!(self.parse_lit()));
let hi = self.span.hi;
let expr = self.mk_expr(lo, hi, ExprLit(literal));
if minus_present {
let minus_hi = self.span.hi;
let unary = self.mk_unary(UnNeg, expr);
- self.mk_expr(minus_lo, minus_hi, unary)
+ Ok(self.mk_expr(minus_lo, minus_hi, unary))
} else {
- expr
+ Ok(expr)
}
}
/// mode. The `mode` parameter determines whether lifetimes, types, and/or
/// bounds are permitted and whether `::` must precede type parameter
/// groups.
- pub fn parse_path(&mut self, mode: PathParsingMode) -> ast::Path {
+ pub fn parse_path(&mut self, mode: PathParsingMode) -> PResult<ast::Path> {
// Check for a whole path...
let found = match self.token {
- token::Interpolated(token::NtPath(_)) => Some(self.bump_and_get()),
+ token::Interpolated(token::NtPath(_)) => Some(try!(self.bump_and_get())),
_ => None,
};
if let Some(token::Interpolated(token::NtPath(box path))) = found {
- return path;
+ return Ok(path);
}
let lo = self.span.lo;
- let is_global = self.eat(&token::ModSep);
+ let is_global = try!(self.eat(&token::ModSep));
// Parse any number of segments and bound sets. A segment is an
// identifier followed by an optional lifetime and a set of types.
// A bound set is a set of type parameter bounds.
let segments = match mode {
LifetimeAndTypesWithoutColons => {
- self.parse_path_segments_without_colons()
+ try!(self.parse_path_segments_without_colons())
}
LifetimeAndTypesWithColons => {
- self.parse_path_segments_with_colons()
+ try!(self.parse_path_segments_with_colons())
}
NoTypesAllowed => {
- self.parse_path_segments_without_types()
+ try!(self.parse_path_segments_without_types())
}
};
let span = mk_sp(lo, self.last_span.hi);
// Assemble the result.
- ast::Path {
+ Ok(ast::Path {
span: span,
global: is_global,
segments: segments,
- }
+ })
}
/// Examples:
/// - `a::b<T,U>::c<V,W>`
/// - `a::b<T,U>::c(V) -> W`
/// - `a::b<T,U>::c(V)`
- pub fn parse_path_segments_without_colons(&mut self) -> Vec<ast::PathSegment> {
+ pub fn parse_path_segments_without_colons(&mut self) -> PResult<Vec<ast::PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
- let identifier = self.parse_ident_or_self_type();
+ let identifier = try!(self.parse_ident_or_self_type());
// Parse types, optionally.
- let parameters = if self.eat_lt() {
- let (lifetimes, types, bindings) = self.parse_generic_values_after_lt();
+ let parameters = if try!(self.eat_lt() ){
+ let (lifetimes, types, bindings) = try!(self.parse_generic_values_after_lt());
ast::AngleBracketedParameters(ast::AngleBracketedParameterData {
lifetimes: lifetimes,
types: OwnedSlice::from_vec(types),
bindings: OwnedSlice::from_vec(bindings),
})
- } else if self.eat(&token::OpenDelim(token::Paren)) {
+ } else if try!(self.eat(&token::OpenDelim(token::Paren)) ){
let lo = self.last_span.lo;
- let inputs = self.parse_seq_to_end(
+ let inputs = try!(self.parse_seq_to_end(
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
- |p| p.parse_ty_sum());
+ |p| p.parse_ty_sum()));
- let output_ty = if self.eat(&token::RArrow) {
- Some(self.parse_ty())
+ let output_ty = if try!(self.eat(&token::RArrow) ){
+ Some(try!(self.parse_ty_nopanic()))
} else {
None
};
parameters: parameters });
// Continue only if we see a `::`
- if !self.eat(&token::ModSep) {
- return segments;
+ if !try!(self.eat(&token::ModSep) ){
+ return Ok(segments);
}
}
}
/// Examples:
/// - `a::b::<T,U>::c`
- pub fn parse_path_segments_with_colons(&mut self) -> Vec<ast::PathSegment> {
+ pub fn parse_path_segments_with_colons(&mut self) -> PResult<Vec<ast::PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
- let identifier = self.parse_ident_or_self_type();
+ let identifier = try!(self.parse_ident_or_self_type());
// If we do not see a `::`, stop.
- if !self.eat(&token::ModSep) {
+ if !try!(self.eat(&token::ModSep) ){
segments.push(ast::PathSegment {
identifier: identifier,
parameters: ast::PathParameters::none()
});
- return segments;
+ return Ok(segments);
}
// Check for a type segment.
- if self.eat_lt() {
+ if try!(self.eat_lt() ){
// Consumed `a::b::<`, go look for types
- let (lifetimes, types, bindings) = self.parse_generic_values_after_lt();
+ let (lifetimes, types, bindings) = try!(self.parse_generic_values_after_lt());
segments.push(ast::PathSegment {
identifier: identifier,
parameters: ast::AngleBracketedParameters(ast::AngleBracketedParameterData {
});
// Consumed `a::b::<T,U>`, check for `::` before proceeding
- if !self.eat(&token::ModSep) {
- return segments;
+ if !try!(self.eat(&token::ModSep) ){
+ return Ok(segments);
}
} else {
// Consumed `a::`, go look for `b`
/// Examples:
/// - `a::b::c`
- pub fn parse_path_segments_without_types(&mut self) -> Vec<ast::PathSegment> {
+ pub fn parse_path_segments_without_types(&mut self) -> PResult<Vec<ast::PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
- let identifier = self.parse_ident_or_self_type();
+ let identifier = try!(self.parse_ident_or_self_type());
// Assemble and push the result.
segments.push(ast::PathSegment {
});
// If we do not see a `::`, stop.
- if !self.eat(&token::ModSep) {
- return segments;
+ if !try!(self.eat(&token::ModSep) ){
+ return Ok(segments);
}
}
}
/// parses 0 or 1 lifetime
- pub fn parse_opt_lifetime(&mut self) -> Option<ast::Lifetime> {
+ pub fn parse_opt_lifetime(&mut self) -> PResult<Option<ast::Lifetime>> {
match self.token {
token::Lifetime(..) => {
- Some(self.parse_lifetime())
+ Ok(Some(try!(self.parse_lifetime())))
}
_ => {
- None
+ Ok(None)
}
}
}
/// Parses a single lifetime
/// Matches lifetime = LIFETIME
- pub fn parse_lifetime(&mut self) -> ast::Lifetime {
+ pub fn parse_lifetime(&mut self) -> PResult<ast::Lifetime> {
match self.token {
token::Lifetime(i) => {
let span = self.span;
- self.bump();
- return ast::Lifetime {
+ try!(self.bump());
+ return Ok(ast::Lifetime {
id: ast::DUMMY_NODE_ID,
span: span,
name: i.name
- };
+ });
}
_ => {
- self.fatal(&format!("expected a lifetime name"));
+ return Err(self.fatal(&format!("expected a lifetime name")));
}
}
}
/// Parses `lifetime_defs = [ lifetime_defs { ',' lifetime_defs } ]` where `lifetime_def =
/// lifetime [':' lifetimes]`
- pub fn parse_lifetime_defs(&mut self) -> Vec<ast::LifetimeDef> {
+ pub fn parse_lifetime_defs(&mut self) -> PResult<Vec<ast::LifetimeDef>> {
let mut res = Vec::new();
loop {
match self.token {
token::Lifetime(_) => {
- let lifetime = self.parse_lifetime();
+ let lifetime = try!(self.parse_lifetime());
let bounds =
- if self.eat(&token::Colon) {
- self.parse_lifetimes(token::BinOp(token::Plus))
+ if try!(self.eat(&token::Colon) ){
+ try!(self.parse_lifetimes(token::BinOp(token::Plus)))
} else {
Vec::new()
};
}
_ => {
- return res;
+ return Ok(res);
}
}
match self.token {
- token::Comma => { self.bump(); }
- token::Gt => { return res; }
- token::BinOp(token::Shr) => { return res; }
+ token::Comma => { try!(self.bump());}
+ token::Gt => { return Ok(res); }
+ token::BinOp(token::Shr) => { return Ok(res); }
_ => {
let this_token_str = self.this_token_to_string();
let msg = format!("expected `,` or `>` after lifetime \
name, found `{}`",
this_token_str);
- self.fatal(&msg[..]);
+ return Err(self.fatal(&msg[..]));
}
}
}
/// Parses zero or more comma separated lifetimes. Expects each lifetime to be followed by
/// either a comma or `>`. Used when parsing type parameter lists, where we expect something
/// like `<'a, 'b, T>`.
- pub fn parse_lifetimes(&mut self, sep: token::Token) -> Vec<ast::Lifetime> {
+ pub fn parse_lifetimes(&mut self, sep: token::Token) -> PResult<Vec<ast::Lifetime>> {
let mut res = Vec::new();
loop {
match self.token {
token::Lifetime(_) => {
- res.push(self.parse_lifetime());
+ res.push(try!(self.parse_lifetime()));
}
_ => {
- return res;
+ return Ok(res);
}
}
if self.token != sep {
- return res;
+ return Ok(res);
}
- self.bump();
+ try!(self.bump());
}
}
/// Parse mutability declaration (mut/const/imm)
- pub fn parse_mutability(&mut self) -> Mutability {
- if self.eat_keyword(keywords::Mut) {
- MutMutable
+ pub fn parse_mutability(&mut self) -> PResult<Mutability> {
+ if try!(self.eat_keyword(keywords::Mut) ){
+ Ok(MutMutable)
} else {
- MutImmutable
+ Ok(MutImmutable)
}
}
/// Parse ident COLON expr
- pub fn parse_field(&mut self) -> Field {
+ pub fn parse_field(&mut self) -> PResult<Field> {
let lo = self.span.lo;
- let i = self.parse_ident();
+ let i = try!(self.parse_ident());
let hi = self.last_span.hi;
- self.expect(&token::Colon);
- let e = self.parse_expr();
- ast::Field {
+ try!(self.expect(&token::Colon));
+ let e = try!(self.parse_expr_nopanic());
+ Ok(ast::Field {
ident: spanned(lo, hi, i),
span: mk_sp(lo, e.span.hi),
expr: e,
- }
+ })
}
pub fn mk_expr(&mut self, lo: BytePos, hi: BytePos, node: Expr_) -> P<Expr> {
})
}
- fn expect_open_delim(&mut self) -> token::DelimToken {
+ fn expect_open_delim(&mut self) -> PResult<token::DelimToken> {
self.expected_tokens.push(TokenType::Token(token::Gt));
match self.token {
token::OpenDelim(delim) => {
- self.bump();
- delim
+ try!(self.bump());
+ Ok(delim)
},
- _ => self.fatal("expected open delimiter"),
+ _ => Err(self.fatal("expected open delimiter")),
}
}
/// At the bottom (top?) of the precedence hierarchy,
/// parse things like parenthesized exprs,
/// macros, return, etc.
- pub fn parse_bottom_expr(&mut self) -> P<Expr> {
+ pub fn parse_bottom_expr(&mut self) -> PResult<P<Expr>> {
maybe_whole_expr!(self);
let lo = self.span.lo;
// Note: when adding new syntax here, don't forget to adjust Token::can_begin_expr().
match self.token {
token::OpenDelim(token::Paren) => {
- self.bump();
+ try!(self.bump());
// (e) is parenthesized e
// (e,) is a tuple with only one field, e
let mut es = vec![];
let mut trailing_comma = false;
while self.token != token::CloseDelim(token::Paren) {
- es.push(self.parse_expr());
- self.commit_expr(&**es.last().unwrap(), &[],
- &[token::Comma, token::CloseDelim(token::Paren)]);
+ es.push(try!(self.parse_expr_nopanic()));
+ try!(self.commit_expr(&**es.last().unwrap(), &[],
+ &[token::Comma, token::CloseDelim(token::Paren)]));
if self.check(&token::Comma) {
trailing_comma = true;
- self.bump();
+ try!(self.bump());
} else {
trailing_comma = false;
break;
}
}
- self.bump();
+ try!(self.bump());
hi = self.span.hi;
return if es.len() == 1 && !trailing_comma {
- self.mk_expr(lo, hi, ExprParen(es.into_iter().nth(0).unwrap()))
+ Ok(self.mk_expr(lo, hi, ExprParen(es.into_iter().nth(0).unwrap())))
} else {
- self.mk_expr(lo, hi, ExprTup(es))
+ Ok(self.mk_expr(lo, hi, ExprTup(es)))
}
},
token::OpenDelim(token::Brace) => {
name: token::SELF_KEYWORD_NAME,
ctxt: _
}, token::Plain) => {
- self.bump();
+ try!(self.bump());
let path = ast_util::ident_to_path(mk_sp(lo, hi), id);
ex = ExprPath(None, path);
hi = self.last_span.hi;
}
token::OpenDelim(token::Bracket) => {
- self.bump();
+ try!(self.bump());
if self.check(&token::CloseDelim(token::Bracket)) {
// Empty vector.
- self.bump();
+ try!(self.bump());
ex = ExprVec(Vec::new());
} else {
// Nonempty vector.
- let first_expr = self.parse_expr();
+ let first_expr = try!(self.parse_expr_nopanic());
if self.check(&token::Semi) {
// Repeating vector syntax: [ 0; 512 ]
- self.bump();
- let count = self.parse_expr();
- self.expect(&token::CloseDelim(token::Bracket));
+ try!(self.bump());
+ let count = try!(self.parse_expr_nopanic());
+ try!(self.expect(&token::CloseDelim(token::Bracket)));
ex = ExprRepeat(first_expr, count);
} else if self.check(&token::Comma) {
// Vector with two or more elements.
- self.bump();
- let remaining_exprs = self.parse_seq_to_end(
+ try!(self.bump());
+ let remaining_exprs = try!(self.parse_seq_to_end(
&token::CloseDelim(token::Bracket),
seq_sep_trailing_allowed(token::Comma),
- |p| p.parse_expr()
- );
+ |p| Ok(try!(p.parse_expr_nopanic()))
+ ));
let mut exprs = vec!(first_expr);
exprs.extend(remaining_exprs.into_iter());
ex = ExprVec(exprs);
} else {
// Vector with one element.
- self.expect(&token::CloseDelim(token::Bracket));
+ try!(self.expect(&token::CloseDelim(token::Bracket)));
ex = ExprVec(vec!(first_expr));
}
}
hi = self.last_span.hi;
}
_ => {
- if self.eat_lt() {
+ if try!(self.eat_lt()){
// QUALIFIED PATH `<TYPE as TRAIT_REF>::item::<'a, T>`
- let self_type = self.parse_ty_sum();
- let mut path = if self.eat_keyword(keywords::As) {
- self.parse_path(LifetimeAndTypesWithoutColons)
+ let self_type = try!(self.parse_ty_sum());
+ let mut path = if try!(self.eat_keyword(keywords::As) ){
+ try!(self.parse_path(LifetimeAndTypesWithoutColons))
} else {
ast::Path {
span: self.span,
ty: self_type,
position: path.segments.len()
};
- self.expect(&token::Gt);
- self.expect(&token::ModSep);
+ try!(self.expect(&token::Gt));
+ try!(self.expect(&token::ModSep));
- let item_name = self.parse_ident();
- let parameters = if self.eat(&token::ModSep) {
- self.expect_lt();
+ let item_name = try!(self.parse_ident());
+ let parameters = if try!(self.eat(&token::ModSep) ){
+ try!(self.expect_lt());
// Consumed `item::<`, go look for types
let (lifetimes, types, bindings) =
- self.parse_generic_values_after_lt();
+ try!(self.parse_generic_values_after_lt());
ast::AngleBracketedParameters(ast::AngleBracketedParameterData {
lifetimes: lifetimes,
types: OwnedSlice::from_vec(types),
path.span.hi = self.last_span.hi;
let hi = self.span.hi;
- return self.mk_expr(lo, hi, ExprPath(Some(qself), path));
+ return Ok(self.mk_expr(lo, hi, ExprPath(Some(qself), path)));
}
- if self.eat_keyword(keywords::Move) {
+ if try!(self.eat_keyword(keywords::Move) ){
return self.parse_lambda_expr(CaptureByValue);
}
- if self.eat_keyword(keywords::If) {
+ if try!(self.eat_keyword(keywords::If)) {
return self.parse_if_expr();
}
- if self.eat_keyword(keywords::For) {
+ if try!(self.eat_keyword(keywords::For) ){
return self.parse_for_expr(None);
}
- if self.eat_keyword(keywords::While) {
+ if try!(self.eat_keyword(keywords::While) ){
return self.parse_while_expr(None);
}
if self.token.is_lifetime() {
let lifetime = self.get_lifetime();
- self.bump();
- self.expect(&token::Colon);
- if self.eat_keyword(keywords::While) {
+ try!(self.bump());
+ try!(self.expect(&token::Colon));
+ if try!(self.eat_keyword(keywords::While) ){
return self.parse_while_expr(Some(lifetime))
}
- if self.eat_keyword(keywords::For) {
+ if try!(self.eat_keyword(keywords::For) ){
return self.parse_for_expr(Some(lifetime))
}
- if self.eat_keyword(keywords::Loop) {
+ if try!(self.eat_keyword(keywords::Loop) ){
return self.parse_loop_expr(Some(lifetime))
}
- self.fatal("expected `while`, `for`, or `loop` after a label")
+ return Err(self.fatal("expected `while`, `for`, or `loop` after a label"))
}
- if self.eat_keyword(keywords::Loop) {
+ if try!(self.eat_keyword(keywords::Loop) ){
return self.parse_loop_expr(None);
}
- if self.eat_keyword(keywords::Continue) {
+ if try!(self.eat_keyword(keywords::Continue) ){
let lo = self.span.lo;
let ex = if self.token.is_lifetime() {
let lifetime = self.get_lifetime();
- self.bump();
+ try!(self.bump());
ExprAgain(Some(lifetime))
} else {
ExprAgain(None)
};
let hi = self.span.hi;
- return self.mk_expr(lo, hi, ex);
+ return Ok(self.mk_expr(lo, hi, ex));
}
- if self.eat_keyword(keywords::Match) {
+ if try!(self.eat_keyword(keywords::Match) ){
return self.parse_match_expr();
}
- if self.eat_keyword(keywords::Unsafe) {
+ if try!(self.eat_keyword(keywords::Unsafe) ){
return self.parse_block_expr(
lo,
UnsafeBlock(ast::UserProvided));
}
- if self.eat_keyword(keywords::Return) {
+ if try!(self.eat_keyword(keywords::Return) ){
// RETURN expression
if self.token.can_begin_expr() {
- let e = self.parse_expr();
+ let e = try!(self.parse_expr_nopanic());
hi = e.span.hi;
ex = ExprRet(Some(e));
} else {
ex = ExprRet(None);
}
- } else if self.eat_keyword(keywords::Break) {
+ } else if try!(self.eat_keyword(keywords::Break) ){
// BREAK expression
if self.token.is_lifetime() {
let lifetime = self.get_lifetime();
- self.bump();
+ try!(self.bump());
ex = ExprBreak(Some(lifetime));
} else {
ex = ExprBreak(None);
!self.check_keyword(keywords::True) &&
!self.check_keyword(keywords::False) {
let pth =
- self.parse_path(LifetimeAndTypesWithColons);
+ try!(self.parse_path(LifetimeAndTypesWithColons));
// `!`, as an operator, is prefix, so we know this isn't that
if self.check(&token::Not) {
// MACRO INVOCATION expression
- self.bump();
+ try!(self.bump());
- let delim = self.expect_open_delim();
- let tts = self.parse_seq_to_end(
+ let delim = try!(self.expect_open_delim());
+ let tts = try!(self.parse_seq_to_end(
&token::CloseDelim(delim),
seq_sep_none(),
- |p| p.parse_token_tree());
+ |p| p.parse_token_tree()));
let hi = self.span.hi;
- return self.mk_mac_expr(lo,
+ return Ok(self.mk_mac_expr(lo,
hi,
MacInvocTT(pth,
tts,
- EMPTY_CTXT));
+ EMPTY_CTXT)));
}
if self.check(&token::OpenDelim(token::Brace)) {
// This is a struct literal, unless we're prohibited
// from parsing struct literals here.
if !self.restrictions.contains(RESTRICTION_NO_STRUCT_LITERAL) {
// It's a struct literal.
- self.bump();
+ try!(self.bump());
let mut fields = Vec::new();
let mut base = None;
while self.token != token::CloseDelim(token::Brace) {
- if self.eat(&token::DotDot) {
- base = Some(self.parse_expr());
+ if try!(self.eat(&token::DotDot) ){
+ base = Some(try!(self.parse_expr_nopanic()));
break;
}
- fields.push(self.parse_field());
- self.commit_expr(&*fields.last().unwrap().expr,
+ fields.push(try!(self.parse_field()));
+ try!(self.commit_expr(&*fields.last().unwrap().expr,
&[token::Comma],
- &[token::CloseDelim(token::Brace)]);
+ &[token::CloseDelim(token::Brace)]));
}
if fields.len() == 0 && base.is_none() {
}
hi = self.span.hi;
- self.expect(&token::CloseDelim(token::Brace));
+ try!(self.expect(&token::CloseDelim(token::Brace)));
ex = ExprStruct(pth, fields, base);
- return self.mk_expr(lo, hi, ex);
+ return Ok(self.mk_expr(lo, hi, ex));
}
}
ex = ExprPath(None, pth);
} else {
// other literal expression
- let lit = self.parse_lit();
+ let lit = try!(self.parse_lit());
hi = lit.span.hi;
ex = ExprLit(P(lit));
}
}
}
- return self.mk_expr(lo, hi, ex);
+ return Ok(self.mk_expr(lo, hi, ex));
}
/// Parse a block or unsafe block
pub fn parse_block_expr(&mut self, lo: BytePos, blk_mode: BlockCheckMode)
- -> P<Expr> {
- self.expect(&token::OpenDelim(token::Brace));
- let blk = self.parse_block_tail(lo, blk_mode);
- return self.mk_expr(blk.span.lo, blk.span.hi, ExprBlock(blk));
+ -> PResult<P<Expr>> {
+ try!(self.expect(&token::OpenDelim(token::Brace)));
+ let blk = try!(self.parse_block_tail(lo, blk_mode));
+ return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprBlock(blk)));
}
/// parse a.b or a(13) or a[4] or just a
- pub fn parse_dot_or_call_expr(&mut self) -> P<Expr> {
- let b = self.parse_bottom_expr();
+ pub fn parse_dot_or_call_expr(&mut self) -> PResult<P<Expr>> {
+ let b = try!(self.parse_bottom_expr());
self.parse_dot_or_call_expr_with(b)
}
- pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> P<Expr> {
+ pub fn parse_dot_or_call_expr_with(&mut self, e0: P<Expr>) -> PResult<P<Expr>> {
let mut e = e0;
let lo = e.span.lo;
let mut hi;
loop {
// expr.f
- if self.eat(&token::Dot) {
+ if try!(self.eat(&token::Dot) ){
match self.token {
token::Ident(i, _) => {
let dot = self.last_span.hi;
hi = self.span.hi;
- self.bump();
- let (_, tys, bindings) = if self.eat(&token::ModSep) {
- self.expect_lt();
- self.parse_generic_values_after_lt()
+ try!(self.bump());
+ let (_, tys, bindings) = if try!(self.eat(&token::ModSep) ){
+ try!(self.expect_lt());
+ try!(self.parse_generic_values_after_lt())
} else {
(Vec::new(), Vec::new(), Vec::new())
};
// expr.f() method call
match self.token {
token::OpenDelim(token::Paren) => {
- let mut es = self.parse_unspanned_seq(
+ let mut es = try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
- |p| p.parse_expr()
- );
+ |p| Ok(try!(p.parse_expr_nopanic()))
+ ));
hi = self.last_span.hi;
es.insert(0, e);
let dot = self.last_span.hi;
hi = self.span.hi;
- self.bump();
+ try!(self.bump());
let index = n.as_str().parse::<usize>().ok();
match index {
}
}
token::Literal(token::Float(n), _suf) => {
- self.bump();
+ try!(self.bump());
let last_span = self.last_span;
let fstr = n.as_str();
self.span_err(last_span,
self.abort_if_errors();
}
- _ => self.unexpected()
+ _ => return Err(self.unexpected())
}
continue;
}
match self.token {
// expr(...)
token::OpenDelim(token::Paren) => {
- let es = self.parse_unspanned_seq(
+ let es = try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
- |p| p.parse_expr()
- );
+ |p| Ok(try!(p.parse_expr_nopanic()))
+ ));
hi = self.last_span.hi;
let nd = self.mk_call(e, es);
// expr[...]
// Could be either an index expression or a slicing expression.
token::OpenDelim(token::Bracket) => {
- self.bump();
-
- let ix = self.parse_expr();
+ try!(self.bump());
+ let ix = try!(self.parse_expr_nopanic());
hi = self.span.hi;
- self.commit_expr_expecting(&*ix, token::CloseDelim(token::Bracket));
+ try!(self.commit_expr_expecting(&*ix, token::CloseDelim(token::Bracket)));
let index = self.mk_index(e, ix);
e = self.mk_expr(lo, hi, index)
}
- _ => return e
+ _ => return Ok(e)
}
}
- return e;
+ return Ok(e);
}
// Parse unquoted tokens after a `$` in a token tree
- fn parse_unquoted(&mut self) -> TokenTree {
+ fn parse_unquoted(&mut self) -> PResult<TokenTree> {
let mut sp = self.span;
let (name, namep) = match self.token {
token::Dollar => {
- self.bump();
+ try!(self.bump());
if self.token == token::OpenDelim(token::Paren) {
- let Spanned { node: seq, span: seq_span } = self.parse_seq(
+ let Spanned { node: seq, span: seq_span } = try!(self.parse_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_none(),
|p| p.parse_token_tree()
- );
- let (sep, repeat) = self.parse_sep_and_kleene_op();
+ ));
+ let (sep, repeat) = try!(self.parse_sep_and_kleene_op());
let name_num = macro_parser::count_names(&seq);
- return TtSequence(mk_sp(sp.lo, seq_span.hi),
+ return Ok(TtSequence(mk_sp(sp.lo, seq_span.hi),
Rc::new(SequenceRepetition {
tts: seq,
separator: sep,
op: repeat,
num_captures: name_num
- }));
+ })));
} else if self.token.is_keyword_allow_following_colon(keywords::Crate) {
- self.bump();
- return TtToken(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar));
+ try!(self.bump());
+ return Ok(TtToken(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
} else {
sp = mk_sp(sp.lo, self.span.hi);
let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain };
- let name = self.parse_ident();
+ let name = try!(self.parse_ident());
(name, namep)
}
}
token::SubstNt(name, namep) => {
- self.bump();
+ try!(self.bump());
(name, namep)
}
_ => unreachable!()
if self.token == token::Colon && self.look_ahead(1, |t| t.is_ident() &&
!t.is_strict_keyword() &&
!t.is_reserved_keyword()) {
- self.bump();
+ try!(self.bump());
sp = mk_sp(sp.lo, self.span.hi);
let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain };
- let nt_kind = self.parse_ident();
- TtToken(sp, MatchNt(name, nt_kind, namep, kindp))
+ let nt_kind = try!(self.parse_ident());
+ Ok(TtToken(sp, MatchNt(name, nt_kind, namep, kindp)))
} else {
- TtToken(sp, SubstNt(name, namep))
+ Ok(TtToken(sp, SubstNt(name, namep)))
}
}
- pub fn check_unknown_macro_variable(&mut self) {
+ pub fn check_unknown_macro_variable(&mut self) -> PResult<()> {
if self.quote_depth == 0 {
match self.token {
token::SubstNt(name, _) =>
- self.fatal(&format!("unknown macro variable `{}`",
- token::get_ident(name))),
+ return Err(self.fatal(&format!("unknown macro variable `{}`",
+ token::get_ident(name)))),
_ => {}
}
}
+ Ok(())
}
/// Parse an optional separator followed by a Kleene-style
/// repetition token (+ or *).
- pub fn parse_sep_and_kleene_op(&mut self) -> (Option<token::Token>, ast::KleeneOp) {
- fn parse_kleene_op(parser: &mut Parser) -> Option<ast::KleeneOp> {
+ pub fn parse_sep_and_kleene_op(&mut self) -> PResult<(Option<token::Token>, ast::KleeneOp)> {
+ fn parse_kleene_op(parser: &mut Parser) -> PResult<Option<ast::KleeneOp>> {
match parser.token {
token::BinOp(token::Star) => {
- parser.bump();
- Some(ast::ZeroOrMore)
+ try!(parser.bump());
+ Ok(Some(ast::ZeroOrMore))
},
token::BinOp(token::Plus) => {
- parser.bump();
- Some(ast::OneOrMore)
+ try!(parser.bump());
+ Ok(Some(ast::OneOrMore))
},
- _ => None
+ _ => Ok(None)
}
};
- match parse_kleene_op(self) {
- Some(kleene_op) => return (None, kleene_op),
+ match try!(parse_kleene_op(self)) {
+ Some(kleene_op) => return Ok((None, kleene_op)),
None => {}
}
- let separator = self.bump_and_get();
- match parse_kleene_op(self) {
- Some(zerok) => (Some(separator), zerok),
- None => self.fatal("expected `*` or `+`")
+ let separator = try!(self.bump_and_get());
+ match try!(parse_kleene_op(self)) {
+ Some(zerok) => Ok((Some(separator), zerok)),
+ None => return Err(self.fatal("expected `*` or `+`"))
}
}
/// parse a single token tree from the input.
- pub fn parse_token_tree(&mut self) -> TokenTree {
+ pub fn parse_token_tree(&mut self) -> PResult<TokenTree> {
// FIXME #6994: currently, this is too eager. It
// parses token trees but also identifies TtSequence's
// and token::SubstNt's; it's too early to know yet
// not an EOF, and not the desired right-delimiter (if
// it were, parse_seq_to_before_end would have prevented
// reaching this point.
- fn parse_non_delim_tt_tok(p: &mut Parser) -> TokenTree {
+ fn parse_non_delim_tt_tok(p: &mut Parser) -> PResult<TokenTree> {
maybe_whole!(deref p, NtTT);
match p.token {
token::CloseDelim(_) => {
Some(&sp) => p.span_note(sp, "unclosed delimiter"),
};
let token_str = p.this_token_to_string();
- p.fatal(&format!("incorrect close delimiter: `{}`",
- token_str))
+ Err(p.fatal(&format!("incorrect close delimiter: `{}`",
+ token_str)))
},
/* we ought to allow different depths of unquotation */
token::Dollar | token::SubstNt(..) if p.quote_depth > 0 => {
p.parse_unquoted()
}
_ => {
- TtToken(p.span, p.bump_and_get())
+ Ok(TtToken(p.span, try!(p.bump_and_get())))
}
}
}
}
// There shouldn't really be a span, but it's easier for the test runner
// if we give it one
- self.fatal("this file contains an un-closed delimiter ");
+ return Err(self.fatal("this file contains an un-closed delimiter "));
},
token::OpenDelim(delim) => {
// The span for beginning of the delimited section
// Parse the open delimiter.
self.open_braces.push(self.span);
let open_span = self.span;
- self.bump();
+ try!(self.bump());
// Parse the token trees within the delimiters
- let tts = self.parse_seq_to_before_end(
+ let tts = try!(self.parse_seq_to_before_end(
&token::CloseDelim(delim),
seq_sep_none(),
|p| p.parse_token_tree()
- );
+ ));
// Parse the close delimiter.
let close_span = self.span;
- self.bump();
+ try!(self.bump());
self.open_braces.pop().unwrap();
// Expand to cover the entire delimited token tree
let span = Span { hi: close_span.hi, ..pre_span };
- TtDelimited(span, Rc::new(Delimited {
+ Ok(TtDelimited(span, Rc::new(Delimited {
delim: delim,
open_span: open_span,
tts: tts,
close_span: close_span,
- }))
+ })))
},
_ => parse_non_delim_tt_tok(self),
}
// parse a stream of tokens into a list of TokenTree's,
// up to EOF.
- pub fn parse_all_token_trees(&mut self) -> Vec<TokenTree> {
+ pub fn parse_all_token_trees(&mut self) -> PResult<Vec<TokenTree>> {
let mut tts = Vec::new();
while self.token != token::Eof {
- tts.push(self.parse_token_tree());
+ tts.push(try!(self.parse_token_tree()));
}
- tts
+ Ok(tts)
}
/// Parse a prefix-operator expr
- pub fn parse_prefix_expr(&mut self) -> P<Expr> {
+ pub fn parse_prefix_expr(&mut self) -> PResult<P<Expr>> {
let lo = self.span.lo;
let hi;
let ex;
match self.token {
token::Not => {
- self.bump();
- let e = self.parse_prefix_expr();
+ try!(self.bump());
+ let e = try!(self.parse_prefix_expr());
hi = e.span.hi;
ex = self.mk_unary(UnNot, e);
}
token::BinOp(token::Minus) => {
- self.bump();
- let e = self.parse_prefix_expr();
+ try!(self.bump());
+ let e = try!(self.parse_prefix_expr());
hi = e.span.hi;
ex = self.mk_unary(UnNeg, e);
}
token::BinOp(token::Star) => {
- self.bump();
- let e = self.parse_prefix_expr();
+ try!(self.bump());
+ let e = try!(self.parse_prefix_expr());
hi = e.span.hi;
ex = self.mk_unary(UnDeref, e);
}
token::BinOp(token::And) | token::AndAnd => {
- self.expect_and();
- let m = self.parse_mutability();
- let e = self.parse_prefix_expr();
+ try!(self.expect_and());
+ let m = try!(self.parse_mutability());
+ let e = try!(self.parse_prefix_expr());
hi = e.span.hi;
ex = ExprAddrOf(m, e);
}
let lo = self.span.lo;
- self.bump();
+ try!(self.bump());
// Check for a place: `box(PLACE) EXPR`.
- if self.eat(&token::OpenDelim(token::Paren)) {
+ if try!(self.eat(&token::OpenDelim(token::Paren)) ){
// Support `box() EXPR` as the default.
- if !self.eat(&token::CloseDelim(token::Paren)) {
- let place = self.parse_expr();
- self.expect(&token::CloseDelim(token::Paren));
+ if !try!(self.eat(&token::CloseDelim(token::Paren)) ){
+ let place = try!(self.parse_expr_nopanic());
+ try!(self.expect(&token::CloseDelim(token::Paren)));
// Give a suggestion to use `box()` when a parenthesised expression is used
if !self.token.can_begin_expr() {
let span = self.span;
"perhaps you meant `box() (foo)` instead?");
self.abort_if_errors();
}
- let subexpression = self.parse_prefix_expr();
+ let subexpression = try!(self.parse_prefix_expr());
hi = subexpression.span.hi;
ex = ExprBox(Some(place), subexpression);
- return self.mk_expr(lo, hi, ex);
+ return Ok(self.mk_expr(lo, hi, ex));
}
}
// Otherwise, we use the unique pointer default.
- let subexpression = self.parse_prefix_expr();
+ let subexpression = try!(self.parse_prefix_expr());
hi = subexpression.span.hi;
// FIXME (pnkfelix): After working out kinks with box
// desugaring, should be `ExprBox(None, subexpression)`
}
_ => return self.parse_dot_or_call_expr()
}
- return self.mk_expr(lo, hi, ex);
+ return Ok(self.mk_expr(lo, hi, ex));
}
/// Parse an expression of binops
- pub fn parse_binops(&mut self) -> P<Expr> {
- let prefix_expr = self.parse_prefix_expr();
+ pub fn parse_binops(&mut self) -> PResult<P<Expr>> {
+ let prefix_expr = try!(self.parse_prefix_expr());
self.parse_more_binops(prefix_expr, 0)
}
/// Parse an expression of binops of at least min_prec precedence
- pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: usize) -> P<Expr> {
- if self.expr_is_complete(&*lhs) { return lhs; }
-
- // Prevent dynamic borrow errors later on by limiting the
- // scope of the borrows.
- if self.token == token::BinOp(token::Or) &&
- self.restrictions.contains(RESTRICTION_NO_BAR_OP) {
- return lhs;
- }
+ pub fn parse_more_binops(&mut self, lhs: P<Expr>, min_prec: usize) -> PResult<P<Expr>> {
+ if self.expr_is_complete(&*lhs) { return Ok(lhs); }
self.expected_tokens.push(TokenType::Operator);
}
let cur_prec = operator_prec(cur_op);
if cur_prec >= min_prec {
- self.bump();
- let expr = self.parse_prefix_expr();
- let rhs = self.parse_more_binops(expr, cur_prec + 1);
+ try!(self.bump());
+ let expr = try!(self.parse_prefix_expr());
+ let rhs = try!(self.parse_more_binops(expr, cur_prec + 1));
let lhs_span = lhs.span;
let rhs_span = rhs.span;
let binary = self.mk_binary(codemap::respan(cur_op_span, cur_op), lhs, rhs);
let bin = self.mk_expr(lhs_span.lo, rhs_span.hi, binary);
self.parse_more_binops(bin, min_prec)
} else {
- lhs
+ Ok(lhs)
}
}
None => {
- if AS_PREC >= min_prec && self.eat_keyword_noexpect(keywords::As) {
- let rhs = self.parse_ty();
+ if AS_PREC >= min_prec && try!(self.eat_keyword_noexpect(keywords::As) ){
+ let rhs = try!(self.parse_ty_nopanic());
let _as = self.mk_expr(lhs.span.lo,
rhs.span.hi,
ExprCast(lhs, rhs));
self.parse_more_binops(_as, min_prec)
} else {
- lhs
+ Ok(lhs)
}
}
}
/// Parse an assignment expression....
/// actually, this seems to be the main entry point for
/// parsing an arbitrary expression.
- pub fn parse_assign_expr(&mut self) -> P<Expr> {
+ pub fn parse_assign_expr(&mut self) -> PResult<P<Expr>> {
match self.token {
token::DotDot => {
// prefix-form of range notation '..expr'
// (much lower than other prefix expressions) to be consistent
// with the postfix-form 'expr..'
let lo = self.span.lo;
- self.bump();
+ try!(self.bump());
let opt_end = if self.is_at_start_of_range_notation_rhs() {
- let end = self.parse_binops();
+ let end = try!(self.parse_binops());
Some(end)
} else {
None
};
let hi = self.span.hi;
let ex = self.mk_range(None, opt_end);
- self.mk_expr(lo, hi, ex)
+ Ok(self.mk_expr(lo, hi, ex))
}
_ => {
- let lhs = self.parse_binops();
+ let lhs = try!(self.parse_binops());
self.parse_assign_expr_with(lhs)
}
}
}
- pub fn parse_assign_expr_with(&mut self, lhs: P<Expr>) -> P<Expr> {
+ pub fn parse_assign_expr_with(&mut self, lhs: P<Expr>) -> PResult<P<Expr>> {
let restrictions = self.restrictions & RESTRICTION_NO_STRUCT_LITERAL;
let op_span = self.span;
match self.token {
token::Eq => {
- self.bump();
- let rhs = self.parse_expr_res(restrictions);
- self.mk_expr(lhs.span.lo, rhs.span.hi, ExprAssign(lhs, rhs))
+ try!(self.bump());
+ let rhs = try!(self.parse_expr_res(restrictions));
+ Ok(self.mk_expr(lhs.span.lo, rhs.span.hi, ExprAssign(lhs, rhs)))
}
token::BinOpEq(op) => {
- self.bump();
- let rhs = self.parse_expr_res(restrictions);
+ try!(self.bump());
+ let rhs = try!(self.parse_expr_res(restrictions));
let aop = match op {
token::Plus => BiAdd,
token::Minus => BiSub,
let rhs_span = rhs.span;
let span = lhs.span;
let assign_op = self.mk_assign_op(codemap::respan(op_span, aop), lhs, rhs);
- self.mk_expr(span.lo, rhs_span.hi, assign_op)
+ Ok(self.mk_expr(span.lo, rhs_span.hi, assign_op))
}
// A range expression, either `expr..expr` or `expr..`.
token::DotDot => {
- self.bump();
+ try!(self.bump());
let opt_end = if self.is_at_start_of_range_notation_rhs() {
- let end = self.parse_binops();
+ let end = try!(self.parse_binops());
Some(end)
} else {
None
let lo = lhs.span.lo;
let hi = self.span.hi;
let range = self.mk_range(Some(lhs), opt_end);
- return self.mk_expr(lo, hi, range);
+ return Ok(self.mk_expr(lo, hi, range));
}
_ => {
- lhs
+ Ok(lhs)
}
}
}
}
/// Parse an 'if' or 'if let' expression ('if' token already eaten)
- pub fn parse_if_expr(&mut self) -> P<Expr> {
+ pub fn parse_if_expr(&mut self) -> PResult<P<Expr>> {
if self.check_keyword(keywords::Let) {
return self.parse_if_let_expr();
}
let lo = self.last_span.lo;
- let cond = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
- let thn = self.parse_block();
+ let cond = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
+ let thn = try!(self.parse_block());
let mut els: Option<P<Expr>> = None;
let mut hi = thn.span.hi;
- if self.eat_keyword(keywords::Else) {
- let elexpr = self.parse_else_expr();
+ if try!(self.eat_keyword(keywords::Else) ){
+ let elexpr = try!(self.parse_else_expr());
hi = elexpr.span.hi;
els = Some(elexpr);
}
- self.mk_expr(lo, hi, ExprIf(cond, thn, els))
+ Ok(self.mk_expr(lo, hi, ExprIf(cond, thn, els)))
}
/// Parse an 'if let' expression ('if' token already eaten)
- pub fn parse_if_let_expr(&mut self) -> P<Expr> {
+ pub fn parse_if_let_expr(&mut self) -> PResult<P<Expr>> {
let lo = self.last_span.lo;
- self.expect_keyword(keywords::Let);
- let pat = self.parse_pat();
- self.expect(&token::Eq);
- let expr = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
- let thn = self.parse_block();
- let (hi, els) = if self.eat_keyword(keywords::Else) {
- let expr = self.parse_else_expr();
+ try!(self.expect_keyword(keywords::Let));
+ let pat = try!(self.parse_pat_nopanic());
+ try!(self.expect(&token::Eq));
+ let expr = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
+ let thn = try!(self.parse_block());
+ let (hi, els) = if try!(self.eat_keyword(keywords::Else) ){
+ let expr = try!(self.parse_else_expr());
(expr.span.hi, Some(expr))
} else {
(thn.span.hi, None)
};
- self.mk_expr(lo, hi, ExprIfLet(pat, expr, thn, els))
+ Ok(self.mk_expr(lo, hi, ExprIfLet(pat, expr, thn, els)))
}
// `|args| expr`
pub fn parse_lambda_expr(&mut self, capture_clause: CaptureClause)
- -> P<Expr>
+ -> PResult<P<Expr>>
{
let lo = self.span.lo;
- let decl = self.parse_fn_block_decl();
+ let decl = try!(self.parse_fn_block_decl());
let body = match decl.output {
DefaultReturn(_) => {
// If no explicit return type is given, parse any
// expr and wrap it up in a dummy block:
- let body_expr = self.parse_expr();
+ let body_expr = try!(self.parse_expr_nopanic());
P(ast::Block {
id: ast::DUMMY_NODE_ID,
stmts: vec![],
_ => {
// If an explicit return type is given, require a
// block to appear (RFC 968).
- self.parse_block()
+ try!(self.parse_block())
}
};
- self.mk_expr(
+ Ok(self.mk_expr(
lo,
body.span.hi,
- ExprClosure(capture_clause, decl, body))
+ ExprClosure(capture_clause, decl, body)))
}
- pub fn parse_else_expr(&mut self) -> P<Expr> {
- if self.eat_keyword(keywords::If) {
+ pub fn parse_else_expr(&mut self) -> PResult<P<Expr>> {
+ if try!(self.eat_keyword(keywords::If) ){
return self.parse_if_expr();
} else {
- let blk = self.parse_block();
- return self.mk_expr(blk.span.lo, blk.span.hi, ExprBlock(blk));
+ let blk = try!(self.parse_block());
+ return Ok(self.mk_expr(blk.span.lo, blk.span.hi, ExprBlock(blk)));
}
}
/// Parse a 'for' .. 'in' expression ('for' token already eaten)
- pub fn parse_for_expr(&mut self, opt_ident: Option<ast::Ident>) -> P<Expr> {
+ pub fn parse_for_expr(&mut self, opt_ident: Option<ast::Ident>) -> PResult<P<Expr>> {
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
let lo = self.last_span.lo;
- let pat = self.parse_pat();
- self.expect_keyword(keywords::In);
- let expr = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
- let loop_block = self.parse_block();
+ let pat = try!(self.parse_pat_nopanic());
+ try!(self.expect_keyword(keywords::In));
+ let expr = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
+ let loop_block = try!(self.parse_block());
let hi = self.span.hi;
- self.mk_expr(lo, hi, ExprForLoop(pat, expr, loop_block, opt_ident))
+ Ok(self.mk_expr(lo, hi, ExprForLoop(pat, expr, loop_block, opt_ident)))
}
/// Parse a 'while' or 'while let' expression ('while' token already eaten)
- pub fn parse_while_expr(&mut self, opt_ident: Option<ast::Ident>) -> P<Expr> {
+ pub fn parse_while_expr(&mut self, opt_ident: Option<ast::Ident>) -> PResult<P<Expr>> {
if self.token.is_keyword(keywords::Let) {
return self.parse_while_let_expr(opt_ident);
}
let lo = self.last_span.lo;
- let cond = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
- let body = self.parse_block();
+ let cond = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
+ let body = try!(self.parse_block());
let hi = body.span.hi;
- return self.mk_expr(lo, hi, ExprWhile(cond, body, opt_ident));
+ return Ok(self.mk_expr(lo, hi, ExprWhile(cond, body, opt_ident)));
}
/// Parse a 'while let' expression ('while' token already eaten)
- pub fn parse_while_let_expr(&mut self, opt_ident: Option<ast::Ident>) -> P<Expr> {
+ pub fn parse_while_let_expr(&mut self, opt_ident: Option<ast::Ident>) -> PResult<P<Expr>> {
let lo = self.last_span.lo;
- self.expect_keyword(keywords::Let);
- let pat = self.parse_pat();
- self.expect(&token::Eq);
- let expr = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
- let body = self.parse_block();
+ try!(self.expect_keyword(keywords::Let));
+ let pat = try!(self.parse_pat_nopanic());
+ try!(self.expect(&token::Eq));
+ let expr = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
+ let body = try!(self.parse_block());
let hi = body.span.hi;
- return self.mk_expr(lo, hi, ExprWhileLet(pat, expr, body, opt_ident));
+ return Ok(self.mk_expr(lo, hi, ExprWhileLet(pat, expr, body, opt_ident)));
}
- pub fn parse_loop_expr(&mut self, opt_ident: Option<ast::Ident>) -> P<Expr> {
+ pub fn parse_loop_expr(&mut self, opt_ident: Option<ast::Ident>) -> PResult<P<Expr>> {
let lo = self.last_span.lo;
- let body = self.parse_block();
+ let body = try!(self.parse_block());
let hi = body.span.hi;
- self.mk_expr(lo, hi, ExprLoop(body, opt_ident))
+ Ok(self.mk_expr(lo, hi, ExprLoop(body, opt_ident)))
}
- fn parse_match_expr(&mut self) -> P<Expr> {
+ fn parse_match_expr(&mut self) -> PResult<P<Expr>> {
let lo = self.last_span.lo;
- let discriminant = self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL);
- self.commit_expr_expecting(&*discriminant, token::OpenDelim(token::Brace));
+ let discriminant = try!(self.parse_expr_res(RESTRICTION_NO_STRUCT_LITERAL));
+ try!(self.commit_expr_expecting(&*discriminant, token::OpenDelim(token::Brace)));
let mut arms: Vec<Arm> = Vec::new();
while self.token != token::CloseDelim(token::Brace) {
- arms.push(self.parse_arm());
+ arms.push(try!(self.parse_arm_nopanic()));
}
let hi = self.span.hi;
- self.bump();
- return self.mk_expr(lo, hi, ExprMatch(discriminant, arms, MatchSource::Normal));
+ try!(self.bump());
+ return Ok(self.mk_expr(lo, hi, ExprMatch(discriminant, arms, MatchSource::Normal)));
}
- pub fn parse_arm(&mut self) -> Arm {
+ pub fn parse_arm_nopanic(&mut self) -> PResult<Arm> {
let attrs = self.parse_outer_attributes();
- let pats = self.parse_pats();
+ let pats = try!(self.parse_pats());
let mut guard = None;
- if self.eat_keyword(keywords::If) {
- guard = Some(self.parse_expr());
+ if try!(self.eat_keyword(keywords::If) ){
+ guard = Some(try!(self.parse_expr_nopanic()));
}
- self.expect(&token::FatArrow);
- let expr = self.parse_expr_res(RESTRICTION_STMT_EXPR);
+ try!(self.expect(&token::FatArrow));
+ let expr = try!(self.parse_expr_res(RESTRICTION_STMT_EXPR));
let require_comma =
!classify::expr_is_simple_block(&*expr)
&& self.token != token::CloseDelim(token::Brace);
if require_comma {
- self.commit_expr(&*expr, &[token::Comma], &[token::CloseDelim(token::Brace)]);
+ try!(self.commit_expr(&*expr, &[token::Comma], &[token::CloseDelim(token::Brace)]));
} else {
- self.eat(&token::Comma);
+ try!(self.eat(&token::Comma));
}
- ast::Arm {
+ Ok(ast::Arm {
attrs: attrs,
pats: pats,
guard: guard,
body: expr,
- }
+ })
}
/// Parse an expression
- pub fn parse_expr(&mut self) -> P<Expr> {
+ pub fn parse_expr_nopanic(&mut self) -> PResult<P<Expr>> {
return self.parse_expr_res(UNRESTRICTED);
}
/// Parse an expression, subject to the given restrictions
- pub fn parse_expr_res(&mut self, r: Restrictions) -> P<Expr> {
+ pub fn parse_expr_res(&mut self, r: Restrictions) -> PResult<P<Expr>> {
let old = self.restrictions;
self.restrictions = r;
- let e = self.parse_assign_expr();
+ let e = try!(self.parse_assign_expr());
self.restrictions = old;
- return e;
+ return Ok(e);
}
/// Parse the RHS of a local variable declaration (e.g. '= 14;')
- fn parse_initializer(&mut self) -> Option<P<Expr>> {
+ fn parse_initializer(&mut self) -> PResult<Option<P<Expr>>> {
if self.check(&token::Eq) {
- self.bump();
- Some(self.parse_expr())
+ try!(self.bump());
+ Ok(Some(try!(self.parse_expr_nopanic())))
} else {
- None
+ Ok(None)
}
}
/// Parse patterns, separated by '|' s
- fn parse_pats(&mut self) -> Vec<P<Pat>> {
+ fn parse_pats(&mut self) -> PResult<Vec<P<Pat>>> {
let mut pats = Vec::new();
loop {
- pats.push(self.parse_pat());
- if self.check(&token::BinOp(token::Or)) { self.bump(); }
- else { return pats; }
+ pats.push(try!(self.parse_pat_nopanic()));
+ if self.check(&token::BinOp(token::Or)) { try!(self.bump());}
+ else { return Ok(pats); }
};
}
+ fn parse_pat_tuple_elements(&mut self) -> PResult<Vec<P<Pat>>> {
+ let mut fields = vec![];
+ if !self.check(&token::CloseDelim(token::Paren)) {
+ fields.push(try!(self.parse_pat_nopanic()));
+ if self.look_ahead(1, |t| *t != token::CloseDelim(token::Paren)) {
+ while try!(self.eat(&token::Comma)) &&
+ !self.check(&token::CloseDelim(token::Paren)) {
+ fields.push(try!(self.parse_pat_nopanic()));
+ }
+ }
+ if fields.len() == 1 {
+ try!(self.expect(&token::Comma));
+ }
+ }
+ Ok(fields)
+ }
+
fn parse_pat_vec_elements(
&mut self,
- ) -> (Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>) {
+ ) -> PResult<(Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>)> {
let mut before = Vec::new();
let mut slice = None;
let mut after = Vec::new();
if first {
first = false;
} else {
- self.expect(&token::Comma);
+ try!(self.expect(&token::Comma));
if self.token == token::CloseDelim(token::Bracket)
&& (before_slice || after.len() != 0) {
if before_slice {
if self.check(&token::DotDot) {
- self.bump();
+ try!(self.bump());
if self.check(&token::Comma) ||
self.check(&token::CloseDelim(token::Bracket)) {
}
}
- let subpat = self.parse_pat();
+ let subpat = try!(self.parse_pat_nopanic());
if before_slice && self.check(&token::DotDot) {
- self.bump();
+ try!(self.bump());
slice = Some(subpat);
before_slice = false;
} else if before_slice {
}
}
- (before, slice, after)
+ Ok((before, slice, after))
}
/// Parse the fields of a struct-like pattern
- fn parse_pat_fields(&mut self) -> (Vec<codemap::Spanned<ast::FieldPat>> , bool) {
+ fn parse_pat_fields(&mut self) -> PResult<(Vec<codemap::Spanned<ast::FieldPat>> , bool)> {
let mut fields = Vec::new();
let mut etc = false;
let mut first = true;
if first {
first = false;
} else {
- self.expect(&token::Comma);
+ try!(self.expect(&token::Comma));
// accept trailing commas
if self.check(&token::CloseDelim(token::Brace)) { break }
}
let hi;
if self.check(&token::DotDot) {
- self.bump();
+ try!(self.bump());
if self.token != token::CloseDelim(token::Brace) {
let token_str = self.this_token_to_string();
- self.fatal(&format!("expected `{}`, found `{}`", "}",
- token_str))
+ return Err(self.fatal(&format!("expected `{}`, found `{}`", "}",
+ token_str)))
}
etc = true;
break;
// Check if a colon exists one ahead. This means we're parsing a fieldname.
let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
// Parsing a pattern of the form "fieldname: pat"
- let fieldname = self.parse_ident();
- self.bump();
- let pat = self.parse_pat();
+ let fieldname = try!(self.parse_ident());
+ try!(self.bump());
+ let pat = try!(self.parse_pat_nopanic());
hi = pat.span.hi;
(pat, fieldname, false)
} else {
// Parsing a pattern of the form "(box) (ref) (mut) fieldname"
- let is_box = self.eat_keyword(keywords::Box);
+ let is_box = try!(self.eat_keyword(keywords::Box));
let boxed_span_lo = self.span.lo;
- let is_ref = self.eat_keyword(keywords::Ref);
- let is_mut = self.eat_keyword(keywords::Mut);
- let fieldname = self.parse_ident();
+ let is_ref = try!(self.eat_keyword(keywords::Ref));
+ let is_mut = try!(self.eat_keyword(keywords::Mut));
+ let fieldname = try!(self.parse_ident());
hi = self.last_span.hi;
let bind_type = match (is_ref, is_mut) {
pat: subpat,
is_shorthand: is_shorthand }});
}
- return (fields, etc);
+ return Ok((fields, etc));
+ }
+
+ fn parse_pat_range_end(&mut self) -> PResult<P<Expr>> {
+ if self.is_path_start() {
+ let lo = self.span.lo;
+ let path = try!(self.parse_path(LifetimeAndTypesWithColons));
+ let hi = self.last_span.hi;
+ Ok(self.mk_expr(lo, hi, ExprPath(None, path)))
+ } else {
+ self.parse_literal_maybe_minus()
+ }
+ }
+
+ fn is_path_start(&self) -> bool {
+ (self.token == token::ModSep || self.token.is_ident() || self.token.is_path())
+ && !self.token.is_keyword(keywords::True) && !self.token.is_keyword(keywords::False)
}
/// Parse a pattern.
- pub fn parse_pat(&mut self) -> P<Pat> {
+ pub fn parse_pat_nopanic(&mut self) -> PResult<P<Pat>> {
maybe_whole!(self, NtPat);
let lo = self.span.lo;
- let mut hi;
let pat;
match self.token {
- // parse _
token::Underscore => {
- self.bump();
+ // Parse _
+ try!(self.bump());
pat = PatWild(PatWildSingle);
- hi = self.last_span.hi;
- return P(ast::Pat {
- id: ast::DUMMY_NODE_ID,
- node: pat,
- span: mk_sp(lo, hi)
- })
}
token::BinOp(token::And) | token::AndAnd => {
- // parse &pat and &mut pat
- let lo = self.span.lo;
- self.expect_and();
- let mutability = if self.eat_keyword(keywords::Mut) {
- ast::MutMutable
- } else {
- ast::MutImmutable
- };
- let sub = self.parse_pat();
- pat = PatRegion(sub, mutability);
- hi = self.last_span.hi;
- return P(ast::Pat {
- id: ast::DUMMY_NODE_ID,
- node: pat,
- span: mk_sp(lo, hi)
- })
+ // Parse &pat / &mut pat
+ try!(self.expect_and());
+ let mutbl = try!(self.parse_mutability());
+ let subpat = try!(self.parse_pat_nopanic());
+ pat = PatRegion(subpat, mutbl);
}
token::OpenDelim(token::Paren) => {
- // parse (pat,pat,pat,...) as tuple
- self.bump();
- if self.check(&token::CloseDelim(token::Paren)) {
- self.bump();
- pat = PatTup(vec![]);
- } else {
- let mut fields = vec!(self.parse_pat());
- if self.look_ahead(1, |t| *t != token::CloseDelim(token::Paren)) {
- while self.check(&token::Comma) {
- self.bump();
- if self.check(&token::CloseDelim(token::Paren)) { break; }
- fields.push(self.parse_pat());
- }
- }
- if fields.len() == 1 { self.expect(&token::Comma); }
- self.expect(&token::CloseDelim(token::Paren));
- pat = PatTup(fields);
- }
- hi = self.last_span.hi;
- return P(ast::Pat {
- id: ast::DUMMY_NODE_ID,
- node: pat,
- span: mk_sp(lo, hi)
- })
+ // Parse (pat,pat,pat,...) as tuple pattern
+ try!(self.bump());
+ let fields = try!(self.parse_pat_tuple_elements());
+ try!(self.expect(&token::CloseDelim(token::Paren)));
+ pat = PatTup(fields);
}
token::OpenDelim(token::Bracket) => {
- // parse [pat,pat,...] as vector pattern
- self.bump();
- let (before, slice, after) =
- self.parse_pat_vec_elements();
-
- self.expect(&token::CloseDelim(token::Bracket));
- pat = ast::PatVec(before, slice, after);
- hi = self.last_span.hi;
- return P(ast::Pat {
- id: ast::DUMMY_NODE_ID,
- node: pat,
- span: mk_sp(lo, hi)
- })
+ // Parse [pat,pat,...] as vector pattern
+ try!(self.bump());
+ let (before, slice, after) = try!(self.parse_pat_vec_elements());
+ try!(self.expect(&token::CloseDelim(token::Bracket)));
+ pat = PatVec(before, slice, after);
}
- _ => {}
- }
- // at this point, token != _, ~, &, &&, (, [
-
- if (!(self.token.is_ident() || self.token.is_path())
- && self.token != token::ModSep)
- || self.token.is_keyword(keywords::True)
- || self.token.is_keyword(keywords::False) {
- // Parse an expression pattern or exp ... exp.
- //
- // These expressions are limited to literals (possibly
- // preceded by unary-minus) or identifiers.
- let val = self.parse_literal_maybe_minus();
- if (self.check(&token::DotDotDot)) &&
- self.look_ahead(1, |t| {
- *t != token::Comma && *t != token::CloseDelim(token::Bracket)
- }) {
- self.bump();
- let end = if self.token.is_ident() || self.token.is_path() {
- let path = self.parse_path(LifetimeAndTypesWithColons);
- let hi = self.span.hi;
- self.mk_expr(lo, hi, ExprPath(None, path))
- } else {
- self.parse_literal_maybe_minus()
- };
- pat = PatRange(val, end);
- } else {
- pat = PatLit(val);
- }
- } else if self.eat_keyword(keywords::Mut) {
- pat = self.parse_pat_ident(BindByValue(MutMutable));
- } else if self.eat_keyword(keywords::Ref) {
- // parse ref pat
- let mutbl = self.parse_mutability();
- pat = self.parse_pat_ident(BindByRef(mutbl));
- } else if self.eat_keyword(keywords::Box) {
- // `box PAT`
- //
- // FIXME(#13910): Rename to `PatBox` and extend to full DST
- // support.
- let sub = self.parse_pat();
- pat = PatBox(sub);
- hi = self.last_span.hi;
- return P(ast::Pat {
- id: ast::DUMMY_NODE_ID,
- node: pat,
- span: mk_sp(lo, hi)
- })
- } else {
- let can_be_enum_or_struct = self.look_ahead(1, |t| {
- match *t {
- token::OpenDelim(_) | token::Lt | token::ModSep => true,
- _ => false,
- }
- });
-
- if self.look_ahead(1, |t| *t == token::DotDotDot) &&
- self.look_ahead(2, |t| {
- *t != token::Comma && *t != token::CloseDelim(token::Bracket)
- }) {
- let start = self.parse_expr_res(RESTRICTION_NO_BAR_OP);
- self.eat(&token::DotDotDot);
- let end = self.parse_expr_res(RESTRICTION_NO_BAR_OP);
- pat = PatRange(start, end);
- } else if self.token.is_plain_ident() && !can_be_enum_or_struct {
- let id = self.parse_ident();
- let id_span = self.last_span;
- let pth1 = codemap::Spanned{span:id_span, node: id};
- if self.eat(&token::Not) {
- // macro invocation
- let delim = self.expect_open_delim();
- let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
- seq_sep_none(),
- |p| p.parse_token_tree());
-
- let mac = MacInvocTT(ident_to_path(id_span,id), tts, EMPTY_CTXT);
- pat = ast::PatMac(codemap::Spanned {node: mac, span: self.span});
- } else {
- let sub = if self.eat(&token::At) {
- // parse foo @ pat
- Some(self.parse_pat())
+ _ => {
+ // At this point, token != _, &, &&, (, [
+ if try!(self.eat_keyword(keywords::Mut)) {
+ // Parse mut ident @ pat
+ pat = try!(self.parse_pat_ident(BindByValue(MutMutable)));
+ } else if try!(self.eat_keyword(keywords::Ref)) {
+ // Parse ref ident @ pat / ref mut ident @ pat
+ let mutbl = try!(self.parse_mutability());
+ pat = try!(self.parse_pat_ident(BindByRef(mutbl)));
+ } else if try!(self.eat_keyword(keywords::Box)) {
+ // Parse box pat
+ let subpat = try!(self.parse_pat_nopanic());
+ pat = PatBox(subpat);
+ } else if self.is_path_start() {
+ // Parse pattern starting with a path
+ if self.token.is_plain_ident() && self.look_ahead(1, |t| *t != token::DotDotDot &&
+ *t != token::OpenDelim(token::Brace) &&
+ *t != token::OpenDelim(token::Paren) &&
+ // Contrary to its definition, a plain ident can be followed by :: in macros
+ *t != token::ModSep) {
+ // Plain idents have some extra abilities here compared to general paths
+ if self.look_ahead(1, |t| *t == token::Not) {
+ // Parse macro invocation
+ let ident = try!(self.parse_ident());
+ let ident_span = self.last_span;
+ let path = ident_to_path(ident_span, ident);
+ try!(self.bump());
+ let delim = try!(self.expect_open_delim());
+ let tts = try!(self.parse_seq_to_end(&token::CloseDelim(delim),
+ seq_sep_none(), |p| p.parse_token_tree()));
+ let mac = MacInvocTT(path, tts, EMPTY_CTXT);
+ pat = PatMac(codemap::Spanned {node: mac, span: self.span});
} else {
- // or just foo
- None
- };
- pat = PatIdent(BindByValue(MutImmutable), pth1, sub);
- }
- } else if self.look_ahead(1, |t| *t == token::Lt) {
- self.bump();
- self.unexpected()
- } else {
- // parse an enum pat
- let enum_path = self.parse_path(LifetimeAndTypesWithColons);
- match self.token {
- token::OpenDelim(token::Brace) => {
- self.bump();
- let (fields, etc) =
- self.parse_pat_fields();
- self.bump();
- pat = PatStruct(enum_path, fields, etc);
+ // Parse ident @ pat
+ // This can give false positives and parse nullary enums,
+ // they are dealt with later in resolve
+ pat = try!(self.parse_pat_ident(BindByValue(MutImmutable)));
}
- token::DotDotDot => {
+ } else {
+ // Parse as a general path
+ let path = try!(self.parse_path(LifetimeAndTypesWithColons));
+ match self.token {
+ token::DotDotDot => {
+ // Parse range
let hi = self.last_span.hi;
- let start = self.mk_expr(lo, hi, ExprPath(None, enum_path));
- self.eat(&token::DotDotDot);
- let end = if self.token.is_ident() || self.token.is_path() {
- let path = self.parse_path(LifetimeAndTypesWithColons);
- let hi = self.span.hi;
- self.mk_expr(lo, hi, ExprPath(None, path))
+ let begin = self.mk_expr(lo, hi, ExprPath(None, path));
+ try!(self.bump());
+ let end = try!(self.parse_pat_range_end());
+ pat = PatRange(begin, end);
+ }
+ token::OpenDelim(token::Brace) => {
+ // Parse struct pattern
+ try!(self.bump());
+ let (fields, etc) = try!(self.parse_pat_fields());
+ try!(self.bump());
+ pat = PatStruct(path, fields, etc);
+ }
+ token::OpenDelim(token::Paren) => {
+ // Parse tuple struct or enum pattern
+ if self.look_ahead(1, |t| *t == token::DotDot) {
+ // This is a "top constructor only" pat
+ try!(self.bump());
+ try!(self.bump());
+ try!(self.expect(&token::CloseDelim(token::Paren)));
+ pat = PatEnum(path, None);
} else {
- self.parse_literal_maybe_minus()
- };
- pat = PatRange(start, end);
- }
- _ => {
- let mut args: Vec<P<Pat>> = Vec::new();
- match self.token {
- token::OpenDelim(token::Paren) => {
- let is_dotdot = self.look_ahead(1, |t| {
- match *t {
- token::DotDot => true,
- _ => false,
- }
- });
- if is_dotdot {
- // This is a "top constructor only" pat
- self.bump();
- self.bump();
- self.expect(&token::CloseDelim(token::Paren));
- pat = PatEnum(enum_path, None);
- } else {
- args = self.parse_enum_variant_seq(
+ let args = try!(self.parse_enum_variant_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
- |p| p.parse_pat()
- );
- pat = PatEnum(enum_path, Some(args));
- }
- },
- _ => {
- if !enum_path.global &&
- enum_path.segments.len() == 1 &&
- enum_path.segments[0].parameters.is_empty()
- {
- // NB: If enum_path is a single identifier,
- // this should not be reachable due to special
- // handling further above.
- //
- // However, previously a PatIdent got emitted
- // here, so we preserve the branch just in case.
- //
- // A rewrite of the logic in this function
- // would probably make this obvious.
- self.span_bug(enum_path.span,
- "ident only path should have been covered already");
- } else {
- pat = PatEnum(enum_path, Some(args));
- }
- }
+ |p| p.parse_pat_nopanic()));
+ pat = PatEnum(path, Some(args));
}
+ }
+ _ => {
+ // Parse nullary enum
+ pat = PatEnum(path, Some(vec![]));
+ }
}
}
+ } else {
+ // Try to parse everything else as literal with optional minus
+ let begin = try!(self.parse_literal_maybe_minus());
+ if try!(self.eat(&token::DotDotDot)) {
+ let end = try!(self.parse_pat_range_end());
+ pat = PatRange(begin, end);
+ } else {
+ pat = PatLit(begin);
+ }
}
+ }
}
- hi = self.last_span.hi;
- P(ast::Pat {
+
+ let hi = self.last_span.hi;
+ Ok(P(ast::Pat {
id: ast::DUMMY_NODE_ID,
node: pat,
span: mk_sp(lo, hi),
- })
+ }))
}
/// Parse ident or ident @ pat
/// error message when parsing mistakes like ref foo(a,b)
fn parse_pat_ident(&mut self,
binding_mode: ast::BindingMode)
- -> ast::Pat_ {
+ -> PResult<ast::Pat_> {
if !self.token.is_plain_ident() {
let span = self.span;
let tok_str = self.this_token_to_string();
- self.span_fatal(span,
- &format!("expected identifier, found `{}`", tok_str));
+ return Err(self.span_fatal(span,
+ &format!("expected identifier, found `{}`", tok_str)))
}
- let ident = self.parse_ident();
+ let ident = try!(self.parse_ident());
let last_span = self.last_span;
let name = codemap::Spanned{span: last_span, node: ident};
- let sub = if self.eat(&token::At) {
- Some(self.parse_pat())
+ let sub = if try!(self.eat(&token::At) ){
+ Some(try!(self.parse_pat_nopanic()))
} else {
None
};
// will direct us over to parse_enum_variant()
if self.token == token::OpenDelim(token::Paren) {
let last_span = self.last_span;
- self.span_fatal(
+ return Err(self.span_fatal(
last_span,
- "expected identifier, found enum pattern");
+ "expected identifier, found enum pattern"))
}
- PatIdent(binding_mode, name, sub)
+ Ok(PatIdent(binding_mode, name, sub))
}
/// Parse a local variable declaration
- fn parse_local(&mut self) -> P<Local> {
+ fn parse_local(&mut self) -> PResult<P<Local>> {
let lo = self.span.lo;
- let pat = self.parse_pat();
+ let pat = try!(self.parse_pat_nopanic());
let mut ty = None;
- if self.eat(&token::Colon) {
- ty = Some(self.parse_ty_sum());
+ if try!(self.eat(&token::Colon) ){
+ ty = Some(try!(self.parse_ty_sum()));
}
- let init = self.parse_initializer();
- P(ast::Local {
+ let init = try!(self.parse_initializer());
+ Ok(P(ast::Local {
ty: ty,
pat: pat,
init: init,
id: ast::DUMMY_NODE_ID,
span: mk_sp(lo, self.last_span.hi),
source: LocalLet,
- })
+ }))
}
/// Parse a "let" stmt
- fn parse_let(&mut self) -> P<Decl> {
+ fn parse_let(&mut self) -> PResult<P<Decl>> {
let lo = self.span.lo;
- let local = self.parse_local();
- P(spanned(lo, self.last_span.hi, DeclLocal(local)))
+ let local = try!(self.parse_local());
+ Ok(P(spanned(lo, self.last_span.hi, DeclLocal(local))))
}
/// Parse a structure field
fn parse_name_and_ty(&mut self, pr: Visibility,
- attrs: Vec<Attribute> ) -> StructField {
+ attrs: Vec<Attribute> ) -> PResult<StructField> {
let lo = self.span.lo;
if !self.token.is_plain_ident() {
- self.fatal("expected ident");
+ return Err(self.fatal("expected ident"));
}
- let name = self.parse_ident();
- self.expect(&token::Colon);
- let ty = self.parse_ty_sum();
- spanned(lo, self.last_span.hi, ast::StructField_ {
+ let name = try!(self.parse_ident());
+ try!(self.expect(&token::Colon));
+ let ty = try!(self.parse_ty_sum());
+ Ok(spanned(lo, self.last_span.hi, ast::StructField_ {
kind: NamedField(name, pr),
id: ast::DUMMY_NODE_ID,
ty: ty,
attrs: attrs,
- })
+ }))
}
/// Emit an expected item after attributes error.
}
/// Parse a statement. may include decl.
- pub fn parse_stmt(&mut self) -> Option<P<Stmt>> {
- self.parse_stmt_().map(P)
+ pub fn parse_stmt_nopanic(&mut self) -> PResult<Option<P<Stmt>>> {
+ Ok(try!(self.parse_stmt_()).map(P))
}
- fn parse_stmt_(&mut self) -> Option<Stmt> {
+ fn parse_stmt_(&mut self) -> PResult<Option<Stmt>> {
maybe_whole!(Some deref self, NtStmt);
fn check_expected_item(p: &mut Parser, attrs: &[Attribute]) {
let lo = self.span.lo;
let attrs = self.parse_outer_attributes();
- Some(if self.check_keyword(keywords::Let) {
+ Ok(Some(if self.check_keyword(keywords::Let) {
check_expected_item(self, &attrs);
- self.expect_keyword(keywords::Let);
- let decl = self.parse_let();
+ try!(self.expect_keyword(keywords::Let));
+ let decl = try!(self.parse_let());
spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID))
} else if self.token.is_ident()
&& !self.token.is_any_keyword()
// Potential trouble: if we allow macros with paths instead of
// idents, we'd need to look ahead past the whole path here...
- let pth = self.parse_path(NoTypesAllowed);
- self.bump();
+ let pth = try!(self.parse_path(NoTypesAllowed));
+ try!(self.bump());
let id = match self.token {
token::OpenDelim(_) => token::special_idents::invalid, // no special identifier
- _ => self.parse_ident(),
+ _ => try!(self.parse_ident()),
};
// check that we're pointing at delimiters (need to check
""
};
let tok_str = self.this_token_to_string();
- self.fatal(&format!("expected {}`(` or `{{`, found `{}`",
+ return Err(self.fatal(&format!("expected {}`(` or `{{`, found `{}`",
ident_str,
- tok_str))
+ tok_str)))
},
};
- let tts = self.parse_unspanned_seq(
+ let tts = try!(self.parse_unspanned_seq(
&token::OpenDelim(delim),
&token::CloseDelim(delim),
seq_sep_none(),
|p| p.parse_token_tree()
- );
+ ));
let hi = self.span.hi;
let style = if delim == token::Brace {
//
// Require a semicolon or braces.
if style != MacStmtWithBraces {
- if !self.eat(&token::Semi) {
+ if !try!(self.eat(&token::Semi) ){
let last_span = self.last_span;
self.span_err(last_span,
"macros that expand to items must \
ast::DUMMY_NODE_ID))
}
} else {
- match self.parse_item_(attrs, false) {
+ match try!(self.parse_item_(attrs, false)) {
Some(i) => {
let hi = i.span.hi;
let decl = P(spanned(lo, hi, DeclItem(i)));
None => {
// Do not attempt to parse an expression if we're done here.
if self.token == token::Semi {
- self.bump();
- return None;
+ try!(self.bump());
+ return Ok(None);
}
if self.token == token::CloseDelim(token::Brace) {
- return None;
+ return Ok(None);
}
// Remainder are line-expr stmts.
- let e = self.parse_expr_res(RESTRICTION_STMT_EXPR);
+ let e = try!(self.parse_expr_res(RESTRICTION_STMT_EXPR));
spanned(lo, e.span.hi, StmtExpr(e, ast::DUMMY_NODE_ID))
}
}
- })
+ }))
}
/// Is this expression a successfully-parsed statement?
}
/// Parse a block. No inner attrs are allowed.
- pub fn parse_block(&mut self) -> P<Block> {
+ pub fn parse_block(&mut self) -> PResult<P<Block>> {
maybe_whole!(no_clone self, NtBlock);
let lo = self.span.lo;
- if !self.eat(&token::OpenDelim(token::Brace)) {
+ if !try!(self.eat(&token::OpenDelim(token::Brace)) ){
let sp = self.span;
let tok = self.this_token_to_string();
- self.span_fatal_help(sp,
+ return Err(self.span_fatal_help(sp,
&format!("expected `{{`, found `{}`", tok),
- "place this code inside a block");
+ "place this code inside a block"));
}
self.parse_block_tail(lo, DefaultBlock)
}
/// Parse a block. Inner attrs are allowed.
- fn parse_inner_attrs_and_block(&mut self) -> (Vec<Attribute>, P<Block>) {
+ fn parse_inner_attrs_and_block(&mut self) -> PResult<(Vec<Attribute>, P<Block>)> {
maybe_whole!(pair_empty self, NtBlock);
let lo = self.span.lo;
- self.expect(&token::OpenDelim(token::Brace));
- (self.parse_inner_attributes(),
- self.parse_block_tail(lo, DefaultBlock))
+ try!(self.expect(&token::OpenDelim(token::Brace)));
+ Ok((self.parse_inner_attributes(),
+ try!(self.parse_block_tail(lo, DefaultBlock))))
}
/// Parse the rest of a block expression or function body
/// Precondition: already parsed the '{'.
- fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> P<Block> {
+ fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> PResult<P<Block>> {
let mut stmts = vec![];
let mut expr = None;
- while !self.eat(&token::CloseDelim(token::Brace)) {
- let Spanned {node, span} = if let Some(s) = self.parse_stmt_() {
+ while !try!(self.eat(&token::CloseDelim(token::Brace))) {
+ let Spanned {node, span} = if let Some(s) = try!(self.parse_stmt_()) {
s
} else {
// Found only `;` or `}`.
};
match node {
StmtExpr(e, _) => {
- self.handle_expression_like_statement(e, span, &mut stmts, &mut expr);
+ try!(self.handle_expression_like_statement(e, span, &mut stmts, &mut expr));
}
StmtMac(mac, MacStmtWithoutBraces) => {
// statement macro without braces; might be an
node: StmtMac(mac, MacStmtWithSemicolon),
span: span,
}));
- self.bump();
+ try!(self.bump());
}
_ => {
let e = self.mk_mac_expr(span.lo, span.hi,
mac.and_then(|m| m.node));
- let e = self.parse_dot_or_call_expr_with(e);
- let e = self.parse_more_binops(e, 0);
- let e = self.parse_assign_expr_with(e);
- self.handle_expression_like_statement(
+ let e = try!(self.parse_dot_or_call_expr_with(e));
+ let e = try!(self.parse_more_binops(e, 0));
+ let e = try!(self.parse_assign_expr_with(e));
+ try!(self.handle_expression_like_statement(
e,
span,
&mut stmts,
- &mut expr);
+ &mut expr));
}
}
}
node: StmtMac(m, MacStmtWithSemicolon),
span: span,
}));
- self.bump();
+ try!(self.bump());
}
token::CloseDelim(token::Brace) => {
// if a block ends in `m!(arg)` without
}
_ => { // all other kinds of statements:
if classify::stmt_ends_with_semi(&node) {
- self.commit_stmt_expecting(token::Semi);
+ try!(self.commit_stmt_expecting(token::Semi));
}
stmts.push(P(Spanned {
}
}
- P(ast::Block {
+ Ok(P(ast::Block {
stmts: stmts,
expr: expr,
id: ast::DUMMY_NODE_ID,
rules: s,
span: mk_sp(lo, self.last_span.hi),
- })
+ }))
}
fn handle_expression_like_statement(
e: P<Expr>,
span: Span,
stmts: &mut Vec<P<Stmt>>,
- last_block_expr: &mut Option<P<Expr>>) {
+ last_block_expr: &mut Option<P<Expr>>) -> PResult<()> {
// expression without semicolon
if classify::expr_requires_semi_to_be_stmt(&*e) {
// Just check for errors and recover; do not eat semicolon yet.
- self.commit_stmt(&[],
- &[token::Semi, token::CloseDelim(token::Brace)]);
+ try!(self.commit_stmt(&[],
+ &[token::Semi, token::CloseDelim(token::Brace)]));
}
match self.token {
token::Semi => {
- self.bump();
+ try!(self.bump());
let span_with_semi = Span {
lo: span.lo,
hi: self.last_span.hi,
}));
}
}
+ Ok(())
}
// Parses a sequence of bounds if a `:` is found,
// otherwise returns empty list.
fn parse_colon_then_ty_param_bounds(&mut self,
mode: BoundParsingMode)
- -> OwnedSlice<TyParamBound>
+ -> PResult<OwnedSlice<TyParamBound>>
{
- if !self.eat(&token::Colon) {
- OwnedSlice::empty()
+ if !try!(self.eat(&token::Colon) ){
+ Ok(OwnedSlice::empty())
} else {
self.parse_ty_param_bounds(mode)
}
// and bound = 'region | trait_ref
fn parse_ty_param_bounds(&mut self,
mode: BoundParsingMode)
- -> OwnedSlice<TyParamBound>
+ -> PResult<OwnedSlice<TyParamBound>>
{
let mut result = vec!();
loop {
let question_span = self.span;
- let ate_question = self.eat(&token::Question);
+ let ate_question = try!(self.eat(&token::Question));
match self.token {
token::Lifetime(lifetime) => {
if ate_question {
span: self.span,
name: lifetime.name
}));
- self.bump();
+ try!(self.bump());
}
token::ModSep | token::Ident(..) => {
- let poly_trait_ref = self.parse_poly_trait_ref();
+ let poly_trait_ref = try!(self.parse_poly_trait_ref());
let modifier = if ate_question {
if mode == BoundParsingMode::Modified {
TraitBoundModifier::Maybe
_ => break,
}
- if !self.eat(&token::BinOp(token::Plus)) {
+ if !try!(self.eat(&token::BinOp(token::Plus)) ){
break;
}
}
- return OwnedSlice::from_vec(result);
+ return Ok(OwnedSlice::from_vec(result));
}
/// Matches typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?
- fn parse_ty_param(&mut self) -> TyParam {
+ fn parse_ty_param(&mut self) -> PResult<TyParam> {
let span = self.span;
- let ident = self.parse_ident();
+ let ident = try!(self.parse_ident());
- let bounds = self.parse_colon_then_ty_param_bounds(BoundParsingMode::Modified);
+ let bounds = try!(self.parse_colon_then_ty_param_bounds(BoundParsingMode::Modified));
let default = if self.check(&token::Eq) {
- self.bump();
- Some(self.parse_ty_sum())
+ try!(self.bump());
+ Some(try!(self.parse_ty_sum()))
} else {
None
};
- TyParam {
+ Ok(TyParam {
ident: ident,
id: ast::DUMMY_NODE_ID,
bounds: bounds,
default: default,
span: span,
- }
+ })
}
/// Parse a set of optional generic type parameter declarations. Where
/// matches generics = ( ) | ( < > ) | ( < typaramseq ( , )? > ) | ( < lifetimes ( , )? > )
/// | ( < lifetimes , typaramseq ( , )? > )
/// where typaramseq = ( typaram ) | ( typaram , typaramseq )
- pub fn parse_generics(&mut self) -> ast::Generics {
- if self.eat(&token::Lt) {
- let lifetime_defs = self.parse_lifetime_defs();
+ pub fn parse_generics(&mut self) -> PResult<ast::Generics> {
+ if try!(self.eat(&token::Lt) ){
+ let lifetime_defs = try!(self.parse_lifetime_defs());
let mut seen_default = false;
- let ty_params = self.parse_seq_to_gt(Some(token::Comma), |p| {
- p.forbid_lifetime();
- let ty_param = p.parse_ty_param();
+ let ty_params = try!(self.parse_seq_to_gt(Some(token::Comma), |p| {
+ try!(p.forbid_lifetime());
+ let ty_param = try!(p.parse_ty_param());
if ty_param.default.is_some() {
seen_default = true;
} else if seen_default {
p.span_err(last_span,
"type parameters with a default must be trailing");
}
- ty_param
- });
- ast::Generics {
+ Ok(ty_param)
+ }));
+ Ok(ast::Generics {
lifetimes: lifetime_defs,
ty_params: ty_params,
where_clause: WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
}
- }
+ })
} else {
- ast_util::empty_generics()
+ Ok(ast_util::empty_generics())
}
}
- fn parse_generic_values_after_lt(&mut self)
- -> (Vec<ast::Lifetime>, Vec<P<Ty>>, Vec<P<TypeBinding>>) {
- let lifetimes = self.parse_lifetimes(token::Comma);
+ fn parse_generic_values_after_lt(&mut self) -> PResult<(Vec<ast::Lifetime>,
+ Vec<P<Ty>>,
+ Vec<P<TypeBinding>>)> {
+ let lifetimes = try!(self.parse_lifetimes(token::Comma));
// First parse types.
- let (types, returned) = self.parse_seq_to_gt_or_return(
+ let (types, returned) = try!(self.parse_seq_to_gt_or_return(
Some(token::Comma),
|p| {
- p.forbid_lifetime();
+ try!(p.forbid_lifetime());
if p.look_ahead(1, |t| t == &token::Eq) {
- None
+ Ok(None)
} else {
- Some(p.parse_ty_sum())
+ Ok(Some(try!(p.parse_ty_sum())))
}
}
- );
+ ));
// If we found the `>`, don't continue.
if !returned {
- return (lifetimes, types.into_vec(), Vec::new());
+ return Ok((lifetimes, types.into_vec(), Vec::new()));
}
// Then parse type bindings.
- let bindings = self.parse_seq_to_gt(
+ let bindings = try!(self.parse_seq_to_gt(
Some(token::Comma),
|p| {
- p.forbid_lifetime();
+ try!(p.forbid_lifetime());
let lo = p.span.lo;
- let ident = p.parse_ident();
- let found_eq = p.eat(&token::Eq);
+ let ident = try!(p.parse_ident());
+ let found_eq = try!(p.eat(&token::Eq));
if !found_eq {
let span = p.span;
p.span_warn(span, "whoops, no =?");
}
- let ty = p.parse_ty();
+ let ty = try!(p.parse_ty_nopanic());
let hi = p.span.hi;
let span = mk_sp(lo, hi);
- return P(TypeBinding{id: ast::DUMMY_NODE_ID,
+ return Ok(P(TypeBinding{id: ast::DUMMY_NODE_ID,
ident: ident,
ty: ty,
span: span,
- });
+ }));
}
- );
- (lifetimes, types.into_vec(), bindings.into_vec())
+ ));
+ Ok((lifetimes, types.into_vec(), bindings.into_vec()))
}
- fn forbid_lifetime(&mut self) {
+ fn forbid_lifetime(&mut self) -> PResult<()> {
if self.token.is_lifetime() {
let span = self.span;
- self.span_fatal(span, "lifetime parameters must be declared \
- prior to type parameters");
+ return Err(self.span_fatal(span, "lifetime parameters must be declared \
+ prior to type parameters"))
}
+ Ok(())
}
/// Parses an optional `where` clause and places it in `generics`.
/// ```
/// where T : Trait<U, V> + 'b, 'a : 'b
/// ```
- fn parse_where_clause(&mut self) -> ast::WhereClause {
+ fn parse_where_clause(&mut self) -> PResult<ast::WhereClause> {
let mut where_clause = WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
};
- if !self.eat_keyword(keywords::Where) {
- return where_clause;
+ if !try!(self.eat_keyword(keywords::Where)) {
+ return Ok(where_clause);
}
let mut parsed_something = false;
token::Lifetime(..) => {
let bounded_lifetime =
- self.parse_lifetime();
+ try!(self.parse_lifetime());
- self.eat(&token::Colon);
+ try!(self.eat(&token::Colon));
let bounds =
- self.parse_lifetimes(token::BinOp(token::Plus));
+ try!(self.parse_lifetimes(token::BinOp(token::Plus)));
let hi = self.span.hi;
let span = mk_sp(lo, hi);
}
_ => {
- let bound_lifetimes = if self.eat_keyword(keywords::For) {
+ let bound_lifetimes = if try!(self.eat_keyword(keywords::For) ){
// Higher ranked constraint.
- self.expect(&token::Lt);
- let lifetime_defs = self.parse_lifetime_defs();
- self.expect_gt();
+ try!(self.expect(&token::Lt));
+ let lifetime_defs = try!(self.parse_lifetime_defs());
+ try!(self.expect_gt());
lifetime_defs
} else {
vec![]
};
- let bounded_ty = self.parse_ty();
+ let bounded_ty = try!(self.parse_ty_nopanic());
- if self.eat(&token::Colon) {
- let bounds = self.parse_ty_param_bounds(BoundParsingMode::Bare);
+ if try!(self.eat(&token::Colon) ){
+ let bounds = try!(self.parse_ty_param_bounds(BoundParsingMode::Bare));
let hi = self.span.hi;
let span = mk_sp(lo, hi);
}));
parsed_something = true;
- } else if self.eat(&token::Eq) {
- // let ty = self.parse_ty();
+ } else if try!(self.eat(&token::Eq) ){
+ // let ty = try!(self.parse_ty_nopanic());
let hi = self.span.hi;
let span = mk_sp(lo, hi);
// where_clause.predicates.push(
}
};
- if !self.eat(&token::Comma) {
+ if !try!(self.eat(&token::Comma) ){
break
}
}
in it");
}
- where_clause
+ Ok(where_clause)
}
fn parse_fn_args(&mut self, named_args: bool, allow_variadic: bool)
- -> (Vec<Arg> , bool) {
+ -> PResult<(Vec<Arg> , bool)> {
let sp = self.span;
let mut args: Vec<Option<Arg>> =
- self.parse_unspanned_seq(
+ try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
|p| {
if p.token == token::DotDotDot {
- p.bump();
+ try!(p.bump());
if allow_variadic {
if p.token != token::CloseDelim(token::Paren) {
let span = p.span;
- p.span_fatal(span,
- "`...` must be last in argument list for variadic function");
+ return Err(p.span_fatal(span,
+ "`...` must be last in argument list for variadic function"))
}
} else {
let span = p.span;
- p.span_fatal(span,
- "only foreign functions are allowed to be variadic");
+ return Err(p.span_fatal(span,
+ "only foreign functions are allowed to be variadic"))
}
- None
+ Ok(None)
} else {
- Some(p.parse_arg_general(named_args))
+ Ok(Some(try!(p.parse_arg_general(named_args))))
}
}
- );
+ ));
let variadic = match args.pop() {
Some(None) => true,
let args = args.into_iter().map(|x| x.unwrap()).collect();
- (args, variadic)
+ Ok((args, variadic))
}
/// Parse the argument list and result type of a function declaration
- pub fn parse_fn_decl(&mut self, allow_variadic: bool) -> P<FnDecl> {
+ pub fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<P<FnDecl>> {
- let (args, variadic) = self.parse_fn_args(true, allow_variadic);
- let ret_ty = self.parse_ret_ty();
+ let (args, variadic) = try!(self.parse_fn_args(true, allow_variadic));
+ let ret_ty = try!(self.parse_ret_ty());
- P(FnDecl {
+ Ok(P(FnDecl {
inputs: args,
output: ret_ty,
variadic: variadic
- })
+ }))
}
fn is_self_ident(&mut self) -> bool {
}
}
- fn expect_self_ident(&mut self) -> ast::Ident {
+ fn expect_self_ident(&mut self) -> PResult<ast::Ident> {
match self.token {
token::Ident(id, token::Plain) if id.name == special_idents::self_.name => {
- self.bump();
- id
+ try!(self.bump());
+ Ok(id)
},
_ => {
let token_str = self.this_token_to_string();
- self.fatal(&format!("expected `self`, found `{}`",
- token_str))
+ return Err(self.fatal(&format!("expected `self`, found `{}`",
+ token_str)))
}
}
}
}
}
- fn expect_self_type_ident(&mut self) -> ast::Ident {
+ fn expect_self_type_ident(&mut self) -> PResult<ast::Ident> {
match self.token {
token::Ident(id, token::Plain) if id.name == special_idents::type_self.name => {
- self.bump();
- id
+ try!(self.bump());
+ Ok(id)
},
_ => {
let token_str = self.this_token_to_string();
- self.fatal(&format!("expected `Self`, found `{}`",
- token_str))
+ Err(self.fatal(&format!("expected `Self`, found `{}`",
+ token_str)))
}
}
}
/// Parse the argument list and result type of a function
/// that may have a self type.
- fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> (ExplicitSelf, P<FnDecl>) where
- F: FnMut(&mut Parser) -> Arg,
+ fn parse_fn_decl_with_self<F>(&mut self,
+ parse_arg_fn: F) -> PResult<(ExplicitSelf, P<FnDecl>)> where
+ F: FnMut(&mut Parser) -> PResult<Arg>,
{
fn maybe_parse_borrowed_explicit_self(this: &mut Parser)
- -> ast::ExplicitSelf_ {
+ -> PResult<ast::ExplicitSelf_> {
// The following things are possible to see here:
//
// fn(&mut self)
// We already know that the current token is `&`.
if this.look_ahead(1, |t| t.is_keyword(keywords::SelfValue)) {
- this.bump();
- SelfRegion(None, MutImmutable, this.expect_self_ident())
+ try!(this.bump());
+ Ok(SelfRegion(None, MutImmutable, try!(this.expect_self_ident())))
} else if this.look_ahead(1, |t| t.is_mutability()) &&
this.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) {
- this.bump();
- let mutability = this.parse_mutability();
- SelfRegion(None, mutability, this.expect_self_ident())
+ try!(this.bump());
+ let mutability = try!(this.parse_mutability());
+ Ok(SelfRegion(None, mutability, try!(this.expect_self_ident())))
} else if this.look_ahead(1, |t| t.is_lifetime()) &&
this.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) {
- this.bump();
- let lifetime = this.parse_lifetime();
- SelfRegion(Some(lifetime), MutImmutable, this.expect_self_ident())
+ try!(this.bump());
+ let lifetime = try!(this.parse_lifetime());
+ Ok(SelfRegion(Some(lifetime), MutImmutable, try!(this.expect_self_ident())))
} else if this.look_ahead(1, |t| t.is_lifetime()) &&
this.look_ahead(2, |t| t.is_mutability()) &&
this.look_ahead(3, |t| t.is_keyword(keywords::SelfValue)) {
- this.bump();
- let lifetime = this.parse_lifetime();
- let mutability = this.parse_mutability();
- SelfRegion(Some(lifetime), mutability, this.expect_self_ident())
+ try!(this.bump());
+ let lifetime = try!(this.parse_lifetime());
+ let mutability = try!(this.parse_mutability());
+ Ok(SelfRegion(Some(lifetime), mutability, try!(this.expect_self_ident())))
} else {
- SelfStatic
+ Ok(SelfStatic)
}
}
- self.expect(&token::OpenDelim(token::Paren));
+ try!(self.expect(&token::OpenDelim(token::Paren)));
// A bit of complexity and lookahead is needed here in order to be
// backwards compatible.
let mut mutbl_self = MutImmutable;
let explicit_self = match self.token {
token::BinOp(token::And) => {
- let eself = maybe_parse_borrowed_explicit_self(self);
+ let eself = try!(maybe_parse_borrowed_explicit_self(self));
self_ident_lo = self.last_span.lo;
self_ident_hi = self.last_span.hi;
eself
token::BinOp(token::Star) => {
// Possibly "*self" or "*mut self" -- not supported. Try to avoid
// emitting cryptic "unexpected token" errors.
- self.bump();
+ try!(self.bump());
let _mutability = if self.token.is_mutability() {
- self.parse_mutability()
+ try!(self.parse_mutability())
} else {
MutImmutable
};
if self.is_self_ident() {
let span = self.span;
self.span_err(span, "cannot pass self by unsafe pointer");
- self.bump();
+ try!(self.bump());
}
// error case, making bogus self ident:
SelfValue(special_idents::self_)
}
token::Ident(..) => {
if self.is_self_ident() {
- let self_ident = self.expect_self_ident();
+ let self_ident = try!(self.expect_self_ident());
// Determine whether this is the fully explicit form, `self:
// TYPE`.
- if self.eat(&token::Colon) {
- SelfExplicit(self.parse_ty_sum(), self_ident)
+ if try!(self.eat(&token::Colon) ){
+ SelfExplicit(try!(self.parse_ty_sum()), self_ident)
} else {
SelfValue(self_ident)
}
} else if self.token.is_mutability() &&
self.look_ahead(1, |t| t.is_keyword(keywords::SelfValue)) {
- mutbl_self = self.parse_mutability();
- let self_ident = self.expect_self_ident();
+ mutbl_self = try!(self.parse_mutability());
+ let self_ident = try!(self.expect_self_ident());
// Determine whether this is the fully explicit form,
// `self: TYPE`.
- if self.eat(&token::Colon) {
- SelfExplicit(self.parse_ty_sum(), self_ident)
+ if try!(self.eat(&token::Colon) ){
+ SelfExplicit(try!(self.parse_ty_sum()), self_ident)
} else {
SelfValue(self_ident)
}
// If we parsed a self type, expect a comma before the argument list.
match self.token {
token::Comma => {
- self.bump();
+ try!(self.bump());
let sep = seq_sep_trailing_allowed(token::Comma);
- let mut fn_inputs = self.parse_seq_to_before_end(
+ let mut fn_inputs = try!(self.parse_seq_to_before_end(
&token::CloseDelim(token::Paren),
sep,
parse_arg_fn
- );
+ ));
fn_inputs.insert(0, Arg::new_self(explicit_self_sp, mutbl_self, $self_id));
fn_inputs
}
}
_ => {
let token_str = self.this_token_to_string();
- self.fatal(&format!("expected `,` or `)`, found `{}`",
- token_str))
+ return Err(self.fatal(&format!("expected `,` or `)`, found `{}`",
+ token_str)))
}
}
}
let fn_inputs = match explicit_self {
SelfStatic => {
let sep = seq_sep_trailing_allowed(token::Comma);
- self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)
+ try!(self.parse_seq_to_before_end(&token::CloseDelim(token::Paren),
+ sep, parse_arg_fn))
}
SelfValue(id) => parse_remaining_arguments!(id),
SelfRegion(_,_,id) => parse_remaining_arguments!(id),
};
- self.expect(&token::CloseDelim(token::Paren));
+ try!(self.expect(&token::CloseDelim(token::Paren)));
let hi = self.span.hi;
- let ret_ty = self.parse_ret_ty();
+ let ret_ty = try!(self.parse_ret_ty());
let fn_decl = P(FnDecl {
inputs: fn_inputs,
variadic: false
});
- (spanned(lo, hi, explicit_self), fn_decl)
+ Ok((spanned(lo, hi, explicit_self), fn_decl))
}
// parse the |arg, arg| header on a lambda
- fn parse_fn_block_decl(&mut self) -> P<FnDecl> {
+ fn parse_fn_block_decl(&mut self) -> PResult<P<FnDecl>> {
let inputs_captures = {
- if self.eat(&token::OrOr) {
+ if try!(self.eat(&token::OrOr) ){
Vec::new()
} else {
- self.expect(&token::BinOp(token::Or));
- self.parse_obsolete_closure_kind();
- let args = self.parse_seq_to_before_end(
+ try!(self.expect(&token::BinOp(token::Or)));
+ try!(self.parse_obsolete_closure_kind());
+ let args = try!(self.parse_seq_to_before_end(
&token::BinOp(token::Or),
seq_sep_trailing_allowed(token::Comma),
|p| p.parse_fn_block_arg()
- );
- self.bump();
+ ));
+ try!(self.bump());
args
}
};
- let output = self.parse_ret_ty();
+ let output = try!(self.parse_ret_ty());
- P(FnDecl {
+ Ok(P(FnDecl {
inputs: inputs_captures,
output: output,
variadic: false
- })
+ }))
}
/// Parse the name and optional generic types of a function header.
- fn parse_fn_header(&mut self) -> (Ident, ast::Generics) {
- let id = self.parse_ident();
- let generics = self.parse_generics();
- (id, generics)
+ fn parse_fn_header(&mut self) -> PResult<(Ident, ast::Generics)> {
+ let id = try!(self.parse_ident());
+ let generics = try!(self.parse_generics());
+ Ok((id, generics))
}
fn mk_item(&mut self, lo: BytePos, hi: BytePos, ident: Ident,
}
/// Parse an item-position function declaration.
- fn parse_item_fn(&mut self, unsafety: Unsafety, abi: abi::Abi) -> ItemInfo {
- let (ident, mut generics) = self.parse_fn_header();
- let decl = self.parse_fn_decl(false);
- generics.where_clause = self.parse_where_clause();
- let (inner_attrs, body) = self.parse_inner_attrs_and_block();
- (ident, ItemFn(decl, unsafety, abi, generics, body), Some(inner_attrs))
+ fn parse_item_fn(&mut self, unsafety: Unsafety, abi: abi::Abi) -> PResult<ItemInfo> {
+ let (ident, mut generics) = try!(self.parse_fn_header());
+ let decl = try!(self.parse_fn_decl(false));
+ generics.where_clause = try!(self.parse_where_clause());
+ let (inner_attrs, body) = try!(self.parse_inner_attrs_and_block());
+ Ok((ident, ItemFn(decl, unsafety, abi, generics, body), Some(inner_attrs)))
}
/// Parse an impl item.
- pub fn parse_impl_item(&mut self) -> P<ImplItem> {
+ pub fn parse_impl_item(&mut self) -> PResult<P<ImplItem>> {
let lo = self.span.lo;
let mut attrs = self.parse_outer_attributes();
- let vis = self.parse_visibility();
- let (name, node) = if self.eat_keyword(keywords::Type) {
- let name = self.parse_ident();
- self.expect(&token::Eq);
- let typ = self.parse_ty_sum();
- self.expect(&token::Semi);
+ let vis = try!(self.parse_visibility());
+ let (name, node) = if try!(self.eat_keyword(keywords::Type)) {
+ let name = try!(self.parse_ident());
+ try!(self.expect(&token::Eq));
+ let typ = try!(self.parse_ty_sum());
+ try!(self.expect(&token::Semi));
(name, TypeImplItem(typ))
} else {
- let (name, inner_attrs, node) = self.parse_impl_method(vis);
+ let (name, inner_attrs, node) = try!(self.parse_impl_method(vis));
attrs.extend(inner_attrs.into_iter());
(name, node)
};
- P(ImplItem {
+ Ok(P(ImplItem {
id: ast::DUMMY_NODE_ID,
span: mk_sp(lo, self.last_span.hi),
ident: name,
vis: vis,
attrs: attrs,
node: node
- })
+ }))
}
fn complain_if_pub_macro(&mut self, visa: Visibility, span: Span) {
/// Parse a method or a macro invocation in a trait impl.
fn parse_impl_method(&mut self, vis: Visibility)
- -> (Ident, Vec<ast::Attribute>, ast::ImplItem_) {
+ -> PResult<(Ident, Vec<ast::Attribute>, ast::ImplItem_)> {
// code copied from parse_macro_use_or_failure... abstraction!
if !self.token.is_any_keyword()
&& self.look_ahead(1, |t| *t == token::Not)
let last_span = self.last_span;
self.complain_if_pub_macro(vis, last_span);
- let pth = self.parse_path(NoTypesAllowed);
- self.expect(&token::Not);
+ let pth = try!(self.parse_path(NoTypesAllowed));
+ try!(self.expect(&token::Not));
// eat a matched-delimiter token tree:
- let delim = self.expect_open_delim();
- let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
+ let delim = try!(self.expect_open_delim());
+ let tts = try!(self.parse_seq_to_end(&token::CloseDelim(delim),
seq_sep_none(),
- |p| p.parse_token_tree());
+ |p| p.parse_token_tree()));
let m_ = ast::MacInvocTT(pth, tts, EMPTY_CTXT);
let m: ast::Mac = codemap::Spanned { node: m_,
span: mk_sp(self.span.lo,
self.span.hi) };
if delim != token::Brace {
- self.expect(&token::Semi)
+ try!(self.expect(&token::Semi))
}
- (token::special_idents::invalid, vec![], ast::MacImplItem(m))
+ Ok((token::special_idents::invalid, vec![], ast::MacImplItem(m)))
} else {
- let unsafety = self.parse_unsafety();
- let abi = if self.eat_keyword(keywords::Extern) {
- self.parse_opt_abi().unwrap_or(abi::C)
+ let unsafety = try!(self.parse_unsafety());
+ let abi = if try!(self.eat_keyword(keywords::Extern)) {
+ try!(self.parse_opt_abi()).unwrap_or(abi::C)
} else {
abi::Rust
};
- self.expect_keyword(keywords::Fn);
- let ident = self.parse_ident();
- let mut generics = self.parse_generics();
- let (explicit_self, decl) = self.parse_fn_decl_with_self(|p| {
+ try!(self.expect_keyword(keywords::Fn));
+ let ident = try!(self.parse_ident());
+ let mut generics = try!(self.parse_generics());
+ let (explicit_self, decl) = try!(self.parse_fn_decl_with_self(|p| {
p.parse_arg()
- });
- generics.where_clause = self.parse_where_clause();
- let (inner_attrs, body) = self.parse_inner_attrs_and_block();
- (ident, inner_attrs, MethodImplItem(ast::MethodSig {
+ }));
+ generics.where_clause = try!(self.parse_where_clause());
+ let (inner_attrs, body) = try!(self.parse_inner_attrs_and_block());
+ Ok((ident, inner_attrs, MethodImplItem(ast::MethodSig {
generics: generics,
abi: abi,
explicit_self: explicit_self,
unsafety: unsafety,
decl: decl
- }, body))
+ }, body)))
}
}
/// Parse trait Foo { ... }
- fn parse_item_trait(&mut self, unsafety: Unsafety) -> ItemInfo {
+ fn parse_item_trait(&mut self, unsafety: Unsafety) -> PResult<ItemInfo> {
- let ident = self.parse_ident();
- let mut tps = self.parse_generics();
+ let ident = try!(self.parse_ident());
+ let mut tps = try!(self.parse_generics());
// Parse supertrait bounds.
- let bounds = self.parse_colon_then_ty_param_bounds(BoundParsingMode::Bare);
+ let bounds = try!(self.parse_colon_then_ty_param_bounds(BoundParsingMode::Bare));
- tps.where_clause = self.parse_where_clause();
+ tps.where_clause = try!(self.parse_where_clause());
- let meths = self.parse_trait_items();
- (ident, ItemTrait(unsafety, tps, bounds, meths), None)
+ let meths = try!(self.parse_trait_items());
+ Ok((ident, ItemTrait(unsafety, tps, bounds, meths), None))
}
/// Parses items implementations variants
/// impl<T> Foo { ... }
/// impl<T> ToString for &'static T { ... }
/// impl Send for .. {}
- fn parse_item_impl(&mut self, unsafety: ast::Unsafety) -> ItemInfo {
+ fn parse_item_impl(&mut self, unsafety: ast::Unsafety) -> PResult<ItemInfo> {
let impl_span = self.span;
// First, parse type parameters if necessary.
- let mut generics = self.parse_generics();
+ let mut generics = try!(self.parse_generics());
// Special case: if the next identifier that follows is '(', don't
// allow this to be parsed as a trait.
let could_be_trait = self.token != token::OpenDelim(token::Paren);
let neg_span = self.span;
- let polarity = if self.eat(&token::Not) {
+ let polarity = if try!(self.eat(&token::Not) ){
ast::ImplPolarity::Negative
} else {
ast::ImplPolarity::Positive
};
// Parse the trait.
- let mut ty = self.parse_ty_sum();
+ let mut ty = try!(self.parse_ty_sum());
// Parse traits, if necessary.
- let opt_trait = if could_be_trait && self.eat_keyword(keywords::For) {
+ let opt_trait = if could_be_trait && try!(self.eat_keyword(keywords::For) ){
// New-style trait. Reinterpret the type as a trait.
match ty.node {
TyPath(None, ref path) => {
None
};
- if self.eat(&token::DotDot) {
+ if try!(self.eat(&token::DotDot) ){
if generics.is_parameterized() {
self.span_err(impl_span, "default trait implementations are not \
allowed to have genercis");
}
- self.expect(&token::OpenDelim(token::Brace));
- self.expect(&token::CloseDelim(token::Brace));
- (ast_util::impl_pretty_name(&opt_trait, None),
- ItemDefaultImpl(unsafety, opt_trait.unwrap()), None)
+ try!(self.expect(&token::OpenDelim(token::Brace)));
+ try!(self.expect(&token::CloseDelim(token::Brace)));
+ Ok((ast_util::impl_pretty_name(&opt_trait, None),
+ ItemDefaultImpl(unsafety, opt_trait.unwrap()), None))
} else {
if opt_trait.is_some() {
- ty = self.parse_ty_sum();
+ ty = try!(self.parse_ty_sum());
}
- generics.where_clause = self.parse_where_clause();
+ generics.where_clause = try!(self.parse_where_clause());
- self.expect(&token::OpenDelim(token::Brace));
+ try!(self.expect(&token::OpenDelim(token::Brace)));
let attrs = self.parse_inner_attributes();
let mut impl_items = vec![];
- while !self.eat(&token::CloseDelim(token::Brace)) {
- impl_items.push(self.parse_impl_item());
+ while !try!(self.eat(&token::CloseDelim(token::Brace))) {
+ impl_items.push(try!(self.parse_impl_item()));
}
- (ast_util::impl_pretty_name(&opt_trait, Some(&*ty)),
+ Ok((ast_util::impl_pretty_name(&opt_trait, Some(&*ty)),
ItemImpl(unsafety, polarity, generics, opt_trait, ty, impl_items),
- Some(attrs))
+ Some(attrs)))
}
}
/// Parse a::B<String,i32>
- fn parse_trait_ref(&mut self) -> TraitRef {
- ast::TraitRef {
- path: self.parse_path(LifetimeAndTypesWithoutColons),
+ fn parse_trait_ref(&mut self) -> PResult<TraitRef> {
+ Ok(ast::TraitRef {
+ path: try!(self.parse_path(LifetimeAndTypesWithoutColons)),
ref_id: ast::DUMMY_NODE_ID,
- }
+ })
}
- fn parse_late_bound_lifetime_defs(&mut self) -> Vec<ast::LifetimeDef> {
- if self.eat_keyword(keywords::For) {
- self.expect(&token::Lt);
- let lifetime_defs = self.parse_lifetime_defs();
- self.expect_gt();
- lifetime_defs
+ fn parse_late_bound_lifetime_defs(&mut self) -> PResult<Vec<ast::LifetimeDef>> {
+ if try!(self.eat_keyword(keywords::For) ){
+ try!(self.expect(&token::Lt));
+ let lifetime_defs = try!(self.parse_lifetime_defs());
+ try!(self.expect_gt());
+ Ok(lifetime_defs)
} else {
- Vec::new()
+ Ok(Vec::new())
}
}
/// Parse for<'l> a::B<String,i32>
- fn parse_poly_trait_ref(&mut self) -> PolyTraitRef {
+ fn parse_poly_trait_ref(&mut self) -> PResult<PolyTraitRef> {
let lo = self.span.lo;
- let lifetime_defs = self.parse_late_bound_lifetime_defs();
+ let lifetime_defs = try!(self.parse_late_bound_lifetime_defs());
- ast::PolyTraitRef {
+ Ok(ast::PolyTraitRef {
bound_lifetimes: lifetime_defs,
- trait_ref: self.parse_trait_ref(),
+ trait_ref: try!(self.parse_trait_ref()),
span: mk_sp(lo, self.last_span.hi),
- }
+ })
}
/// Parse struct Foo { ... }
- fn parse_item_struct(&mut self) -> ItemInfo {
- let class_name = self.parse_ident();
- let mut generics = self.parse_generics();
+ fn parse_item_struct(&mut self) -> PResult<ItemInfo> {
+ let class_name = try!(self.parse_ident());
+ let mut generics = try!(self.parse_generics());
- if self.eat(&token::Colon) {
- let ty = self.parse_ty_sum();
+ if try!(self.eat(&token::Colon) ){
+ let ty = try!(self.parse_ty_sum());
self.span_err(ty.span, "`virtual` structs have been removed from the language");
}
// struct.
let (fields, ctor_id) = if self.token.is_keyword(keywords::Where) {
- generics.where_clause = self.parse_where_clause();
- if self.eat(&token::Semi) {
+ generics.where_clause = try!(self.parse_where_clause());
+ if try!(self.eat(&token::Semi)) {
// If we see a: `struct Foo<T> where T: Copy;` style decl.
(Vec::new(), Some(ast::DUMMY_NODE_ID))
} else {
// If we see: `struct Foo<T> where T: Copy { ... }`
- (self.parse_record_struct_body(&class_name), None)
+ (try!(self.parse_record_struct_body(&class_name)), None)
}
// No `where` so: `struct Foo<T>;`
- } else if self.eat(&token::Semi) {
+ } else if try!(self.eat(&token::Semi) ){
(Vec::new(), Some(ast::DUMMY_NODE_ID))
// Record-style struct definition
} else if self.token == token::OpenDelim(token::Brace) {
- let fields = self.parse_record_struct_body(&class_name);
+ let fields = try!(self.parse_record_struct_body(&class_name));
(fields, None)
// Tuple-style struct definition with optional where-clause.
} else {
- let fields = self.parse_tuple_struct_body(&class_name, &mut generics);
+ let fields = try!(self.parse_tuple_struct_body(&class_name, &mut generics));
(fields, Some(ast::DUMMY_NODE_ID))
};
- (class_name,
+ Ok((class_name,
ItemStruct(P(ast::StructDef {
fields: fields,
ctor_id: ctor_id,
}), generics),
- None)
+ None))
}
- pub fn parse_record_struct_body(&mut self, class_name: &ast::Ident) -> Vec<StructField> {
+ pub fn parse_record_struct_body(&mut self,
+ class_name: &ast::Ident) -> PResult<Vec<StructField>> {
let mut fields = Vec::new();
- if self.eat(&token::OpenDelim(token::Brace)) {
+ if try!(self.eat(&token::OpenDelim(token::Brace)) ){
while self.token != token::CloseDelim(token::Brace) {
- fields.push(self.parse_struct_decl_field(true));
+ fields.push(try!(self.parse_struct_decl_field(true)));
}
if fields.len() == 0 {
- self.fatal(&format!("unit-like struct definition should be \
+ return Err(self.fatal(&format!("unit-like struct definition should be \
written as `struct {};`",
- token::get_ident(class_name.clone())));
+ token::get_ident(class_name.clone()))));
}
- self.bump();
+ try!(self.bump());
} else {
let token_str = self.this_token_to_string();
- self.fatal(&format!("expected `where`, or `{}` after struct \
+ return Err(self.fatal(&format!("expected `where`, or `{}` after struct \
name, found `{}`", "{",
- token_str));
+ token_str)));
}
- fields
+ Ok(fields)
}
pub fn parse_tuple_struct_body(&mut self,
class_name: &ast::Ident,
generics: &mut ast::Generics)
- -> Vec<StructField> {
+ -> PResult<Vec<StructField>> {
// This is the case where we find `struct Foo<T>(T) where T: Copy;`
if self.check(&token::OpenDelim(token::Paren)) {
- let fields = self.parse_unspanned_seq(
+ let fields = try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
let attrs = p.parse_outer_attributes();
let lo = p.span.lo;
let struct_field_ = ast::StructField_ {
- kind: UnnamedField(p.parse_visibility()),
+ kind: UnnamedField(try!(p.parse_visibility())),
id: ast::DUMMY_NODE_ID,
- ty: p.parse_ty_sum(),
+ ty: try!(p.parse_ty_sum()),
attrs: attrs,
};
- spanned(lo, p.span.hi, struct_field_)
- });
+ Ok(spanned(lo, p.span.hi, struct_field_))
+ }));
if fields.len() == 0 {
- self.fatal(&format!("unit-like struct definition should be \
+ return Err(self.fatal(&format!("unit-like struct definition should be \
written as `struct {};`",
- token::get_ident(class_name.clone())));
+ token::get_ident(class_name.clone()))));
}
- generics.where_clause = self.parse_where_clause();
- self.expect(&token::Semi);
- fields
+ generics.where_clause = try!(self.parse_where_clause());
+ try!(self.expect(&token::Semi));
+ Ok(fields)
// This is the case where we just see struct Foo<T> where T: Copy;
} else if self.token.is_keyword(keywords::Where) {
- generics.where_clause = self.parse_where_clause();
- self.expect(&token::Semi);
- Vec::new()
+ generics.where_clause = try!(self.parse_where_clause());
+ try!(self.expect(&token::Semi));
+ Ok(Vec::new())
// This case is where we see: `struct Foo<T>;`
} else {
let token_str = self.this_token_to_string();
- self.fatal(&format!("expected `where`, `{}`, `(`, or `;` after struct \
- name, found `{}`", "{", token_str));
+ Err(self.fatal(&format!("expected `where`, `{}`, `(`, or `;` after struct \
+ name, found `{}`", "{", token_str)))
}
}
pub fn parse_single_struct_field(&mut self,
vis: Visibility,
attrs: Vec<Attribute> )
- -> StructField {
- let a_var = self.parse_name_and_ty(vis, attrs);
+ -> PResult<StructField> {
+ let a_var = try!(self.parse_name_and_ty(vis, attrs));
match self.token {
token::Comma => {
- self.bump();
+ try!(self.bump());
}
token::CloseDelim(token::Brace) => {}
_ => {
let span = self.span;
let token_str = self.this_token_to_string();
- self.span_fatal_help(span,
+ return Err(self.span_fatal_help(span,
&format!("expected `,`, or `}}`, found `{}`",
token_str),
- "struct fields should be separated by commas")
+ "struct fields should be separated by commas"))
}
}
- a_var
+ Ok(a_var)
}
/// Parse an element of a struct definition
- fn parse_struct_decl_field(&mut self, allow_pub: bool) -> StructField {
+ fn parse_struct_decl_field(&mut self, allow_pub: bool) -> PResult<StructField> {
let attrs = self.parse_outer_attributes();
- if self.eat_keyword(keywords::Pub) {
+ if try!(self.eat_keyword(keywords::Pub) ){
if !allow_pub {
let span = self.last_span;
self.span_err(span, "`pub` is not allowed here");
}
/// Parse visibility: PUB, PRIV, or nothing
- fn parse_visibility(&mut self) -> Visibility {
- if self.eat_keyword(keywords::Pub) { Public }
- else { Inherited }
+ fn parse_visibility(&mut self) -> PResult<Visibility> {
+ if try!(self.eat_keyword(keywords::Pub)) { Ok(Public) }
+ else { Ok(Inherited) }
}
/// Given a termination token, parse all of the items in a module
- fn parse_mod_items(&mut self, term: &token::Token, inner_lo: BytePos) -> Mod {
+ fn parse_mod_items(&mut self, term: &token::Token, inner_lo: BytePos) -> PResult<Mod> {
let mut items = vec![];
- while let Some(item) = self.parse_item() {
+ while let Some(item) = try!(self.parse_item_nopanic()) {
items.push(item);
}
- if !self.eat(term) {
+ if !try!(self.eat(term)) {
let token_str = self.this_token_to_string();
- self.fatal(&format!("expected item, found `{}`", token_str))
+ return Err(self.fatal(&format!("expected item, found `{}`", token_str)));
}
- ast::Mod {
+ Ok(ast::Mod {
inner: mk_sp(inner_lo, self.span.lo),
items: items
- }
+ })
}
- fn parse_item_const(&mut self, m: Option<Mutability>) -> ItemInfo {
- let id = self.parse_ident();
- self.expect(&token::Colon);
- let ty = self.parse_ty_sum();
- self.expect(&token::Eq);
- let e = self.parse_expr();
- self.commit_expr_expecting(&*e, token::Semi);
+ fn parse_item_const(&mut self, m: Option<Mutability>) -> PResult<ItemInfo> {
+ let id = try!(self.parse_ident());
+ try!(self.expect(&token::Colon));
+ let ty = try!(self.parse_ty_sum());
+ try!(self.expect(&token::Eq));
+ let e = try!(self.parse_expr_nopanic());
+ try!(self.commit_expr_expecting(&*e, token::Semi));
let item = match m {
Some(m) => ItemStatic(ty, m, e),
None => ItemConst(ty, e),
};
- (id, item, None)
+ Ok((id, item, None))
}
/// Parse a `mod <foo> { ... }` or `mod <foo>;` item
- fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> ItemInfo {
+ fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<ItemInfo> {
let id_span = self.span;
- let id = self.parse_ident();
+ let id = try!(self.parse_ident());
if self.check(&token::Semi) {
- self.bump();
+ try!(self.bump());
// This mod is in an external file. Let's go get it!
- let (m, attrs) = self.eval_src_mod(id, outer_attrs, id_span);
- (id, m, Some(attrs))
+ let (m, attrs) = try!(self.eval_src_mod(id, outer_attrs, id_span));
+ Ok((id, m, Some(attrs)))
} else {
self.push_mod_path(id, outer_attrs);
- self.expect(&token::OpenDelim(token::Brace));
+ try!(self.expect(&token::OpenDelim(token::Brace)));
let mod_inner_lo = self.span.lo;
let old_owns_directory = self.owns_directory;
self.owns_directory = true;
let attrs = self.parse_inner_attributes();
- let m = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo);
+ let m = try!(self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo));
self.owns_directory = old_owns_directory;
self.pop_mod_path();
- (id, ItemMod(m), Some(attrs))
+ Ok((id, ItemMod(m), Some(attrs)))
}
}
id: ast::Ident,
outer_attrs: &[ast::Attribute],
id_sp: Span)
- -> (ast::Item_, Vec<ast::Attribute> ) {
+ -> PResult<(ast::Item_, Vec<ast::Attribute> )> {
let mut prefix = PathBuf::from(&self.sess.span_diagnostic.cm
.span_to_filename(self.span));
prefix.pop();
(true, false) => (default_path, false),
(false, true) => (secondary_path, true),
(false, false) => {
- self.span_fatal_help(id_sp,
+ return Err(self.span_fatal_help(id_sp,
&format!("file not found for module `{}`",
mod_name),
&format!("name the file either {} or {} inside \
the directory {:?}",
default_path_str,
secondary_path_str,
- dir_path.display()));
+ dir_path.display())));
}
(true, true) => {
- self.span_fatal_help(
+ return Err(self.span_fatal_help(
id_sp,
&format!("file for module `{}` found at both {} \
and {}",
mod_name,
default_path_str,
secondary_path_str),
- "delete or rename one of them to remove the ambiguity");
+ "delete or rename one of them to remove the ambiguity"));
}
}
}
path: PathBuf,
owns_directory: bool,
name: String,
- id_sp: Span) -> (ast::Item_, Vec<ast::Attribute> ) {
+ id_sp: Span) -> PResult<(ast::Item_, Vec<ast::Attribute> )> {
let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut();
match included_mod_stack.iter().position(|p| *p == path) {
Some(i) => {
err.push_str(" -> ");
}
err.push_str(&path.to_string_lossy());
- self.span_fatal(id_sp, &err[..]);
+ return Err(self.span_fatal(id_sp, &err[..]));
}
None => ()
}
id_sp);
let mod_inner_lo = p0.span.lo;
let mod_attrs = p0.parse_inner_attributes();
- let m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo);
+ let m0 = try!(p0.parse_mod_items(&token::Eof, mod_inner_lo));
self.sess.included_mod_stack.borrow_mut().pop();
- (ast::ItemMod(m0), mod_attrs)
+ Ok((ast::ItemMod(m0), mod_attrs))
}
/// Parse a function declaration from a foreign module
fn parse_item_foreign_fn(&mut self, vis: ast::Visibility,
- attrs: Vec<Attribute>) -> P<ForeignItem> {
+ attrs: Vec<Attribute>) -> PResult<P<ForeignItem>> {
let lo = self.span.lo;
- self.expect_keyword(keywords::Fn);
+ try!(self.expect_keyword(keywords::Fn));
- let (ident, mut generics) = self.parse_fn_header();
- let decl = self.parse_fn_decl(true);
- generics.where_clause = self.parse_where_clause();
+ let (ident, mut generics) = try!(self.parse_fn_header());
+ let decl = try!(self.parse_fn_decl(true));
+ generics.where_clause = try!(self.parse_where_clause());
let hi = self.span.hi;
- self.expect(&token::Semi);
- P(ast::ForeignItem {
+ try!(self.expect(&token::Semi));
+ Ok(P(ast::ForeignItem {
ident: ident,
attrs: attrs,
node: ForeignItemFn(decl, generics),
id: ast::DUMMY_NODE_ID,
span: mk_sp(lo, hi),
vis: vis
- })
+ }))
}
/// Parse a static item from a foreign module
fn parse_item_foreign_static(&mut self, vis: ast::Visibility,
- attrs: Vec<Attribute>) -> P<ForeignItem> {
+ attrs: Vec<Attribute>) -> PResult<P<ForeignItem>> {
let lo = self.span.lo;
- self.expect_keyword(keywords::Static);
- let mutbl = self.eat_keyword(keywords::Mut);
+ try!(self.expect_keyword(keywords::Static));
+ let mutbl = try!(self.eat_keyword(keywords::Mut));
- let ident = self.parse_ident();
- self.expect(&token::Colon);
- let ty = self.parse_ty_sum();
+ let ident = try!(self.parse_ident());
+ try!(self.expect(&token::Colon));
+ let ty = try!(self.parse_ty_sum());
let hi = self.span.hi;
- self.expect(&token::Semi);
- P(ForeignItem {
+ try!(self.expect(&token::Semi));
+ Ok(P(ForeignItem {
ident: ident,
attrs: attrs,
node: ForeignItemStatic(ty, mutbl),
id: ast::DUMMY_NODE_ID,
span: mk_sp(lo, hi),
vis: vis
- })
+ }))
}
/// Parse extern crate links
lo: BytePos,
visibility: Visibility,
attrs: Vec<Attribute>)
- -> P<Item> {
+ -> PResult<P<Item>> {
- let crate_name = self.parse_ident();
- let (maybe_path, ident) = if self.eat_keyword(keywords::As) {
- (Some(crate_name.name), self.parse_ident())
+ let crate_name = try!(self.parse_ident());
+ let (maybe_path, ident) = if try!(self.eat_keyword(keywords::As)) {
+ (Some(crate_name.name), try!(self.parse_ident()))
} else {
(None, crate_name)
};
- self.expect(&token::Semi);
+ try!(self.expect(&token::Semi));
let last_span = self.last_span;
- self.mk_item(lo,
+ Ok(self.mk_item(lo,
last_span.hi,
ident,
ItemExternCrate(maybe_path),
visibility,
- attrs)
+ attrs))
}
/// Parse `extern` for foreign ABIs
opt_abi: Option<abi::Abi>,
visibility: Visibility,
mut attrs: Vec<Attribute>)
- -> P<Item> {
- self.expect(&token::OpenDelim(token::Brace));
+ -> PResult<P<Item>> {
+ try!(self.expect(&token::OpenDelim(token::Brace)));
let abi = opt_abi.unwrap_or(abi::C);
attrs.extend(self.parse_inner_attributes().into_iter());
let mut foreign_items = vec![];
- while let Some(item) = self.parse_foreign_item() {
+ while let Some(item) = try!(self.parse_foreign_item()) {
foreign_items.push(item);
}
- self.expect(&token::CloseDelim(token::Brace));
+ try!(self.expect(&token::CloseDelim(token::Brace)));
let last_span = self.last_span;
let m = ast::ForeignMod {
abi: abi,
items: foreign_items
};
- self.mk_item(lo,
+ Ok(self.mk_item(lo,
last_span.hi,
special_idents::invalid,
ItemForeignMod(m),
visibility,
- attrs)
+ attrs))
}
/// Parse type Foo = Bar;
- fn parse_item_type(&mut self) -> ItemInfo {
- let ident = self.parse_ident();
- let mut tps = self.parse_generics();
- tps.where_clause = self.parse_where_clause();
- self.expect(&token::Eq);
- let ty = self.parse_ty_sum();
- self.expect(&token::Semi);
- (ident, ItemTy(ty, tps), None)
+ fn parse_item_type(&mut self) -> PResult<ItemInfo> {
+ let ident = try!(self.parse_ident());
+ let mut tps = try!(self.parse_generics());
+ tps.where_clause = try!(self.parse_where_clause());
+ try!(self.expect(&token::Eq));
+ let ty = try!(self.parse_ty_sum());
+ try!(self.expect(&token::Semi));
+ Ok((ident, ItemTy(ty, tps), None))
}
/// Parse a structure-like enum variant definition
/// this should probably be renamed or refactored...
- fn parse_struct_def(&mut self) -> P<StructDef> {
+ fn parse_struct_def(&mut self) -> PResult<P<StructDef>> {
let mut fields: Vec<StructField> = Vec::new();
while self.token != token::CloseDelim(token::Brace) {
- fields.push(self.parse_struct_decl_field(false));
+ fields.push(try!(self.parse_struct_decl_field(false)));
}
- self.bump();
+ try!(self.bump());
- P(StructDef {
+ Ok(P(StructDef {
fields: fields,
ctor_id: None,
- })
+ }))
}
/// Parse the part of an "enum" decl following the '{'
- fn parse_enum_def(&mut self, _generics: &ast::Generics) -> EnumDef {
+ fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<EnumDef> {
let mut variants = Vec::new();
let mut all_nullary = true;
let mut any_disr = None;
let variant_attrs = self.parse_outer_attributes();
let vlo = self.span.lo;
- let vis = self.parse_visibility();
+ let vis = try!(self.parse_visibility());
let ident;
let kind;
let mut args = Vec::new();
let mut disr_expr = None;
- ident = self.parse_ident();
- if self.eat(&token::OpenDelim(token::Brace)) {
+ ident = try!(self.parse_ident());
+ if try!(self.eat(&token::OpenDelim(token::Brace)) ){
// Parse a struct variant.
all_nullary = false;
let start_span = self.span;
- let struct_def = self.parse_struct_def();
+ let struct_def = try!(self.parse_struct_def());
if struct_def.fields.len() == 0 {
self.span_err(start_span,
&format!("unit-like struct variant should be written \
kind = StructVariantKind(struct_def);
} else if self.check(&token::OpenDelim(token::Paren)) {
all_nullary = false;
- let arg_tys = self.parse_enum_variant_seq(
+ let arg_tys = try!(self.parse_enum_variant_seq(
&token::OpenDelim(token::Paren),
&token::CloseDelim(token::Paren),
seq_sep_trailing_allowed(token::Comma),
|p| p.parse_ty_sum()
- );
+ ));
for ty in arg_tys {
args.push(ast::VariantArg {
ty: ty,
});
}
kind = TupleVariantKind(args);
- } else if self.eat(&token::Eq) {
- disr_expr = Some(self.parse_expr());
+ } else if try!(self.eat(&token::Eq) ){
+ disr_expr = Some(try!(self.parse_expr_nopanic()));
any_disr = disr_expr.as_ref().map(|expr| expr.span);
kind = TupleVariantKind(args);
} else {
};
variants.push(P(spanned(vlo, self.last_span.hi, vr)));
- if !self.eat(&token::Comma) { break; }
+ if !try!(self.eat(&token::Comma)) { break; }
}
- self.expect(&token::CloseDelim(token::Brace));
+ try!(self.expect(&token::CloseDelim(token::Brace)));
match any_disr {
Some(disr_span) if !all_nullary =>
self.span_err(disr_span,
_ => ()
}
- ast::EnumDef { variants: variants }
+ Ok(ast::EnumDef { variants: variants })
}
/// Parse an "enum" declaration
- fn parse_item_enum(&mut self) -> ItemInfo {
- let id = self.parse_ident();
- let mut generics = self.parse_generics();
- generics.where_clause = self.parse_where_clause();
- self.expect(&token::OpenDelim(token::Brace));
+ fn parse_item_enum(&mut self) -> PResult<ItemInfo> {
+ let id = try!(self.parse_ident());
+ let mut generics = try!(self.parse_generics());
+ generics.where_clause = try!(self.parse_where_clause());
+ try!(self.expect(&token::OpenDelim(token::Brace)));
- let enum_definition = self.parse_enum_def(&generics);
- (id, ItemEnum(enum_definition, generics), None)
+ let enum_definition = try!(self.parse_enum_def(&generics));
+ Ok((id, ItemEnum(enum_definition, generics), None))
}
/// Parses a string as an ABI spec on an extern type or module. Consumes
/// the `extern` keyword, if one is found.
- fn parse_opt_abi(&mut self) -> Option<abi::Abi> {
+ fn parse_opt_abi(&mut self) -> PResult<Option<abi::Abi>> {
match self.token {
token::Literal(token::Str_(s), suf) | token::Literal(token::StrRaw(s, _), suf) => {
let sp = self.span;
self.expect_no_suffix(sp, "ABI spec", suf);
- self.bump();
+ try!(self.bump());
let the_string = s.as_str();
match abi::lookup(the_string) {
- Some(abi) => Some(abi),
+ Some(abi) => Ok(Some(abi)),
None => {
let last_span = self.last_span;
self.span_err(
found `{}`",
abi::all_names().connect(", "),
the_string));
- None
+ Ok(None)
}
}
}
- _ => None,
+ _ => Ok(None),
}
}
/// NB: this function no longer parses the items inside an
/// extern crate.
fn parse_item_(&mut self, attrs: Vec<Attribute>,
- macros_allowed: bool) -> Option<P<Item>> {
+ macros_allowed: bool) -> PResult<Option<P<Item>>> {
let nt_item = match self.token {
token::Interpolated(token::NtItem(ref item)) => {
Some((**item).clone())
};
match nt_item {
Some(mut item) => {
- self.bump();
+ try!(self.bump());
let mut attrs = attrs;
mem::swap(&mut item.attrs, &mut attrs);
item.attrs.extend(attrs.into_iter());
- return Some(P(item));
+ return Ok(Some(P(item)));
}
None => {}
}
let lo = self.span.lo;
- let visibility = self.parse_visibility();
+ let visibility = try!(self.parse_visibility());
- if self.eat_keyword(keywords::Use) {
+ if try!(self.eat_keyword(keywords::Use) ){
// USE ITEM
- let item_ = ItemUse(self.parse_view_path());
- self.expect(&token::Semi);
+ let item_ = ItemUse(try!(self.parse_view_path()));
+ try!(self.expect(&token::Semi));
let last_span = self.last_span;
let item = self.mk_item(lo,
item_,
visibility,
attrs);
- return Some(item);
+ return Ok(Some(item));
}
- if self.eat_keyword(keywords::Extern) {
- if self.eat_keyword(keywords::Crate) {
- return Some(self.parse_item_extern_crate(lo, visibility, attrs));
+ if try!(self.eat_keyword(keywords::Extern)) {
+ if try!(self.eat_keyword(keywords::Crate)) {
+ return Ok(Some(try!(self.parse_item_extern_crate(lo, visibility, attrs))));
}
- let opt_abi = self.parse_opt_abi();
+ let opt_abi = try!(self.parse_opt_abi());
- if self.eat_keyword(keywords::Fn) {
+ if try!(self.eat_keyword(keywords::Fn) ){
// EXTERN FUNCTION ITEM
let abi = opt_abi.unwrap_or(abi::C);
let (ident, item_, extra_attrs) =
- self.parse_item_fn(Unsafety::Normal, abi);
+ try!(self.parse_item_fn(Unsafety::Normal, abi));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
} else if self.check(&token::OpenDelim(token::Brace)) {
- return Some(self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs));
+ return Ok(Some(try!(self.parse_item_foreign_mod(lo, opt_abi, visibility, attrs))));
}
let span = self.span;
let token_str = self.this_token_to_string();
- self.span_fatal(span,
+ return Err(self.span_fatal(span,
&format!("expected `{}` or `fn`, found `{}`", "{",
- token_str));
+ token_str)))
}
- if self.eat_keyword_noexpect(keywords::Virtual) {
+ if try!(self.eat_keyword_noexpect(keywords::Virtual) ){
let span = self.span;
self.span_err(span, "`virtual` structs have been removed from the language");
}
- if self.eat_keyword(keywords::Static) {
+ if try!(self.eat_keyword(keywords::Static) ){
// STATIC ITEM
- let m = if self.eat_keyword(keywords::Mut) {MutMutable} else {MutImmutable};
- let (ident, item_, extra_attrs) = self.parse_item_const(Some(m));
+ let m = if try!(self.eat_keyword(keywords::Mut)) {MutMutable} else {MutImmutable};
+ let (ident, item_, extra_attrs) = try!(self.parse_item_const(Some(m)));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
- if self.eat_keyword(keywords::Const) {
+ if try!(self.eat_keyword(keywords::Const) ){
// CONST ITEM
- if self.eat_keyword(keywords::Mut) {
+ if try!(self.eat_keyword(keywords::Mut) ){
let last_span = self.last_span;
self.span_err(last_span, "const globals cannot be mutable");
self.fileline_help(last_span, "did you mean to declare a static?");
}
- let (ident, item_, extra_attrs) = self.parse_item_const(None);
+ let (ident, item_, extra_attrs) = try!(self.parse_item_const(None));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
if self.check_keyword(keywords::Unsafe) &&
self.look_ahead(1, |t| t.is_keyword(keywords::Trait))
{
// UNSAFE TRAIT ITEM
- self.expect_keyword(keywords::Unsafe);
- self.expect_keyword(keywords::Trait);
+ try!(self.expect_keyword(keywords::Unsafe));
+ try!(self.expect_keyword(keywords::Trait));
let (ident, item_, extra_attrs) =
- self.parse_item_trait(ast::Unsafety::Unsafe);
+ try!(self.parse_item_trait(ast::Unsafety::Unsafe));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
if self.check_keyword(keywords::Unsafe) &&
self.look_ahead(1, |t| t.is_keyword(keywords::Impl))
{
// IMPL ITEM
- self.expect_keyword(keywords::Unsafe);
- self.expect_keyword(keywords::Impl);
- let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Unsafe);
+ try!(self.expect_keyword(keywords::Unsafe));
+ try!(self.expect_keyword(keywords::Impl));
+ let (ident, item_, extra_attrs) = try!(self.parse_item_impl(ast::Unsafety::Unsafe));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
if self.check_keyword(keywords::Fn) {
// FUNCTION ITEM
- self.bump();
+ try!(self.bump());
let (ident, item_, extra_attrs) =
- self.parse_item_fn(Unsafety::Normal, abi::Rust);
+ try!(self.parse_item_fn(Unsafety::Normal, abi::Rust));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
if self.check_keyword(keywords::Unsafe)
&& self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
// UNSAFE FUNCTION ITEM
- self.bump();
- let abi = if self.eat_keyword(keywords::Extern) {
- self.parse_opt_abi().unwrap_or(abi::C)
+ try!(self.bump());
+ let abi = if try!(self.eat_keyword(keywords::Extern) ){
+ try!(self.parse_opt_abi()).unwrap_or(abi::C)
} else {
abi::Rust
};
- self.expect_keyword(keywords::Fn);
+ try!(self.expect_keyword(keywords::Fn));
let (ident, item_, extra_attrs) =
- self.parse_item_fn(Unsafety::Unsafe, abi);
+ try!(self.parse_item_fn(Unsafety::Unsafe, abi));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
- if self.eat_keyword(keywords::Mod) {
+ if try!(self.eat_keyword(keywords::Mod) ){
// MODULE ITEM
let (ident, item_, extra_attrs) =
- self.parse_item_mod(&attrs[..]);
+ try!(self.parse_item_mod(&attrs[..]));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
- if self.eat_keyword(keywords::Type) {
+ if try!(self.eat_keyword(keywords::Type) ){
// TYPE ITEM
- let (ident, item_, extra_attrs) = self.parse_item_type();
+ let (ident, item_, extra_attrs) = try!(self.parse_item_type());
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
- if self.eat_keyword(keywords::Enum) {
+ if try!(self.eat_keyword(keywords::Enum) ){
// ENUM ITEM
- let (ident, item_, extra_attrs) = self.parse_item_enum();
+ let (ident, item_, extra_attrs) = try!(self.parse_item_enum());
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
- if self.eat_keyword(keywords::Trait) {
+ if try!(self.eat_keyword(keywords::Trait) ){
// TRAIT ITEM
let (ident, item_, extra_attrs) =
- self.parse_item_trait(ast::Unsafety::Normal);
+ try!(self.parse_item_trait(ast::Unsafety::Normal));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
- if self.eat_keyword(keywords::Impl) {
+ if try!(self.eat_keyword(keywords::Impl) ){
// IMPL ITEM
- let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Normal);
+ let (ident, item_, extra_attrs) = try!(self.parse_item_impl(ast::Unsafety::Normal));
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
- if self.eat_keyword(keywords::Struct) {
+ if try!(self.eat_keyword(keywords::Struct) ){
// STRUCT ITEM
- let (ident, item_, extra_attrs) = self.parse_item_struct();
+ let (ident, item_, extra_attrs) = try!(self.parse_item_struct());
let last_span = self.last_span;
let item = self.mk_item(lo,
last_span.hi,
item_,
visibility,
maybe_append(attrs, extra_attrs));
- return Some(item);
+ return Ok(Some(item));
}
self.parse_macro_use_or_failure(attrs,macros_allowed,lo,visibility)
}
/// Parse a foreign item.
- fn parse_foreign_item(&mut self) -> Option<P<ForeignItem>> {
+ fn parse_foreign_item(&mut self) -> PResult<Option<P<ForeignItem>>> {
let lo = self.span.lo;
let attrs = self.parse_outer_attributes();
- let visibility = self.parse_visibility();
+ let visibility = try!(self.parse_visibility());
if self.check_keyword(keywords::Static) {
// FOREIGN STATIC ITEM
- return Some(self.parse_item_foreign_static(visibility, attrs));
+ return Ok(Some(try!(self.parse_item_foreign_static(visibility, attrs))));
}
if self.check_keyword(keywords::Fn) || self.check_keyword(keywords::Unsafe) {
// FOREIGN FUNCTION ITEM
- return Some(self.parse_item_foreign_fn(visibility, attrs));
+ return Ok(Some(try!(self.parse_item_foreign_fn(visibility, attrs))));
}
// FIXME #5668: this will occur for a macro invocation:
- match self.parse_macro_use_or_failure(attrs, true, lo, visibility) {
+ match try!(self.parse_macro_use_or_failure(attrs, true, lo, visibility)) {
Some(item) => {
- self.span_fatal(item.span, "macros cannot expand to foreign items");
+ return Err(self.span_fatal(item.span, "macros cannot expand to foreign items"));
}
- None => None
+ None => Ok(None)
}
}
macros_allowed: bool,
lo: BytePos,
visibility: Visibility
- ) -> Option<P<Item>> {
+ ) -> PResult<Option<P<Item>>> {
if macros_allowed && !self.token.is_any_keyword()
&& self.look_ahead(1, |t| *t == token::Not)
&& (self.look_ahead(2, |t| t.is_plain_ident())
self.complain_if_pub_macro(visibility, last_span);
// item macro.
- let pth = self.parse_path(NoTypesAllowed);
- self.expect(&token::Not);
+ let pth = try!(self.parse_path(NoTypesAllowed));
+ try!(self.expect(&token::Not));
// a 'special' identifier (like what `macro_rules!` uses)
// is optional. We should eventually unify invoc syntax
// and remove this.
let id = if self.token.is_plain_ident() {
- self.parse_ident()
+ try!(self.parse_ident())
} else {
token::special_idents::invalid // no special identifier
};
// eat a matched-delimiter token tree:
- let delim = self.expect_open_delim();
- let tts = self.parse_seq_to_end(&token::CloseDelim(delim),
+ let delim = try!(self.expect_open_delim());
+ let tts = try!(self.parse_seq_to_end(&token::CloseDelim(delim),
seq_sep_none(),
- |p| p.parse_token_tree());
+ |p| p.parse_token_tree()));
// single-variant-enum... :
let m = ast::MacInvocTT(pth, tts, EMPTY_CTXT);
let m: ast::Mac = codemap::Spanned { node: m,
self.span.hi) };
if delim != token::Brace {
- if !self.eat(&token::Semi) {
+ if !try!(self.eat(&token::Semi) ){
let last_span = self.last_span;
self.span_err(last_span,
"macros that expand to items must either \
item_,
visibility,
attrs);
- return Some(item);
+ return Ok(Some(item));
}
// FAILURE TO PARSE ITEM
Inherited => {}
Public => {
let last_span = self.last_span;
- self.span_fatal(last_span, "unmatched visibility `pub`");
+ return Err(self.span_fatal(last_span, "unmatched visibility `pub`"));
}
}
if !attrs.is_empty() {
self.expected_item_err(&attrs);
}
- None
+ Ok(None)
}
- pub fn parse_item(&mut self) -> Option<P<Item>> {
+ pub fn parse_item_nopanic(&mut self) -> PResult<Option<P<Item>>> {
let attrs = self.parse_outer_attributes();
self.parse_item_(attrs, true)
}
+
/// Matches view_path : MOD? non_global_path as IDENT
/// | MOD? non_global_path MOD_SEP LBRACE RBRACE
/// | MOD? non_global_path MOD_SEP LBRACE ident_seq RBRACE
/// | MOD? non_global_path MOD_SEP STAR
/// | MOD? non_global_path
- fn parse_view_path(&mut self) -> P<ViewPath> {
+ fn parse_view_path(&mut self) -> PResult<P<ViewPath>> {
let lo = self.span.lo;
// Allow a leading :: because the paths are absolute either way.
// This occurs with "use $crate::..." in macros.
- self.eat(&token::ModSep);
+ try!(self.eat(&token::ModSep));
if self.check(&token::OpenDelim(token::Brace)) {
// use {foo,bar}
- let idents = self.parse_unspanned_seq(
+ let idents = try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Brace),
&token::CloseDelim(token::Brace),
seq_sep_trailing_allowed(token::Comma),
- |p| p.parse_path_list_item());
+ |p| p.parse_path_list_item()));
let path = ast::Path {
span: mk_sp(lo, self.span.hi),
global: false,
segments: Vec::new()
};
- return P(spanned(lo, self.span.hi, ViewPathList(path, idents)));
+ return Ok(P(spanned(lo, self.span.hi, ViewPathList(path, idents))));
}
- let first_ident = self.parse_ident();
+ let first_ident = try!(self.parse_ident());
let mut path = vec!(first_ident);
if let token::ModSep = self.token {
// foo::bar or foo::{a,b,c} or foo::*
while self.check(&token::ModSep) {
- self.bump();
+ try!(self.bump());
match self.token {
token::Ident(..) => {
- let ident = self.parse_ident();
+ let ident = try!(self.parse_ident());
path.push(ident);
}
// foo::bar::{a,b,c}
token::OpenDelim(token::Brace) => {
- let idents = self.parse_unspanned_seq(
+ let idents = try!(self.parse_unspanned_seq(
&token::OpenDelim(token::Brace),
&token::CloseDelim(token::Brace),
seq_sep_trailing_allowed(token::Comma),
|p| p.parse_path_list_item()
- );
+ ));
let path = ast::Path {
span: mk_sp(lo, self.span.hi),
global: false,
}
}).collect()
};
- return P(spanned(lo, self.span.hi, ViewPathList(path, idents)));
+ return Ok(P(spanned(lo, self.span.hi, ViewPathList(path, idents))));
}
// foo::bar::*
token::BinOp(token::Star) => {
- self.bump();
+ try!(self.bump());
let path = ast::Path {
span: mk_sp(lo, self.span.hi),
global: false,
}
}).collect()
};
- return P(spanned(lo, self.span.hi, ViewPathGlob(path)));
+ return Ok(P(spanned(lo, self.span.hi, ViewPathGlob(path))));
}
// fall-through for case foo::bar::;
}
}).collect()
};
- if self.eat_keyword(keywords::As) {
- rename_to = self.parse_ident()
+ if try!(self.eat_keyword(keywords::As)) {
+ rename_to = try!(self.parse_ident())
}
- P(spanned(lo, self.last_span.hi, ViewPathSimple(rename_to, path)))
+ Ok(P(spanned(lo, self.last_span.hi, ViewPathSimple(rename_to, path))))
}
/// Parses a source module as a crate. This is the main
/// entry point for the parser.
- pub fn parse_crate_mod(&mut self) -> Crate {
+ pub fn parse_crate_mod(&mut self) -> PResult<Crate> {
let lo = self.span.lo;
- ast::Crate {
+ Ok(ast::Crate {
attrs: self.parse_inner_attributes(),
- module: self.parse_mod_items(&token::Eof, lo),
+ module: try!(self.parse_mod_items(&token::Eof, lo)),
config: self.cfg.clone(),
span: mk_sp(lo, self.span.lo),
exported_macros: Vec::new(),
- }
+ })
}
pub fn parse_optional_str(&mut self)
- -> Option<(InternedString, ast::StrStyle, Option<ast::Name>)> {
+ -> PResult<Option<(InternedString,
+ ast::StrStyle,
+ Option<ast::Name>)>> {
let ret = match self.token {
token::Literal(token::Str_(s), suf) => {
(self.id_to_interned_str(s.ident()), ast::CookedStr, suf)
token::Literal(token::StrRaw(s, n), suf) => {
(self.id_to_interned_str(s.ident()), ast::RawStr(n), suf)
}
- _ => return None
+ _ => return Ok(None)
};
- self.bump();
- Some(ret)
+ try!(self.bump());
+ Ok(Some(ret))
}
- pub fn parse_str(&mut self) -> (InternedString, StrStyle) {
- match self.parse_optional_str() {
+ pub fn parse_str(&mut self) -> PResult<(InternedString, StrStyle)> {
+ match try!(self.parse_optional_str()) {
Some((s, style, suf)) => {
let sp = self.last_span;
self.expect_no_suffix(sp, "str literal", suf);
- (s, style)
+ Ok((s, style))
}
- _ => self.fatal("expected string literal")
+ _ => Err(self.fatal("expected string literal"))
}
}
}
match i.node {
ast::ItemFn(_, ast::Unsafety::Unsafe, _, _, _) => {
let diag = self.cx.span_diagnostic;
- diag.span_fatal(i.span, "unsafe functions cannot be used for tests");
+ panic!(diag.span_fatal(i.span, "unsafe functions cannot be used for tests"));
}
_ => {
debug!("this is a test function");
/// Parse a string, return a crate.
pub fn string_to_crate (source_str : String) -> ast::Crate {
with_error_checking_parse(source_str, |p| {
- p.parse_crate_mod()
+ panictry!(p.parse_crate_mod())
})
}
-Subproject commit 49cc7f6fef12bdd77a0f8b182d9a64c371cb17c8
+Subproject commit ebc6b04c29591108d3f28e724b4b9b74cd1232e6
#![crate_type = "rlib"]
#![feature(fundamental)]
-use std::marker::MarkerTrait;
-
-pub trait MyCopy : MarkerTrait { }
+pub trait MyCopy { }
impl MyCopy for i32 { }
pub struct MyStruct<T>(T);
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-pub trait TheTrait<T> : ::std::marker::PhantomFn<T> {
+pub trait TheTrait<T> {
fn the_fn(&self);
}
#![no_std]
#![feature(lang_items)]
-#[lang="phantom_fn"]
-pub trait PhantomFn<A:?Sized,R:?Sized=()> { }
-impl<A:?Sized, R:?Sized, U:?Sized> PhantomFn<A,R> for U { }
-
#[lang="sized"]
-pub trait Sized : PhantomFn<Self> {}
+pub trait Sized { }
#[lang="panic"]
fn panic(_: &(&'static str, &'static str, usize)) -> ! { loop {} }
extern fn eh_personality() {}
#[lang="copy"]
-pub trait Copy : PhantomFn<Self> {
+pub trait Copy {
// Empty.
}
#![feature(no_std)]
#![no_std]
-#[lang="phantom_fn"]
-pub trait PhantomFn<A:?Sized,R:?Sized=()> { }
-impl<A:?Sized, R:?Sized, U:?Sized> PhantomFn<A,R> for U { }
-
#[lang="sized"]
-pub trait Sized : PhantomFn<Self> {
+pub trait Sized {
// Empty.
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that we give suitable error messages when the user attempts to
+// impl a trait `Trait` for its own object type.
+
+trait Foo { fn dummy(&self) { } }
+trait Bar: Foo { }
+trait Baz: Bar { }
+
+// Subtraits of Baz are not legal:
+impl Foo for Baz { } //~ ERROR E0371
+impl Bar for Baz { } //~ ERROR E0371
+impl Baz for Baz { } //~ ERROR E0371
+
+// But other random traits are:
+trait Other { }
+impl Other for Baz { } // OK, Bar not a subtrait of Baz
+
+// If the trait is not object-safe, we give a more tailored message
+// because we're such schnuckels:
+trait NotObjectSafe { fn eq(&self, other: Self); }
+impl NotObjectSafe for NotObjectSafe { } //~ ERROR E0372
+
+fn main() { }
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that two distinct impls which match subtypes of one another
-// yield coherence errors (or not) depending on the variance.
-
-trait Contravariant {
- fn foo(&self) { }
-}
-
-impl Contravariant for for<'a,'b> fn(&'a u8, &'b u8) {
- //~^ ERROR E0119
-}
-
-impl Contravariant for for<'a> fn(&'a u8, &'a u8) {
-}
-
-///////////////////////////////////////////////////////////////////////////
-
-trait Covariant {
- fn foo(&self) { }
-}
-
-impl Covariant for for<'a,'b> fn(&'a u8, &'b u8) {
- //~^ ERROR E0119
-}
-
-impl Covariant for for<'a> fn(&'a u8, &'a u8) {
-}
-
-///////////////////////////////////////////////////////////////////////////
-
-trait Invariant {
- fn foo(&self) -> Self { }
-}
-
-impl Invariant for for<'a,'b> fn(&'a u8, &'b u8) {
-}
-
-impl Invariant for for<'a> fn(&'a u8, &'a u8) {
-}
-
-fn main() { }
extern crate coherence_copy_like_lib as lib;
-use std::marker::MarkerTrait;
-
struct MyType { x: i32 }
-trait MyTrait : MarkerTrait { }
+trait MyTrait { }
impl<T: lib::MyCopy> MyTrait for T { }
// `MyFundamentalStruct` is declared fundamental, so we can test that
extern crate coherence_copy_like_lib as lib;
-use std::marker::MarkerTrait;
-
struct MyType { x: i32 }
-trait MyTrait : MarkerTrait { }
+trait MyTrait { }
impl<T: lib::MyCopy> MyTrait for T { }
// `MyFundamentalStruct` is declared fundamental, so we can test that
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::marker::PhantomFn;
-
-trait FromStructReader<'a> : PhantomFn<(Self,&'a ())> { }
+trait FromStructReader<'a> { }
trait ResponseHook {
fn get<'a, T: FromStructReader<'a>>(&'a self);
}
#![feature(lang_items, start, no_std)]
#![no_std]
-#[lang="phantom_fn"]
-trait PhantomFn<A:?Sized,R:?Sized=()> { }
-impl<A:?Sized, R:?Sized, U:?Sized> PhantomFn<A,R> for U { }
-
#[lang = "sized"]
-trait Sized : PhantomFn<Self> {}
+trait Sized { }
#[start]
fn main(_: isize, _: *const *const u8) -> isize {
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-fn main() {
- match 42 {
- x < 7 => (),
- //~^ error: unexpected token: `<`
- _ => ()
- }
-}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-fn a(B<) {}
- //~^ error: unexpected token: `<`
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-struct Foo<T>(T, T);
-
-impl<T> Foo<T> {
- fn foo(&self) {
- match *self {
- Foo<T>(x, y) => {
- //~^ error: unexpected token: `<`
- println!("Goodbye, World!")
- }
- }
- }
-}
fn foo<'a>() {
let t: S<&'a isize> = S(marker::PhantomData);
let a = &t as &Gettable<&'a isize>;
- //~^ ERROR cannot infer
+ //~^ ERROR does not fulfill
}
fn foo2<'a>() {
#![allow(dead_code)]
#![deny(unsafe_code)]
-use std::marker::PhantomFn;
-
struct Bar;
struct Bar2;
struct Bar3;
#[allow(unsafe_code)]
mod allowed_unsafe {
- use std::marker::PhantomFn;
fn allowed() { unsafe {} }
unsafe fn also_allowed() {}
- unsafe trait AllowedUnsafe : PhantomFn<Self> {}
+ unsafe trait AllowedUnsafe { }
unsafe impl AllowedUnsafe for super::Bar {}
}
}
unsafe fn baz() {} //~ ERROR: declaration of an `unsafe` function
-unsafe trait Foo : PhantomFn<Self> {} //~ ERROR: declaration of an `unsafe` trait
+unsafe trait Foo {} //~ ERROR: declaration of an `unsafe` trait
unsafe impl Foo for Bar {} //~ ERROR: implementation of an `unsafe` trait
trait Baz {
#![feature(rustc_attrs)]
#![allow(dead_code)]
-use std::marker::PhantomFn;
-
-trait Baz : PhantomFn<Self> {
+trait Baz {
}
-trait Bar<T> : PhantomFn<(Self, T)> {
+trait Bar<T> {
}
fn make_bar<T:Bar<u32>>(t: &T) -> &Bar<u32> {
#![allow(unused)]
-use std::marker;
-
#[rustc_on_unimplemented = "test error `{Self}` with `{Bar}` `{Baz}` `{Quux}`"]
trait Foo<Bar, Baz, Quux>
- : marker::PhantomFn<(Self,Bar,Baz,Quux)>
{}
#[rustc_on_unimplemented="a collection of type `{Self}` cannot be built from an iterator over elements of type `{A}`"]
#[rustc_on_unimplemented] //~ ERROR this attribute must have a value
trait BadAnnotation1
- : marker::MarkerTrait
{}
#[rustc_on_unimplemented = "Unimplemented trait error on `{Self}` with params `<{A},{B},{C}>`"]
//~^ ERROR there is no type parameter C on trait BadAnnotation2
trait BadAnnotation2<A,B>
- : marker::PhantomFn<(Self,A,B)>
{}
#[rustc_on_unimplemented = "Unimplemented trait error on `{Self}` with params `<{A},{B},{}>`"]
//~^ only named substitution parameters are allowed
trait BadAnnotation3<A,B>
- : marker::PhantomFn<(Self,A,B)>
{}
pub fn main() {
#![feature(on_unimplemented)]
-use std::marker;
-
#[rustc_on_unimplemented = "test error `{Self}` with `{Bar}` `{Baz}` `{Quux}`"]
trait Foo<Bar, Baz, Quux>
- : marker::PhantomFn<(Self,Bar,Baz,Quux)>
{}
fn foobar<U: Clone, T: Foo<u8, U, u32>>() -> T {
#![feature(lang_items, start, no_std)]
#![no_std] // makes debugging this test *a lot* easier (during resolve)
-#[lang="phantom_fn"]
-pub trait PhantomFn<A:?Sized,R:?Sized=()> { }
-impl<A:?Sized, R:?Sized, U:?Sized> PhantomFn<A,R> for U { }
-
#[lang="sized"]
-pub trait Sized : PhantomFn<Self> {}
+pub trait Sized {}
#[lang="copy"]
-pub trait Copy : PhantomFn<Self> {}
+pub trait Copy {}
mod bar {
// shouldn't bring in too much
#![feature(lang_items, start, no_std)]
#![no_std] // makes debugging this test *a lot* easier (during resolve)
-#[lang="phantom_fn"]
-pub trait PhantomFn<A:?Sized,R:?Sized=()> { }
-impl<A:?Sized, R:?Sized, U:?Sized> PhantomFn<A,R> for U { }
-
-#[lang = "sized"] pub trait Sized : PhantomFn<Self> {}
-#[lang="copy"] pub trait Copy : PhantomFn<Self> {}
+#[lang = "sized"] pub trait Sized {}
+#[lang="copy"] pub trait Copy {}
// Test to make sure that private items imported through globs remain private
// when they're used.
#![allow(dead_code)]
-use std::marker::PhantomFn;
-
///////////////////////////////////////////////////////////////////////////
-pub trait TheTrait: PhantomFn<Self, Self> {
+pub trait TheTrait {
type TheAssocType;
}
#![allow(dead_code)]
#![feature(rustc_attrs)]
-use std::marker::PhantomFn;
-
///////////////////////////////////////////////////////////////////////////
-pub trait TheTrait<'b> : PhantomFn<&'b Self,Self> {
+pub trait TheTrait<'b> {
type TheAssocType;
}
#![allow(dead_code)]
-use std::marker::PhantomFn;
-
///////////////////////////////////////////////////////////////////////////
-pub trait TheTrait: PhantomFn<Self, Self> {
+pub trait TheTrait {
type TheAssocType;
}
#![allow(dead_code)]
#![feature(rustc_attrs)]
-use std::marker::PhantomFn;
-
///////////////////////////////////////////////////////////////////////////
-pub trait TheTrait: PhantomFn<Self, Self> {
+pub trait TheTrait {
type TheAssocType;
}
#![feature(box_syntax)]
#![allow(warnings)]
-use std::marker::PhantomFn;
-
-trait A<T> : PhantomFn<(Self,T)> { }
+trait A<T> { }
struct B<'a, T>(&'a (A<T>+'a));
-trait X : ::std::marker::MarkerTrait {}
+trait X { }
impl<'a, T> X for B<'a, T> {}
#![feature(box_syntax)]
-use std::marker::PhantomFn;
-
-trait A<T> : PhantomFn<(Self,T)> { }
+trait A<T> { }
struct B<'a, T>(&'a (A<T>+'a));
-trait X : PhantomFn<Self> {}
+trait X { }
impl<'a, T> X for B<'a, T> {}
fn g<'a, T: 'static>(v: Box<A<T>+'a>) -> Box<X+'static> {
#![feature(box_syntax)]
#![allow(warnings)]
-use std::marker::PhantomFn;
-
-trait A<T> : PhantomFn<(Self,T)> {}
+trait A<T> { }
struct B<'a, T>(&'a (A<T>+'a));
-trait X : PhantomFn<Self> {}
+trait X { }
impl<'a, T> X for B<'a, T> {}
-fn h<'a, T, U>(v: Box<A<U>+'static>) -> Box<X+'static> {
+fn h<'a, T, U:'static>(v: Box<A<U>+'static>) -> Box<X+'static> {
box B(&*v) as Box<X> //~ ERROR `*v` does not live long enough
}
#![feature(box_syntax)]
-use std::marker::PhantomFn;
-
-trait A<T> : PhantomFn<(Self,T)> {}
+trait A<T> { }
struct B<'a, T>(&'a (A<T>+'a));
-trait X : PhantomFn<Self> {}
+trait X { }
impl<'a, T> X for B<'a, T> {}
fn i<'a, T, U>(v: Box<A<U>+'a>) -> Box<X+'static> {
#![feature(lang_items, no_std)]
#![no_std]
-#[lang="phantom_fn"]
-pub trait PhantomFn<T:?Sized> { }
-impl<T:?Sized, U:?Sized> PhantomFn<T> for U { }
-
-#[lang="sized"] pub trait Sized : PhantomFn<Self> {}
+#[lang="sized"] pub trait Sized { }
// error-pattern:requires `start` lang_item
impl<'a> Foo<'a> {
//~^ NOTE shadowed lifetime `'a` declared here
fn shadow_in_method<'a>(&'a self) -> &'a isize {
- //~^ WARNING lifetime name `'a` shadows another lifetime name that is already in scope
- //~| NOTE deprecated
+ //~^ ERROR lifetime name `'a` shadows another lifetime name that is already in scope
self.0
}
fn shadow_in_type<'b>(&'b self) -> &'b isize {
//~^ NOTE shadowed lifetime `'b` declared here
let x: for<'b> fn(&'b isize) = panic!();
- //~^ WARNING lifetime name `'b` shadows another lifetime name that is already in scope
- //~| NOTE deprecated
+ //~^ ERROR lifetime name `'b` shadows another lifetime name that is already in scope
self.0
}
}
fn main() {
- // intentional error that occurs after `resolve_lifetime` runs,
- // just to ensure that this test fails to compile; when shadowed
- // lifetimes become either an error or a proper lint, this will
- // not be needed.
- let x: isize = 3_usize; //~ ERROR mismatched types
}
use std::marker;
-trait A : marker::PhantomFn<Self> {
-}
+trait A { }
trait B: A {}
}
trait IteratorUtil<A>
- : ::std::marker::PhantomFn<(),A>
{
fn zip<B, U: Iterator<U>>(self, other: U) -> ZipIterator<Self, U>;
}
#![feature(unboxed_closures)]
#![allow(dead_code)]
-use std::marker::PhantomFn;
-
trait Foo<T> {
type Output;
fn dummy(&self, t: T, u: Self::Output);
}
-trait Eq<X: ?Sized> : PhantomFn<(Self,X)> { }
+trait Eq<X: ?Sized> { }
impl<X: ?Sized> Eq<X> for X { }
fn eq<A: ?Sized,B: ?Sized +Eq<A>>() { }
fn dummy(&self, t: T);
}
-trait Eq<X: ?Sized> : marker::PhantomFn<(Self, X)> { }
+trait Eq<X: ?Sized> { }
impl<X: ?Sized> Eq<X> for X { }
fn eq<A: ?Sized,B: ?Sized +Eq<A>>() { }
#![allow(dead_code)]
+// Test that even when `T` is only used in contravariant position, it
+// is treated as invariant.
+
trait Get<T> : 'static {
fn get(&self, t: T);
}
-> Box<Get<&'max i32>>
where 'max : 'min
{
- v
+ // Previously OK:
+ v //~ ERROR mismatched types
}
fn main() { }
#![allow(dead_code)]
+// Test that even when `T` is only used in contravariant position, it
+// is treated as invariant.
+
trait Get<T> {
fn get(&self, t: T);
}
fn get_max_from_min<'min, 'max, G>()
where 'max : 'min, G : Get<&'min i32>
{
- impls_get::<G,&'max i32>()
+ // Previously OK, but now an error because traits are invariant:
+
+ impls_get::<G,&'max i32>() //~ ERROR mismatched types
}
fn impls_get<G,T>() where G : Get<T> { }
#![allow(dead_code)]
+// Test that even when `Self` is only used in contravariant position, it
+// is treated as invariant.
+
trait Get {
fn get(&self);
}
fn get_max_from_min<'min, 'max, G>()
where 'max : 'min, G : 'max, &'min G : Get
{
- impls_get::<&'max G>();
+ // Previously OK, but now error because traits are invariant with
+ // respect to all inputs.
+
+ impls_get::<&'max G>(); //~ ERROR mismatched types
}
fn impls_get<G>() where G : Get { }
#![allow(dead_code)]
+// Test that even when `T` is only used in covariant position, it
+// is treated as invariant.
+
trait Get<T> : 'static {
fn get(&self) -> T;
}
-> Box<Get<&'min i32>>
where 'max : 'min
{
- v
+ // Previously OK, now an error as traits are invariant.
+ v //~ ERROR mismatched types
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
#![allow(dead_code)]
+// Test that even when `T` is only used in covariant position, it
+// is treated as invariant.
+
trait Get<T> {
fn get(&self) -> T;
}
fn get_min_from_max<'min, 'max, G>()
where 'max : 'min, G : Get<&'max i32>
{
- impls_get::<G,&'min i32>()
+ // Previously OK, now an error as traits are invariant.
+ impls_get::<G,&'min i32>() //~ ERROR mismatched types
}
fn get_max_from_min<'min, 'max, G>()
#![allow(dead_code)]
+// Test that even when `Self` is only used in covariant position, it
+// is treated as invariant.
+
trait Get {
fn get() -> Self;
}
fn get_min_from_max<'min, 'max, G>()
where 'max : 'min, G : 'max, &'max G : Get
{
- impls_get::<&'min G>();
+ // Previously OK, now an error as traits are invariant.
+ impls_get::<&'min G>(); //~ ERROR mismatched types
}
fn get_max_from_min<'min, 'max, G>()
#![feature(rustc_attrs)]
#[rustc_variance]
-trait Foo: 'static { //~ ERROR types=[[];[-];[]]
+trait Foo: 'static { //~ ERROR types=[[];[o];[]]
}
#[rustc_variance]
-trait Bar<T> { //~ ERROR types=[[+];[-];[]]
+trait Bar<T> { //~ ERROR types=[[o];[o];[]]
fn do_it(&self)
where T: 'static;
}
field: &'a [i32]
}
-trait Trait<'a, 'd> { //~ ERROR parameter `'d` is never used
+trait Trait<'a, 'd> { // OK on traits
fn method(&'a self);
}
// influence variance.
#[rustc_variance]
-trait Getter<T> { //~ ERROR types=[[+];[-];[]]
+trait Getter<T> { //~ ERROR types=[[o];[o];[]]
fn get(&self) -> T;
}
#[rustc_variance]
-trait Setter<T> { //~ ERROR types=[[-];[-];[]]
+trait Setter<T> { //~ ERROR types=[[o];[o];[]]
fn get(&self, T);
}
}
#[rustc_variance]
-trait TestTrait<U,T:Setter<U>> { //~ ERROR types=[[-, +];[-];[]]
+trait TestTrait<U,T:Setter<U>> { //~ ERROR types=[[o, o];[o];[]]
fn getter(&self, u: U) -> T;
}
#[rustc_variance]
-trait TestTrait2<U> : Getter<U> { //~ ERROR types=[[+];[-];[]]
+trait TestTrait2<U> : Getter<U> { //~ ERROR types=[[o];[o];[]]
}
#[rustc_variance]
-trait TestTrait3<U> { //~ ERROR types=[[-];[-];[]]
+trait TestTrait3<U> { //~ ERROR types=[[o];[o];[]]
fn getter<T:Getter<U>>(&self);
}
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// Issue #5781. Tests that subtyping is handled properly in trait matching.
+// pretty-expanded FIXME #23616
-trait Make<'a> {
- fn make(x: &'a mut isize) -> Self;
+#![allow(dead_code)]
+
+// Get<T> is covariant in T
+trait Get<T> {
+ fn get(&self) -> T;
+}
+
+struct Cloner<T:Clone> {
+ t: T
}
-impl<'a> Make<'a> for &'a mut isize {
- fn make(x: &'a mut isize) -> &'a mut isize {
- x
+impl<T:Clone> Get<T> for Cloner<T> {
+ fn get(&self) -> T {
+ self.t.clone()
}
}
-fn f() -> &'static mut isize {
- let mut x = 1;
- let y: &'static mut isize = Make::make(&mut x); //~ ERROR `x` does not live long enough
- y
+fn get<'a, G>(get: &G) -> i32
+ where G : Get<&'a i32>
+{
+ // This fails to type-check because, without variance, we can't
+ // use `G : Get<&'a i32>` as evidence that `G : Get<&'b i32>`,
+ // even if `'a : 'b`.
+ pick(get, &22) //~ ERROR cannot infer
}
-fn main() {}
+fn pick<'b, G>(get: &'b G, if_odd: &'b i32) -> i32
+ where G : Get<&'b i32>
+{
+ let v = *get.get();
+ if v % 2 != 0 { v } else { *if_odd }
+}
+
+fn main() {
+ let x = Cloner { t: &23 };
+ let y = get(&x);
+ assert_eq!(y, 23);
+}
}
#[rustc_variance]
-trait Getter<A> { //~ ERROR types=[[+];[-];[]]
+trait Getter<A> { //~ ERROR types=[[o];[o];[]]
fn get(&self) -> A;
}
#[rustc_variance]
-trait Setter<A> { //~ ERROR types=[[-];[o];[]]
+trait Setter<A> { //~ ERROR types=[[o];[o];[]]
fn set(&mut self, a: A);
}
}
#[rustc_variance]
-trait GetterInTypeBound<A> { //~ ERROR types=[[-];[-];[]]
+trait GetterInTypeBound<A> { //~ ERROR types=[[o];[o];[]]
// Here, the use of `A` in the method bound *does* affect
// variance. Think of it as if the method requested a dictionary
// for `T:Getter<A>`. Since this dictionary is an input, it is
}
#[rustc_variance]
-trait SetterInTypeBound<A> { //~ ERROR types=[[+];[-];[]]
+trait SetterInTypeBound<A> { //~ ERROR types=[[o];[o];[]]
fn do_it<T:Setter<A>>(&self);
}
#[rustc_variance]
-struct TestObject<A, R> { //~ ERROR types=[[-, +];[];[]]
+struct TestObject<A, R> { //~ ERROR types=[[o, o];[];[]]
n: Box<Setter<A>+Send>,
m: Box<Getter<R>+Send>,
}
struct SomeStruct<'a> { x: u32 } //~ ERROR parameter `'a` is never used
enum SomeEnum<'a> { Nothing } //~ ERROR parameter `'a` is never used
-trait SomeTrait<'a> { fn foo(&self); } //~ ERROR parameter `'a` is never used
+trait SomeTrait<'a> { fn foo(&self); } // OK on traits.
fn main() {}
//~^ ERROR parameter `A` is never used
//~| HELP PhantomData
-trait SomeTrait<A> { fn foo(&self); }
-//~^ ERROR parameter `A` is never used
-//~| HELP PhantomFn
-
// Here T might *appear* used, but in fact it isn't.
enum ListCell<T> {
//~^ ERROR parameter `T` is never used
// older versions of GDB too. A more extensive test can be found in
// gdb-pretty-struct-and-enums.rs
+// ignore-bitrig
// ignore-windows failing on win32 bot
// ignore-freebsd: gdb package too new
// ignore-tidy-linelength
// except according to those terms.
fn main() {
- let caller<F> = |f: F| //~ ERROR unexpected token: `<`
+ let caller<F> = |f: F| //~ ERROR expected one of `:`, `;`, `=`, or `@`, found `<`
where F: Fn() -> i32
{
let x = f();
}
fn bar() {
- let Foo<Vec<u8>> //~ ERROR unexpected token: `<`
+ let Foo<Vec<u8>> //~ ERROR expected one of `:`, `;`, `=`, or `@`, found `<`
}
fn main() {}
// ignore-windows
// ignore-freebsd
// ignore-openbsd
+// ignore-bitrig
#[path = "../compile-fail"]
mod foo; //~ ERROR: a directory
fn main() {
let a = Vec::new();
match a {
- [1, tail.., tail..] => {}, //~ ERROR: expected one of `!`, `,`, or `@`, found `..`
+ [1, tail.., tail..] => {}, //~ ERROR: expected one of `,` or `@`, found `..`
_ => ()
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-fn foo(x) { //~ ERROR expected one of `!`, `:`, or `@`, found `)`
+fn foo(x) { //~ ERROR expected one of `:` or `@`, found `)`
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ match 42 {
+ x < 7 => (),
+ //~^ error: expected one of `=>`, `@`, `if`, or `|`, found `<`
+ _ => ()
+ }
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn a(B<) {}
+ //~^ error: expected one of `:` or `@`, found `<`
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct Foo<T>(T, T);
+
+impl<T> Foo<T> {
+ fn foo(&self) {
+ match *self {
+ Foo<T>(x, y) => {
+ //~^ error: expected one of `=>`, `@`, `if`, or `|`, found `<`
+ println!("Goodbye, World!")
+ }
+ }
+ }
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+enum BtNode {
+ Node(u32,Box<BtNode>,Box<BtNode>),
+ Leaf(u32),
+}
+
+fn main() {
+ let y = match x {
+ Foo<T>::A(value) => value, //~ error: expected one of `=>`, `@`, `if`, or `|`, found `<`
+ Foo<T>::B => 7,
+ };
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let v[0] = v[1]; //~ error: expected one of `:`, `;`, `=`, or `@`, found `[`
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let Test(&desc[..]) = x; //~ error: expected one of `,` or `@`, found `[`
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ for thing(x[]) {} //~ error: expected one of `,` or `@`, found `[`
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Parsing of range patterns
+
+fn main() {
+ let macropus!() ... 11 = 12; //~ error: expected one of `:`, `;`, or `=`, found `...`
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Parsing of range patterns
+
+fn main() {
+ let 10 ... makropulos!() = 12; //~ error: expected one of `::`, `:`, `;`, or `=`, found `!`
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Parsing of range patterns
+
+fn main() {
+ let 10 ... 10 + 3 = 12; //~ expected one of `:`, `;`, or `=`, found `+`
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Parsing of range patterns
+
+fn main() {
+ let 10 - 3 ... 10 = 8; //~ error: expected one of `...`, `:`, `;`, or `=`, found `-`
+}
fn removed_moves() {
let mut x = 0;
let y <- x;
- //~^ ERROR expected one of `!`, `:`, `;`, `=`, or `@`, found `<-`
+ //~^ ERROR expected one of `:`, `;`, `=`, or `@`, found `<-`
}
fn main() {
match Foo {
- x: 3 //~ ERROR expected one of `!`, `=>`, `@`, `if`, or `|`, found `:`
+ x: 3 //~ ERROR expected one of `=>`, `@`, `if`, or `|`, found `:`
} {
Foo {
x: x
ifneq ($(shell uname),FreeBSD)
all:
$(RUSTC) foo.rs
- $(CC) bar.c -lfoo -o $(call RUN_BINFILE,bar) $(EXTRAFLAGS) -lstdc++
+ $(CC) bar.c -lfoo -o $(call RUN_BINFILE,bar) $(EXTRAFLAGS) $(EXTRACXXFLAGS)
$(call RUN,bar)
rm $(call STATICLIB,foo*)
$(call RUN,bar)
# is compiled with LTO, it shouldn't strip the symbol from `foo`, and that's the
# only way that `foo.c` will successfully compile.
+ifeq ($(UNAME),Bitrig)
+ EXTRACFLAGS := -lc $(EXTRACFLAGS) $(EXTRACXXFLAGS)
+endif
+
all:
$(RUSTC) foo.rs --crate-type=rlib
$(RUSTC) bar.rs --crate-type=staticlib -C lto -L. -o $(TMPDIR)/libbar.a
all:
$(RUSTC) foo.rs -C lto
- $(CC) bar.c -lfoo -o $(call RUN_BINFILE,bar) $(EXTRACFLAGS) -lstdc++
+ $(CC) bar.c -lfoo -o $(call RUN_BINFILE,bar) $(EXTRACFLAGS) $(EXTRACXXFLAGS)
$(call RUN,bar)
-include ../tools.mk
+
ifndef IS_WINDOWS
-ifneq ($(UNAME),OpenBSD)
+
+SKIP_OS := 'OpenBSD Bitrig'
+ifneq ($(UNAME),$(findstring $(UNAME),$(SKIP_OS)))
+
all:
$(RUSTC) -O --emit asm attr.rs
! grep -q morestack $(TMPDIR)/attr.s
$(RUSTC) -O --emit asm -C no-stack-check flag.rs
! grep -q morestack $(TMPDIR)/flag.s
else
-# On OpenBSD, morestack isn't used as the segmented stacks are disabled
+# On Bitrig/OpenBSD, morestack isn't used as the segmented stacks are disabled
all:
endif
+
else
# On Windows we use __chkstk and it only appears in functions with large allocations,
# so this test wouldn't be reliable.
}
#[lang = "sized"]
-pub trait Sized : PhantomFn<Self> {}
+pub trait Sized { }
#[lang = "copy"]
-pub trait Copy : PhantomFn<Self> {}
-
-#[lang="phantom_fn"]
-pub trait PhantomFn<A:?Sized,R:?Sized=()> { }
-impl<A:?Sized, R:?Sized, U:?Sized> PhantomFn<A,R> for U { }
+pub trait Copy { }
mod core {
pub mod marker {
#![feature(lang_items, no_std)]
#![no_std]
-#[lang="phantom_fn"]
-trait PhantomFn<A:?Sized,R:?Sized=()> { }
-impl<A:?Sized, R:?Sized, U:?Sized> PhantomFn<A,R> for U { }
-
#[lang="copy"]
-trait Copy : PhantomFn<Self> { }
+trait Copy { }
#[lang="sized"]
-trait Sized : PhantomFn<Self> { }
+trait Sized { }
#[lang="start"]
fn start(_main: *const u8, _argc: isize, _argv: *const *const u8) -> isize { 0 }
ifdef IS_WINDOWS
EXTRACFLAGS := -lws2_32 -luserenv
else
-ifeq ($(shell uname),Darwin)
+ifeq ($(UNAME),Darwin)
else
-ifeq ($(shell uname),FreeBSD)
+ifeq ($(UNAME),FreeBSD)
EXTRACFLAGS := -lm -lpthread -lgcc_s
else
-ifeq ($(shell uname),OpenBSD)
+ifeq ($(UNAME),Bitrig)
+ EXTRACFLAGS := -lm -lpthread
+ EXTRACXXFLAGS := -lc++ -lc++abi
+else
+ifeq ($(UNAME),OpenBSD)
EXTRACFLAGS := -lm -lpthread
else
EXTRACFLAGS := -lm -lrt -ldl -lpthread
+ EXTRACXXFLAGS := -lstdc++
+endif
endif
endif
endif
-include ../tools.mk
-ifneq ($(findstring BSD,$(UNAME)),BSD)
+SKIP_OS := 'FreeBSD OpenBSD Bitrig'
+
+ifneq ($(UNAME),$(findstring $(UNAME),$(SKIP_OS)))
+
HOST := $(shell $(RUSTC) -vV | grep 'host:' | sed 's/host: //')
ifeq ($(findstring i686,$(HOST)),i686)
TARGET := $(subst i686,x86_64,$(HOST))
$(RUSTC) bar.rs -C extra-filename=-targ --target $(TARGET)
$(RUSTC) baz.rs --extern a=$(TMPDIR)/liba-targ.rlib --target $(TARGET)
else
-# FreeBSD & OpenBSD support only x86_64 architecture for now
+# FreeBSD, OpenBSD, and Bitrig support only x86_64 architecture for now
all:
endif
use syntax::ext::base::ExtCtxt;
use syntax::ptr::P;
+use syntax::parse::PResult;
fn syntax_extension(cx: &ExtCtxt) {
let e_toks : Vec<syntax::ast::TokenTree> = quote_tokens!(cx, 1 + 2);
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that two distinct impls which match subtypes of one another
+// yield coherence errors (or not) depending on the variance.
+
+trait Contravariant {
+ fn foo(&self) { }
+}
+
+impl Contravariant for for<'a,'b> fn(&'a u8, &'b u8) {
+}
+
+impl Contravariant for for<'a> fn(&'a u8, &'a u8) {
+}
+
+///////////////////////////////////////////////////////////////////////////
+
+trait Covariant {
+ fn foo(&self) { }
+}
+
+impl Covariant for for<'a,'b> fn(&'a u8, &'b u8) {
+}
+
+impl Covariant for for<'a> fn(&'a u8, &'a u8) {
+}
+
+///////////////////////////////////////////////////////////////////////////
+
+trait Invariant {
+ fn foo(&self) { }
+}
+
+impl Invariant for for<'a,'b> fn(&'a u8, &'b u8) {
+}
+
+impl Invariant for for<'a> fn(&'a u8, &'a u8) {
+}
+
+fn main() { }
extern crate coherence_copy_like_lib as lib;
-use std::marker::MarkerTrait;
-
struct MyType { x: i32 }
-trait MyTrait : MarkerTrait { }
+trait MyTrait { }
impl<T: lib::MyCopy> MyTrait for T { }
impl MyTrait for MyType { }
impl<'a> MyTrait for &'a MyType { }
// pretty-expanded FIXME #23616
-use std::marker::PhantomFn;
-
-trait Chromosome<X: Chromosome<i32>> : PhantomFn<(Self,X)> {
+trait Chromosome<X: Chromosome<i32>> {
}
fn main() { }
// pretty-expanded FIXME #23616
-use std::marker::{PhantomData, PhantomFn};
+use std::marker::PhantomData;
pub struct Handle<T, I>(T, I);
pub type RawBufferHandle<D: Device> = Handle<<D as Device>::Buffer, String>;
-pub trait Device: PhantomFn<Self> {
+pub trait Device {
type Buffer;
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Parsing patterns with paths with type parameters (issue #22544)
+
+use std::default::Default;
+
+#[derive(Default)]
+pub struct Foo<T>(T, T);
+
+impl<T: ::std::fmt::Display> Foo<T> {
+ fn foo(&self) {
+ match *self {
+ Foo::<T>(ref x, ref y) => println!("Goodbye, World! {} {}", x, y)
+ }
+ }
+}
+
+trait Tr {
+ type U;
+}
+
+impl<T> Tr for Foo<T> {
+ type U = T;
+}
+
+struct Wrapper<T> {
+ value: T
+}
+
+fn main() {
+ let Foo::<i32>(a, b) = Default::default();
+
+ let f = Foo(2,3);
+ f.foo();
+
+ let w = Wrapper { value: Foo(10u8, 11u8) };
+ match w {
+ Wrapper::<Foo<u8>> { value: Foo(10, 11) } => {},
+ ::Wrapper::<<Foo<_> as Tr>::U> { value: Foo::<u8>(11, 16) } => { panic!() },
+ _ => { panic!() }
+ }
+
+ if let None::<u8> = Some(8) {
+ panic!();
+ }
+}
impl<'a, K: PartialEq + std::fmt::Debug, V:Clone> Index<&'a K> for AssociationList<K,V> {
type Output = V;
- fn index<'a>(&'a self, index: &K) -> &'a V {
+ fn index(&self, index: &K) -> &V {
for pair in &self.pairs {
if pair.key == *index {
return &pair.value
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Parsing of range patterns
+
+const NUM1: i32 = 10;
+
+mod m {
+ pub const NUM2: i32 = 16;
+}
+
+fn main() {
+ if let NUM1 ... m::NUM2 = 10 {} else { panic!() }
+ if let ::NUM1 ... ::m::NUM2 = 11 {} else { panic!() }
+ if let -13 ... -10 = 12 { panic!() } else {}
+}
fn read(&mut self) -> Value<'v>;
}
-pub trait Decodable<'v, D: Decoder<'v>>
- : marker::PhantomFn<(), &'v isize>
-{
+pub trait Decodable<'v, D: Decoder<'v>> {
fn decode(d: &mut D) -> Self;
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// ignore-bitrig
// compile-flags: -C codegen-units=3
// aux-build:sepcomp_cci_lib.rs
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// ignore-bitrig
// compile-flags: -C codegen-units=3
// aux-build:sepcomp-extern-lib.rs
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// ignore-bitrig
// compile-flags: -C codegen-units=3
// Test references to items that haven't been translated yet.
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// ignore-bitrig
// compile-flags: -C codegen-units=3
// Test basic separate compilation functionality. The functions should be able
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// ignore-bitrig
// compile-flags: -C codegen-units=3
// Test references to static items across compilation units.
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// ignore-bitrig
// compile-flags: -C codegen-units=3
// Test unwinding through multiple compilation units.
// ignore-linux see joyent/libuv#1189
// ignore-android needs extra network permissions
// ignore-openbsd system ulimit (Too many open files)
+// ignore-bitrig system ulimit (Too many open files)
// exec-env:RUST_LOG=debug
#![feature(rustc_private, libc, old_io, io, std_misc)]
#![allow(unknown_features)]
#![feature(box_syntax)]
-use std::marker::MarkerTrait;
-
trait Get {
fn get(&self) -> Self;
}
-trait MyCopy : MarkerTrait { fn copy(&self) -> Self; }
+trait MyCopy { fn copy(&self) -> Self; }
impl MyCopy for u16 { fn copy(&self) -> Self { *self } }
impl MyCopy for u32 { fn copy(&self) -> Self { *self } }
impl MyCopy for i32 { fn copy(&self) -> Self { *self } }
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that we are able to compile the case where both a blanket impl
+// and the object type itself supply the required trait obligation.
+// In this case, the blanket impl for `Foo` applies to any type,
+// including `Bar`, but the object type `Bar` also implicitly supplies
+// this context.
+
+trait Foo { fn dummy(&self) { } }
+
+trait Bar: Foo { }
+
+impl<T:?Sized> Foo for T { }
+
+fn want_foo<B:?Sized+Foo>() { }
+
+fn main() {
+ want_foo::<Bar>();
+}
// pretty-expanded FIXME #23616
-use std::marker::{PhantomData, PhantomFn};
+use std::marker::PhantomData;
-trait T1 : PhantomFn<Self> { }
-pub trait T2 : PhantomFn<Self> { }
-trait T3<X: T1> : T2 + PhantomFn<X> { }
-trait T4<X: ?Sized> : PhantomFn<(Self,X)> {}
-trait T5<X: ?Sized, Y> : PhantomFn<(Self,X,Y)> {}
-trait T6<Y, X: ?Sized> : PhantomFn<(Self,X,Y)> {}
-trait T7<X: ?Sized, Y: ?Sized> : PhantomFn<(Self,X,Y)> {}
-trait T8<X: ?Sized+T2> : PhantomFn<(Self,X)> {}
-trait T9<X: T2 + ?Sized> : PhantomFn<(Self,X)> {}
+trait T1 { }
+pub trait T2 { }
+trait T3<X: T1> : T2 { }
+trait T4<X: ?Sized> { }
+trait T5<X: ?Sized, Y> { }
+trait T6<Y, X: ?Sized> { }
+trait T7<X: ?Sized, Y: ?Sized> { }
+trait T8<X: ?Sized+T2> { }
+trait T9<X: T2 + ?Sized> { }
struct S1<X: ?Sized>(PhantomData<X>);
enum E<X: ?Sized> { E1(PhantomData<X>) }
impl <X: ?Sized> T1 for S1<X> {}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// pretty-expanded FIXME #23616
-
-#![allow(dead_code)]
-
-// Get<T> is covariant in T
-trait Get<T> {
- fn get(&self) -> T;
-}
-
-struct Cloner<T:Clone> {
- t: T
-}
-
-impl<T:Clone> Get<T> for Cloner<T> {
- fn get(&self) -> T {
- self.t.clone()
- }
-}
-
-fn get<'a, G>(get: &G) -> i32
- where G : Get<&'a i32>
-{
- // This call only type checks if we can use `G : Get<&'a i32>` as
- // evidence that `G : Get<&'b i32>` where `'a : 'b`.
- pick(get, &22)
-}
-
-fn pick<'b, G>(get: &'b G, if_odd: &'b i32) -> i32
- where G : Get<&'b i32>
-{
- let v = *get.get();
- if v % 2 != 0 { v } else { *if_odd }
-}
-
-fn main() {
- let x = Cloner { t: &23 };
- let y = get(&x);
- assert_eq!(y, 23);
-}
// pretty-expanded FIXME #23616
-use std::marker::PhantomFn;
-
static mut COUNT: u32 = 1;
-trait Bar<'a>
- : PhantomFn<&'a ()>
-{
+trait Bar<'a> {
fn bar(&self);
}
trait Baz<'a>
- : PhantomFn<&'a ()>
{
fn baz(&self);
}