use rustc::dep_graph::DepGraph;
use rustc::hir::map as hir_map;
use rustc::session::config::Options as SessionOptions;
use rustc::session::config::{Input, OutputFilenames};
use rustc::session::{self, DiagnosticOutput, Session};
use rustc::ty::steal::Steal;
use rustc::ty::{self, GlobalCtxt, Resolutions, TyCtxt};
use rustc::util::common::ErrorReported;
use rustc_codegen_utils::codegen_backend::CodegenBackend;
use rustc_data_structures::declare_box_region_type;
use rustc_data_structures::sync::{Lock, Lrc};
use rustc_driver;
use rustc_errors::DiagnosticBuilder;
use rustc_incremental::DepGraphFuture;
use rustc_interface::interface;
use rustc_interface::interface::BoxedResolver;
use rustc_interface::util::get_codegen_backend;
use rustc_interface::{util, Config};
use rustc_metadata::cstore::CStore;
use std::any::Any;
use std::cell::RefCell;
use std::collections::HashSet;
use std::mem;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use std::sync::mpsc;
use std::sync::Arc;
use syntax::ast;
use syntax::ast::DUMMY_NODE_ID;
use syntax::ast::{
Arg, Block, BlockCheckMode, Expr, ForeignItem, ImplItem, Item, ItemKind, NodeId, Pat, Stmt, Ty,
UnsafeSource,
};
use syntax::ext::base::NamedSyntaxExtension;
use syntax::ext::hygiene::SyntaxContext;
use syntax::feature_gate::AttributeType;
use syntax::parse::parser::Parser;
use syntax::parse::token::Token;
use syntax::parse::{self, PResult};
use syntax::ptr::P;
use syntax::source_map::SourceMap;
use syntax::source_map::{FileLoader, RealFileLoader};
use syntax::symbol::{keywords, Symbol};
use syntax::tokenstream::TokenTree;
use syntax_pos::FileName;
use syntax_pos::Span;
use crate::ast_manip::remove_paren;
use crate::command::{RefactorState, Registry};
use crate::file_io::{ArcFileIO, FileIO};
use crate::util::Lone;
use crate::RefactorCtxt;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub enum Phase {
Phase1,
Phase2,
Phase3,
}
impl<'a, 'tcx: 'a> RefactorCtxt<'a, 'tcx> {
pub fn new_phase_1(sess: &'a Session, cstore: &'a CStore) -> RefactorCtxt<'a, 'tcx> {
RefactorCtxt::new(sess, cstore, None, None)
}
pub fn new_phase_2(
sess: &'a Session,
cstore: &'a CStore,
map: &'a hir_map::Map<'tcx>,
) -> RefactorCtxt<'a, 'tcx> {
RefactorCtxt::new(sess, cstore, Some(map), None)
}
pub fn new_phase_3(
sess: &'a Session,
cstore: &'a CStore,
map: &'a hir_map::Map<'tcx>,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
) -> RefactorCtxt<'a, 'tcx> {
RefactorCtxt::new(sess, cstore, Some(map), Some(tcx))
}
}
fn maybe_set_sysroot(mut sopts: SessionOptions, args: &[String]) -> SessionOptions {
if sopts.maybe_sysroot.is_none() && args.len() > 0 {
let p = Path::new(&args[0]);
if p.is_absolute() {
if let Some(sysroot) = p.parent().and_then(|p| p.parent()) {
sopts.maybe_sysroot = Some(sysroot.to_owned());
}
}
}
sopts
}
pub fn clone_config(config: &interface::Config) -> interface::Config {
let input = match &config.input {
Input::File(f) => Input::File(f.clone()),
Input::Str { name, input } => Input::Str {
name: name.clone(),
input: input.clone(),
},
};
interface::Config {
opts: config.opts.clone(),
crate_cfg: config.crate_cfg.clone(),
input,
input_path: config.input_path.clone(),
output_file: config.output_file.clone(),
output_dir: config.output_dir.clone(),
file_loader: None,
diagnostic_output: DiagnosticOutput::Default,
stderr: config.stderr.clone(),
crate_name: config.crate_name.clone(),
lint_caps: config.lint_caps.clone(),
}
}
pub fn create_config(args: &[String]) -> interface::Config {
let matches = rustc_driver::handle_options(args).expect("rustc arg parsing failed");
let (sopts, cfg) = session::config::build_session_options_and_crate_config(&matches);
let sopts = maybe_set_sysroot(sopts, args);
let output_dir = matches.opt_str("out-dir").map(|o| PathBuf::from(&o));
let output_file = matches.opt_str("o").map(|o| PathBuf::from(&o));
assert!(matches.free.len() == 1, "expected exactly one input file");
let input_path = Some(Path::new(&matches.free[0]).to_owned());
let input = Input::File(input_path.as_ref().unwrap().clone());
interface::Config {
opts: sopts,
crate_cfg: cfg,
input,
input_path,
output_file,
output_dir,
file_loader: None,
diagnostic_output: DiagnosticOutput::Default,
stderr: None,
crate_name: None,
lint_caps: Default::default(),
}
}
pub fn run_compiler<F, R>(
mut config: interface::Config,
file_loader: Option<Box<dyn FileLoader + Send + Sync>>,
f: F,
) -> R
where
F: FnOnce(&interface::Compiler) -> R,
R: Send,
{
config.opts.incremental = None;
config.file_loader = file_loader;
syntax::with_globals(move || {
ty::tls::GCX_PTR.set(&Lock::new(0), || {
ty::tls::with_thread_locals(|| {
interface::run_compiler_in_existing_thread_pool(config, f)
})
})
})
}
pub fn run_refactoring<F, R>(
mut config: interface::Config,
cmd_reg: Registry,
file_io: Arc<FileIO + Sync + Send>,
marks: HashSet<(NodeId, Symbol)>,
f: F,
) -> R
where
F: FnOnce(RefactorState) -> R,
R: Send,
{
config.opts.incremental = None;
syntax::with_globals(move || {
ty::tls::GCX_PTR.set(&Lock::new(0), || {
ty::tls::with_thread_locals(|| {
let state = RefactorState::new(config, cmd_reg, file_io, marks);
f(state)
})
})
})
}
#[allow(dead_code)]
pub struct Compiler {
pub sess: Lrc<Session>,
pub codegen_backend: Lrc<Box<dyn CodegenBackend>>,
source_map: Lrc<SourceMap>,
input: Input,
input_path: Option<PathBuf>,
output_dir: Option<PathBuf>,
output_file: Option<PathBuf>,
queries: Queries,
pub cstore: Lrc<CStore>,
crate_name: Option<String>,
}
#[allow(dead_code)]
#[derive(Default)]
struct Queries {
dep_graph_future: Query<Option<DepGraphFuture>>,
parse: Query<ast::Crate>,
crate_name: Query<String>,
register_plugins: Query<(ast::Crate, PluginInfo)>,
expansion: Query<(ast::Crate, Rc<Option<RefCell<BoxedResolver>>>)>,
dep_graph: Query<DepGraph>,
lower_to_hir: Query<(Steal<hir_map::Forest>, ExpansionResult)>,
prepare_outputs: Query<OutputFilenames>,
codegen_channel: Query<(
Steal<mpsc::Sender<Box<dyn Any + Send>>>,
Steal<mpsc::Receiver<Box<dyn Any + Send>>>,
)>,
global_ctxt: Query<BoxedGlobalCtxt>,
ongoing_codegen: Query<Box<dyn Any>>,
link: Query<()>,
}
#[allow(dead_code)]
struct Query<T> {
result: RefCell<Option<Result<T, ErrorReported>>>,
}
impl<T> Default for Query<T> {
fn default() -> Self {
Query {
result: RefCell::new(None),
}
}
}
#[allow(dead_code)]
struct PluginInfo {
syntax_exts: Vec<NamedSyntaxExtension>,
attributes: Vec<(String, AttributeType)>,
}
struct ExpansionResult {
pub defs: Steal<hir_map::Definitions>,
pub resolutions: Steal<Resolutions>,
}
declare_box_region_type!(
pub BoxedGlobalCtxt,
for('gcx),
(&'gcx GlobalCtxt<'gcx>) -> ((), ())
);
pub fn make_compiler(config: &Config, file_io: Arc<FileIO + Sync + Send>) -> interface::Compiler {
let mut config = clone_config(config);
config.file_loader = Some(Box::new(ArcFileIO(file_io)));
let (sess, codegen_backend, source_map) = util::create_session(
config.opts,
config.crate_cfg,
config.diagnostic_output,
config.file_loader,
config.input_path.clone(),
config.lint_caps,
);
source_map.new_source_file(FileName::Custom("<dummy>".to_string()), " ".to_string());
let cstore = Lrc::new(CStore::new(codegen_backend.metadata_loader()));
let compiler = Compiler {
sess,
codegen_backend,
source_map,
cstore,
input: config.input,
input_path: config.input_path,
output_dir: config.output_dir,
output_file: config.output_file,
queries: Default::default(),
crate_name: config.crate_name,
};
let compiler = unsafe { mem::transmute(compiler) };
compiler
}
pub fn build_session_from_args(
args: &[String],
file_loader: Option<Box<FileLoader + Sync + Send>>,
) -> Session {
let matches = rustc_driver::handle_options(args).expect("rustc arg parsing failed");
let (sopts, _cfg) = session::config::build_session_options_and_crate_config(&matches);
let sopts = maybe_set_sysroot(sopts, args);
assert!(matches.free.len() == 1, "expected exactly one input file");
let in_path = Some(Path::new(&matches.free[0]).to_owned());
let (session, _cstore, _codegen_backend) = build_session(sopts, in_path, file_loader);
session
}
fn build_session(
sopts: SessionOptions,
in_path: Option<PathBuf>,
file_loader: Option<Box<FileLoader + Sync + Send>>,
) -> (Session, CStore, Box<CodegenBackend>) {
let descriptions = rustc_interface::util::diagnostics_registry();
let file_loader = file_loader.unwrap_or_else(|| Box::new(RealFileLoader));
let source_map = Rc::new(SourceMap::with_file_loader(
file_loader,
sopts.file_path_mapping(),
));
source_map.new_source_file(FileName::Custom("<dummy>".to_string()), " ".to_string());
let sess = session::build_session_with_source_map(
sopts,
in_path,
descriptions,
source_map,
DiagnosticOutput::Default,
Default::default(),
);
let codegen_backend = get_codegen_backend(&sess);
let cstore = CStore::new(codegen_backend.metadata_loader());
(sess, cstore, codegen_backend)
}
fn make_parser<'a>(sess: &'a Session, src: &str) -> Parser<'a> {
parse::new_parser_from_source_str(
&sess.parse_sess,
FileName::anon_source_code(src),
src.to_owned(),
)
}
pub fn emit_and_panic(mut db: DiagnosticBuilder, what: &str) -> ! {
db.emit();
panic!("error parsing {}", what);
}
pub fn parse_expr(sess: &Session, src: &str) -> P<Expr> {
let mut p = make_parser(sess, src);
match p.parse_expr() {
Ok(mut expr) => {
remove_paren(&mut expr);
expr
}
Err(db) => emit_and_panic(db, "expr"),
}
}
pub fn parse_pat(sess: &Session, src: &str) -> P<Pat> {
let mut p = make_parser(sess, src);
match p.parse_pat(None) {
Ok(mut pat) => {
remove_paren(&mut pat);
pat
}
Err(db) => emit_and_panic(db, "pat"),
}
}
pub fn parse_ty(sess: &Session, src: &str) -> P<Ty> {
let mut p = make_parser(sess, src);
match p.parse_ty() {
Ok(mut ty) => {
remove_paren(&mut ty);
ty
}
Err(db) => emit_and_panic(db, "ty"),
}
}
pub fn parse_stmts(sess: &Session, src: &str) -> Vec<Stmt> {
let mut p = make_parser(sess, &format!("{{ {} }}", src));
match p.parse_block() {
Ok(blk) => blk
.into_inner()
.stmts
.into_iter()
.map(|mut s| {
remove_paren(&mut s);
s.lone()
})
.collect(),
Err(db) => emit_and_panic(db, "stmts"),
}
}
pub fn parse_items(sess: &Session, src: &str) -> Vec<P<Item>> {
let mut p = make_parser(sess, src);
let mut items = Vec::new();
loop {
match p.parse_item() {
Ok(Some(mut item)) => {
remove_paren(&mut item);
items.push(item.lone());
}
Ok(None) => break,
Err(db) => emit_and_panic(db, "items"),
}
}
items
}
pub fn parse_impl_items(sess: &Session, src: &str) -> Vec<ImplItem> {
let mut p = make_parser(sess, &format!("impl ! {{ {} }}", src));
match p.parse_item() {
Ok(item) => match item.expect("expected to find an item").into_inner().node {
ItemKind::Impl(_, _, _, _, _, _, items) => items,
_ => panic!("expected to find an impl item"),
},
Err(db) => emit_and_panic(db, "impl items"),
}
}
pub fn parse_foreign_items(sess: &Session, src: &str) -> Vec<ForeignItem> {
let mut p = make_parser(sess, &format!("extern {{ {} }}", src));
match p.parse_item() {
Ok(item) => match item.expect("expected to find an item").into_inner().node {
ItemKind::ForeignMod(fm) => fm.items,
_ => panic!("expected to find a foreignmod item"),
},
Err(db) => emit_and_panic(db, "foreign items"),
}
}
pub fn parse_block(sess: &Session, src: &str) -> P<Block> {
let mut p = make_parser(sess, src);
let rules = if p.eat_keyword(keywords::Unsafe) {
BlockCheckMode::Unsafe(UnsafeSource::UserProvided)
} else {
BlockCheckMode::Default
};
match p.parse_block() {
Ok(mut block) => {
remove_paren(&mut block);
block.rules = rules;
block
}
Err(db) => emit_and_panic(db, "block"),
}
}
fn parse_arg_inner<'a>(p: &mut Parser<'a>) -> PResult<'a, Arg> {
let pat = p.parse_pat(None)?;
p.expect(&Token::Colon)?;
let ty = p.parse_ty()?;
Ok(Arg {
pat,
ty,
id: DUMMY_NODE_ID,
})
}
pub fn parse_arg(sess: &Session, src: &str) -> Arg {
let mut p = make_parser(sess, src);
match parse_arg_inner(&mut p) {
Ok(mut arg) => {
remove_paren(&mut arg);
arg
}
Err(db) => emit_and_panic(db, "arg"),
}
}
pub fn run_parser<F, R>(sess: &Session, src: &str, f: F) -> R
where
F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, R>,
{
let mut p = make_parser(sess, src);
match f(&mut p) {
Ok(x) => x,
Err(db) => emit_and_panic(db, "src"),
}
}
pub fn run_parser_tts<F, R>(sess: &Session, tts: Vec<TokenTree>, f: F) -> R
where
F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, R>,
{
let mut p = parse::new_parser_from_tts(&sess.parse_sess, tts);
match f(&mut p) {
Ok(x) => x,
Err(db) => emit_and_panic(db, "tts"),
}
}
pub fn try_run_parser<F, R>(sess: &Session, src: &str, f: F) -> Option<R>
where
F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, R>,
{
let mut p = make_parser(sess, src);
match f(&mut p) {
Ok(x) => Some(x),
Err(mut db) => {
db.cancel();
None
}
}
}
pub fn try_run_parser_tts<F, R>(sess: &Session, tts: Vec<TokenTree>, f: F) -> Option<R>
where
F: for<'a> FnOnce(&mut Parser<'a>) -> PResult<'a, R>,
{
let mut p = parse::new_parser_from_tts(&sess.parse_sess, tts);
match f(&mut p) {
Ok(x) => Some(x),
Err(mut db) => {
db.cancel();
None
}
}
}
pub fn make_span_for_text(cm: &SourceMap, s: &str) -> Span {
let fm = cm.new_source_file(FileName::anon_source_code(s), s.to_string());
Span::new(fm.start_pos, fm.end_pos, SyntaxContext::empty())
}