cranelift_isle/
compile.rs

1//! Compilation process, from AST to Sema to Sequences of Insts.
2
3use std::path::Path;
4use std::sync::Arc;
5
6use crate::ast::Def;
7use crate::error::Errors;
8use crate::files::Files;
9use crate::{ast, codegen, overlap, sema};
10
11/// Compile the given AST definitions into Rust source code.
12pub fn compile(
13    files: Arc<Files>,
14    defs: &[ast::Def],
15    options: &codegen::CodegenOptions,
16) -> Result<String, Errors> {
17    let mut type_env = match sema::TypeEnv::from_ast(defs) {
18        Ok(type_env) => type_env,
19        Err(errs) => return Err(Errors::new(errs, files)),
20    };
21    let term_env = match sema::TermEnv::from_ast(&mut type_env, defs, true) {
22        Ok(term_env) => term_env,
23        Err(errs) => return Err(Errors::new(errs, files)),
24    };
25    let terms = match overlap::check(&term_env) {
26        Ok(terms) => terms,
27        Err(errs) => return Err(Errors::new(errs, files)),
28    };
29
30    Ok(codegen::codegen(
31        files, &type_env, &term_env, &terms, options,
32    ))
33}
34
35/// Compile the given files into Rust source code.
36pub fn from_files<P: AsRef<Path>>(
37    inputs: impl IntoIterator<Item = P>,
38    options: &codegen::CodegenOptions,
39) -> Result<String, Errors> {
40    let files = match Files::from_paths(inputs) {
41        Ok(files) => files,
42        Err((path, err)) => {
43            return Err(Errors::from_io(
44                err,
45                format!("cannot read file {}", path.display()),
46            ))
47        }
48    };
49
50    let files = Arc::new(files);
51
52    let mut defs = Vec::new();
53    for (file, src) in files.file_texts.iter().enumerate() {
54        let lexer = match crate::lexer::Lexer::new(file, src) {
55            Ok(lexer) => lexer,
56            Err(err) => return Err(Errors::new(vec![err], files)),
57        };
58
59        match crate::parser::parse(lexer) {
60            Ok(mut ds) => defs.append(&mut ds),
61            Err(err) => return Err(Errors::new(vec![err], files)),
62        }
63    }
64
65    compile(files, &defs, options)
66}
67
68/// Construct the ISLE type and term environments for further analysis
69/// (i.e., verification), without going all the way through codegen.
70pub fn create_envs(
71    inputs: Vec<std::path::PathBuf>,
72) -> Result<(sema::TypeEnv, sema::TermEnv, Vec<Def>), Errors> {
73    let files = match Files::from_paths(inputs) {
74        Ok(files) => files,
75        Err((path, err)) => {
76            return Err(Errors::from_io(
77                err,
78                format!("cannot read file {}", path.display()),
79            ))
80        }
81    };
82    let files = Arc::new(files);
83    let mut defs = Vec::new();
84    for (file, src) in files.file_texts.iter().enumerate() {
85        let lexer = match crate::lexer::Lexer::new(file, src) {
86            Ok(lexer) => lexer,
87            Err(err) => return Err(Errors::new(vec![err], files)),
88        };
89
90        match crate::parser::parse(lexer) {
91            Ok(mut ds) => defs.append(&mut ds),
92            Err(err) => return Err(Errors::new(vec![err], files)),
93        }
94    }
95    let mut type_env = match sema::TypeEnv::from_ast(&defs) {
96        Ok(type_env) => type_env,
97        Err(errs) => return Err(Errors::new(errs, files)),
98    };
99    // We want to allow annotations on terms with internal extractors,
100    // so we avoid expanding them within the sema rules.
101    let term_env = match sema::TermEnv::from_ast(&mut type_env, &defs, false) {
102        Ok(term_env) => term_env,
103        Err(errs) => return Err(Errors::new(errs, files)),
104    };
105    Ok((type_env, term_env, defs))
106}