-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlib.rs
109 lines (91 loc) · 4.03 KB
/
lib.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
#![warn(clippy::pedantic, clippy::nursery)]
#![allow(clippy::missing_errors_doc, clippy::must_use_candidate)]
#![allow(
clippy::cast_possible_wrap,
clippy::missing_panics_doc,
clippy::use_self,
clippy::module_name_repetitions,
clippy::vec_init_then_push,
clippy::large_enum_variant
)]
pub mod analyze;
pub mod common;
pub mod error;
pub mod generate;
pub mod parse;
pub mod preprocess;
pub mod tokenize;
use crate::generate::generate::Generator;
use crate::tokenize::debug_infos::FileInfo;
use analyze::analyze::Analyzer;
use analyze::analyze::ConvProgram;
use error::CompileError;
use parse::parse::Parser;
use parse::parse::Program;
use preprocess::preprocess::Preprocessor;
use preprocess::preprocessor_streams::{PreprocessorTokenContainerStream, PreprocessorTokenStream};
use preprocess::srccursor::SrcCursor;
use std::io::BufWriter;
use std::rc::Rc;
use tokenize::tokenize::Token as TokenizeToken;
use tokenize::tokenize::TokenKind as TokenizeTokenKind;
use tokenize::tokenize::TokenStream;
use tokenize::tokenize::Tokenizer;
pub fn preprocess_and_compile(mut input: String) -> Result<String, CompileError> {
use std::io::Write;
input.push('\n');
let file_name = "src.c".to_string();
let main_file_info = Rc::new(FileInfo::new(file_name.clone(), input.clone()));
let mut preprocessor = Preprocessor::new(main_file_info.clone(), "include");
let tokens = preprocessor.preprocess(&mut SrcCursor::new(main_file_info), None)?;
let stream = PreprocessorTokenStream::new(tokens.into_iter());
let container_stream = PreprocessorTokenContainerStream::new(stream.collect());
let mut tokenizer = Tokenizer::new(container_stream);
let file_info = Rc::new(FileInfo::new(file_name, input));
let tokens = tokenizer.tokenize(&file_info)?;
let mut token_stream = TokenStream::new(tokens.into_iter());
let mut parser = Parser::new();
let program = parser.parse_program(&mut token_stream)?;
let mut analyzer = Analyzer::new();
let converted_program = analyzer.traverse_program(program)?;
let mut buf_writer = BufWriter::new(Vec::new());
let mut generater = Generator::new();
generater.gen_head(&mut buf_writer, converted_program)?;
buf_writer.flush()?;
let asm = String::from_utf8(buf_writer.into_inner().unwrap()).unwrap();
Ok(asm)
}
pub fn preprocessed_source(mut input: String) -> Result<String, CompileError> {
input.push('\n');
let file_name = "src.c".to_string();
let main_file_info = Rc::new(FileInfo::new(file_name, input.clone()));
let mut preprocessor = Preprocessor::new(main_file_info.clone(), "include");
let tokens = preprocessor.preprocess(&mut SrcCursor::new(main_file_info), None)?;
let stream = PreprocessorTokenStream::new(tokens.into_iter());
let container_stream = PreprocessorTokenContainerStream::new(stream.collect());
Ok(container_stream.map(|(_, ch)| ch).collect())
}
pub fn tokens(mut input: String) -> Result<Vec<TokenizeToken<TokenizeTokenKind>>, CompileError> {
input.push('\n');
let file_name = "src.c".to_string();
let main_file_info = Rc::new(FileInfo::new(file_name.clone(), input.clone()));
let mut preprocessor = Preprocessor::new(main_file_info.clone(), "include");
let tokens = preprocessor.preprocess(&mut SrcCursor::new(main_file_info), None)?;
let stream = PreprocessorTokenStream::new(tokens.into_iter());
let container_stream = PreprocessorTokenContainerStream::new(stream.collect());
let mut tokenizer = Tokenizer::new(container_stream);
let file_info = Rc::new(FileInfo::new(file_name, input));
tokenizer.tokenize(&file_info)
}
pub fn parsed_ast(input: String) -> Result<Program, CompileError> {
let tokens = tokens(input)?;
let mut token_stream = TokenStream::new(tokens.into_iter());
let mut parser = Parser::new();
let program = parser.parse_program(&mut token_stream)?;
Ok(program)
}
pub fn converted_ast(input: String) -> Result<ConvProgram, CompileError> {
let ast = parsed_ast(input)?;
let mut analyzer = Analyzer::new();
analyzer.traverse_program(ast)
}