aboutsummaryrefslogtreecommitdiffstats
path: root/minimal.rs
diff options
context:
space:
mode:
authorbnewbold <bnewbold@robocracy.org>2016-04-20 00:19:36 -0400
committerbnewbold <bnewbold@robocracy.org>2016-04-20 00:19:36 -0400
commitc92f6f75ac38ede49e6b466da9c2be8da37e1bb9 (patch)
tree72715e169b4b7f90f8a545ede562cf85d2614974 /minimal.rs
parente57b363b6cc9be3c4e3cd3cb04e37a29368ec532 (diff)
downloadspectrum-c92f6f75ac38ede49e6b466da9c2be8da37e1bb9.tar.gz
spectrum-c92f6f75ac38ede49e6b466da9c2be8da37e1bb9.zip
rust: flush out parse function
Diffstat (limited to 'minimal.rs')
-rw-r--r--minimal.rs48
1 files changed, 40 insertions, 8 deletions
diff --git a/minimal.rs b/minimal.rs
index ee7c8f1..01e64c4 100644
--- a/minimal.rs
+++ b/minimal.rs
@@ -80,13 +80,43 @@ fn scheme_tokenize<'a>(raw_str: &'a str) -> Result<Vec<&'a str>, &'static str> {
return Ok(ret);
}
-fn scheme_parse_num(s: &String) -> Result<f64, &'static str> {
- let num = 0.;
- return Ok(num);
+fn scheme_parse_token(token: &str) -> Result<SchemeExpr, &'static str> {
+ // XXX: implement me
+ return Ok(SchemeExpr::SchemeNull);
}
-fn scheme_parse<'a>(tokens: &Vec<&'a str>) -> Result<(SchemeExpr<'a>, usize), &'static str> {
- return Ok((SchemeExpr::SchemeNull, 0));
+fn scheme_parse<'a>(tokens: &Vec<&'a str>, depth: u32) -> Result<(SchemeExpr<'a>, usize), &'static str> {
+ let mut ret = Vec::<SchemeExpr>::new();
+ let mut i: usize = 0;
+ if tokens.len() == 0 {
+ return Ok((SchemeExpr::SchemeNull, 0));
+ } else if tokens.len() == 1 {
+ let expr = try!(scheme_parse_token(tokens[0]));
+ return Ok((expr, 1));
+ }
+ while i < tokens.len() {
+ match tokens[i] {
+ "(" => {
+ let (expr, skip) = try!(scheme_parse(&tokens[i+1..].to_vec(), depth+1));
+ ret.push(expr);
+ i += skip;},
+ ")" => {
+ if depth == 0 {
+ return Err("missing an open bracket");
+ }
+ return Ok((SchemeExpr::SchemeList(ret), i+1));},
+ token => {
+ let expr = try!(scheme_parse_token(token));
+ ret.push(expr);
+ }
+ }
+ i += 1;
+ }
+ if depth > 0 {
+ return Err("missing a close bracket");
+ }
+ let rlen = ret.len();
+ return Ok((SchemeExpr::SchemeList(ret), rlen));
}
fn scheme_eval<'a>(ast: &SchemeExpr) -> Result<SchemeExpr<'a>, &'static str> {
@@ -126,13 +156,15 @@ fn main() {
}
let tokens = match scheme_tokenize(&raw_input) {
Ok(tokens) => {
- println!("Tokens: {}", tokens.join(", "));
+ println!("Tokens: {}", tokens.join(", ")); // debug
tokens},
Err(e) => {
println!("couldn't tokenize: {}", e);
continue}};
- let ast = match scheme_parse(&tokens) {
- Ok((ast, _)) => ast,
+ let ast = match scheme_parse(&tokens, 0) {
+ Ok((ast, _)) => {
+ println!("AST: {}", scheme_repr(&ast).unwrap());
+ ast},
Err(e) => {
println!("couldn't parse: {}", e);
continue}};