Skip to content

Commit

Permalink
wip
Browse files Browse the repository at this point in the history
  • Loading branch information
hlorenzi committed Dec 31, 2023
1 parent 4c543f5 commit d849129
Show file tree
Hide file tree
Showing 19 changed files with 514 additions and 147 deletions.
81 changes: 37 additions & 44 deletions src/asm/matcher/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -596,53 +596,40 @@ fn match_with_expr<'tokens>(
match_so_far: &mut InstructionMatch)
-> WorkingMatches<'tokens>
{
if walker.is_at_partial()
{
match walker.maybe_expect_partial_usize()
{
None =>
{
return vec![];
}
Some(value) =>
{
let expr = expr::Value::make_integer(value)
.make_literal();
walker.reinterpret_next_tokens(&mut diagn::Report::new()).unwrap();

match_so_far.args.push(InstructionArgument {
kind: InstructionArgumentKind::Expr(expr),
tokens: Vec::new(),
});
}
let token_start = walker.get_current_token_index();

let maybe_expr = parse_with_lookahead(
&rule.pattern,
at_pattern_part,
walker,
|walker| expr::parse_optional(walker));

dbg!(&maybe_expr);

let token_end = walker.get_current_token_index();

let expr = {
match maybe_expr
{
Some(expr) => expr,
None => return vec![],
}
}
else
{
let token_start = walker.get_current_token_index();
};

let maybe_expr = parse_with_lookahead(
&rule.pattern,
at_pattern_part,
walker,
|walker| expr::parse_optional(walker));
let tokens = walker.get_cloned_tokens_by_index(
token_start,
token_end);

let token_end = walker.get_current_token_index();
dbg!(&tokens);

let expr = {
match maybe_expr
{
Some(expr) => expr,
None => return vec![],
}
};
match_so_far.args.push(InstructionArgument {
kind: InstructionArgumentKind::Expr(expr),
tokens,
});

match_so_far.args.push(InstructionArgument {
kind: InstructionArgumentKind::Expr(expr),
tokens: walker.get_cloned_tokens_by_index(
token_start,
token_end),
});
}
dbg!(&match_so_far.args);

match_with_rule(
defs,
Expand All @@ -664,6 +651,8 @@ fn match_with_nested_ruledef<'tokens>(
match_so_far: &mut InstructionMatch)
-> WorkingMatches<'tokens>
{
walker.reinterpret_next_tokens(&mut diagn::Report::new()).unwrap();

let token_start = walker.get_current_token_index();

let nested_matches = parse_with_lookahead(
Expand All @@ -687,11 +676,15 @@ fn match_with_nested_ruledef<'tokens>(

let mut match_so_far = match_so_far.clone();

let tokens = walker.get_cloned_tokens_by_index(
token_start,
walker.get_current_token_index());

dbg!(&tokens);

match_so_far.args.push(InstructionArgument {
kind: InstructionArgumentKind::Nested(nested_match.0),
tokens: walker.get_cloned_tokens_by_index(
token_start,
walker.get_current_token_index()),
tokens,
});


Expand Down
2 changes: 1 addition & 1 deletion src/asm/parser/directive_ruledef.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ pub fn parse(
-> Result<AstDirectiveRuledef, ()>
{
let tk_name = walker.maybe_expect(syntax::TokenKind::Identifier);
let name = tk_name.map(|tk| tk.excerpt.clone().unwrap());
let name = tk_name.clone().map(|tk| tk.excerpt.clone().unwrap());
let name_span = tk_name
.map(|tk| tk.span)
.unwrap_or_else(|| header_span);
Expand Down
52 changes: 13 additions & 39 deletions src/asm/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,36 +4,24 @@ use crate::*;
mod directive;

mod directive_addr;
pub use directive_addr::{
AstDirectiveAddr,
};
pub use directive_addr::AstDirectiveAddr;

mod directive_align;
pub use directive_align::{
AstDirectiveAlign,
};
pub use directive_align::AstDirectiveAlign;

mod directive_bank;
pub use directive_bank::{
AstDirectiveBank,
};
pub use directive_bank::AstDirectiveBank;

mod directive_bankdef;
pub use directive_bankdef::{
AstDirectiveBankdef,
};
pub use directive_bankdef::AstDirectiveBankdef;

mod directive_bits;
pub use directive_bits::{
AstDirectiveBits,
};
pub use directive_bits::AstDirectiveBits;

mod directive_const;

mod directive_data;
pub use directive_data::{
AstDirectiveData,
};
pub use directive_data::AstDirectiveData;

mod directive_fn;
pub use directive_fn::{
Expand All @@ -42,34 +30,22 @@ pub use directive_fn::{
};

mod directive_if;
pub use directive_if::{
AstDirectiveIf,
};
pub use directive_if::AstDirectiveIf;

mod directive_include;
pub use directive_include::{
AstDirectiveInclude,
};
pub use directive_include::AstDirectiveInclude;

mod directive_labelalign;
pub use directive_labelalign::{
AstDirectiveLabelAlign,
};
pub use directive_labelalign::AstDirectiveLabelAlign;

mod directive_noemit;
pub use directive_noemit::{
AstDirectiveNoEmit,
};
pub use directive_noemit::AstDirectiveNoEmit;

mod directive_once;
pub use directive_once::{
AstDirectiveOnce,
};
pub use directive_once::AstDirectiveOnce;

mod directive_res;
pub use directive_res::{
AstDirectiveRes,
};
pub use directive_res::AstDirectiveRes;

mod directive_ruledef;
pub use directive_ruledef::{
Expand All @@ -87,9 +63,7 @@ pub use fields::{
};

mod instruction;
pub use instruction::{
AstInstruction,
};
pub use instruction::AstInstruction;

mod symbol;
pub use symbol::{
Expand Down
12 changes: 6 additions & 6 deletions src/asm/resolver/eval_asm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,19 +152,19 @@ pub fn eval_asm(
}


struct AsmSubstitution<'a>
struct AsmSubstitution
{
pub start: usize,
pub end: usize,
pub name: &'a str,
pub name: String,
pub span: diagn::Span,
}


fn parse_substitutions<'tokens>(
report: &mut diagn::Report,
tokens: &'tokens [syntax::Token])
-> Result<Vec<AsmSubstitution<'tokens>>, ()>
-> Result<Vec<AsmSubstitution>, ()>
{
let mut substs = Vec::new();

Expand All @@ -180,7 +180,7 @@ fn parse_substitutions<'tokens>(
report,
syntax::TokenKind::Identifier)?;

let name = tk_name.excerpt.as_ref().unwrap();
let name = tk_name.clone().excerpt.unwrap();
let span = tk_name.span;

walker.expect(
Expand Down Expand Up @@ -208,7 +208,7 @@ fn parse_substitutions<'tokens>(

fn perform_substitutions<'tokens>(
tokens: &'tokens [syntax::Token],
substs: &Vec<AsmSubstitution<'tokens>>,
substs: &Vec<AsmSubstitution>,
info: &mut expr::EvalAsmBlockQuery)
-> Result<Vec<syntax::Token>, ()>
{
Expand All @@ -225,7 +225,7 @@ fn perform_substitutions<'tokens>(
}

let token_subst = {
match info.eval_ctx.get_token_subst(subst.name)
match info.eval_ctx.get_token_subst(&subst.name)
{
Some(t) => t,
None =>
Expand Down
13 changes: 13 additions & 0 deletions src/diagn/span.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ pub type SpanIndex = u32;
pub struct Span
{
pub file_handle: util::FileServerHandle,

/// Represents byte indices (not UTF-8 char indices)
location: (SpanIndex, SpanIndex),
}

Expand Down Expand Up @@ -45,6 +47,17 @@ impl Span

Some(self.location)
}


pub fn length(&self) -> usize
{
if self.location.0 == SpanIndex::MAX
{
return 0;
}

(self.location.1 - self.location.0) as usize
}


pub fn before(&self) -> Span
Expand Down
8 changes: 5 additions & 3 deletions src/syntax/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,15 @@ pub use self::token::{
Token,
TokenKind,
tokenize,
decide_next_token,
is_whitespace,
};

mod token_walker;
pub use self::token_walker::{
TokenWalker,
};
pub use self::token_walker::TokenWalker;

mod walker;
pub use self::walker::Walker;

mod excerpt;
pub use self::excerpt::{
Expand Down
23 changes: 15 additions & 8 deletions src/syntax/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -263,14 +263,7 @@ pub fn tokenize(
let remaining = &src.get(index..).unwrap();

// Decide what the next token's kind and length are.
let (kind, length) =
check_for_whitespace(remaining).unwrap_or_else(||
check_for_comment (remaining).unwrap_or_else(||
check_for_number (remaining).unwrap_or_else(||
check_for_identifier(remaining).unwrap_or_else(||
check_for_special (remaining).unwrap_or_else(||
check_for_string (remaining).unwrap_or_else(||
(TokenKind::Error, 1)))))));
let (kind, length) = decide_next_token(&remaining);

let span = diagn::Span::new(
src_file_handle,
Expand Down Expand Up @@ -315,6 +308,20 @@ pub fn tokenize(
}


pub fn decide_next_token(
src: &str)
-> (TokenKind, usize)
{
check_for_whitespace(src).unwrap_or_else(||
check_for_comment (src).unwrap_or_else(||
check_for_number (src).unwrap_or_else(||
check_for_identifier(src).unwrap_or_else(||
check_for_special (src).unwrap_or_else(||
check_for_string (src).unwrap_or_else(||
(TokenKind::Error, 1)))))))
}


#[derive(Clone)]
struct CharWalker<'a>
{
Expand Down
Loading

0 comments on commit d849129

Please sign in to comment.