Skip to content

Commit

Permalink
Add IDE highlighting in string literals
Browse files Browse the repository at this point in the history
  • Loading branch information
Minigugus committed Feb 11, 2024
1 parent 2092174 commit 45f141a
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 13 deletions.
5 changes: 5 additions & 0 deletions src/bytecode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -541,6 +541,8 @@ fn it_works() -> Result<()> {
#[test]
fn it_prints_functions_in_rust() -> Result<()> {
use crate::lexer::Token;

//language=rust
let mut tokens = Token::parse_ascii(r#"
pub fn life(mut a: i64) -> i64 {
40 + 2 * if a > 40 { (1) + { (4) } } else { 0 }
Expand Down Expand Up @@ -568,6 +570,8 @@ pub fn life(mut a: i64) -> i64 {
#[test]
fn it_runs_recursive_fibonacci() -> Result<()> {
use crate::lexer::Token;

//language=rust
let mut tokens = Token::parse_ascii(r#"
pub fn fib(n: i64) -> i64 {
if 2 > n {
Expand Down Expand Up @@ -600,6 +604,7 @@ pub fn fib(n: i64) -> i64 {
fn it_runs_fn_with_composed_types() -> Result<()> {
use crate::lexer::Token;

//language=rust
let mut tokens = Token::parse_ascii(r#"
fn new_point(x: i64, y: i64) -> Point {
Point {
Expand Down
8 changes: 3 additions & 5 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -433,8 +433,6 @@ pub fn parse_low_expression<'a>(narrowed: bool, tokens: &mut Vec<Token<'a>>) ->
}
}

// struct A { b: () }

pub fn parse_middle_low_expression<'a>(narrowed: bool, tokens: &mut Vec<Token<'a>>) -> Result<Expression<'a>> {
let mut prev = parse_low_expression(narrowed, tokens)?;
loop {
Expand Down Expand Up @@ -598,7 +596,7 @@ fn it_tokenize_struct_with_keywords_as_identifiers() -> Result<()> {

#[test]
fn it_tokenize_enum() -> Result<()> {
let mut tokens = Token::parse_ascii(r#"pub enum TokenKind {
let mut tokens = /*language=rust*/Token::parse_ascii(r#"pub enum TokenKind {
Equal,
Unexpected { character: char }
}"#)?;
Expand Down Expand Up @@ -634,7 +632,7 @@ fn it_tokenize_enum() -> Result<()> {

#[test]
fn it_tokenize_block_expression() -> Result<()> {
let mut tokens = Token::parse_ascii(r#"{
let mut tokens = /*language=rust*/Token::parse_ascii(r#"{
2 + --3 + 5
}"#)?;

Expand All @@ -661,7 +659,7 @@ fn it_tokenize_block_expression() -> Result<()> {

#[test]
fn it_tokenize_function_declaration() -> Result<()> {
let mut tokens = Token::parse_ascii(r#"pub fn ten(mut a: u32) {
let mut tokens = /*language=rust*/Token::parse_ascii(r#"pub fn ten(mut a: u32) {
2 + 3 + 5
}"#)?;

Expand Down
15 changes: 7 additions & 8 deletions src/transformer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -397,7 +397,7 @@ impl JavaModule {
fn it_generates_java() -> Result<(), Cow<'static, str>> {
use crate::lexer::Token;

let module = Module::parse_tokens("my_first_module", Token::parse_ascii(r#"
let module = Module::parse_tokens("my_first_module", /*language=rust*/Token::parse_ascii(r#"
fn new_point(x: i16, y: i16) -> Point {
Point {
x,
Expand Down Expand Up @@ -431,7 +431,7 @@ enum Shape {

assert_eq!(
java.resolve("my_first_module.Shape").map(ToString::to_string),
Some(r#"package my_first_module;
/*language=java*/Some(r#"package my_first_module;
sealed interface Shape {}
Expand All @@ -442,7 +442,7 @@ public record Rect(

assert_eq!(
java.resolve("my_first_module.Size").map(ToString::to_string),
Some(r#"package my_first_module;
/*language=java*/Some(r#"package my_first_module;
public record Size(
short width,
Expand All @@ -452,7 +452,7 @@ public record Size(

assert_eq!(
java.resolve("my_first_module.Point").map(ToString::to_string),
Some(r#"package my_first_module;
/*language=java*/Some(r#"package my_first_module;
record Point(
short x,
Expand All @@ -462,7 +462,7 @@ record Point(

assert_eq!(
java.resolve("my_first_module.Rectangle").map(ToString::to_string),
Some(r#"package my_first_module;
/*language=java*/Some(r#"package my_first_module;
record Rectangle(
my_first_module.Point origin,
Expand Down Expand Up @@ -532,10 +532,9 @@ record Rectangle(

#[test]
fn it_transform_enum() -> Result<(), Cow<'static, str>> {
use crate::lexer::TokenKind::*;

// tokenize
let tokens = Token::parse_ascii(r#"pub enum Price {
let tokens = /*language=rust*/Token::parse_ascii(r#"pub enum Price {
Limit,
Market,
StopLimit { stop_price: f64, },
Expand All @@ -557,7 +556,7 @@ fn it_transform_enum() -> Result<(), Cow<'static, str>> {

assert_eq!(
java.resolve("skull_test_transform_enum.Price").map(ToString::to_string),
Some(r#"package skull_test_transform_enum;
/*language=java*/Some(r#"package skull_test_transform_enum;
public sealed interface Price {}
Expand Down

0 comments on commit 45f141a

Please sign in to comment.