Skip to content

Commit 9a530e1

Browse files
seandewartjdevries
authored andcommitted
refactor: get rid of nightly rust dependency
Closes: #28
1 parent e0292d1 commit 9a530e1

2 files changed

Lines changed: 14 additions & 19 deletions

File tree

crates/vim9-gen/src/lib.rs

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
1-
#![feature(iter_intersperse)]
2-
31
use std::{collections::HashMap, fmt::Write as _, path::Path};
42

53
use lexer::Lexer;
@@ -1020,8 +1018,8 @@ fn identifier_list(state: &mut State, unpacked: &UnpackIdentifier) -> String {
10201018
unpacked
10211019
.identifiers
10221020
.iter()
1023-
.map(|i| i.gen(state))
1024-
.intersperse(", ".to_string())
1021+
.flat_map(|i| [i.gen(state), ", ".to_string()])
1022+
.take((2 * unpacked.identifiers.len()).saturating_sub(1))
10251023
.collect()
10261024
}
10271025

crates/vim9-lexer/src/lib.rs

Lines changed: 12 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
#![feature(let_chains)]
21
#![allow(unreachable_code)]
32

43
use std::{
@@ -46,8 +45,6 @@ pub enum TokenText<'a> {
4645
Empty,
4746
}
4847

49-
50-
5148
impl<'a> From<TokenText<'a>> for String {
5249
fn from(val: TokenText<'a>) -> Self {
5350
match val {
@@ -436,7 +433,7 @@ impl Lexer {
436433
self.read_char();
437434

438435
// read the rest of the number
439-
while let Some(ch) = self.ch() && ch.is_numeric() {
436+
while self.ch().is_some_and(|ch| ch.is_numeric()) {
440437
self.read_char();
441438
}
442439

@@ -470,7 +467,7 @@ impl Lexer {
470467
F: Fn(&char) -> bool,
471468
{
472469
self.read_char();
473-
if let Some(ch) = self.ch() && ch == &until {
470+
if self.ch().is_some_and(|&ch| ch == until) {
474471
return Ok(Token {
475472
kind: passed,
476473
text: TokenText::Slice(self.chars[self.position()..self.position()].into()),
@@ -480,7 +477,7 @@ impl Lexer {
480477

481478
let position = self.position();
482479

483-
while let Some(ch) = self.ch() && ch != &until {
480+
while let Some(ch) = self.ch().filter(|&&ch| ch != until) {
484481
if fail(ch) {
485482
return Ok(Token {
486483
kind: failed,
@@ -538,7 +535,7 @@ impl Lexer {
538535
F: Fn(&char) -> bool,
539536
{
540537
self.read_char();
541-
if let Some(ch) = self.ch() && ch == &until {
538+
if self.ch().is_some_and(|&ch| ch == until) {
542539
return Ok(Some(Token {
543540
kind,
544541
text: TokenText::Slice(self.chars[self.position()..self.position()].into()),
@@ -548,7 +545,7 @@ impl Lexer {
548545

549546
let position = self.position();
550547

551-
while let Some(ch) = self.ch() && ch != &until {
548+
while let Some(ch) = self.ch().filter(|&&ch| ch != until) {
552549
if fail(ch) {
553550
return Ok(None);
554551
}
@@ -575,7 +572,7 @@ impl Lexer {
575572

576573
fn read_identifier(&self) -> Result<Token> {
577574
let position = self.position();
578-
while let Some(ch) = self.ch() && is_identifier(*ch) {
575+
while self.ch().is_some_and(|&ch| is_identifier(ch)) {
579576
self.read_char();
580577
}
581578

@@ -632,9 +629,9 @@ impl Lexer {
632629
return;
633630
}
634631

635-
while let Some(&ch) = self.ch() && ch.is_ascii_whitespace() {
632+
while let Some(&ch) = self.ch().filter(|&&ch| ch.is_ascii_whitespace()) {
636633
if ch == '\n' {
637-
return
634+
return;
638635
}
639636

640637
self.read_char();
@@ -913,7 +910,7 @@ impl Lexer {
913910
self.read_char();
914911

915912
let position = self.position();
916-
while let Some(&ch) = self.ch() && is_identifier(ch) {
913+
while self.ch().is_some_and(|&ch| is_identifier(ch)) {
917914
self.read_char();
918915
}
919916

@@ -967,9 +964,9 @@ impl Lexer {
967964
F: Fn(char) -> bool,
968965
{
969966
let mut n = 1;
970-
while let Some(peeked) = self.peek_n(n) && *peeked != '\n' {
971-
if f(*peeked) {
972-
return true
967+
while let Some(&peeked) = self.peek_n(n).filter(|&&peeked| peeked != '\n') {
968+
if f(peeked) {
969+
return true;
973970
}
974971

975972
n += 1;

0 commit comments

Comments
 (0)