Skip to content

Instantly share code, notes, and snippets.

@CATboardBETA
Created Jun 6, 2022
Embed
What would you like to do?
qiish-parse/src/lib.rs
// Copyright (c) 2022 The Quantii Contributors
//
// This file is part of Quantii.
//
// Quantii is free software: you can redistribute
// it and/or modify it under the terms of the GNU
// Lesser General Public License as published by
// the Free Software Foundation, either version 3
// of the License, or (at your option) any later
// version.
//
// Quantii is distributed in the hope that it
// will be useful, but WITHOUT ANY WARRANTY;
// without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR
// PURPOSE. See the GNU Lesser General Public
// License for more details.
//
// You should have received a copy of the GNU
// Lesser General Public License along with
// Quantii. If not, see <https://www.gnu.org/licenses/>.
//! Core parser for Quantii Shell (Qiish).
// section clippy
#![warn(
clippy::all,
clippy::restriction,
clippy::pedantic,
clippy::nursery,
clippy::cargo
)]
#![allow(clippy::implicit_return)]
#![allow(clippy::missing_inline_in_public_items)]
#![allow(clippy::print_stdout)]
#![allow(clippy::blanket_clippy_restriction_lints)]
#![allow(clippy::unwrap_used)]
#![allow(clippy::let_underscore_drop)]
#![allow(clippy::indexing_slicing)]
#![allow(clippy::inline_always)]
#![allow(clippy::unwrap_in_result)]
#![allow(clippy::as_conversions)]
#![allow(clippy::integer_arithmetic)]
#![allow(clippy::cast_possible_truncation)]
#![allow(clippy::panic)]
#![allow(dead_code)]
#![allow(clippy::unseparated_literal_suffix)]
use qiish_lex::{Lexer, Token, TokenType};
/// The core parser for Qiish.
struct Parser {
/// The lexer used by the parser
lexer: Lexer,
/// The outputted stream of tokens
output: ParsedTokenStream,
/// Stack of curly braces
brace_stack: BraceStack,
/// Stack of brackets
bracket_stack: BracketStack,
/// Stack of parentheses
paren_stack: ParenthesisStack,
/// Stack of double brackets
double_bracket_stack: DoubleBracketStack,
/// Stack of double parentheses
double_paren_stack: DoubleParenthesisStack,
}
/// A token, in a syntactic context, that can be parsed further.
#[derive(Debug, Clone)]
struct ParsedToken {
/// The token itself
base_token: Token,
/// The token on the left, in case of an `base_token` being an operator.
left_operand: Option<&'static ParsedToken>,
/// The token on the right, in case of an `base_token` being an operator.
right_operand: Option<&'static ParsedToken>,
}
/// A stream of syntactic tokens.
///
/// Can only be used as a `mut`
#[derive(Debug)]
struct ParsedTokenStream {
/// Array of the tokens
tokens: Vec<ParsedToken>,
/// Current index of the token stream
index: u64,
}
impl ParsedTokenStream {
/// Creates a new token stream.
pub const fn new() -> Self {
Self {
tokens: vec![],
index: 0,
}
}
/// Adds a token to the stream.
pub fn push(&mut self, tk: ParsedToken) {
self.tokens.push(tk);
}
/// Returns the next token.
pub fn next(&mut self) -> ParsedToken {
let c: ParsedToken = self.tokens[self.index as usize].clone();
self.index += 1;
c
}
/// Checks if there are more tokens in the stream.
pub fn has_next(&self) -> bool {
self.index < self.tokens.len() as u64
}
/// Returns the next token without consuming it.
pub fn lookahead(&self, index: u64) -> ParsedToken {
self.tokens[(self.index + index) as usize].clone()
}
/// Change the left operand of a token in the stream.
pub fn change_left_operand(&mut self, index: usize, left_op_tk: &'static ParsedToken) {
self.tokens[index].left_operand = Some(left_op_tk);
}
/// Change the right operand of a token in the stream.
pub fn change_right_operand(&mut self, index: usize, right_op_tk: &'static ParsedToken) {
self.tokens[index].right_operand = Some(right_op_tk);
}
}
/// A stack of braces, brackets, parenthesis, double brackets, and/or double parenthesis.
trait Stack {
/// Push a new stack layer
fn push(&mut self);
/// Pop a stack layer
fn pop(&mut self);
/// Get current stack depth
fn peek(&self) -> i64;
}
/// Stack of braces
struct BraceStack {
/// Vector
stack: Vec<Token>,
/// Current layer
brace_layer: i64,
}
impl Stack for BraceStack {
fn push(&mut self) {
self.brace_layer += 1;
}
fn pop(&mut self) {
self.brace_layer -= 1;
}
fn peek(&self) -> i64 {
self.brace_layer
}
}
/// Stack of brackets
struct BracketStack {
/// Vector
stack: Vec<Token>,
/// Current layer
bracket_layer: i64,
}
impl Stack for BracketStack {
fn push(&mut self) {
self.bracket_layer += 1;
}
fn pop(&mut self) {
self.bracket_layer -= 1;
}
fn peek(&self) -> i64 {
self.bracket_layer
}
}
/// Stack of parenthesis
struct ParenthesisStack {
/// Vector
stack: Vec<Token>,
/// Current layer
parenthesis_layer: i64,
}
impl Stack for ParenthesisStack {
fn push(&mut self) {
self.parenthesis_layer += 1;
}
fn pop(&mut self) {
self.parenthesis_layer -= 1;
}
fn peek(&self) -> i64 {
self.parenthesis_layer
}
}
/// Stack of double brackets
struct DoubleBracketStack {
/// Vector
stack: Vec<Token>,
/// Current layer
double_bracket_layer: i64,
}
impl Stack for DoubleBracketStack {
fn push(&mut self) {
self.double_bracket_layer += 1;
}
fn pop(&mut self) {
self.double_bracket_layer -= 1;
}
fn peek(&self) -> i64 {
self.double_bracket_layer
}
}
/// Stack of double parenthesis
struct DoubleParenthesisStack {
/// Vector
stack: Vec<Token>,
/// Current layer
double_parenthesis_layer: i64,
}
impl Stack for DoubleParenthesisStack {
fn push(&mut self) {
self.double_parenthesis_layer += 1;
}
fn pop(&mut self) {
self.double_parenthesis_layer -= 1;
}
fn peek(&self) -> i64 {
self.double_parenthesis_layer
}
}
impl Parser {
/// Creates a new parser.
pub const fn new(lexer: Lexer) -> Self {
Self {
lexer,
output: ParsedTokenStream::new(),
brace_stack: BraceStack {
stack: vec![],
brace_layer: 0,
},
bracket_stack: BracketStack {
stack: vec![],
bracket_layer: 0,
},
paren_stack: ParenthesisStack {
stack: vec![],
parenthesis_layer: 0,
},
double_bracket_stack: DoubleBracketStack {
stack: vec![],
double_bracket_layer: 0,
},
double_paren_stack: DoubleParenthesisStack {
stack: vec![],
double_parenthesis_layer: 0,
},
}
}
/// Parses the input.
pub fn parse(&mut self) {
while self.lexer.output.has_next() {
let in_token: &Token = self.lexer.output.next();
match in_token.get_type() {
TokenType::Redir => {}
TokenType::RedirAppend => {}
TokenType::ForceRedir => {}
TokenType::Pipe => {}
TokenType::Case => {}
TokenType::Coproc => {}
TokenType::Do => {}
TokenType::Done => {}
TokenType::Elif => {}
TokenType::Else => {}
TokenType::Esac => {}
TokenType::Fi => {}
TokenType::For => {}
TokenType::Function => {}
TokenType::If => {}
TokenType::In => {}
TokenType::Select => {}
TokenType::Then => {}
TokenType::Time => {}
TokenType::Until => {}
TokenType::While => {}
TokenType::LeftCurlyBrace => {
self.brace_stack.push();
}
TokenType::RightCurlyBrace => {
self.brace_stack.pop();
}
TokenType::LeftSquareBracket => {
self.bracket_stack.push();
}
TokenType::RightSquareBracket => {
self.bracket_stack.pop();
}
TokenType::LeftParen => {
self.paren_stack.push();
}
TokenType::RightParen => {
self.paren_stack.pop();
}
TokenType::LeftDoubleSquareBracket => {
self.double_bracket_stack.push();
}
TokenType::RightDoubleSquareBracket => {
self.double_bracket_stack.pop();
}
TokenType::LeftDoubleParen => {
self.double_paren_stack.push();
}
TokenType::RightDoubleParen => {
self.double_paren_stack.pop();
}
TokenType::Text => {}
TokenType::EOF => {
// Reached end of file
}
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment