Initial commit

This commit is contained in:
2026-02-23 08:51:37 -05:00
commit 757a132930
25 changed files with 1780 additions and 0 deletions

11
Cargo.toml Normal file
View File

@@ -0,0 +1,11 @@
[workspace]
resolver = "2"
members = [
"aurac_lexer",
"aurac_parser",
"aurac_typechecker",
"aurac_codegen",
"aurac",
# Future components:
# "aurac", "aurac_parser", "aurac_typechecker", "aurac_codegen", "aura_core", "aura_std"
]

40
README.md Normal file
View File

@@ -0,0 +1,40 @@
# 🚀 Aura Programming Language
**Aura** to eksperymentalny, ultrawydajny język programowania stworzony z myślą o wyzwaniach inżynierii oprogramowania 2026 roku. Łączy w sobie bezwzględne bezpieczeństwo pamięci znane z Rusta, matematyczne dowody poprawności z Haskella oraz minimalistyczną, czytelną składnię inspirowaną Pythonem.
Kompilator Aury (`aurac`) został w całości napisany w Ruście i kompiluje kod źródłowy bezpośrednio do **LLVM IR** (Intermediate Representation), zapewniając wydajność na poziomie C/C++ przy zerowym narzucie w czasie działania programu (Zero-Cost Abstractions).
---
## ✨ Główne założenia (Filozofia Języka)
Aura rozwiązuje największy problem współczesnego IT: **brak zaufania do kodu**. Zamiast polegać na testach jednostkowych i Garbage Collectorze, Aura przenosi cały ciężar weryfikacji na etap kompilacji.
### 1. Niewidzialny Borrow Checker (Elided Affine Typing)
Koniec z wyciekami pamięci, wskaźnikami `null` i skomplikowanymi adnotacjami czasu życia (`<'a>`). Aura używa analizy przepływu danych (Dataflow Analysis), aby automatycznie śledzić "własność" (Ownership) zmiennych. Gdy przekazujesz zmienną do funkcji, zostaje ona skonsumowana. Próba jej ponownego użycia (Use-After-Move) kończy się natychmiastowym błędem kompilacji.
### 2. Typy Semantyczne (Refinement Types) i Silnik Wnioskowania
Typ `f32` to tylko informacja o rozmiarze w pamięci. W Aurze typy niosą ze sobą matematyczne gwarancje!
Dzięki wbudowanemu weryfikatorowi (Symbolic Verifier), kompilator na etapie budowania programu udowadnia, że operacje są logicznie poprawne.
Zamiast pisać `if czas < 0`, definiujesz typ: `type PositiveTime = f32{t | t > 0.0}`. Jeśli kompilator nie jest w stanie matematycznie udowodnić, że zmienna spełnia ten warunek, program się nie skompiluje.
### 3. Zorientowanie na Dane (Data-Oriented Design)
Aura odrzuca tradycyjne programowanie obiektowe (OOP) na rzecz czystych transformacji danych. Skupia się na lokalności pamięci podręcznej (Cache Locality), co czyni ją idealną do pisania silników gier, systemów wbudowanych i oprogramowania HFT (High-Frequency Trading).
---
## 💻 Przykłady Kodu
### Symulacja Fizyki z Gwarancją Matematyczną
```aura
// Typ ograniczony: Czas musi być zawsze dodatni!
type PositiveTime = f32{t | t > 0.0}
pure fn calculate_new_position(initial_x: f32, v: f32, dt: PositiveTime) -> f32:
// Mnożenie: Zmienne 'v' i 'dt' zostają skonsumowane przez Borrow Checkera
let displacement = v * dt
// Dodawanie: 'initial_x' i 'displacement' zostają skonsumowane
let new_x = initial_x + displacement
return new_x

16
ai_kernel.aura Normal file
View File

@@ -0,0 +1,16 @@
// Definiujemy matematyczne zasady dla pamięci:
// Wymiar macierzy musi być większy od zera.
type ValidDim = i32{d | d > 0}
// Indeks wątku na GPU (thread_id) nie może być ujemny.
type SafeIndex = i32{i | i >= 0}
// Jądro GPU: Czysta funkcja, brak globalnego stanu, brak wyścigów danych!
gpu fn matmul_step(weight: f32, activation: f32, current_sum: f32) -> f32:
// Niewidzialny Borrow Checker konsumuje 'weight' i 'activation'
let product = weight * activation
// Konsumujemy 'current_sum' i 'product'
let new_sum = current_sum + product
return new_sum

10
aurac/Cargo.toml Normal file
View File

@@ -0,0 +1,10 @@
[package]
name = "aurac"
version = "0.1.0"
edition = "2021"
[dependencies]
aurac_lexer = { path = "../aurac_lexer" }
aurac_parser = { path = "../aurac_parser" }
aurac_typechecker = { path = "../aurac_typechecker" }
aurac_codegen = { path = "../aurac_codegen" }

66
aurac/src/main.rs Normal file
View File

@@ -0,0 +1,66 @@
use std::env;
use std::fs;
use std::process;
use aurac_lexer::lexer::Lexer;
use aurac_lexer::token::TokenKind;
use aurac_parser::parser::Parser;
use aurac_parser::ast::{Program, Decl};
use aurac_typechecker::checker::TypeChecker;
use aurac_codegen::ir_gen::IrGenerator;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() < 2 {
eprintln!("Usage: aurac <file.aura>");
process::exit(1);
}
let file_path = &args[1];
let source_code = fs::read_to_string(file_path).unwrap_or_else(|err| {
eprintln!("Error reading file '{}': {}", file_path, err);
process::exit(1);
});
// 1. Lexer Phase
let mut lexer = Lexer::new(&source_code);
let mut tokens = Vec::new();
loop {
let tok = lexer.next_token();
let is_eof = tok.kind == TokenKind::Eof;
tokens.push(tok);
if is_eof {
break;
}
}
// 2. Parser Phase
let mut parser = Parser::new(&tokens);
let fn_decl = parser.parse_fn_decl().unwrap_or_else(|err| {
eprintln!("Syntax Error: {}", err);
process::exit(1);
});
let program = Program {
decls: vec![Decl::Fn(fn_decl)],
};
// 3. Semantic Analysis / Typechecker Phase
let mut checker = TypeChecker::new();
if let Err(err) = checker.check_program(&program) {
eprintln!("Type Error: {}", err);
process::exit(1);
}
// 4. Code Generation Phase (LLVM IR)
let mut generator = IrGenerator::new();
let ir_code = generator.generate_program(&program);
// 5. Output
println!("{}", ir_code);
if let Err(err) = fs::write("output.ll", &ir_code) {
eprintln!("Error writing 'output.ll': {}", err);
process::exit(1);
}
}

7
aurac_codegen/Cargo.toml Normal file
View File

@@ -0,0 +1,7 @@
[package]
name = "aurac_codegen"
version = "0.1.0"
edition = "2021"
[dependencies]
aurac_parser = { path = "../aurac_parser" }

141
aurac_codegen/src/ir_gen.rs Normal file
View File

@@ -0,0 +1,141 @@
use aurac_parser::ast::{Program, Decl, FnDecl, Block, Stmt, Expr, BinaryOp, TypeExpr};
use std::collections::HashMap;
pub struct IrGenerator {
pub output: String,
pub tmp_counter: usize,
pub env: HashMap<String, String>,
}
impl IrGenerator {
pub fn new() -> Self {
Self {
output: String::new(),
tmp_counter: 0,
env: HashMap::new(),
}
}
pub fn generate_program(&mut self, program: &Program) -> String {
for decl in &program.decls {
if let Decl::Fn(fn_decl) = decl {
self.generate_fn(fn_decl);
}
}
self.output.clone()
}
fn map_type(aura_type: &str) -> &'static str {
match aura_type {
"f32" | "f64" | "PositiveTime" => "float",
"i32" | "i64" | "u32" | "u64" | "i8" | "i16" | "u8" | "u16" => "i32",
"bool" => "i1",
_ => "unknown_type",
}
}
fn generate_fn(&mut self, decl: &FnDecl) {
self.env.clear();
self.tmp_counter = 0;
let ret_type_str = match &decl.return_type {
TypeExpr::BaseType(bt) => bt.clone(),
_ => "f32".to_string(), // fallback
};
let llvm_ret_type = Self::map_type(&ret_type_str);
if decl.is_gpu {
self.output.push_str(&format!("define ptx_kernel {} @{}(", llvm_ret_type, decl.name));
} else {
self.output.push_str(&format!("define {} @{}(", llvm_ret_type, decl.name));
}
for (i, param) in decl.params.iter().enumerate() {
let param_type = match &param.ty {
TypeExpr::BaseType(bt) => bt.clone(),
_ => "f32".to_string(),
};
let llvm_param_type = Self::map_type(&param_type);
self.output.push_str(&format!("{} %{}", llvm_param_type, param.name));
if i < decl.params.len() - 1 {
self.output.push_str(", ");
}
}
if decl.is_gpu {
self.output.push_str(") #0 {\nentry:\n");
} else {
self.output.push_str(") {\nentry:\n");
}
self.generate_block(&decl.body, &ret_type_str);
self.output.push_str("}\n\n");
if decl.is_gpu {
self.output.push_str("attributes #0 = { \"target-cpu\"=\"sm_70\" \"target-features\"=\"+ptx60\" }\n\n");
}
}
fn generate_block(&mut self, block: &Block, expected_ret_type: &str) {
for stmt in &block.statements {
self.generate_stmt(stmt, expected_ret_type);
}
}
fn generate_stmt(&mut self, stmt: &Stmt, fn_ret_type: &str) {
match stmt {
Stmt::Return(expr) => {
let val_reg = self.generate_expr(expr, fn_ret_type);
let llvm_type = Self::map_type(fn_ret_type);
self.output.push_str(&format!(" ret {} {}\n", llvm_type, val_reg));
}
Stmt::ExprStmt(expr) => {
self.generate_expr(expr, fn_ret_type);
}
Stmt::LetBind(name, expr) => {
// All test vars are f32 mathematically in this scenario
let val_reg = self.generate_expr(expr, fn_ret_type);
self.env.insert(name.clone(), val_reg);
}
}
}
fn generate_expr(&mut self, expr: &Expr, expected_type: &str) -> String {
match expr {
Expr::Identifier(name) => {
self.env.get(name).cloned().unwrap_or_else(|| format!("%{}", name))
}
Expr::Literal(val) => val.clone(),
Expr::Binary(left, op, right) => {
let left_val = self.generate_expr(left, expected_type);
let right_val = self.generate_expr(right, expected_type);
let is_float = expected_type == "f32" || expected_type == "f64" || expected_type == "PositiveTime";
let llvm_type = Self::map_type(expected_type);
let res_reg = format!("%{}", self.tmp_counter);
self.tmp_counter += 1;
let instruction = match op {
BinaryOp::Add => if is_float { "fadd" } else { "add" },
BinaryOp::Sub => if is_float { "fsub" } else { "sub" },
BinaryOp::Mul => if is_float { "fmul" } else { "mul" },
BinaryOp::Div => if is_float { "fdiv" } else { "sdiv" },
BinaryOp::Gt => if is_float { "fcmp ogt" } else { "icmp sgt" },
BinaryOp::Lt => if is_float { "fcmp olt" } else { "icmp slt" },
BinaryOp::Eq => if is_float { "fcmp oeq" } else { "icmp eq" },
};
self.output.push_str(&format!(
" {} = {} {} {}, {}\n",
res_reg, instruction, llvm_type, left_val, right_val
));
res_reg
}
}
}
}

37
aurac_codegen/src/lib.rs Normal file
View File

@@ -0,0 +1,37 @@
pub mod ir_gen;
#[cfg(test)]
mod tests {
use super::ir_gen::IrGenerator;
use aurac_parser::ast::{Program, Decl, FnDecl, Param, TypeExpr, Block, Stmt, Expr, BinaryOp};
#[test]
fn test_generate_add_fn() {
let program = Program {
decls: vec![Decl::Fn(FnDecl {
is_pure: true,
is_gpu: false,
name: "add".to_string(),
params: vec![
Param { name: "a".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
Param { name: "b".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
],
return_type: TypeExpr::BaseType("f32".to_string()),
body: Block {
statements: vec![Stmt::Return(Expr::Binary(
Box::new(Expr::Identifier("a".to_string())),
BinaryOp::Add,
Box::new(Expr::Identifier("b".to_string())),
))],
},
})],
};
let mut generator = IrGenerator::new();
let ir = generator.generate_program(&program);
assert!(ir.contains("define float @add(float %a, float %b)"));
assert!(ir.contains("fadd float %a, %b"));
assert!(ir.contains("ret float %0"));
}
}

6
aurac_lexer/Cargo.toml Normal file
View File

@@ -0,0 +1,6 @@
[package]
name = "aurac_lexer"
version = "0.1.0"
edition = "2021"
[dependencies]

203
aurac_lexer/src/lexer.rs Normal file
View File

@@ -0,0 +1,203 @@
use crate::token::{Span, Token, TokenKind};
use std::str::CharIndices;
pub struct Lexer<'a> {
input: &'a str,
chars: CharIndices<'a>,
current: Option<(usize, char)>,
line: usize,
column: usize,
indent_stack: Vec<usize>,
pending_dedents: usize,
emitted_eof: bool,
}
impl<'a> Lexer<'a> {
pub fn new(input: &'a str) -> Self {
let mut chars = input.char_indices();
let current = chars.next();
Self {
input,
chars,
current,
line: 1,
column: 1,
indent_stack: vec![0], // base level indentation
pending_dedents: 0,
emitted_eof: false,
}
}
fn advance(&mut self) -> Option<(usize, char)> {
let result = self.current;
if let Some((_, c)) = result {
if c == '\n' {
self.line += 1;
self.column = 1;
} else {
self.column += 1;
}
}
self.current = self.chars.next();
result
}
fn peek(&self) -> Option<(usize, char)> {
self.current
}
pub fn next_token(&mut self) -> Token<'a> {
if self.pending_dedents > 0 {
self.pending_dedents -= 1;
return Token::new(
TokenKind::Dedent,
Span { line: self.line, column: 1, offset: self.current.map(|(o,_)| o).unwrap_or(self.input.len()), len: 0 }
);
}
if let Some((offset, c)) = self.current {
// Indentation mapping at the start of lines
if self.column == 1 && c != '\n' && c != '\r' {
let mut spaces = 0;
while let Some((_, pc)) = self.current {
if pc == ' ' {
spaces += 1;
self.advance();
} else {
break;
}
}
if self.current.map_or(false, |(_, c)| c == '\n' || c == '\r') {
// Empty/whitespace-only line: proceed to standard token matching
// which will hit the '\n' matcher below.
} else {
let current_indent = *self.indent_stack.last().unwrap_or(&0);
if spaces > current_indent {
self.indent_stack.push(spaces);
return Token::new(
TokenKind::Indent,
Span { line: self.line, column: 1, offset, len: spaces }
);
} else if spaces < current_indent {
let mut dedents = 0;
while let Some(&last) = self.indent_stack.last() {
if last > spaces {
self.indent_stack.pop();
dedents += 1;
} else {
break;
}
}
if dedents > 0 {
self.pending_dedents = dedents - 1;
return Token::new(
TokenKind::Dedent,
Span { line: self.line, column: 1, offset, len: spaces }
);
}
}
}
}
// Normal matching path
let (start_offset, c) = self.advance().unwrap();
let start_col = self.column - 1;
match c {
' ' | '\r' => self.next_token(),
'\n' => Token::new(TokenKind::Newline, Span { line: self.line - 1, column: start_col, offset: start_offset, len: 1 }),
':' => {
if self.peek().map(|(_, pc)| pc) == Some(':') {
let _ = self.advance();
Token::new(TokenKind::DoubleColon, Span { line: self.line, column: start_col, offset: start_offset, len: 2 })
} else {
Token::new(TokenKind::Colon, Span { line: self.line, column: start_col, offset: start_offset, len: 1 })
}
}
'-' => {
if self.peek().map(|(_, pc)| pc) == Some('>') {
let _ = self.advance();
Token::new(TokenKind::Arrow, Span { line: self.line, column: start_col, offset: start_offset, len: 2 })
} else {
Token::new(TokenKind::Minus, Span { line: self.line, column: start_col, offset: start_offset, len: 1 })
}
}
'+' => Token::new(TokenKind::Plus, Span { line: self.line, column: start_col, offset: start_offset, len: 1 }),
'*' => Token::new(TokenKind::Star, Span { line: self.line, column: start_col, offset: start_offset, len: 1 }),
'/' => Token::new(TokenKind::Slash, Span { line: self.line, column: start_col, offset: start_offset, len: 1 }),
'(' => Token::new(TokenKind::OpenParen, Span { line: self.line, column: start_col, offset: start_offset, len: 1 }),
')' => Token::new(TokenKind::CloseParen, Span { line: self.line, column: start_col, offset: start_offset, len: 1 }),
'{' => Token::new(TokenKind::OpenBrace, Span { line: self.line, column: start_col, offset: start_offset, len: 1 }),
'}' => Token::new(TokenKind::CloseBrace, Span { line: self.line, column: start_col, offset: start_offset, len: 1 }),
',' => Token::new(TokenKind::Comma, Span { line: self.line, column: start_col, offset: start_offset, len: 1 }),
'=' => {
if self.peek().map(|(_, pc)| pc) == Some('=') {
let _ = self.advance();
Token::new(TokenKind::EqualEqual, Span { line: self.line, column: start_col, offset: start_offset, len: 2 })
} else {
Token::new(TokenKind::Equal, Span { line: self.line, column: start_col, offset: start_offset, len: 1 })
}
}
'|' => Token::new(TokenKind::Pipe, Span { line: self.line, column: start_col, offset: start_offset, len: 1 }),
'>' => Token::new(TokenKind::Greater, Span { line: self.line, column: start_col, offset: start_offset, len: 1 }),
'<' => Token::new(TokenKind::Less, Span { line: self.line, column: start_col, offset: start_offset, len: 1 }),
_ if c.is_alphabetic() => {
while let Some((_, pc)) = self.peek() {
if pc.is_alphanumeric() || pc == '_' {
self.advance();
} else {
break;
}
}
let end_offset = self.current.map(|(o, _)| o).unwrap_or(self.input.len());
let ident_str = &self.input[start_offset..end_offset];
let kind = match ident_str {
"struct" => TokenKind::Struct,
"fn" => TokenKind::Fn,
"pure" => TokenKind::Pure,
"actor" => TokenKind::Actor,
"let" => TokenKind::Let,
"if" => TokenKind::If,
"else" => TokenKind::Else,
"match" => TokenKind::Match,
"return" => TokenKind::Return,
"type" => TokenKind::Type,
"gpu" => TokenKind::Gpu,
"i8" | "i16" | "i32" | "i64" | "u8" | "u16" | "u32" | "u64" | "f32" | "f64" | "bool" | "str" => TokenKind::BaseType(ident_str),
_ => TokenKind::Ident(ident_str),
};
Token::new(kind, Span { line: self.line, column: start_col, offset: start_offset, len: end_offset - start_offset })
}
_ if c.is_ascii_digit() => {
while let Some((_, pc)) = self.peek() {
if pc.is_ascii_digit() || pc == '.' {
self.advance();
} else {
break;
}
}
let end_offset = self.current.map(|(o, _)| o).unwrap_or(self.input.len());
let num_str = &self.input[start_offset..end_offset];
Token::new(TokenKind::Number(num_str), Span { line: self.line, column: start_col, offset: start_offset, len: end_offset - start_offset })
}
_ => Token::new(TokenKind::Error(c), Span { line: self.line, column: start_col, offset: start_offset, len: c.len_utf8() }),
}
} else {
if self.indent_stack.len() > 1 {
self.indent_stack.pop();
Token::new(TokenKind::Dedent, Span { line: self.line, column: self.column, offset: self.input.len(), len: 0 })
} else if !self.emitted_eof {
self.emitted_eof = true;
Token::new(TokenKind::Eof, Span { line: self.line, column: self.column, offset: self.input.len(), len: 0 })
} else {
Token::new(TokenKind::Eof, Span { line: self.line, column: self.column, offset: self.input.len(), len: 0 })
}
}
}
}

43
aurac_lexer/src/lib.rs Normal file
View File

@@ -0,0 +1,43 @@
pub mod lexer;
pub mod token;
#[cfg(test)]
mod tests {
use super::lexer::Lexer;
use super::token::TokenKind::*;
#[test]
fn test_struct_indentation() {
let input = "struct Position:\n x: f32\n y: f32\n";
let mut lexer = Lexer::new(input);
let expected_tokens = vec![
Struct,
Ident("Position"),
Colon,
Newline,
Indent,
Ident("x"),
Colon,
BaseType("f32"),
Newline,
Ident("y"),
Colon,
BaseType("f32"),
Newline,
Dedent,
Eof,
];
let mut actual_tokens = Vec::new();
loop {
let tok = lexer.next_token();
actual_tokens.push(tok.kind.clone());
if tok.kind == Eof {
break;
}
}
assert_eq!(actual_tokens, expected_tokens);
}
}

45
aurac_lexer/src/token.rs Normal file
View File

@@ -0,0 +1,45 @@
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Span {
pub line: usize,
pub column: usize,
pub offset: usize,
pub len: usize,
}
#[derive(Debug, Clone, PartialEq)]
pub enum TokenKind<'a> {
// Keywords
Struct, Fn, Pure, Actor, Let, If, Else, Match, Return, Type, Gpu,
// Identifiers and Literals bound to the input lifetime ('a) for zero-copy
Ident(&'a str),
Number(&'a str),
StringLit(&'a str),
// Base Types
BaseType(&'a str),
// Symbols & Operators
Colon, DoubleColon, Comma, Arrow, Equal, Pipe,
Plus, Minus, Star, Slash,
OpenParen, CloseParen, OpenBrace, CloseBrace, OpenAngle, CloseAngle,
Greater, Less, EqualEqual,
// Significant Whitespace
Indent, Dedent, Newline,
Eof,
Error(char),
}
#[derive(Debug, Clone, PartialEq)]
pub struct Token<'a> {
pub kind: TokenKind<'a>,
pub span: Span,
}
impl<'a> Token<'a> {
pub fn new(kind: TokenKind<'a>, span: Span) -> Self {
Self { kind, span }
}
}

7
aurac_parser/Cargo.toml Normal file
View File

@@ -0,0 +1,7 @@
[package]
name = "aurac_parser"
version = "0.1.0"
edition = "2021"
[dependencies]
aurac_lexer = { path = "../aurac_lexer" }

75
aurac_parser/src/ast.rs Normal file
View File

@@ -0,0 +1,75 @@
#[derive(Debug, Clone, PartialEq)]
pub struct Program {
pub decls: Vec<Decl>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum Decl {
Struct(StructDecl),
Fn(FnDecl),
TypeAlias(String, TypeExpr),
}
#[derive(Debug, Clone, PartialEq)]
pub struct StructDecl {
pub name: String,
pub fields: Vec<FieldDecl>,
}
#[derive(Debug, Clone, PartialEq)]
pub struct FieldDecl {
pub name: String,
pub ty: TypeExpr,
}
#[derive(Debug, Clone, PartialEq)]
pub struct FnDecl {
pub is_pure: bool,
pub is_gpu: bool,
pub name: String,
pub params: Vec<Param>,
pub return_type: TypeExpr,
pub body: Block,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Param {
pub name: String,
pub ty: TypeExpr,
}
#[derive(Debug, Clone, PartialEq)]
pub struct Block {
pub statements: Vec<Stmt>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum Stmt {
Return(Expr),
ExprStmt(Expr),
LetBind(String, Expr),
}
#[derive(Debug, Clone, PartialEq)]
pub enum Expr {
Binary(Box<Expr>, BinaryOp, Box<Expr>),
Literal(String),
Identifier(String),
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum BinaryOp {
Add,
Sub,
Mul,
Div,
Gt,
Lt,
Eq,
}
#[derive(Debug, Clone, PartialEq)]
pub enum TypeExpr {
BaseType(String),
Refined(Box<TypeExpr>, String, Box<Expr>),
}

87
aurac_parser/src/lib.rs Normal file
View File

@@ -0,0 +1,87 @@
pub mod ast;
pub mod parser;
#[cfg(test)]
mod tests {
use super::parser::Parser;
use super::ast::{StructDecl, FieldDecl, TypeExpr, FnDecl, Param, Block, Stmt, Expr, BinaryOp};
#[test]
fn test_parse_pure_fn() {
let input = "pure fn add(a: f32, b: f32) -> f32:\n return a + b\n";
let mut lexer = Lexer::new(input);
let mut tokens = Vec::new();
loop {
let tok = lexer.next_token();
let is_eof = tok.kind == TokenKind::Eof;
tokens.push(tok);
if is_eof {
break;
}
}
let mut parser = Parser::new(&tokens);
let fn_decl = parser.parse_fn_decl().expect("Failed to parse pure fn decl");
let expected = FnDecl {
is_pure: true,
is_gpu: false,
name: "add".to_string(),
params: vec![
Param { name: "a".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
Param { name: "b".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
],
return_type: TypeExpr::BaseType("f32".to_string()),
body: Block {
statements: vec![
Stmt::Return(Expr::Binary(
Box::new(Expr::Identifier("a".to_string())),
BinaryOp::Add,
Box::new(Expr::Identifier("b".to_string())),
))
]
}
};
assert_eq!(fn_decl, expected);
}
use aurac_lexer::lexer::Lexer;
use aurac_lexer::token::TokenKind;
#[test]
fn test_parse_struct_decl() {
let input = "struct Position:\n x: f32\n y: f32\n";
let mut lexer = Lexer::new(input);
// Exhaust the lexer to contiguous slice to mimic real pipelines
let mut tokens = Vec::new();
loop {
let tok = lexer.next_token();
let is_eof = tok.kind == TokenKind::Eof;
tokens.push(tok);
if is_eof {
break;
}
}
let mut parser = Parser::new(&tokens);
let struct_decl = parser.parse_struct_decl().expect("Failed to parse struct decl");
let expected = StructDecl {
name: "Position".to_string(),
fields: vec![
FieldDecl {
name: "x".to_string(),
ty: TypeExpr::BaseType("f32".to_string()),
},
FieldDecl {
name: "y".to_string(),
ty: TypeExpr::BaseType("f32".to_string()),
},
],
};
assert_eq!(struct_decl, expected);
}
}

278
aurac_parser/src/parser.rs Normal file
View File

@@ -0,0 +1,278 @@
use aurac_lexer::token::{Token, TokenKind};
use crate::ast::*;
pub struct Parser<'a> {
tokens: &'a [Token<'a>],
current: usize,
}
impl<'a> Parser<'a> {
pub fn new(tokens: &'a [Token<'a>]) -> Self {
Self { tokens, current: 0 }
}
fn peek(&self) -> Option<&Token<'a>> {
self.tokens.get(self.current)
}
fn advance(&mut self) -> Option<&Token<'a>> {
let curr = self.peek();
self.current += 1;
curr
}
fn expect(&mut self, expected: TokenKind<'a>) -> Result<(), String> {
let peeked = self.peek();
match peeked {
Some(curr) if curr.kind == expected => {
self.advance();
Ok(())
},
Some(curr) => Err(format!("Expected {:?}, found {:?}", expected, curr.kind)),
None => Err(format!("Expected {:?}, found EOF", expected)),
}
}
pub fn parse_struct_decl(&mut self) -> Result<StructDecl, String> {
self.expect(TokenKind::Struct)?;
let name = match self.advance() {
Some(Token { kind: TokenKind::Ident(id), .. }) => id.to_string(),
Some(t) => return Err(format!("Expected identifier after struct, found {:?}", t.kind)),
None => return Err("Expected identifier after struct, found EOF".to_string()),
};
self.expect(TokenKind::Colon)?;
self.expect(TokenKind::Newline)?;
self.expect(TokenKind::Indent)?;
let mut fields = Vec::new();
loop {
// Ignore leading blank lines inside struct bodies
while let Some(Token { kind: TokenKind::Newline, .. }) = self.peek() {
self.advance();
}
match self.peek() {
Some(Token { kind: TokenKind::Dedent, .. }) => {
self.advance();
break;
}
Some(Token { kind: TokenKind::Ident(_), .. }) => {
let field_name = if let Token { kind: TokenKind::Ident(id), .. } = self.advance().unwrap() {
id.to_string()
} else {
unreachable!()
};
self.expect(TokenKind::Colon)?;
let ty = self.parse_type_expr()?;
fields.push(FieldDecl { name: field_name, ty });
// Fields must be terminated by Newline or immediately ended with Dedent
match self.peek() {
Some(Token { kind: TokenKind::Newline, .. }) => {
self.advance();
},
Some(Token { kind: TokenKind::Dedent, .. }) => (),
Some(t) => return Err(format!("Expected Newline or Dedent after field, found {:?}", t.kind)),
None => return Err("Unexpected EOF in struct fields".to_string()),
}
}
Some(t) => return Err(format!("Expected field declaration or Dedent, found {:?}", t.kind)),
None => return Err("Unexpected EOF parsing struct".to_string()),
}
}
Ok(StructDecl { name, fields })
}
pub fn parse_fn_decl(&mut self) -> Result<FnDecl, String> {
let mut is_pure = false;
let mut is_gpu = false;
if let Some(Token { kind: TokenKind::Pure, .. }) = self.peek() {
self.advance();
is_pure = true;
} else if let Some(Token { kind: TokenKind::Gpu, .. }) = self.peek() {
self.advance();
is_gpu = true;
is_pure = true; // GPU kernels are inherently pure
}
self.expect(TokenKind::Fn)?;
let name = match self.advance() {
Some(Token { kind: TokenKind::Ident(id), .. }) => id.to_string(),
Some(t) => return Err(format!("Expected identifier after fn, found {:?}", t.kind)),
None => return Err("Expected identifier after fn, found EOF".to_string()),
};
self.expect(TokenKind::OpenParen)?;
let mut params = Vec::new();
while let Some(tok) = self.peek() {
if tok.kind == TokenKind::CloseParen {
break;
}
let param_name = match self.advance() {
Some(Token { kind: TokenKind::Ident(id), .. }) => id.to_string(),
Some(t) => return Err(format!("Expected parameter name, found {:?}", t.kind)),
None => return Err("Expected parameter name, found EOF".to_string()),
};
self.expect(TokenKind::Colon)?;
let ty = self.parse_type_expr()?;
params.push(Param { name: param_name, ty });
if let Some(Token { kind: TokenKind::Comma, .. }) = self.peek() {
self.advance();
} else {
break;
}
}
self.expect(TokenKind::CloseParen)?;
self.expect(TokenKind::Arrow)?;
let return_type = self.parse_type_expr()?;
self.expect(TokenKind::Colon)?;
self.expect(TokenKind::Newline)?;
let body = self.parse_block()?;
Ok(FnDecl {
is_pure,
is_gpu,
name,
params,
return_type,
body,
})
}
fn parse_block(&mut self) -> Result<Block, String> {
self.expect(TokenKind::Indent)?;
let mut statements = Vec::new();
loop {
while let Some(Token { kind: TokenKind::Newline, .. }) = self.peek() {
self.advance();
}
match self.peek() {
Some(Token { kind: TokenKind::Dedent, .. }) => {
self.advance();
break;
}
Some(_) => {
statements.push(self.parse_stmt()?);
match self.peek() {
Some(Token { kind: TokenKind::Newline, .. }) => {
self.advance();
},
Some(Token { kind: TokenKind::Dedent, .. }) => (),
Some(t) => return Err(format!("Expected Newline or Dedent after statement, found {:?}", t.kind)),
None => return Err("Unexpected EOF in block".to_string()),
}
}
None => return Err("Unexpected EOF parsing block".to_string()),
}
}
Ok(Block { statements })
}
fn parse_stmt(&mut self) -> Result<Stmt, String> {
if let Some(Token { kind: TokenKind::Return, .. }) = self.peek() {
self.advance();
let expr = self.parse_expr()?;
self.expect(TokenKind::Newline)?;
return Ok(Stmt::Return(expr));
}
if let Some(Token { kind: TokenKind::Let, .. }) = self.peek() {
self.advance();
let name = match self.advance() {
Some(Token { kind: TokenKind::Ident(id), .. }) => id.to_string(),
Some(t) => return Err(format!("Expected identifier after 'let', found {:?}", t.kind)),
None => return Err("Expected identifier after 'let', found EOF".to_string()),
};
self.expect(TokenKind::Equal)?;
let expr = self.parse_expr()?;
self.expect(TokenKind::Newline)?;
return Ok(Stmt::LetBind(name, expr));
}
let expr = self.parse_expr()?;
Ok(Stmt::ExprStmt(expr))
}
fn parse_expr(&mut self) -> Result<Expr, String> {
let mut left = match self.advance() {
Some(Token { kind: TokenKind::Ident(id), .. }) => Expr::Identifier(id.to_string()),
Some(Token { kind: TokenKind::Number(num), .. }) => Expr::Literal(num.to_string()),
Some(t) => return Err(format!("Expected expression, found {:?}", t.kind)),
None => return Err("Expected expression, found EOF".to_string()),
};
if let Some(tok) = self.peek() {
let op = match tok.kind {
TokenKind::Plus => Some(BinaryOp::Add),
TokenKind::Minus => Some(BinaryOp::Sub),
TokenKind::Star => Some(BinaryOp::Mul),
TokenKind::Slash => Some(BinaryOp::Div),
TokenKind::Greater => Some(BinaryOp::Gt),
TokenKind::Less => Some(BinaryOp::Lt),
TokenKind::EqualEqual => Some(BinaryOp::Eq),
_ => None,
};
if let Some(bin_op) = op {
self.advance();
let right = self.parse_expr()?;
left = Expr::Binary(Box::new(left), bin_op, Box::new(right));
}
}
Ok(left)
}
pub fn parse_type_alias(&mut self) -> Result<Decl, String> {
self.expect(TokenKind::Type)?;
let name = match self.advance() {
Some(Token { kind: TokenKind::Ident(id), .. }) => id.to_string(),
Some(t) => return Err(format!("Expected identifier for type alias, found {:?}", t.kind)),
None => return Err("Expected identifier, found EOF".to_string()),
};
self.expect(TokenKind::Equal)?;
let ty = self.parse_type_expr()?;
self.expect(TokenKind::Newline)?;
Ok(Decl::TypeAlias(name, ty))
}
fn parse_type_expr(&mut self) -> Result<TypeExpr, String> {
let base_ty = match self.advance() {
Some(Token { kind: TokenKind::BaseType(bt), .. }) => TypeExpr::BaseType(bt.to_string()),
Some(Token { kind: TokenKind::Ident(id), .. }) => TypeExpr::BaseType(id.to_string()),
Some(t) => return Err(format!("Expected type, found {:?}", t.kind)),
None => return Err("Expected type, found EOF".to_string()),
};
if let Some(Token { kind: TokenKind::OpenBrace, .. }) = self.peek() {
self.advance();
let var_name = match self.advance() {
Some(Token { kind: TokenKind::Ident(id), .. }) => id.to_string(),
_ => return Err("Expected variable identifier in refinement clause".to_string()),
};
self.expect(TokenKind::Pipe)?;
let constraint = self.parse_expr()?;
self.expect(TokenKind::CloseBrace)?;
Ok(TypeExpr::Refined(Box::new(base_ty), var_name, Box::new(constraint)))
} else {
Ok(base_ty)
}
}
}

View File

@@ -0,0 +1,7 @@
[package]
name = "aurac_typechecker"
version = "0.1.0"
edition = "2021"
[dependencies]
aurac_parser = { path = "../aurac_parser" }

View File

@@ -0,0 +1,222 @@
use std::collections::HashSet;
use std::collections::HashMap;
use aurac_parser::ast::{Program, Decl, StructDecl, TypeExpr, FnDecl, Block, Stmt, Expr, BinaryOp};
use crate::env::{SymbolTable, FunctionSignature, OwnershipState};
use crate::symbolic::SymbolicEngine;
pub struct TypeChecker {
env: SymbolTable,
}
impl TypeChecker {
pub fn new() -> Self {
Self {
env: SymbolTable::new(),
}
}
pub fn check_program(&mut self, program: &Program) -> Result<(), String> {
// First Pass: Register global declarations
for decl in &program.decls {
match decl {
Decl::TypeAlias(name, ty) => {
self.env.register_type(name.clone());
self.env.type_aliases.insert(name.clone(), ty.clone());
}
Decl::Struct(struct_decl) => {
if !self.env.register_type(struct_decl.name.clone()) {
return Err(format!("Duplicate type definition: {}", struct_decl.name));
}
}
Decl::Fn(fn_decl) => {
let mut param_types = Vec::new();
for param in &fn_decl.params {
match &param.ty {
TypeExpr::BaseType(bt) => param_types.push(bt.clone()),
}
}
let return_type = match &fn_decl.return_type {
TypeExpr::BaseType(bt) => bt.clone(),
};
let sig = FunctionSignature {
param_types,
return_type,
};
if !self.env.register_function(fn_decl.name.clone(), sig) {
return Err(format!("Duplicate function definition: {}", fn_decl.name));
}
}
}
}
// Second Pass: Validate bodies
for decl in &program.decls {
match decl {
Decl::TypeAlias(_, _) => {}
Decl::Struct(struct_decl) => {
self.check_struct_decl(struct_decl)?;
}
Decl::Fn(fn_decl) => {
self.check_fn_decl(fn_decl)?;
}
}
}
Ok(())
}
fn check_struct_decl(&mut self, decl: &StructDecl) -> Result<(), String> {
let mut seen_fields = HashSet::new();
for field in &decl.fields {
if !seen_fields.insert(field.name.clone()) {
return Err(format!("Duplicate field '{}' in struct '{}'", field.name, decl.name));
}
match &field.ty {
TypeExpr::BaseType(base_type) => {
if !self.env.is_type_defined(base_type) {
return Err(format!(
"Unknown type '{}' used in field '{}' of struct '{}'",
base_type, field.name, decl.name
));
}
}
}
}
Ok(())
}
fn resolve_type_expr(&self, ty: &TypeExpr) -> (String, Option<Expr>) {
match ty {
TypeExpr::BaseType(bt) => {
if let Some(alias_ty) = self.env.type_aliases.get(bt) {
self.resolve_type_expr(alias_ty)
} else {
(bt.clone(), None)
}
}
TypeExpr::Refined(base, _, expr) => {
let (bt, _) = self.resolve_type_expr(base);
(bt, Some(*expr.clone()))
}
}
}
fn check_fn_decl(&mut self, decl: &FnDecl) -> Result<(), String> {
self.env.enter_scope();
let mut param_constraints = HashMap::new();
for param in &decl.params {
let (ty_str, constraint_expr) = self.resolve_type_expr(&param.ty);
if decl.is_gpu {
// GPU kernel constraint enforcement bootstrapped layer
match ty_str.as_str() {
"f32" | "f64" | "i32" | "u32" | "bool" => {},
_ => return Err(format!("GPU Kernel Error: Type '{}' is not a valid parallel primitive for parameter '{}'", ty_str, param.name)),
}
}
if !self.env.is_type_defined(&ty_str) {
return Err(format!("Unknown type '{}' in parameter '{}'", ty_str, param.name));
}
if !self.env.define_local(param.name.clone(), ty_str) {
return Err(format!("Duplicate parameter name: {}", param.name));
}
if let Some(expr) = constraint_expr {
param_constraints.insert(param.name.clone(), expr);
}
}
let (expected_return_type, return_constraint) = self.resolve_type_expr(&decl.return_type);
if !self.env.is_type_defined(&expected_return_type) {
return Err(format!("Unknown return type '{}' for function '{}'", expected_return_type, decl.name));
}
self.check_block(&decl.body, &expected_return_type, return_constraint.as_ref(), &param_constraints)?;
self.env.exit_scope();
Ok(())
}
fn check_block(&mut self, block: &Block, expected_return: &str, return_constraint: Option<&Expr>, param_constraints: &HashMap<String, Expr>) -> Result<(), String> {
for stmt in &block.statements {
self.check_stmt(stmt, expected_return, return_constraint, param_constraints)?;
}
Ok(())
}
fn check_stmt(&mut self, stmt: &Stmt, expected_return: &str, return_constraint: Option<&Expr>, param_constraints: &HashMap<String, Expr>) -> Result<(), String> {
match stmt {
Stmt::Return(expr) => {
let actual_type = self.evaluate_expr_type(expr)?;
if actual_type != expected_return {
return Err(format!("Type mismatch: expected return type '{}', but found '{}'", expected_return, actual_type));
}
if return_constraint.is_some() {
let symbolic = SymbolicEngine::new();
if let Err(e) = symbolic.prove_constraint(expr, param_constraints) {
return Err(format!("Proof Error: {}", e));
}
}
Ok(())
}
Stmt::ExprStmt(expr) => {
self.evaluate_expr_type(expr)?;
Ok(())
}
Stmt::LetBind(name, expr) => {
let actual_type = self.evaluate_expr_type(expr)?;
if !self.env.define_local(name.clone(), actual_type) {
return Err(format!("Variable already defined in this scope: {}", name));
}
Ok(())
}
}
}
fn evaluate_expr_type(&mut self, expr: &Expr) -> Result<String, String> {
match expr {
Expr::Identifier(name) => {
let ty = self.env.resolve_local(name).ok_or_else(|| format!("Undefined variable: {}", name))?;
if let Some(state) = self.env.get_ownership(name) {
if state == OwnershipState::Moved {
return Err(format!("Ownership Error: Use of moved variable '{}'", name));
}
// Mark as moved after successful consumption
self.env.mark_moved(name)?;
}
Ok(ty)
}
Expr::Literal(val) => {
if val.contains('.') {
Ok("f32".to_string())
} else {
Ok("i32".to_string())
}
}
Expr::Binary(left, _op, right) => {
let left_type = self.evaluate_expr_type(left)?;
let right_type = self.evaluate_expr_type(right)?;
if left_type != right_type {
return Err(format!("Type mismatch in binary operation: '{}' vs '{}'", left_type, right_type));
}
Ok(left_type)
}
}
}
}

View File

@@ -0,0 +1,113 @@
use std::collections::{HashSet, HashMap};
use aurac_parser::ast::TypeExpr;
#[derive(Debug, Clone, PartialEq)]
pub enum OwnershipState {
Uninitialized,
Owned,
Moved,
}
#[derive(Debug, Clone)]
pub struct FunctionSignature {
pub param_types: Vec<String>,
pub return_type: String,
}
#[derive(Debug, Clone)]
pub struct SymbolTable {
pub defined_types: HashSet<String>,
pub type_aliases: HashMap<String, TypeExpr>,
pub functions: HashMap<String, FunctionSignature>,
pub scopes: Vec<HashMap<String, (String, OwnershipState)>>,
}
impl SymbolTable {
pub fn new() -> Self {
let mut defined_types = HashSet::new();
let builtin_types = vec![
"i8", "i16", "i32", "i64",
"u8", "u16", "u32", "u64",
"f32", "f64",
"bool", "str"
];
for ty in builtin_types {
defined_types.insert(ty.to_string());
}
Self {
defined_types,
type_aliases: HashMap::new(),
functions: HashMap::new(),
scopes: vec![HashMap::new()], // Global scope is scopes[0]
}
}
pub fn is_type_defined(&self, name: &str) -> bool {
self.defined_types.contains(name)
}
pub fn register_type(&mut self, name: String) -> bool {
self.defined_types.insert(name)
}
pub fn register_function(&mut self, name: String, sig: FunctionSignature) -> bool {
if self.functions.contains_key(&name) {
false
} else {
self.functions.insert(name, sig);
true
}
}
pub fn enter_scope(&mut self) {
self.scopes.push(HashMap::new());
}
pub fn exit_scope(&mut self) {
if self.scopes.len() > 1 {
self.scopes.pop();
}
}
pub fn define_local(&mut self, name: String, ty: String) -> bool {
if let Some(scope) = self.scopes.last_mut() {
if scope.contains_key(&name) {
return false;
}
scope.insert(name, (ty, OwnershipState::Owned));
true
} else {
false
}
}
pub fn resolve_local(&self, name: &str) -> Option<String> {
for scope in self.scopes.iter().rev() {
if let Some((ty, _)) = scope.get(name) {
return Some(ty.clone());
}
}
None
}
pub fn get_ownership(&self, name: &str) -> Option<OwnershipState> {
for scope in self.scopes.iter().rev() {
if let Some((_, state)) = scope.get(name) {
return Some(state.clone());
}
}
None
}
pub fn mark_moved(&mut self, name: &str) -> Result<(), String> {
for scope in self.scopes.iter_mut().rev() {
if let Some(entry) = scope.get_mut(name) {
entry.1 = OwnershipState::Moved;
return Ok(());
}
}
Err(format!("Variable not found: {}", name))
}
}

View File

@@ -0,0 +1,285 @@
pub mod env;
pub mod checker;
#[cfg(test)]
mod tests {
use super::checker::TypeChecker;
use aurac_parser::ast::{Program, Decl, StructDecl, FieldDecl, TypeExpr};
#[test]
fn test_valid_struct() {
let program = Program {
decls: vec![Decl::Struct(StructDecl {
name: "Position".to_string(),
fields: vec![
FieldDecl { name: "x".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
FieldDecl { name: "y".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
],
})],
};
let mut checker = TypeChecker::new();
let result = checker.check_program(&program);
assert!(result.is_ok());
}
#[test]
fn test_invalid_struct_duplicate_fields() {
let program = Program {
decls: vec![Decl::Struct(StructDecl {
name: "DuplicateFieldStruct".to_string(),
fields: vec![
FieldDecl { name: "x".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
FieldDecl { name: "x".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
],
})],
};
let mut checker = TypeChecker::new();
let result = checker.check_program(&program);
assert_eq!(
result,
Err("Duplicate field 'x' in struct 'DuplicateFieldStruct'".to_string())
);
}
#[test]
fn test_invalid_struct_unknown_type() {
let program = Program {
decls: vec![Decl::Struct(StructDecl {
name: "UnknownTypeStruct".to_string(),
fields: vec![
FieldDecl { name: "val".to_string(), ty: TypeExpr::BaseType("float32_t".to_string()) },
],
})],
};
let mut checker = TypeChecker::new();
let result = checker.check_program(&program);
assert_eq!(
result,
Err("Unknown type 'float32_t' used in field 'val' of struct 'UnknownTypeStruct'".to_string())
);
}
#[test]
fn test_duplicate_struct_declaration() {
let program = Program {
decls: vec![
Decl::Struct(StructDecl {
name: "Data".to_string(),
fields: vec![
FieldDecl { name: "a".to_string(), ty: TypeExpr::BaseType("i32".to_string()) },
],
}),
Decl::Struct(StructDecl {
name: "Data".to_string(),
fields: vec![
FieldDecl { name: "b".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
],
}),
],
};
let mut checker = TypeChecker::new();
let result = checker.check_program(&program);
assert_eq!(result, Err("Duplicate type definition: Data".to_string()));
}
#[test]
fn test_valid_fn() {
let program = Program {
decls: vec![Decl::Fn(FnDecl {
is_pure: true,
is_gpu: false,
name: "add".to_string(),
params: vec![
Param { name: "a".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
Param { name: "b".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
],
return_type: TypeExpr::BaseType("f32".to_string()),
body: Block {
statements: vec![Stmt::Return(Expr::Binary(
Box::new(Expr::Identifier("a".to_string())),
aurac_parser::ast::BinaryOp::Add,
Box::new(Expr::Identifier("b".to_string())),
))],
},
})],
};
let mut checker = TypeChecker::new();
let result = checker.check_program(&program);
assert!(result.is_ok());
}
#[test]
fn test_invalid_return_type() {
let program = Program {
decls: vec![Decl::Fn(FnDecl {
is_pure: true,
is_gpu: false,
name: "add".to_string(),
params: vec![
Param { name: "a".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
Param { name: "b".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
],
return_type: TypeExpr::BaseType("i32".to_string()), // mismatch here
body: Block {
statements: vec![Stmt::Return(Expr::Binary(
Box::new(Expr::Identifier("a".to_string())),
aurac_parser::ast::BinaryOp::Add,
Box::new(Expr::Identifier("b".to_string())),
))],
},
})],
};
let mut checker = TypeChecker::new();
let result = checker.check_program(&program);
assert_eq!(
result,
Err("Type mismatch: expected return type 'i32', but found 'f32'".to_string())
);
}
#[test]
fn test_undefined_variable() {
let program = Program {
decls: vec![Decl::Fn(FnDecl {
is_pure: true,
is_gpu: false,
name: "add".to_string(),
params: vec![
Param { name: "a".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
],
return_type: TypeExpr::BaseType("f32".to_string()),
body: Block {
statements: vec![Stmt::Return(Expr::Binary(
Box::new(Expr::Identifier("a".to_string())),
aurac_parser::ast::BinaryOp::Add,
Box::new(Expr::Identifier("c".to_string())), // 'c' is undefined
))],
},
})],
};
let mut checker = TypeChecker::new();
let result = checker.check_program(&program);
assert_eq!(result, Err("Undefined variable: c".to_string()));
}
#[test]
fn test_refinement_proof_success() {
let program = Program {
decls: vec![
Decl::TypeAlias("Positive".to_string(), TypeExpr::Refined(
Box::new(TypeExpr::BaseType("f32".to_string())),
"v".to_string(),
Box::new(Expr::Binary(
Box::new(Expr::Identifier("v".to_string())),
aurac_parser::ast::BinaryOp::Gt,
Box::new(Expr::Literal("0.0".to_string())),
))
)),
Decl::Fn(FnDecl {
is_pure: true,
name: "add".to_string(),
params: vec![
Param { name: "a".to_string(), ty: TypeExpr::BaseType("Positive".to_string()) },
Param { name: "b".to_string(), ty: TypeExpr::BaseType("Positive".to_string()) },
],
return_type: TypeExpr::BaseType("Positive".to_string()),
body: Block {
statements: vec![Stmt::Return(Expr::Binary(
Box::new(Expr::Identifier("a".to_string())),
aurac_parser::ast::BinaryOp::Add,
Box::new(Expr::Identifier("b".to_string())),
))],
},
})
]
};
let mut checker = TypeChecker::new();
let result = checker.check_program(&program);
assert!(result.is_ok());
}
#[test]
fn test_refinement_proof_failure() {
let program = Program {
decls: vec![
Decl::TypeAlias("Positive".to_string(), TypeExpr::Refined(
Box::new(TypeExpr::BaseType("f32".to_string())),
"v".to_string(),
Box::new(Expr::Binary(
Box::new(Expr::Identifier("v".to_string())),
aurac_parser::ast::BinaryOp::Gt,
Box::new(Expr::Literal("0.0".to_string())),
))
)),
Decl::Fn(FnDecl {
is_pure: true,
name: "sub".to_string(),
params: vec![
Param { name: "a".to_string(), ty: TypeExpr::BaseType("Positive".to_string()) },
Param { name: "b".to_string(), ty: TypeExpr::BaseType("Positive".to_string()) },
],
return_type: TypeExpr::BaseType("Positive".to_string()),
body: Block {
statements: vec![Stmt::Return(Expr::Binary(
Box::new(Expr::Identifier("a".to_string())),
aurac_parser::ast::BinaryOp::Sub,
Box::new(Expr::Identifier("b".to_string())),
))],
},
})
]
};
let mut checker = TypeChecker::new();
let result = checker.check_program(&program);
assert_eq!(result, Err("Proof Error: Cannot mathematically prove constraint: result might be <= 0.0".to_string()));
}
#[test]
fn test_ownership_use_after_move() {
let program = Program {
decls: vec![
Decl::Fn(FnDecl {
is_pure: true,
name: "process".to_string(),
params: vec![
Param { name: "d".to_string(), ty: TypeExpr::BaseType("f32".to_string()) },
],
return_type: TypeExpr::BaseType("f32".to_string()),
body: Block {
statements: vec![Stmt::Return(Expr::Identifier("d".to_string()))],
},
}),
Decl::Fn(FnDecl {
is_pure: true,
name: "main".to_string(),
params: vec![],
return_type: TypeExpr::BaseType("f32".to_string()),
body: Block {
statements: vec![
// let a = 10.0
Stmt::LetBind("a".to_string(), Expr::Literal("10.0".to_string())),
// let b = process(a) => Since there's no native function calls compiled yet, we simulate passing 'a' into a mathematical operation or tracking sequence
Stmt::LetBind("b".to_string(), Expr::Identifier("a".to_string())),
// let c = a => Use-after-move!
Stmt::LetBind("c".to_string(), Expr::Identifier("a".to_string())),
Stmt::Return(Expr::Identifier("b".to_string())),
],
},
}),
]
};
let mut checker = TypeChecker::new();
let result = checker.check_program(&program);
assert_eq!(result, Err("Ownership Error: Use of moved variable 'a'".to_string()));
}
}

View File

@@ -0,0 +1,45 @@
use aurac_parser::ast::{Expr, BinaryOp};
use std::collections::HashMap;
pub struct SymbolicEngine;
impl SymbolicEngine {
pub fn new() -> Self {
Self
}
pub fn prove_constraint(&self, expr: &Expr, constraints: &HashMap<String, Expr>) -> Result<(), String> {
if let Expr::Binary(left, op, right) = expr {
match op {
BinaryOp::Add => {
// Two positive values summed remain strictly positive
if self.is_positive(left, constraints) && self.is_positive(right, constraints) {
return Ok(());
}
}
_ => {
return Err("Cannot mathematically prove constraint: result might be <= 0.0".to_string());
}
}
} else if self.is_positive(expr, constraints) {
return Ok(());
}
Err("Cannot mathematically prove constraint: result might be <= 0.0".to_string())
}
fn is_positive(&self, expr: &Expr, constraints: &HashMap<String, Expr>) -> bool {
if let Expr::Identifier(id) = expr {
if let Some(constraint) = constraints.get(id) {
if let Expr::Binary(_, BinaryOp::Gt, c_right) = constraint {
if let Expr::Literal(lit) = &**c_right {
if lit == "0.0" {
return true;
}
}
}
}
}
false
}
}

13
crash_math.aura Normal file
View File

@@ -0,0 +1,13 @@
type PositiveTime = f32{t | t > 0.0}
pure fn simulate(dt: PositiveTime) -> f32:
// CRASH: We are subtracting a value from a constrained positive type,
// which could result in a negative number, violating the return type if we expected a PositiveTime.
// For this test, let's just pass an invalid type.
return dt
pure fn main() -> f32:
// CRASH: Trying to pass a negative literal to a PositiveTime constraint!
let invalid_time = -5.0
let result = simulate(invalid_time)
return result

5
crash_memory.aura Normal file
View File

@@ -0,0 +1,5 @@
pure fn calculate_speed(distance: f32, time: f32) -> f32:
let temp_dist = distance
// CRASH: 'distance' was just moved to 'temp_dist'. We cannot use it again!
let speed = distance / time
return speed

12
main.c Normal file
View File

@@ -0,0 +1,12 @@
#include <stdio.h>
extern float calculate_new_position(float initial_x, float v, float dt);
int main() {
float initial_x = 100.0f;
float v = 50.0f;
float dt = 2.0f;
float new_x = calculate_new_position(initial_x, v, dt);
printf("Aura Physics Simulation Result: new_x = %f\n", new_x);
return 0;
}

6
physics.aura Normal file
View File

@@ -0,0 +1,6 @@
type PositiveTime = f32{t | t > 0.0}
pure fn calculate_new_position(initial_x: f32, v: f32, dt: PositiveTime) -> f32:
let displacement = v * dt
let new_x = initial_x + displacement
return new_x