pr 08 lsp
This commit is contained in:
parent
912b9b0075
commit
06b49cf433
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -1976,6 +1976,7 @@ name = "prometeu-lsp"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"prometeu-analysis",
|
"prometeu-analysis",
|
||||||
|
"prometeu-compiler",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-util",
|
"tokio-util",
|
||||||
"tower-lsp",
|
"tower-lsp",
|
||||||
|
|||||||
@ -63,6 +63,11 @@ impl FileDB {
|
|||||||
pub fn line_index(&self, id: FileId) -> &LineIndex {
|
pub fn line_index(&self, id: FileId) -> &LineIndex {
|
||||||
&self.files[id.0 as usize].line_index
|
&self.files[id.0 as usize].line_index
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a list of all known file IDs in insertion order.
|
||||||
|
pub fn all_files(&self) -> Vec<FileId> {
|
||||||
|
(0..self.files.len()).map(|i| FileId(i as u32)).collect()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LineIndex {
|
impl LineIndex {
|
||||||
|
|||||||
@ -4,9 +4,8 @@ use crate::frontends::pbs::ast::*;
|
|||||||
use crate::frontends::pbs::contracts::ContractRegistry;
|
use crate::frontends::pbs::contracts::ContractRegistry;
|
||||||
use crate::frontends::pbs::symbols::*;
|
use crate::frontends::pbs::symbols::*;
|
||||||
use crate::frontends::pbs::types::PbsType;
|
use crate::frontends::pbs::types::PbsType;
|
||||||
use crate::ir_core;
|
|
||||||
use crate::ir_core::ids::{FieldId, FunctionId, TypeId, ValueId};
|
use crate::ir_core::ids::{FieldId, FunctionId, TypeId, ValueId};
|
||||||
use crate::ir_core::{Block, Function, Instr, InstrKind, Module, Param, Program, Terminator, Type};
|
use crate::ir_core::{Block, ConstPool, Function, Instr, InstrKind, Module, Param, Program, Terminator, Type};
|
||||||
use prometeu_analysis::{NameInterner, NodeId};
|
use prometeu_analysis::{NameInterner, NodeId};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
@ -62,7 +61,7 @@ impl<'a> Lowerer<'a> {
|
|||||||
imported_symbols,
|
imported_symbols,
|
||||||
interner,
|
interner,
|
||||||
program: Program {
|
program: Program {
|
||||||
const_pool: ir_core::ConstPool::new(),
|
const_pool: ConstPool::new(),
|
||||||
modules: Vec::new(),
|
modules: Vec::new(),
|
||||||
field_offsets,
|
field_offsets,
|
||||||
field_types: HashMap::new(),
|
field_types: HashMap::new(),
|
||||||
@ -1341,14 +1340,13 @@ impl<'a> Lowerer<'a> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use crate::common::spans::FileId;
|
||||||
use crate::frontends::pbs::collector::SymbolCollector;
|
use crate::frontends::pbs::collector::SymbolCollector;
|
||||||
use crate::frontends::pbs::parser::Parser;
|
use crate::frontends::pbs::parser::Parser;
|
||||||
use crate::frontends::pbs::symbols::ModuleSymbols;
|
use crate::frontends::pbs::symbols::ModuleSymbols;
|
||||||
use crate::ir_core;
|
|
||||||
use prometeu_analysis::NameInterner;
|
use prometeu_analysis::NameInterner;
|
||||||
use crate::common::spans::FileId;
|
|
||||||
|
|
||||||
fn lower_program(code: &str) -> ir_core::Program {
|
fn lower_program(code: &str) -> Program {
|
||||||
let mut interner = NameInterner::new();
|
let mut interner = NameInterner::new();
|
||||||
let mut parser = Parser::new(code, FileId(0), &mut interner);
|
let mut parser = Parser::new(code, FileId(0), &mut interner);
|
||||||
let parsed = parser.parse_file().expect("Failed to parse");
|
let parsed = parser.parse_file().expect("Failed to parse");
|
||||||
@ -1383,13 +1381,13 @@ mod tests {
|
|||||||
|
|
||||||
let add_func = module.functions.iter().find(|f| f.name == "add").unwrap();
|
let add_func = module.functions.iter().find(|f| f.name == "add").unwrap();
|
||||||
assert_eq!(add_func.params.len(), 2);
|
assert_eq!(add_func.params.len(), 2);
|
||||||
assert_eq!(add_func.return_type, ir_core::Type::Int);
|
assert_eq!(add_func.return_type, Type::Int);
|
||||||
|
|
||||||
// Verify blocks
|
// Verify blocks
|
||||||
assert!(add_func.blocks.len() >= 1);
|
assert!(add_func.blocks.len() >= 1);
|
||||||
let first_block = &add_func.blocks[0];
|
let first_block = &add_func.blocks[0];
|
||||||
// Check for Add instruction
|
// Check for Add instruction
|
||||||
assert!(first_block.instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Add)));
|
assert!(first_block.instrs.iter().any(|i| matches!(i.kind, InstrKind::Add)));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1417,15 +1415,15 @@ mod tests {
|
|||||||
.flat_map(|b| b.instrs.iter())
|
.flat_map(|b| b.instrs.iter())
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Add)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::Add)));
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Sub)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::Sub)));
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Mul)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::Mul)));
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Div)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::Div)));
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Eq)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::Eq)));
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Lt)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::Lt)));
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Gt)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::Gt)));
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::And)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::And)));
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Or)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::Or)));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1446,8 +1444,8 @@ mod tests {
|
|||||||
.flat_map(|b| b.instrs.iter())
|
.flat_map(|b| b.instrs.iter())
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Neg)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::Neg)));
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Not)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::Not)));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1497,8 +1495,8 @@ mod tests {
|
|||||||
let main_func = module.functions.iter().find(|f| f.name == "main").unwrap();
|
let main_func = module.functions.iter().find(|f| f.name == "main").unwrap();
|
||||||
let terminators: Vec<_> = main_func.blocks.iter().map(|b| &b.terminator).collect();
|
let terminators: Vec<_> = main_func.blocks.iter().map(|b| &b.terminator).collect();
|
||||||
|
|
||||||
assert!(terminators.iter().any(|t| matches!(t, ir_core::Terminator::JumpIfFalse { .. })));
|
assert!(terminators.iter().any(|t| matches!(t, Terminator::JumpIfFalse { .. })));
|
||||||
assert!(terminators.iter().any(|t| matches!(t, ir_core::Terminator::Jump(_))));
|
assert!(terminators.iter().any(|t| matches!(t, Terminator::Jump(_))));
|
||||||
assert!(main_func.blocks.len() >= 3);
|
assert!(main_func.blocks.len() >= 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1518,7 +1516,7 @@ mod tests {
|
|||||||
let main_func = module.functions.iter().find(|f| f.name == "main").unwrap();
|
let main_func = module.functions.iter().find(|f| f.name == "main").unwrap();
|
||||||
let terminators: Vec<_> = main_func.blocks.iter().map(|b| &b.terminator).collect();
|
let terminators: Vec<_> = main_func.blocks.iter().map(|b| &b.terminator).collect();
|
||||||
|
|
||||||
assert!(terminators.iter().any(|t| matches!(t, ir_core::Terminator::JumpIfFalse { .. })));
|
assert!(terminators.iter().any(|t| matches!(t, Terminator::JumpIfFalse { .. })));
|
||||||
assert!(main_func.blocks.len() >= 5);
|
assert!(main_func.blocks.len() >= 5);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1548,24 +1546,24 @@ mod tests {
|
|||||||
let main_func = module.functions.iter().find(|f| f.name == "main").unwrap();
|
let main_func = module.functions.iter().find(|f| f.name == "main").unwrap();
|
||||||
let params: Vec<_> = main_func.params.iter().map(|p| p.ty.clone()).collect();
|
let params: Vec<_> = main_func.params.iter().map(|p| p.ty.clone()).collect();
|
||||||
|
|
||||||
assert_eq!(params[0], ir_core::Type::Service("MyService".to_string()));
|
assert_eq!(params[0], Type::Service("MyService".to_string()));
|
||||||
assert_eq!(params[1], ir_core::Type::Contract("MyContract".to_string()));
|
assert_eq!(params[1], Type::Contract("MyContract".to_string()));
|
||||||
assert_eq!(params[2], ir_core::Type::ErrorType("MyError".to_string()));
|
assert_eq!(params[2], Type::ErrorType("MyError".to_string()));
|
||||||
assert_eq!(params[3], ir_core::Type::Struct("Point".to_string()));
|
assert_eq!(params[3], Type::Struct("Point".to_string()));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
params[4],
|
params[4],
|
||||||
ir_core::Type::Optional(Box::new(ir_core::Type::Int))
|
Type::Optional(Box::new(Type::Int))
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
params[5],
|
params[5],
|
||||||
ir_core::Type::Result(
|
Type::Result(
|
||||||
Box::new(ir_core::Type::Int),
|
Box::new(Type::Int),
|
||||||
Box::new(ir_core::Type::String)
|
Box::new(Type::String)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
params[6],
|
params[6],
|
||||||
ir_core::Type::Array(Box::new(ir_core::Type::Int), 3)
|
Type::Array(Box::new(Type::Int), 3)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1591,7 +1589,7 @@ mod tests {
|
|||||||
|
|
||||||
assert!(instrs
|
assert!(instrs
|
||||||
.iter()
|
.iter()
|
||||||
.any(|i| matches!(i.kind, ir_core::InstrKind::Call(_, 2))));
|
.any(|i| matches!(i.kind, InstrKind::Call(_, 2))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1614,10 +1612,10 @@ mod tests {
|
|||||||
|
|
||||||
assert!(instrs
|
assert!(instrs
|
||||||
.iter()
|
.iter()
|
||||||
.any(|i| matches!(i.kind, ir_core::InstrKind::HostCall(_, _))));
|
.any(|i| matches!(i.kind, InstrKind::HostCall(_, _))));
|
||||||
assert!(instrs
|
assert!(instrs
|
||||||
.iter()
|
.iter()
|
||||||
.any(|i| matches!(i.kind, ir_core::InstrKind::PushBounded(_))));
|
.any(|i| matches!(i.kind, InstrKind::PushBounded(_))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1643,13 +1641,13 @@ mod tests {
|
|||||||
|
|
||||||
assert!(instrs
|
assert!(instrs
|
||||||
.iter()
|
.iter()
|
||||||
.any(|i| matches!(i.kind, ir_core::InstrKind::GetLocal(16))));
|
.any(|i| matches!(i.kind, InstrKind::GetLocal(16))));
|
||||||
assert!(instrs
|
assert!(instrs
|
||||||
.iter()
|
.iter()
|
||||||
.any(|i| matches!(i.kind, ir_core::InstrKind::GetLocal(18))));
|
.any(|i| matches!(i.kind, InstrKind::GetLocal(18))));
|
||||||
assert!(instrs
|
assert!(instrs
|
||||||
.iter()
|
.iter()
|
||||||
.any(|i| matches!(i.kind, ir_core::InstrKind::PushBounded(_))));
|
.any(|i| matches!(i.kind, InstrKind::PushBounded(_))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1677,12 +1675,12 @@ mod tests {
|
|||||||
|
|
||||||
let push_consts = instrs
|
let push_consts = instrs
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|i| matches!(i.kind, ir_core::InstrKind::PushConst(_)))
|
.filter(|i| matches!(i.kind, InstrKind::PushConst(_)))
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
assert_eq!(push_consts, 3);
|
assert_eq!(push_consts, 3);
|
||||||
assert!(!instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Call(_, _))));
|
assert!(!instrs.iter().any(|i| matches!(i.kind, InstrKind::Call(_, _))));
|
||||||
assert!(!instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::HostCall(_, _))));
|
assert!(!instrs.iter().any(|i| matches!(i.kind, InstrKind::HostCall(_, _))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1712,9 +1710,9 @@ mod tests {
|
|||||||
let func = &program.modules[0].functions[0];
|
let func = &program.modules[0].functions[0];
|
||||||
let instrs: Vec<_> = func.blocks.iter().flat_map(|b| b.instrs.iter()).collect();
|
let instrs: Vec<_> = func.blocks.iter().flat_map(|b| b.instrs.iter()).collect();
|
||||||
|
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::Alloc { .. })));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::Alloc { .. })));
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::BeginMutate { .. })));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::BeginMutate { .. })));
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::EndMutate)));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::EndMutate)));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1828,9 +1826,9 @@ mod tests {
|
|||||||
let instrs: Vec<_> = func.blocks.iter().flat_map(|b| b.instrs.iter()).collect();
|
let instrs: Vec<_> = func.blocks.iter().flat_map(|b| b.instrs.iter()).collect();
|
||||||
|
|
||||||
// Gfx.clear -> 0x1010
|
// Gfx.clear -> 0x1010
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::HostCall(0x1010, 0))));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::HostCall(0x1010, 0))));
|
||||||
// Log.write -> 0x5001
|
// Log.write -> 0x5001
|
||||||
assert!(instrs.iter().any(|i| matches!(i.kind, ir_core::InstrKind::HostCall(0x5001, 0))));
|
assert!(instrs.iter().any(|i| matches!(i.kind, InstrKind::HostCall(0x5001, 0))));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@ -9,3 +9,4 @@ tower-lsp = "0.20"
|
|||||||
tokio = { version = "1", features = ["full"] }
|
tokio = { version = "1", features = ["full"] }
|
||||||
tokio-util = { version = "0.7" }
|
tokio-util = { version = "0.7" }
|
||||||
prometeu-analysis = { path = "../prometeu-analysis" }
|
prometeu-analysis = { path = "../prometeu-analysis" }
|
||||||
|
prometeu-compiler = { path = "../prometeu-compiler" }
|
||||||
|
|||||||
@ -5,6 +5,8 @@ use tokio_util::sync::CancellationToken;
|
|||||||
use prometeu_analysis::FileDB;
|
use prometeu_analysis::FileDB;
|
||||||
use prometeu_analysis::ids::{FileId, ProjectId};
|
use prometeu_analysis::ids::{FileId, ProjectId};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use crate::rebuild::LspDiagnostic;
|
||||||
|
use crate::rebuild::FlatSymbol;
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct AnalysisDb {
|
pub struct AnalysisDb {
|
||||||
@ -22,6 +24,9 @@ pub struct AnalysisDb {
|
|||||||
|
|
||||||
/// Cancel token do último rebuild em progresso (se houver)
|
/// Cancel token do último rebuild em progresso (se houver)
|
||||||
pub active_rebuild: Option<CancellationToken>,
|
pub active_rebuild: Option<CancellationToken>,
|
||||||
|
|
||||||
|
/// Último snapshot bom (consultado pelos handlers LSP)
|
||||||
|
pub last_good: Option<AnalysisSnapshot>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type SharedDb = Arc<RwLock<AnalysisDb>>;
|
pub type SharedDb = Arc<RwLock<AnalysisDb>>;
|
||||||
@ -30,4 +35,18 @@ impl AnalysisDb {
|
|||||||
pub fn project_for_file(&self, file: FileId) -> Option<ProjectId> {
|
pub fn project_for_file(&self, file: FileId) -> Option<ProjectId> {
|
||||||
self.file_to_project.get(&file).copied()
|
self.file_to_project.get(&file).copied()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns all known file ids in the FileDB.
|
||||||
|
pub fn file_ids(&self) -> Vec<FileId> {
|
||||||
|
// delegate to FileDB helper (added in prometeu-analysis)
|
||||||
|
self.file_db.all_files()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default, Clone)]
|
||||||
|
pub struct AnalysisSnapshot {
|
||||||
|
/// Diagnostics por arquivo (URI LSP → diagnostics já convertidos)
|
||||||
|
pub diagnostics_by_uri: HashMap<String, Vec<LspDiagnostic>>,
|
||||||
|
/// Lista “flatten” de símbolos para workspaceSymbol/documentSymbol
|
||||||
|
pub symbols_flat: Vec<FlatSymbol>,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
use tower_lsp::{LspService, Server};
|
use tower_lsp::{Client, LspService, Server};
|
||||||
|
use tower_lsp::lsp_types as lsp;
|
||||||
|
|
||||||
mod analysis_db;
|
mod analysis_db;
|
||||||
mod rebuild;
|
mod rebuild;
|
||||||
@ -9,6 +10,7 @@ use analysis_db::SharedDb;
|
|||||||
|
|
||||||
struct Backend {
|
struct Backend {
|
||||||
db: SharedDb,
|
db: SharedDb,
|
||||||
|
client: Client,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tower_lsp::async_trait]
|
#[tower_lsp::async_trait]
|
||||||
@ -24,34 +26,10 @@ impl tower_lsp::LanguageServer for Backend {
|
|||||||
tower_lsp::lsp_types::TextDocumentSyncKind::FULL,
|
tower_lsp::lsp_types::TextDocumentSyncKind::FULL,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
// Declare capabilities up front to avoid later churn.
|
// MVP capabilities only (PR-08):
|
||||||
definition_provider: Some(tower_lsp::lsp_types::OneOf::Left(true)),
|
definition_provider: Some(tower_lsp::lsp_types::OneOf::Left(true)),
|
||||||
document_symbol_provider: Some(tower_lsp::lsp_types::OneOf::Left(true)),
|
document_symbol_provider: Some(tower_lsp::lsp_types::OneOf::Left(true)),
|
||||||
workspace_symbol_provider: Some(tower_lsp::lsp_types::OneOf::Left(true)),
|
// workspace_symbol is not available in tower-lsp 0.20 trait
|
||||||
hover_provider: Some(true.into()),
|
|
||||||
references_provider: Some(tower_lsp::lsp_types::OneOf::Left(true)),
|
|
||||||
rename_provider: Some(tower_lsp::lsp_types::OneOf::Left(true)),
|
|
||||||
completion_provider: Some(tower_lsp::lsp_types::CompletionOptions {
|
|
||||||
resolve_provider: Some(false),
|
|
||||||
trigger_characters: Some(vec![".".into(), ":".into()]),
|
|
||||||
..Default::default()
|
|
||||||
}),
|
|
||||||
semantic_tokens_provider: Some(
|
|
||||||
tower_lsp::lsp_types::SemanticTokensServerCapabilities::SemanticTokensOptions(
|
|
||||||
tower_lsp::lsp_types::SemanticTokensOptions {
|
|
||||||
legend: tower_lsp::lsp_types::SemanticTokensLegend {
|
|
||||||
// filled in PR-12
|
|
||||||
token_types: vec![],
|
|
||||||
token_modifiers: vec![],
|
|
||||||
},
|
|
||||||
full: Some(
|
|
||||||
tower_lsp::lsp_types::SemanticTokensFullOptions::Bool(true),
|
|
||||||
),
|
|
||||||
range: None,
|
|
||||||
..Default::default()
|
|
||||||
},
|
|
||||||
),
|
|
||||||
),
|
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
..Default::default()
|
..Default::default()
|
||||||
@ -63,6 +41,124 @@ impl tower_lsp::LanguageServer for Backend {
|
|||||||
async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> {
|
async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// didOpen: upsert texto, solicita rebuild
|
||||||
|
async fn did_open(&self, params: tower_lsp::lsp_types::DidOpenTextDocumentParams) {
|
||||||
|
let uri = params.text_document.uri.to_string();
|
||||||
|
let text = params.text_document.text;
|
||||||
|
{
|
||||||
|
let mut guard = self.db.write().await;
|
||||||
|
guard.file_db.upsert(&uri, text);
|
||||||
|
}
|
||||||
|
rebuild::request_rebuild(self.db.clone(), self.client.clone()).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// didChange (FULL): receber conteúdo completo e upsert
|
||||||
|
async fn did_change(&self, params: tower_lsp::lsp_types::DidChangeTextDocumentParams) {
|
||||||
|
let uri = params.text_document.uri.to_string();
|
||||||
|
// Full-sync: esperamos 1 conteúdo completo
|
||||||
|
if let Some(change) = params.content_changes.into_iter().last() {
|
||||||
|
let text = change.text;
|
||||||
|
let mut guard = self.db.write().await;
|
||||||
|
guard.file_db.upsert(&uri, text);
|
||||||
|
}
|
||||||
|
rebuild::request_rebuild(self.db.clone(), self.client.clone()).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// didClose: opcionalmente remover do db e limpar diagnostics
|
||||||
|
async fn did_close(&self, params: tower_lsp::lsp_types::DidCloseTextDocumentParams) {
|
||||||
|
let uri = params.text_document.uri;
|
||||||
|
// Estratégia simples: manter FileDB para estabilidade de IDs, mas limpar diagnostics
|
||||||
|
let _ = self
|
||||||
|
.client
|
||||||
|
.publish_diagnostics(uri.clone(), vec![], Some(0))
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn goto_definition(
|
||||||
|
&self,
|
||||||
|
params: tower_lsp::lsp_types::GotoDefinitionParams,
|
||||||
|
) -> tower_lsp::jsonrpc::Result<Option<tower_lsp::lsp_types::GotoDefinitionResponse>> {
|
||||||
|
let tdp = params.text_document_position_params;
|
||||||
|
let uri = tdp.text_document.uri;
|
||||||
|
let pos = tdp.position;
|
||||||
|
|
||||||
|
let guard = self.db.read().await;
|
||||||
|
// Map URI to current text and index
|
||||||
|
let Some(fid) = guard.file_db.file_id(uri.as_str()) else { return Ok(None) };
|
||||||
|
let text = guard.file_db.text(fid).to_string();
|
||||||
|
let idx = prometeu_analysis::TextIndex::new(&text);
|
||||||
|
let byte = idx.lsp_to_byte(pos.line, pos.character);
|
||||||
|
let ident = ident_at(&text, byte);
|
||||||
|
|
||||||
|
if let Some(name) = ident {
|
||||||
|
if let Some(snap) = &guard.last_good {
|
||||||
|
let mut hits: Vec<lsp::Location> = Vec::new();
|
||||||
|
for s in &snap.symbols_flat {
|
||||||
|
if s.name == name {
|
||||||
|
hits.push(s.location.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !hits.is_empty() {
|
||||||
|
return Ok(Some(lsp::GotoDefinitionResponse::Array(hits)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// MVP stubs: documentSymbol/workspaceSymbol/definition retornam vazio até PRs seguintes
|
||||||
|
async fn document_symbol(
|
||||||
|
&self,
|
||||||
|
params: tower_lsp::lsp_types::DocumentSymbolParams,
|
||||||
|
) -> tower_lsp::jsonrpc::Result<Option<tower_lsp::lsp_types::DocumentSymbolResponse>> {
|
||||||
|
let uri = params.text_document.uri;
|
||||||
|
let guard = self.db.read().await;
|
||||||
|
if let Some(snap) = &guard.last_good {
|
||||||
|
let mut items: Vec<lsp::SymbolInformation> = Vec::new();
|
||||||
|
for s in &snap.symbols_flat {
|
||||||
|
if s.location.uri == uri {
|
||||||
|
items.push(lsp::SymbolInformation {
|
||||||
|
name: s.name.clone(),
|
||||||
|
kind: s.kind,
|
||||||
|
location: s.location.clone(),
|
||||||
|
tags: None,
|
||||||
|
deprecated: None,
|
||||||
|
container_name: None,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return Ok(Some(lsp::DocumentSymbolResponse::Flat(items)));
|
||||||
|
}
|
||||||
|
Ok(Some(lsp::DocumentSymbolResponse::Flat(vec![])))
|
||||||
|
}
|
||||||
|
|
||||||
|
// async fn workspace_symbol(
|
||||||
|
// &self,
|
||||||
|
// params: lsp::WorkspaceSymbolParams,
|
||||||
|
// ) -> tower_lsp::jsonrpc::Result<Option<Vec<lsp::SymbolInformation>>> {
|
||||||
|
// let query = params.query.to_lowercase();
|
||||||
|
// let guard = self.db.read().await;
|
||||||
|
// if let Some(snap) = &guard.last_good {
|
||||||
|
// let mut out: Vec<lsp::SymbolInformation> = Vec::new();
|
||||||
|
// for s in &snap.symbols_flat {
|
||||||
|
// if s.name.to_lowercase().contains(&query) {
|
||||||
|
// out.push(lsp::SymbolInformation {
|
||||||
|
// name: s.name.clone(),
|
||||||
|
// kind: s.kind,
|
||||||
|
// location: s.location.clone(),
|
||||||
|
// tags: None,
|
||||||
|
// deprecated: None,
|
||||||
|
// container_name: None,
|
||||||
|
// });
|
||||||
|
// if out.len() >= 50 { break; }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// return Ok(Some(out));
|
||||||
|
// }
|
||||||
|
// Ok(Some(vec![]))
|
||||||
|
// }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
@ -72,6 +168,25 @@ async fn main() {
|
|||||||
|
|
||||||
let db: SharedDb = Arc::new(RwLock::new(analysis_db::AnalysisDb::default()));
|
let db: SharedDb = Arc::new(RwLock::new(analysis_db::AnalysisDb::default()));
|
||||||
|
|
||||||
let (service, socket) = LspService::new(|_client| Backend { db: db.clone() });
|
let (service, socket) = LspService::new(|client| Backend { db: db.clone(), client });
|
||||||
Server::new(stdin, stdout, socket).serve(service).await;
|
Server::new(stdin, stdout, socket).serve(service).await;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Simple textual identifier extraction for MVP definition lookup.
|
||||||
|
fn ident_at(text: &str, byte: u32) -> Option<String> {
|
||||||
|
let b = byte as usize;
|
||||||
|
if b > text.len() { return None; }
|
||||||
|
// Expand left and right over identifier characters (ASCII + underscore; acceptable MVP)
|
||||||
|
let bytes = text.as_bytes();
|
||||||
|
let mut start = b;
|
||||||
|
while start > 0 {
|
||||||
|
let c = bytes[start - 1];
|
||||||
|
if (c as char).is_ascii_alphanumeric() || c == b'_' { start -= 1; } else { break; }
|
||||||
|
}
|
||||||
|
let mut end = b;
|
||||||
|
while end < bytes.len() {
|
||||||
|
let c = bytes[end];
|
||||||
|
if (c as char).is_ascii_alphanumeric() || c == b'_' { end += 1; } else { break; }
|
||||||
|
}
|
||||||
|
if start < end { Some(text[start..end].to_string()) } else { None }
|
||||||
|
}
|
||||||
|
|||||||
@ -1,10 +1,29 @@
|
|||||||
use tokio_util::sync::CancellationToken;
|
use tokio_util::sync::CancellationToken;
|
||||||
|
use tower_lsp::Client;
|
||||||
|
|
||||||
use crate::analysis_db::SharedDb;
|
use crate::analysis_db::{AnalysisSnapshot, SharedDb};
|
||||||
|
use prometeu_analysis::{TextIndex};
|
||||||
|
use tower_lsp::lsp_types as lsp;
|
||||||
|
use prometeu_analysis::ids::FileId;
|
||||||
|
use crate::rebuild::compiler_bridge::{ParserFacade, Severity};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct LspDiagnostic {
|
||||||
|
pub range: lsp::Range,
|
||||||
|
pub severity: Option<lsp::DiagnosticSeverity>,
|
||||||
|
pub code: Option<lsp::NumberOrString>,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct FlatSymbol {
|
||||||
|
pub name: String,
|
||||||
|
pub kind: lsp::SymbolKind,
|
||||||
|
pub location: lsp::Location,
|
||||||
|
}
|
||||||
|
|
||||||
/// Requests a project rebuild (coarse). Cancels the previous rebuild if in progress.
|
/// Requests a project rebuild (coarse). Cancels the previous rebuild if in progress.
|
||||||
/// Initial implementation: only spawns a task and returns.
|
pub async fn request_rebuild(db: SharedDb, client: Client) {
|
||||||
pub async fn request_rebuild(db: SharedDb) {
|
|
||||||
// 1) short lock: cancel previous token and install a new one
|
// 1) short lock: cancel previous token and install a new one
|
||||||
let new_token = CancellationToken::new();
|
let new_token = CancellationToken::new();
|
||||||
{
|
{
|
||||||
@ -20,29 +39,187 @@ pub async fn request_rebuild(db: SharedDb) {
|
|||||||
// Safe point: check before starting
|
// Safe point: check before starting
|
||||||
if new_token.is_cancelled() { return; }
|
if new_token.is_cancelled() { return; }
|
||||||
|
|
||||||
// TODO(PR-03/04/05):
|
// Clone snapshot of files (URIs and texts) under a short read lock
|
||||||
// - Clone FileDB snapshot
|
let (files, revision) = {
|
||||||
// - Parsing
|
let guard = db.read().await;
|
||||||
// - Binding/Resolver
|
let mut v = Vec::new();
|
||||||
// - Typecheck
|
for fid in guard.file_ids() {
|
||||||
// - Collect diagnostics
|
let uri = guard.file_db.uri(fid).to_string();
|
||||||
|
let text = guard.file_db.text(fid).to_string();
|
||||||
// Safe point: after parsing
|
v.push((fid, uri, text));
|
||||||
if new_token.is_cancelled() { return; }
|
|
||||||
// Safe point: after resolving
|
|
||||||
if new_token.is_cancelled() { return; }
|
|
||||||
// Safe point: after typecheck
|
|
||||||
if new_token.is_cancelled() { return; }
|
|
||||||
|
|
||||||
// 3) short lock: if token not cancelled, swap state + revision++
|
|
||||||
if !new_token.is_cancelled() {
|
|
||||||
let mut guard = db.write().await;
|
|
||||||
// TODO: check if the token is still the active one when we have a stable identifier
|
|
||||||
if !new_token.is_cancelled() {
|
|
||||||
// TODO: apply real analysis results when available
|
|
||||||
guard.revision = guard.revision.saturating_add(1);
|
|
||||||
// Note: we don't clear `active_rebuild` here to avoid interfering with a newer rebuild
|
|
||||||
}
|
}
|
||||||
|
(v, guard.revision)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Prepare accumulators
|
||||||
|
let mut diagnostics_by_uri: std::collections::HashMap<String, Vec<LspDiagnostic>> = std::collections::HashMap::new();
|
||||||
|
let mut symbols_flat: Vec<FlatSymbol> = Vec::new();
|
||||||
|
|
||||||
|
// For each file: run a minimal frontend to collect diagnostics and top-level symbols
|
||||||
|
for (fid, uri, text) in files.into_iter() {
|
||||||
|
if new_token.is_cancelled() { return; }
|
||||||
|
let text_index = TextIndex::new(&text);
|
||||||
|
|
||||||
|
// Parser + basic pipeline
|
||||||
|
let mut interner = prometeu_analysis::NameInterner::new();
|
||||||
|
let mut parser = ParserFacade::new(&text, fid, &mut interner);
|
||||||
|
match parser.parse_and_collect() {
|
||||||
|
Ok(parsed) => {
|
||||||
|
// Diagnostics (from parse/collect are already inside parsed.diags)
|
||||||
|
let mut file_diags = Vec::new();
|
||||||
|
for d in parsed.diagnostics {
|
||||||
|
let range = span_to_range(fid, &text_index, d.span.start, d.span.end);
|
||||||
|
file_diags.push(LspDiagnostic {
|
||||||
|
range,
|
||||||
|
severity: Some(match d.severity { Severity::Error => lsp::DiagnosticSeverity::ERROR, Severity::Warning => lsp::DiagnosticSeverity::WARNING }),
|
||||||
|
code: Some(lsp::NumberOrString::String(d.code)),
|
||||||
|
message: d.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
diagnostics_by_uri.insert(uri.clone(), file_diags);
|
||||||
|
|
||||||
|
// Symbols: flatten only top-level decls with their decl_span
|
||||||
|
for sym in parsed.symbols {
|
||||||
|
let lsp_loc = lsp::Location {
|
||||||
|
uri: uri.parse().unwrap_or_else(|_| lsp::Url::parse("untitled:").unwrap()),
|
||||||
|
range: span_to_range(fid, &text_index, sym.decl_span.start, sym.decl_span.end),
|
||||||
|
};
|
||||||
|
let kind = match sym.kind {
|
||||||
|
prometeu_compiler::analysis::symbols::SymbolKind::Function => lsp::SymbolKind::FUNCTION,
|
||||||
|
prometeu_compiler::analysis::symbols::SymbolKind::Service => lsp::SymbolKind::INTERFACE,
|
||||||
|
prometeu_compiler::analysis::symbols::SymbolKind::Struct => lsp::SymbolKind::STRUCT,
|
||||||
|
prometeu_compiler::analysis::symbols::SymbolKind::Contract => lsp::SymbolKind::CLASS,
|
||||||
|
prometeu_compiler::analysis::symbols::SymbolKind::ErrorType => lsp::SymbolKind::ENUM,
|
||||||
|
_ => lsp::SymbolKind::VARIABLE,
|
||||||
|
};
|
||||||
|
symbols_flat.push(FlatSymbol { name: sym.name, kind, location: lsp_loc });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(diags) => {
|
||||||
|
// Parser returned errors only; publish them
|
||||||
|
let mut file_diags = Vec::new();
|
||||||
|
for d in diags {
|
||||||
|
let range = span_to_range(fid, &text_index, d.span.start, d.span.end);
|
||||||
|
file_diags.push(LspDiagnostic {
|
||||||
|
range,
|
||||||
|
severity: Some(match d.severity { Severity::Error => lsp::DiagnosticSeverity::ERROR, Severity::Warning => lsp::DiagnosticSeverity::WARNING }),
|
||||||
|
code: Some(lsp::NumberOrString::String(d.code)),
|
||||||
|
message: d.message,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
diagnostics_by_uri.insert(uri.clone(), file_diags);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if new_token.is_cancelled() { return; }
|
||||||
|
|
||||||
|
// 3) short lock: swap state + revision++ if not cancelled; then publish diagnostics
|
||||||
|
let snapshot = AnalysisSnapshot { diagnostics_by_uri: diagnostics_by_uri.clone(), symbols_flat };
|
||||||
|
{
|
||||||
|
let mut guard = db.write().await;
|
||||||
|
if new_token.is_cancelled() { return; }
|
||||||
|
// if no new changes since we started, accept this snapshot
|
||||||
|
guard.last_good = Some(snapshot);
|
||||||
|
guard.revision = revision.saturating_add(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Publish diagnostics per file
|
||||||
|
for (uri, diags) in diagnostics_by_uri.into_iter() {
|
||||||
|
let lsp_diags: Vec<lsp::Diagnostic> = diags.into_iter().map(|d| lsp::Diagnostic {
|
||||||
|
range: d.range,
|
||||||
|
severity: d.severity,
|
||||||
|
code: d.code,
|
||||||
|
message: d.message,
|
||||||
|
..Default::default()
|
||||||
|
}).collect();
|
||||||
|
let _ = client.publish_diagnostics(uri.parse().unwrap_or_else(|_| lsp::Url::parse("untitled:").unwrap()), lsp_diags, None).await;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn span_to_range(file: FileId, idx: &TextIndex, start: u32, end: u32) -> lsp::Range {
|
||||||
|
// Ignore `file` here since idx is built from that file's text
|
||||||
|
let (s_line, s_col) = idx.byte_to_lsp(start);
|
||||||
|
let (e_line, e_col) = idx.byte_to_lsp(end);
|
||||||
|
lsp::Range {
|
||||||
|
start: lsp::Position { line: s_line, character: s_col },
|
||||||
|
end: lsp::Position { line: e_line, character: e_col },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Minimal integration with the compiler frontend for the MVP rebuild loop.
|
||||||
|
mod compiler_bridge {
|
||||||
|
use super::*;
|
||||||
|
use prometeu_compiler::frontends::pbs as p;
|
||||||
|
use prometeu_compiler::common::spans as cspans;
|
||||||
|
use prometeu_compiler::common::diagnostics as cdiag;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum Severity { Error, Warning }
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct Diag { pub severity: Severity, pub code: String, pub message: String, pub span: cspans::Span }
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct SymbolItem { pub name: String, pub kind: prometeu_compiler::analysis::symbols::SymbolKind, pub decl_span: cspans::Span }
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct ParsedResult { pub diagnostics: Vec<Diag>, pub symbols: Vec<SymbolItem> }
|
||||||
|
|
||||||
|
pub struct ParserFacade<'a> {
|
||||||
|
text: &'a str,
|
||||||
|
file_id: FileId,
|
||||||
|
interner: &'a mut prometeu_analysis::NameInterner,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> ParserFacade<'a> {
|
||||||
|
pub fn new(text: &'a str, file_id: FileId, interner: &'a mut prometeu_analysis::NameInterner) -> Self {
|
||||||
|
Self { text, file_id, interner }
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_and_collect(&mut self) -> Result<ParsedResult, Vec<Diag>> {
|
||||||
|
let mut parser = p::parser::Parser::new(self.text, cspans::FileId(self.file_id.0), self.interner);
|
||||||
|
let parsed = match parser.parse_file() {
|
||||||
|
Ok(p) => p,
|
||||||
|
Err(bundle) => {
|
||||||
|
let diags = bundle.diagnostics.into_iter().map(|d| Diag { severity: match d.severity { cdiag::Severity::Error => Severity::Error, cdiag::Severity::Warning => Severity::Warning }, code: d.code, message: d.message, span: d.span }).collect();
|
||||||
|
return Err(diags);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut collector = p::collector::SymbolCollector::new(self.interner);
|
||||||
|
let (type_symbols, value_symbols) = match collector.collect(&parsed.arena, parsed.root) {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(bundle) => {
|
||||||
|
let diags = bundle.diagnostics.into_iter().map(|d| Diag { severity: match d.severity { cdiag::Severity::Error => Severity::Error, cdiag::Severity::Warning => Severity::Warning }, code: d.code, message: d.message, span: d.span }).collect();
|
||||||
|
return Err(diags);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let module_symbols = p::symbols::ModuleSymbols { type_symbols, value_symbols };
|
||||||
|
|
||||||
|
struct EmptyProvider;
|
||||||
|
impl p::resolver::ModuleProvider for EmptyProvider { fn get_module_symbols(&self, _from_path: &str) -> Option<&p::symbols::ModuleSymbols> { None } }
|
||||||
|
let mut resolver = p::resolver::Resolver::new(&module_symbols, &EmptyProvider, self.interner);
|
||||||
|
// bootstrap primitives using a throwaway interner behavior
|
||||||
|
resolver.bootstrap_types(self.interner);
|
||||||
|
if let Err(bundle) = resolver.resolve(&parsed.arena, parsed.root) {
|
||||||
|
let diags = bundle.diagnostics.into_iter().map(|d| Diag { severity: match d.severity { cdiag::Severity::Error => Severity::Error, cdiag::Severity::Warning => Severity::Warning }, code: d.code, message: d.message, span: d.span }).collect();
|
||||||
|
return Err(diags);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect top-level symbols only for MVP
|
||||||
|
let mut symbols = Vec::new();
|
||||||
|
for s in &resolver.symbol_arena.symbols {
|
||||||
|
// Keep only decls in this file
|
||||||
|
if s.decl_span.file.0 == self.file_id.0 {
|
||||||
|
let name = self.interner.resolve(s.name).to_string();
|
||||||
|
let kind = s.kind;
|
||||||
|
symbols.push(SymbolItem { name, kind, decl_span: s.decl_span.clone() });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ParsedResult { diagnostics: vec![], symbols })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
Binary file not shown.
3
test-cartridges/test01/settings.json
Normal file
3
test-cartridges/test01/settings.json
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"prometeuPbs.serverPath": "/Users/niltonconstantino/personal/workspace.personal/intrepid/prometeu/runtime/target/debug/prometeu-lsp --studio"
|
||||||
|
}
|
||||||
@ -1,11 +1,42 @@
|
|||||||
import { Color, Gfx } from "@sdk:gfx";
|
import { Color, Gfx } from "@sdk:gfx";
|
||||||
import { Input } from "@sdk:input";
|
import { Input } from "@sdk:input";
|
||||||
|
|
||||||
|
declare struct Vec2(x: int, y: int)
|
||||||
|
[
|
||||||
|
(x: int, y: int): (x, y) as default { }
|
||||||
|
(s: int): (s, s) as square { }
|
||||||
|
]
|
||||||
|
[[
|
||||||
|
ZERO: square(0)
|
||||||
|
]]
|
||||||
|
{
|
||||||
|
fn getX(self: this): int {
|
||||||
|
return x;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn getY(self: this): int {
|
||||||
|
return y;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn add(a: int, b: int): int {
|
fn add(a: int, b: int): int {
|
||||||
return a + b;
|
return a + b;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_higher(a: int, b: int): bool {
|
||||||
|
let c: int = a + b;
|
||||||
|
return c >= 30;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add2(a: int, b: int): int {
|
||||||
|
let c = add(a, b);
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
|
||||||
fn frame(): void {
|
fn frame(): void {
|
||||||
|
let zero = Vec2.ZERO;
|
||||||
|
let zz = add(zero.getX(), zero.getY());
|
||||||
|
|
||||||
// 1. Locals & Arithmetic
|
// 1. Locals & Arithmetic
|
||||||
let x = 10;
|
let x = 10;
|
||||||
let y = 20;
|
let y = 20;
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user