1mod bundle;
6mod commit;
7mod convergence;
8mod init;
9mod planning;
10mod repair;
11mod solo;
12mod verification;
13
14use crate::agent::{ActuatorAgent, Agent, ArchitectAgent, SpeculatorAgent, VerifierAgent};
15use crate::context_retriever::ContextRetriever;
16use crate::lsp::LspClient;
17use crate::test_runner::{self, PythonTestRunner, TestResults};
18use crate::tools::{AgentTools, ToolCall};
19use crate::types::{AgentContext, EnergyComponents, ModelTier, NodeState, SRBNNode, TaskPlan};
20use anyhow::{Context, Result};
21use perspt_core::types::{
22 EscalationCategory, EscalationReport, NodeClass, ProvisionalBranch, ProvisionalBranchState,
23 RewriteAction, RewriteRecord, SheafValidationResult, SheafValidatorClass, WorkspaceState,
24};
25use petgraph::graph::{DiGraph, NodeIndex};
26use petgraph::visit::{EdgeRef, Topo, Walker};
27use std::collections::HashMap;
28use std::path::PathBuf;
29use std::sync::atomic::{AtomicBool, Ordering};
30use std::sync::Arc;
31use std::time::Instant;
32
33#[derive(Debug, Clone)]
35pub struct Dependency {
36 pub kind: String,
38}
39
40#[derive(Debug, Clone)]
42pub enum ApprovalResult {
43 Approved,
45 ApprovedWithEdit(String),
47 Rejected,
49}
50
51#[derive(Debug, Clone, Copy, PartialEq, Eq)]
53pub enum NodeOutcome {
54 Completed,
56 Escalated,
58}
59
60pub struct SRBNOrchestrator {
62 pub graph: DiGraph<SRBNNode, Dependency>,
64 node_indices: HashMap<String, NodeIndex>,
66 pub context: AgentContext,
68 pub auto_approve: bool,
70 lsp_clients: HashMap<String, LspClient>,
72 agents: Vec<Box<dyn Agent>>,
74 tools: AgentTools,
76 last_written_file: Option<PathBuf>,
78 file_version: i32,
80 provider: std::sync::Arc<perspt_core::llm_provider::GenAIProvider>,
82 architect_model: String,
84 actuator_model: String,
86 verifier_model: String,
88 speculator_model: String,
90 architect_fallback_model: Option<String>,
92 actuator_fallback_model: Option<String>,
94 verifier_fallback_model: Option<String>,
96 speculator_fallback_model: Option<String>,
98 event_sender: Option<perspt_core::events::channel::EventSender>,
100 action_receiver: Option<perspt_core::events::channel::ActionReceiver>,
102 pub ledger: crate::ledger::MerkleLedger,
104 pub last_tool_failure: Option<String>,
106 last_context_provenance: Option<perspt_core::types::ContextProvenance>,
108 last_formatted_context: String,
110 last_verification_result: Option<perspt_core::types::VerificationResult>,
112 last_applied_bundle: Option<perspt_core::types::ArtifactBundle>,
114 last_repair_footprint: Option<perspt_core::RepairFootprint>,
116 blocked_dependencies: Vec<perspt_core::types::BlockedDependency>,
118 budget: perspt_core::types::BudgetEnvelope,
120 pub planning_policy: perspt_core::PlanningPolicy,
122 pub stability_epsilon: f32,
124 pub energy_alpha: f32,
126 pub energy_beta: f32,
128 pub energy_gamma: f32,
130 abort_requested: Arc<AtomicBool>,
132}
133
134fn epoch_seconds() -> i64 {
136 use std::time::{SystemTime, UNIX_EPOCH};
137 SystemTime::now()
138 .duration_since(UNIX_EPOCH)
139 .unwrap()
140 .as_secs() as i64
141}
142
143fn detect_stub_content(path: &std::path::Path, plugin_hint: &str) -> Option<String> {
152 let content = std::fs::read_to_string(path).ok()?;
153
154 let lang = if !plugin_hint.is_empty() && plugin_hint != "unknown" {
156 plugin_hint.to_ascii_lowercase()
157 } else {
158 path.extension()
159 .and_then(|e| e.to_str())
160 .map(|e| match e {
161 "rs" => "rust",
162 "py" => "python",
163 "js" | "jsx" | "ts" | "tsx" | "mjs" | "cjs" => "javascript",
164 _ => "",
165 })
166 .unwrap_or("")
167 .to_string()
168 };
169
170 let universal_patterns = [
172 "// stub",
173 "# stub",
174 "// placeholder",
175 "# placeholder",
176 "// will be replaced",
177 "# will be replaced",
178 "/* todo */",
179 ];
180
181 let lang_patterns: &[&str] = match lang.as_str() {
183 "rust" => &["todo!()", "unimplemented!()"],
184 "python" => &["raise NotImplementedError", "raise NotImplementedError()"],
185 "javascript" | "typescript" => &[
186 "throw new Error(\"not implemented\")",
187 "throw new Error('not implemented')",
188 "throw new Error(\"TODO\")",
189 "throw new Error('TODO')",
190 ],
191 _ => &[],
192 };
193
194 let content_lower = content.to_ascii_lowercase();
195
196 let mut matched_pattern = None;
198 for pat in &universal_patterns {
199 if content_lower.contains(pat) {
200 matched_pattern = Some(*pat);
201 break;
202 }
203 }
204 if matched_pattern.is_none() {
205 for pat in lang_patterns {
206 if content.contains(pat) {
207 matched_pattern = Some(*pat);
208 break;
209 }
210 }
211 }
212
213 if matched_pattern.is_none() && lang == "python" {
215 let trimmed_lines: Vec<&str> = content
216 .lines()
217 .map(|l| l.trim())
218 .filter(|l| !l.is_empty() && !l.starts_with('#'))
219 .collect();
220 let body_only: Vec<&&str> = trimmed_lines
221 .iter()
222 .filter(|l| {
223 !l.starts_with("def ")
224 && !l.starts_with("class ")
225 && !l.starts_with("import ")
226 && !l.starts_with("from ")
227 })
228 .collect();
229 if body_only.len() <= 2 && body_only.iter().all(|l| **l == "pass" || **l == "...") {
230 matched_pattern = Some("only pass/... body");
231 }
232 }
233
234 let pattern = matched_pattern?;
235
236 let real_lines = count_real_code_lines(&content, &lang);
238 if real_lines >= 5 {
239 return None;
242 }
243
244 Some(format!(
245 "found '{}' with only {} line(s) of real code",
246 pattern, real_lines
247 ))
248}
249
250fn count_real_code_lines(content: &str, lang: &str) -> usize {
252 content
253 .lines()
254 .filter(|line| {
255 let trimmed = line.trim();
256 if trimmed.is_empty() {
257 return false;
258 }
259 match lang {
261 "rust" => {
262 if trimmed.starts_with("//")
263 || trimmed.starts_with("/*")
264 || trimmed.starts_with('*')
265 {
266 return false;
267 }
268 if trimmed.starts_with("use ")
270 || trimmed.starts_with("extern ")
271 || trimmed.starts_with("mod ")
272 {
273 return false;
274 }
275 }
276 "python" => {
277 if trimmed.starts_with('#')
278 || trimmed.starts_with("\"\"\"")
279 || trimmed.starts_with("'''")
280 {
281 return false;
282 }
283 if trimmed.starts_with("import ") || trimmed.starts_with("from ") {
284 return false;
285 }
286 }
287 "javascript" | "typescript" => {
288 if trimmed.starts_with("//")
289 || trimmed.starts_with("/*")
290 || trimmed.starts_with('*')
291 {
292 return false;
293 }
294 if trimmed.starts_with("import ")
295 || trimmed.starts_with("require(")
296 || trimmed.starts_with("const ") && trimmed.contains("require(")
297 {
298 return false;
299 }
300 }
301 _ => {
302 if trimmed.starts_with("//")
303 || trimmed.starts_with('#')
304 || trimmed.starts_with("/*")
305 {
306 return false;
307 }
308 }
309 }
310 true
311 })
312 .count()
313}
314
315impl SRBNOrchestrator {
316 pub fn new(working_dir: PathBuf, auto_approve: bool) -> Self {
318 Self::new_with_models(
319 working_dir,
320 auto_approve,
321 None,
322 None,
323 None,
324 None,
325 None,
326 None,
327 None,
328 None,
329 )
330 }
331
332 #[allow(clippy::too_many_arguments)]
334 pub fn new_with_models(
335 working_dir: PathBuf,
336 auto_approve: bool,
337 architect_model: Option<String>,
338 actuator_model: Option<String>,
339 verifier_model: Option<String>,
340 speculator_model: Option<String>,
341 architect_fallback_model: Option<String>,
342 actuator_fallback_model: Option<String>,
343 verifier_fallback_model: Option<String>,
344 speculator_fallback_model: Option<String>,
345 ) -> Self {
346 let context = AgentContext {
347 working_dir: working_dir.clone(),
348 auto_approve,
349 ..Default::default()
350 };
351
352 let provider = std::sync::Arc::new(
355 perspt_core::llm_provider::GenAIProvider::new().unwrap_or_else(|e| {
356 log::warn!("Failed to create GenAIProvider: {}, using default", e);
357 perspt_core::llm_provider::GenAIProvider::new().expect("GenAI must initialize")
358 }),
359 );
360
361 let tools = AgentTools::new(working_dir.clone(), !auto_approve);
363
364 let stored_architect_model = architect_model
366 .clone()
367 .unwrap_or_else(|| ModelTier::Architect.default_model().to_string());
368 let stored_actuator_model = actuator_model
369 .clone()
370 .unwrap_or_else(|| ModelTier::Actuator.default_model().to_string());
371 let stored_verifier_model = verifier_model
372 .clone()
373 .unwrap_or_else(|| ModelTier::Verifier.default_model().to_string());
374 let stored_speculator_model = speculator_model
375 .clone()
376 .unwrap_or_else(|| ModelTier::Speculator.default_model().to_string());
377
378 Self {
379 graph: DiGraph::new(),
380 node_indices: HashMap::new(),
381 context,
382 auto_approve,
383 lsp_clients: HashMap::new(),
384 agents: vec![
385 Box::new(ArchitectAgent::new(provider.clone(), architect_model)),
386 Box::new(ActuatorAgent::new(provider.clone(), actuator_model)),
387 Box::new(VerifierAgent::new(provider.clone(), verifier_model)),
388 Box::new(SpeculatorAgent::new(provider.clone(), speculator_model)),
389 ],
390 tools,
391 last_written_file: None,
392 file_version: 0,
393 provider,
394 architect_model: stored_architect_model,
395 actuator_model: stored_actuator_model,
396 verifier_model: stored_verifier_model,
397 speculator_model: stored_speculator_model,
398 architect_fallback_model,
399 actuator_fallback_model,
400 verifier_fallback_model,
401 speculator_fallback_model,
402 event_sender: None,
403 action_receiver: None,
404 #[cfg(test)]
405 ledger: crate::ledger::MerkleLedger::in_memory().expect("Failed to create test ledger"),
406 #[cfg(not(test))]
407 ledger: crate::ledger::MerkleLedger::new().expect("Failed to create ledger"),
408 last_tool_failure: None,
409 last_context_provenance: None,
410 last_formatted_context: String::new(),
411 last_verification_result: None,
412 last_applied_bundle: None,
413 last_repair_footprint: None,
414 blocked_dependencies: Vec::new(),
415 budget: perspt_core::types::BudgetEnvelope::new("pending"),
416 planning_policy: perspt_core::PlanningPolicy::default(),
417 stability_epsilon: 0.1,
418 energy_alpha: 1.0,
419 energy_beta: 0.5,
420 energy_gamma: 2.0,
421 abort_requested: Arc::new(AtomicBool::new(false)),
422 }
423 }
424
425 #[cfg(test)]
427 pub fn new_for_testing(working_dir: PathBuf) -> Self {
428 let context = AgentContext {
429 working_dir: working_dir.clone(),
430 auto_approve: true,
431 ..Default::default()
432 };
433
434 let provider = std::sync::Arc::new(
435 perspt_core::llm_provider::GenAIProvider::new().unwrap_or_else(|e| {
436 log::warn!("Failed to create GenAIProvider: {}, using default", e);
437 perspt_core::llm_provider::GenAIProvider::new().expect("GenAI must initialize")
438 }),
439 );
440
441 let tools = AgentTools::new(working_dir.clone(), false);
442
443 Self {
444 graph: DiGraph::new(),
445 node_indices: HashMap::new(),
446 context,
447 auto_approve: true,
448 lsp_clients: HashMap::new(),
449 agents: vec![
450 Box::new(ArchitectAgent::new(provider.clone(), None)),
451 Box::new(ActuatorAgent::new(provider.clone(), None)),
452 Box::new(VerifierAgent::new(provider.clone(), None)),
453 Box::new(SpeculatorAgent::new(provider.clone(), None)),
454 ],
455 tools,
456 last_written_file: None,
457 file_version: 0,
458 provider,
459 architect_model: ModelTier::Architect.default_model().to_string(),
460 actuator_model: ModelTier::Actuator.default_model().to_string(),
461 verifier_model: ModelTier::Verifier.default_model().to_string(),
462 speculator_model: ModelTier::Speculator.default_model().to_string(),
463 architect_fallback_model: None,
464 actuator_fallback_model: None,
465 verifier_fallback_model: None,
466 speculator_fallback_model: None,
467 event_sender: None,
468 action_receiver: None,
469 ledger: crate::ledger::MerkleLedger::in_memory().expect("Failed to create test ledger"),
470 last_tool_failure: None,
471 last_context_provenance: None,
472 last_formatted_context: String::new(),
473 last_verification_result: None,
474 last_applied_bundle: None,
475 last_repair_footprint: None,
476 blocked_dependencies: Vec::new(),
477 budget: perspt_core::types::BudgetEnvelope::new("test"),
478 planning_policy: perspt_core::PlanningPolicy::default(),
479 stability_epsilon: 0.1,
480 energy_alpha: 1.0,
481 energy_beta: 0.5,
482 energy_gamma: 2.0,
483 abort_requested: Arc::new(AtomicBool::new(false)),
484 }
485 }
486
487 pub fn add_node(&mut self, node: SRBNNode) -> NodeIndex {
489 let node_id = node.node_id.clone();
490 let idx = self.graph.add_node(node);
491 self.node_indices.insert(node_id, idx);
492 idx
493 }
494
495 pub fn connect_tui(
497 &mut self,
498 event_sender: perspt_core::events::channel::EventSender,
499 action_receiver: perspt_core::events::channel::ActionReceiver,
500 ) {
501 self.tools.set_event_sender(event_sender.clone());
502 self.event_sender = Some(event_sender);
503 self.action_receiver = Some(action_receiver);
504 }
505
506 pub fn abort_flag(&self) -> Arc<AtomicBool> {
508 self.abort_requested.clone()
509 }
510
511 fn is_abort_requested(&self) -> bool {
513 self.abort_requested.load(Ordering::Relaxed)
514 }
515
516 fn finalize_session(&mut self, result: &Result<perspt_core::SessionOutcome>) {
518 let status = if self.is_abort_requested() {
519 "ABORTED"
520 } else {
521 match result {
522 Ok(perspt_core::SessionOutcome::Success) => "COMPLETED",
523 Ok(perspt_core::SessionOutcome::PartialSuccess) => "PARTIAL",
524 Ok(perspt_core::SessionOutcome::Failed) | Err(_) => "FAILED",
525 }
526 };
527 if let Err(e) = self.ledger.end_session(status) {
528 log::error!("Failed to finalize session as {}: {}", status, e);
529 }
530 }
531
532 pub fn set_budget(
537 &mut self,
538 max_steps: Option<u32>,
539 max_revisions: Option<u32>,
540 max_cost_usd: Option<f64>,
541 ) {
542 self.budget.max_steps = max_steps;
543 self.budget.max_revisions = max_revisions;
544 self.budget.max_cost_usd = max_cost_usd;
545 }
546
547 pub fn rehydrate_session(
562 &mut self,
563 session_id: &str,
564 ) -> Result<crate::ledger::SessionSnapshot> {
565 self.context.session_id = session_id.to_string();
567 self.ledger.current_session = Some(crate::ledger::SessionRecordLegacy {
568 session_id: session_id.to_string(),
569 task: String::new(),
570 started_at: epoch_seconds(),
571 ended_at: None,
572 status: "RESUMING".to_string(),
573 total_nodes: 0,
574 completed_nodes: 0,
575 });
576
577 let snapshot = self.ledger.load_session_snapshot()?;
578
579 if let Ok(Some(row)) = self.ledger.get_budget_envelope() {
582 self.budget = perspt_core::types::BudgetEnvelope {
583 session_id: row.session_id,
584 max_steps: row.max_steps.map(|v| v as u32),
585 steps_used: row.steps_used as u32,
586 max_revisions: row.max_revisions.map(|v| v as u32),
587 revisions_used: row.revisions_used as u32,
588 max_cost_usd: row.max_cost_usd,
589 cost_used_usd: row.cost_used_usd,
590 };
591 log::info!(
592 "Restored budget envelope: steps {}/{:?}, revisions {}/{:?}, cost ${:.2}/{:?}",
593 self.budget.steps_used,
594 self.budget.max_steps,
595 self.budget.revisions_used,
596 self.budget.max_revisions,
597 self.budget.cost_used_usd,
598 self.budget.max_cost_usd,
599 );
600 }
601
602 if snapshot.node_details.is_empty() {
604 anyhow::bail!(
605 "Session {} has no persisted nodes — cannot resume",
606 session_id
607 );
608 }
609
610 let node_ids: std::collections::HashSet<&str> = snapshot
612 .node_details
613 .iter()
614 .map(|d| d.record.node_id.as_str())
615 .collect();
616 let orphaned_edges = snapshot
617 .graph_edges
618 .iter()
619 .filter(|e| {
620 !node_ids.contains(e.parent_node_id.as_str())
621 || !node_ids.contains(e.child_node_id.as_str())
622 })
623 .count();
624 if orphaned_edges > 0 {
625 log::warn!(
626 "Session {} has {} orphaned edge(s) referencing unknown nodes — \
627 edges will be dropped during resume",
628 session_id,
629 orphaned_edges
630 );
631 self.emit_log(format!(
632 "⚠️ Resume: dropping {} orphaned graph edge(s)",
633 orphaned_edges
634 ));
635 }
636
637 let mut node_map: HashMap<String, NodeIndex> = HashMap::new();
639
640 for detail in &snapshot.node_details {
641 let rec = &detail.record;
642
643 let state = parse_node_state(&rec.state);
644 let node_class = rec
645 .node_class
646 .as_deref()
647 .map(parse_node_class)
648 .unwrap_or_default();
649
650 let mut node = SRBNNode::new(
651 rec.node_id.clone(),
652 rec.goal.clone().unwrap_or_default(),
653 ModelTier::Actuator,
654 );
655 node.state = state;
656 node.node_class = node_class;
657 node.owner_plugin = rec.owner_plugin.clone().unwrap_or_default();
658 node.parent_id = rec.parent_id.clone();
659 node.children = rec
660 .children
661 .as_deref()
662 .and_then(|s| serde_json::from_str::<Vec<String>>(s).ok())
663 .unwrap_or_default();
664 node.monitor.attempt_count = rec.attempt_count as usize;
665
666 if let Some(last_energy) = detail.energy_history.last() {
668 node.monitor.energy_history.push(last_energy.v_total);
669 }
670
671 if let Some(seal) = detail.interface_seals.last() {
673 if seal.seal_hash.len() == 32 {
674 let mut hash = [0u8; 32];
675 hash.copy_from_slice(&seal.seal_hash);
676 node.interface_seal_hash = Some(hash);
677 }
678 }
679
680 let idx = self.add_node(node);
681 node_map.insert(rec.node_id.clone(), idx);
682 }
683
684 for edge in &snapshot.graph_edges {
686 if let (Some(&from_idx), Some(&to_idx)) = (
687 node_map.get(&edge.parent_node_id),
688 node_map.get(&edge.child_node_id),
689 ) {
690 self.graph.add_edge(
691 from_idx,
692 to_idx,
693 Dependency {
694 kind: edge.edge_type.clone(),
695 },
696 );
697 }
698 }
699
700 for (child_id, &child_idx) in &node_map {
702 let parents: Vec<NodeIndex> = self
703 .graph
704 .neighbors_directed(child_idx, petgraph::Direction::Incoming)
705 .collect();
706
707 for parent_idx in parents {
708 let parent = &self.graph[parent_idx];
709 if parent.node_class == NodeClass::Interface
710 && parent.interface_seal_hash.is_none()
711 && !parent.state.is_terminal()
712 {
713 self.blocked_dependencies
714 .push(perspt_core::types::BlockedDependency {
715 child_node_id: child_id.clone(),
716 parent_node_id: parent.node_id.clone(),
717 required_seal_paths: Vec::new(),
718 blocked_at: epoch_seconds(),
719 });
720 }
721 }
722 }
723
724 let terminal = snapshot
725 .node_details
726 .iter()
727 .filter(|d| {
728 let s = parse_node_state(&d.record.state);
729 s.is_terminal()
730 })
731 .count();
732 let resumable = snapshot.node_details.len() - terminal;
733
734 log::info!(
735 "Rehydrated session {}: {} nodes ({} terminal, {} resumable), {} edges",
736 session_id,
737 snapshot.node_details.len(),
738 terminal,
739 resumable,
740 snapshot.graph_edges.len()
741 );
742
743 if let Some(ref mut sess) = self.ledger.current_session {
745 sess.total_nodes = snapshot.node_details.len();
746 sess.completed_nodes = terminal;
747 sess.status = "RUNNING".to_string();
748 }
749
750 for detail in &snapshot.node_details {
754 let state = parse_node_state(&detail.record.state);
755 if state.is_terminal() {
756 continue;
757 }
758
759 if let Some(ref prov) = detail.context_provenance {
760 let retriever = ContextRetriever::new(self.context.working_dir.clone());
761 let drift = retriever.validate_provenance_record(prov);
762 if !drift.is_empty() {
763 log::warn!(
764 "Provenance drift for node '{}': {} file(s) missing: {}",
765 detail.record.node_id,
766 drift.len(),
767 drift.join(", ")
768 );
769 self.emit_log(format!(
770 "⚠️ Provenance drift: node '{}' has {} missing file(s)",
771 detail.record.node_id,
772 drift.len()
773 ));
774 self.emit_event(perspt_core::AgentEvent::ProvenanceDrift {
775 node_id: detail.record.node_id.clone(),
776 missing_files: drift,
777 reason: "Files referenced in persisted context no longer exist".to_string(),
778 });
779 }
780 }
781 }
782
783 Ok(snapshot)
784 }
785
786 pub async fn run_resumed(&mut self) -> Result<()> {
793 let result = self.run_resumed_inner().await;
794 self.finalize_session(&result);
795 result.map(|_| ())
796 }
797
798 async fn run_resumed_inner(&mut self) -> Result<perspt_core::SessionOutcome> {
800 let topo = Topo::new(&self.graph);
801 let indices: Vec<_> = topo.iter(&self.graph).collect();
802 let total_nodes = indices.len();
803 let mut executed = 0;
804 let mut escalated: usize = 0;
805
806 let terminal_count = indices
808 .iter()
809 .filter(|i| self.graph[**i].state.is_terminal())
810 .count();
811 let blocked_count = indices
812 .iter()
813 .filter(|i| !self.graph[**i].state.is_terminal() && self.check_seal_prerequisites(**i))
814 .count();
815 let resumable_count = total_nodes - terminal_count - blocked_count;
816 self.emit_log(format!(
817 "📊 Differential resume: {} total, {} skipped (terminal), {} blocked (seal), {} to execute",
818 total_nodes, terminal_count, blocked_count, resumable_count
819 ));
820
821 for (i, idx) in indices.iter().enumerate() {
822 if self.is_abort_requested() {
824 self.emit_log("⚠️ Session aborted — stopping resumed execution".to_string());
825 break;
826 }
827
828 if self.budget.any_exhausted() {
830 let node_id = self.graph[*idx].node_id.clone();
831 self.emit_log(format!(
832 "⛔ Budget exhausted — skipping node '{}' and remaining nodes",
833 node_id
834 ));
835 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
836 node_id,
837 status: perspt_core::NodeStatus::Escalated,
838 });
839 break;
840 }
841
842 let node = &self.graph[*idx];
843
844 if node.state.is_terminal() {
846 log::debug!("Skipping terminal node {} ({:?})", node.node_id, node.state);
847 continue;
848 }
849
850 if self.check_seal_prerequisites(*idx) {
852 log::warn!(
853 "Node {} blocked on seal prerequisite — skipping",
854 self.graph[*idx].node_id
855 );
856 continue;
857 }
858
859 let node = &self.graph[*idx];
860 self.emit_log(format!(
861 "📝 [resume {}/{}] {}",
862 i + 1,
863 total_nodes,
864 node.goal
865 ));
866 self.emit_event(perspt_core::AgentEvent::NodeSelected {
867 node_id: node.node_id.clone(),
868 goal: node.goal.clone(),
869 node_class: node.node_class.to_string(),
870 });
871 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
872 node_id: node.node_id.clone(),
873 status: perspt_core::NodeStatus::Running,
874 });
875
876 match self.execute_node(*idx).await {
877 Ok(NodeOutcome::Completed) => {
878 executed += 1;
879 self.budget.record_step();
880
881 if let Err(e) = self.ledger.upsert_budget_envelope(&self.budget) {
883 log::warn!("Failed to persist budget envelope: {}", e);
884 }
885
886 if let Some(node) = self.graph.node_weight(*idx) {
887 self.emit_event(perspt_core::AgentEvent::NodeCompleted {
888 node_id: node.node_id.clone(),
889 goal: node.goal.clone(),
890 });
891 }
892 }
893 Ok(NodeOutcome::Escalated) => {
894 escalated += 1;
895 self.budget.record_step();
896 continue;
897 }
898 Err(e) => {
899 escalated += 1;
900 let node_id = self.graph[*idx].node_id.clone();
901 log::error!("Node {} failed on resume: {}", node_id, e);
902 self.emit_log(format!("❌ Node {} failed: {}", node_id, e));
903 self.graph[*idx].state = NodeState::Escalated;
904 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
905 node_id,
906 status: perspt_core::NodeStatus::Escalated,
907 });
908 continue;
909 }
910 }
911 }
912
913 log::info!(
914 "Resumed execution completed: {} of {} nodes executed",
915 executed,
916 total_nodes
917 );
918
919 let outcome = if escalated == 0 && executed + terminal_count >= total_nodes {
922 perspt_core::SessionOutcome::Success
923 } else if executed > 0 {
924 perspt_core::SessionOutcome::PartialSuccess
925 } else {
926 perspt_core::SessionOutcome::Failed
927 };
928 self.emit_event(perspt_core::AgentEvent::Complete {
929 success: outcome == perspt_core::SessionOutcome::Success,
930 message: format!(
931 "Resumed: {}/{} completed, {} escalated",
932 executed, total_nodes, escalated
933 ),
934 });
935 Ok(outcome)
936 }
937
938 fn emit_event(&self, event: perspt_core::AgentEvent) {
940 if let Some(ref sender) = self.event_sender {
941 let _ = sender.send(event);
942 }
943 }
944
945 fn emit_log(&self, msg: impl Into<String>) {
947 self.emit_event(perspt_core::AgentEvent::Log(msg.into()));
948 }
949
950 fn record_step_quietly(
952 &self,
953 node_id: &str,
954 step: &str,
955 outcome: &str,
956 energy: Option<&perspt_core::types::EnergyComponents>,
957 attempt_count: i32,
958 duration_ms: i32,
959 ) {
960 let record = perspt_store::SrbnStepRecord {
961 session_id: self.context.session_id.clone(),
962 node_id: node_id.to_string(),
963 step: step.to_string(),
964 outcome: outcome.to_string(),
965 energy_json: energy.and_then(|e| serde_json::to_string(e).ok()),
966 parse_state: None,
967 retry_classification: None,
968 attempt_count,
969 duration_ms,
970 };
971 if let Err(e) = self.ledger.record_step(&record) {
972 log::warn!("Failed to record step '{}' for {}: {}", step, node_id, e);
973 }
974 }
975
976 async fn await_approval(
980 &mut self,
981 action_type: perspt_core::ActionType,
982 description: String,
983 diff: Option<String>,
984 ) -> ApprovalResult {
985 self.await_approval_for_node(action_type, description, diff, None)
986 .await
987 }
988
989 async fn await_approval_for_node(
991 &mut self,
992 action_type: perspt_core::ActionType,
993 description: String,
994 diff: Option<String>,
995 review_node_id: Option<&str>,
996 ) -> ApprovalResult {
997 if self.auto_approve {
999 if let Some(nid) = review_node_id {
1000 self.persist_review_decision(nid, "auto_approved", None);
1001 }
1002 return ApprovalResult::Approved;
1003 }
1004
1005 if self.action_receiver.is_none() {
1007 if let Some(nid) = review_node_id {
1008 self.persist_review_decision(nid, "auto_approved", None);
1009 }
1010 return ApprovalResult::Approved;
1011 }
1012
1013 let request_id = uuid::Uuid::new_v4().to_string();
1015
1016 self.emit_event(perspt_core::AgentEvent::ApprovalRequest {
1018 request_id: request_id.clone(),
1019 node_id: review_node_id.unwrap_or("current").to_string(),
1020 action_type,
1021 description,
1022 diff,
1023 });
1024
1025 if let Some(ref mut receiver) = self.action_receiver {
1027 while let Some(action) = receiver.recv().await {
1028 match action {
1029 perspt_core::AgentAction::Approve { request_id: rid } if rid == request_id => {
1030 self.emit_log("✓ Approved by user");
1031 if let Some(nid) = review_node_id {
1032 self.persist_review_decision(nid, "approved", None);
1033 }
1034 return ApprovalResult::Approved;
1035 }
1036 perspt_core::AgentAction::ApproveWithEdit {
1037 request_id: rid,
1038 edited_value,
1039 } if rid == request_id => {
1040 self.emit_log(format!("✓ Approved with edit: {}", edited_value));
1041 if let Some(nid) = review_node_id {
1042 self.persist_review_decision(nid, "approved_with_edit", None);
1043 }
1044 return ApprovalResult::ApprovedWithEdit(edited_value);
1045 }
1046 perspt_core::AgentAction::Reject {
1047 request_id: rid,
1048 reason,
1049 } if rid == request_id => {
1050 let msg = reason.unwrap_or_else(|| "User rejected".to_string());
1051 self.emit_log(format!("✗ Rejected: {}", msg));
1052 if let Some(nid) = review_node_id {
1053 self.persist_review_decision(nid, "rejected", Some(&msg));
1054 }
1055 return ApprovalResult::Rejected;
1056 }
1057 perspt_core::AgentAction::RequestCorrection {
1058 request_id: rid,
1059 feedback,
1060 } if rid == request_id => {
1061 self.emit_log(format!("🔄 Correction requested: {}", feedback));
1062 if let Some(nid) = review_node_id {
1063 self.persist_review_decision(
1064 nid,
1065 "correction_requested",
1066 Some(&feedback),
1067 );
1068 }
1069 return ApprovalResult::Rejected;
1070 }
1071 perspt_core::AgentAction::Abort => {
1072 self.emit_log("⚠️ Session aborted by user");
1073 self.abort_requested.store(true, Ordering::Relaxed);
1074 if let Some(nid) = review_node_id {
1075 self.persist_review_decision(nid, "aborted", None);
1076 }
1077 return ApprovalResult::Rejected;
1078 }
1079 _ => {
1080 continue;
1082 }
1083 }
1084 }
1085 }
1086
1087 ApprovalResult::Rejected }
1089
1090 fn persist_review_decision(&self, node_id: &str, outcome: &str, note: Option<&str>) {
1092 let degraded = self.last_verification_result.as_ref().map(|vr| vr.degraded);
1093 if let Err(e) = self
1094 .ledger
1095 .record_review_outcome(node_id, outcome, note, None, degraded, None)
1096 {
1097 log::warn!("Failed to persist review decision for {}: {}", node_id, e);
1098 }
1099 }
1100
1101 pub fn add_dependency(&mut self, from_id: &str, to_id: &str, kind: &str) -> Result<()> {
1103 let from_idx = self
1104 .node_indices
1105 .get(from_id)
1106 .context(format!("Node not found: {}", from_id))?;
1107 let to_idx = self
1108 .node_indices
1109 .get(to_id)
1110 .context(format!("Node not found: {}", to_id))?;
1111
1112 self.graph.add_edge(
1113 *from_idx,
1114 *to_idx,
1115 Dependency {
1116 kind: kind.to_string(),
1117 },
1118 );
1119 Ok(())
1120 }
1121
1122 pub async fn run(&mut self, task: String) -> Result<()> {
1124 log::info!("Starting SRBN execution for task: {}", task);
1125 self.emit_log(format!("🚀 Starting task: {}", task));
1126
1127 let session_id = uuid::Uuid::new_v4().to_string();
1129 self.context.session_id = session_id.clone();
1130 self.ledger.start_session(
1131 &session_id,
1132 &task,
1133 &self.context.working_dir.to_string_lossy(),
1134 )?;
1135
1136 let result = self.run_orchestration(task).await;
1138 self.finalize_session(&result);
1139 result.map(|_| ())
1140 }
1141
1142 async fn run_orchestration(&mut self, task: String) -> Result<perspt_core::SessionOutcome> {
1144 if self.context.log_llm {
1145 self.emit_log("📝 LLM request logging enabled".to_string());
1146 }
1147
1148 let execution_mode = self.detect_execution_mode(&task);
1150 self.context.execution_mode = execution_mode;
1151 self.emit_log(format!("🎯 Execution mode: {}", execution_mode));
1152
1153 if execution_mode == perspt_core::types::ExecutionMode::Solo {
1154 log::info!("Using Solo Mode for explicit single-file task");
1156 self.emit_log("⚡ Solo Mode: Single-file execution".to_string());
1157 return self
1158 .run_solo_mode(task)
1159 .await
1160 .map(|()| perspt_core::SessionOutcome::Success);
1161 }
1162
1163 let workspace_state = self.classify_workspace(&task);
1165 self.context.workspace_state = workspace_state.clone();
1166 self.emit_log(format!("📋 Workspace: {}", workspace_state));
1167
1168 if let WorkspaceState::ExistingProject { ref plugins } = workspace_state {
1171 self.context.active_plugins = plugins.clone();
1172 self.emit_log(format!("🔌 Detected plugins: {}", plugins.join(", ")));
1173 self.emit_plugin_readiness();
1174 }
1175
1176 self.step_init_project(&task).await?;
1178
1179 if !matches!(workspace_state, WorkspaceState::ExistingProject { .. }) {
1182 self.redetect_plugins_after_init();
1183 }
1184
1185 self.check_verifier_readiness_gate();
1189
1190 {
1193 let plugin_refs: Vec<String> = self.context.active_plugins.clone();
1194 let refs: Vec<&str> = plugin_refs.iter().map(|s| s.as_str()).collect();
1195 if !refs.is_empty() {
1196 self.emit_log("🔍 Starting language servers...".to_string());
1197 if let Err(e) = self.start_lsp_for_plugins(&refs).await {
1198 log::warn!("Failed to start LSP: {}", e);
1199 self.emit_log("⚠️ Continuing without LSP".to_string());
1200 } else {
1201 self.emit_log("✅ Language servers ready".to_string());
1202 }
1203 }
1204 }
1205
1206 if self.planning_policy == perspt_core::PlanningPolicy::default() {
1210 self.planning_policy = match &self.context.workspace_state {
1211 WorkspaceState::Greenfield { .. } => perspt_core::PlanningPolicy::GreenfieldBuild,
1212 WorkspaceState::ExistingProject { .. } => {
1213 perspt_core::PlanningPolicy::FeatureIncrement
1214 }
1215 WorkspaceState::Ambiguous => perspt_core::PlanningPolicy::FeatureIncrement,
1216 };
1217 }
1218
1219 if self.ledger.get_feature_charter().ok().flatten().is_none() {
1223 let mut charter = perspt_core::FeatureCharter::new(&self.context.session_id, &task);
1224 match self.planning_policy {
1225 perspt_core::PlanningPolicy::LocalEdit => {
1226 charter.max_modules = Some(1);
1227 charter.max_files = Some(5);
1228 charter.max_revisions = Some(3);
1229 }
1230 perspt_core::PlanningPolicy::FeatureIncrement => {
1231 charter.max_modules = Some(10);
1232 charter.max_files = Some(30);
1233 charter.max_revisions = Some(5);
1234 }
1235 perspt_core::PlanningPolicy::LargeFeature
1236 | perspt_core::PlanningPolicy::GreenfieldBuild
1237 | perspt_core::PlanningPolicy::ArchitecturalRevision => {
1238 charter.max_modules = Some(25);
1239 charter.max_files = Some(80);
1240 charter.max_revisions = Some(10);
1241 }
1242 }
1243 if let Some(ref lang) = self.context.active_plugins.first() {
1244 charter.language_constraint = Some(lang.to_string());
1245 }
1246 if let Err(e) = self.ledger.record_feature_charter(&charter) {
1247 log::warn!("Failed to persist default FeatureCharter: {}", e);
1248 } else {
1249 log::info!(
1250 "Registered default FeatureCharter (max_modules={:?}, max_files={:?})",
1251 charter.max_modules,
1252 charter.max_files
1253 );
1254 }
1255 }
1256
1257 if self.planning_policy.needs_architect() {
1260 self.step_sheafify(task).await?;
1261 } else {
1262 self.emit_log("📐 LocalEdit policy — skipping architect, single-node plan".to_string());
1263 self.create_deterministic_fallback_graph(&task)?;
1264 }
1265
1266 self.emit_log(format!("📐 Planning policy: {:?}", self.planning_policy));
1268
1269 let node_count = self.graph.node_count();
1271 self.emit_event(perspt_core::AgentEvent::PlanReady {
1272 nodes: node_count,
1273 plugins: self.context.active_plugins.clone(),
1274 execution_mode: execution_mode.to_string(),
1275 });
1276
1277 for node_id in self.node_indices.keys() {
1279 if let Some(idx) = self.node_indices.get(node_id) {
1280 if let Some(node) = self.graph.node_weight(*idx) {
1281 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1282 node_id: node.node_id.clone(),
1283 status: perspt_core::NodeStatus::Pending,
1284 });
1285 }
1286 }
1287 }
1288
1289 let topo = Topo::new(&self.graph);
1291 let indices: Vec<_> = topo.iter(&self.graph).collect();
1292 let total_nodes = indices.len();
1293 let mut completed_count: usize = 0;
1294 let mut escalated_count: usize = 0;
1295
1296 for (i, idx) in indices.iter().enumerate() {
1297 if self.is_abort_requested() {
1299 self.emit_log("⚠️ Session aborted — stopping execution".to_string());
1300 break;
1301 }
1302
1303 if self.budget.any_exhausted() {
1305 let node_id = self.graph[*idx].node_id.clone();
1306 self.emit_log(format!(
1307 "⛔ Budget exhausted — skipping node '{}' and remaining nodes",
1308 node_id
1309 ));
1310 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1311 node_id,
1312 status: perspt_core::NodeStatus::Escalated,
1313 });
1314 break;
1315 }
1316
1317 if self.check_seal_prerequisites(*idx) {
1322 log::warn!(
1323 "Node {} blocked on seal prerequisite — skipping in this iteration",
1324 self.graph[*idx].node_id
1325 );
1326 continue;
1327 }
1328
1329 if let Some(node) = self.graph.node_weight(*idx) {
1331 self.emit_log(format!("📝 [{}/{}] {}", i + 1, total_nodes, node.goal));
1332 self.emit_event(perspt_core::AgentEvent::NodeSelected {
1333 node_id: node.node_id.clone(),
1334 goal: node.goal.clone(),
1335 node_class: node.node_class.to_string(),
1336 });
1337 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1338 node_id: node.node_id.clone(),
1339 status: perspt_core::NodeStatus::Running,
1340 });
1341 }
1342
1343 match self.execute_node(*idx).await {
1344 Ok(NodeOutcome::Completed) => {
1345 completed_count += 1;
1346
1347 self.budget.record_step();
1349
1350 self.emit_event(perspt_core::AgentEvent::BudgetUpdated {
1352 steps_used: self.budget.steps_used,
1353 max_steps: self.budget.max_steps,
1354 cost_used_usd: self.budget.cost_used_usd,
1355 max_cost_usd: self.budget.max_cost_usd,
1356 revisions_used: self.budget.revisions_used,
1357 max_revisions: self.budget.max_revisions,
1358 });
1359
1360 if let Err(e) = self.ledger.upsert_budget_envelope(&self.budget) {
1362 log::warn!("Failed to persist budget envelope: {}", e);
1363 }
1364
1365 if let Some(node) = self.graph.node_weight(*idx) {
1367 self.emit_event(perspt_core::AgentEvent::NodeCompleted {
1368 node_id: node.node_id.clone(),
1369 goal: node.goal.clone(),
1370 });
1371 }
1372 }
1373 Ok(NodeOutcome::Escalated) => {
1374 escalated_count += 1;
1375 self.budget.record_step();
1376
1377 if let Some(node) = self.graph.node_weight(*idx) {
1379 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1380 node_id: node.node_id.clone(),
1381 status: perspt_core::NodeStatus::Escalated,
1382 });
1383 }
1384 continue;
1385 }
1386 Err(e) => {
1387 escalated_count += 1;
1388 let node_id = self.graph[*idx].node_id.clone();
1389 eprintln!("[SRBN-DIAG] Node {} failed: {:#}", node_id, e);
1390 log::error!("Node {} failed: {}", node_id, e);
1391 self.emit_log(format!("❌ Node {} failed: {}", node_id, e));
1392
1393 if let Some(bid) = self.graph[*idx].provisional_branch_id.clone() {
1398 self.flush_provisional_branch(&bid, &node_id);
1399 }
1400 self.flush_descendant_branches(*idx);
1401
1402 self.graph[*idx].state = NodeState::Escalated;
1403 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1404 node_id: node_id.clone(),
1405 status: perspt_core::NodeStatus::Escalated,
1406 });
1407 continue;
1409 }
1410 }
1411 }
1412
1413 log::info!("SRBN execution completed");
1414
1415 if let Err(e) = crate::tools::cleanup_session_sandboxes(
1417 &self.context.working_dir,
1418 &self.context.session_id,
1419 ) {
1420 log::warn!("Failed to clean up session sandboxes: {}", e);
1421 }
1422
1423 let outcome = if escalated_count == 0 && completed_count >= total_nodes {
1427 perspt_core::SessionOutcome::Success
1428 } else if completed_count > 0 {
1429 perspt_core::SessionOutcome::PartialSuccess
1430 } else {
1431 perspt_core::SessionOutcome::Failed
1432 };
1433 self.emit_event(perspt_core::AgentEvent::Complete {
1434 success: outcome == perspt_core::SessionOutcome::Success,
1435 message: format!(
1436 "{}/{} nodes completed, {} escalated",
1437 completed_count, total_nodes, escalated_count
1438 ),
1439 });
1440 Ok(outcome)
1441 }
1442
1443 async fn execute_node(&mut self, idx: NodeIndex) -> Result<NodeOutcome> {
1445 let node = &self.graph[idx];
1446 log::info!("Executing node: {} ({})", node.node_id, node.goal);
1447
1448 let branch_id = self.maybe_create_provisional_branch(idx);
1450
1451 self.graph[idx].state = NodeState::Coding;
1453 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1454 node_id: self.graph[idx].node_id.clone(),
1455 status: perspt_core::NodeStatus::Coding,
1456 });
1457
1458 let speculate_start = std::time::Instant::now();
1460 self.step_speculate(idx).await?;
1461 self.record_step_quietly(
1462 &self.graph[idx].node_id.clone(),
1463 "speculate",
1464 "ok",
1465 None,
1466 0,
1467 speculate_start.elapsed().as_millis() as i32,
1468 );
1469
1470 let verify_start = std::time::Instant::now();
1472 let mut energy = self.step_verify(idx).await?;
1473 self.record_step_quietly(
1474 &self.graph[idx].node_id.clone(),
1475 "verify",
1476 "ok",
1477 Some(&energy),
1478 0,
1479 verify_start.elapsed().as_millis() as i32,
1480 );
1481
1482 let mut sheaf_pre_check_retries = 0u32;
1488 let mut converge_start;
1489 loop {
1490 converge_start = std::time::Instant::now();
1492 if !self.step_converge(idx, energy.clone()).await? {
1493 self.record_step_quietly(
1494 &self.graph[idx].node_id.clone(),
1495 "converge",
1496 "escalated",
1497 Some(&energy),
1498 self.graph[idx].monitor.attempt_count as i32,
1499 converge_start.elapsed().as_millis() as i32,
1500 );
1501 let category = self.classify_non_convergence(idx);
1503 let action = self.choose_repair_action(idx, &category);
1504
1505 let node = &self.graph[idx];
1507 let report = EscalationReport {
1508 node_id: node.node_id.clone(),
1509 session_id: self.context.session_id.clone(),
1510 category,
1511 action: action.clone(),
1512 energy_snapshot: EnergyComponents {
1513 v_syn: node.monitor.current_energy(),
1514 ..Default::default()
1515 },
1516 stage_outcomes: self
1517 .last_verification_result
1518 .as_ref()
1519 .map(|vr| vr.stage_outcomes.clone())
1520 .unwrap_or_default(),
1521 evidence: self.build_escalation_evidence(idx),
1522 affected_node_ids: self.affected_dependents(idx),
1523 timestamp: epoch_seconds(),
1524 };
1525
1526 if let Err(e) = self.ledger.record_escalation_report(&report) {
1527 log::warn!("Failed to persist escalation report: {}", e);
1528 }
1529
1530 if let Some(bundle) = self.last_applied_bundle.take() {
1532 if let Err(e) = self
1533 .ledger
1534 .record_artifact_bundle(&self.graph[idx].node_id, &bundle)
1535 {
1536 log::warn!(
1537 "Failed to persist artifact bundle on escalation for {}: {}",
1538 self.graph[idx].node_id,
1539 e
1540 );
1541 }
1542 }
1543
1544 self.emit_event(perspt_core::AgentEvent::EscalationClassified {
1545 node_id: report.node_id.clone(),
1546 category: report.category.to_string(),
1547 action: report.action.to_string(),
1548 });
1549
1550 let node_id_for_flush = self.graph[idx].node_id.clone();
1552 if let Some(ref bid) = branch_id {
1553 self.flush_provisional_branch(bid, &node_id_for_flush);
1554 }
1555 self.flush_descendant_branches(idx);
1556
1557 let applied = self.apply_repair_action(idx, &action).await;
1559
1560 if !applied {
1561 self.graph[idx].state = NodeState::Escalated;
1562 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1563 node_id: self.graph[idx].node_id.clone(),
1564 status: perspt_core::NodeStatus::Escalated,
1565 });
1566 log::warn!(
1567 "Node {} escalated to user: {} → {}",
1568 self.graph[idx].node_id,
1569 category,
1570 action
1571 );
1572 }
1573
1574 return Ok(NodeOutcome::Escalated);
1575 }
1576
1577 if sheaf_pre_check_retries < 1 {
1580 if let Some(evidence) = self.sheaf_pre_check(idx) {
1581 sheaf_pre_check_retries += 1;
1582 log::warn!(
1583 "Sheaf pre-check failed for {}, retrying convergence: {}",
1584 self.graph[idx].node_id,
1585 evidence
1586 );
1587 self.emit_log(format!("⚠️ Sheaf pre-check: {}", evidence));
1588 self.context.last_test_output = Some(format!(
1590 "Structural pre-check failure: {}\nEnsure all declared output files are generated correctly.",
1591 evidence
1592 ));
1593 energy = self.step_verify(idx).await?;
1595 energy.v_sheaf += 2.0;
1596 continue;
1597 }
1598 }
1599 break;
1600 } if sheaf_pre_check_retries > 0 {
1606 if let Some(evidence) = self.sheaf_pre_check(idx) {
1607 log::warn!(
1608 "Sheaf pre-check still failing for {} after retry, escalating: {}",
1609 self.graph[idx].node_id,
1610 evidence
1611 );
1612 self.emit_log(format!(
1613 "❌ Sheaf pre-check failed after retry: {}",
1614 evidence
1615 ));
1616 self.graph[idx].state = NodeState::Escalated;
1617 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1618 node_id: self.graph[idx].node_id.clone(),
1619 status: perspt_core::NodeStatus::Escalated,
1620 });
1621 let node_id_for_flush = self.graph[idx].node_id.clone();
1623 if let Some(ref bid) = branch_id {
1624 self.flush_provisional_branch(bid, &node_id_for_flush);
1625 }
1626 self.flush_descendant_branches(idx);
1627 return Ok(NodeOutcome::Escalated);
1628 }
1629 }
1630
1631 self.record_step_quietly(
1633 &self.graph[idx].node_id.clone(),
1634 "converge",
1635 "ok",
1636 Some(&energy),
1637 self.graph[idx].monitor.attempt_count as i32,
1638 converge_start.elapsed().as_millis() as i32,
1639 );
1640
1641 let sheaf_start = std::time::Instant::now();
1643 self.step_sheaf_validate(idx).await?;
1644 self.record_step_quietly(
1645 &self.graph[idx].node_id.clone(),
1646 "sheaf_validate",
1647 "ok",
1648 None,
1649 0,
1650 sheaf_start.elapsed().as_millis() as i32,
1651 );
1652
1653 let commit_start = std::time::Instant::now();
1655 self.step_commit(idx).await?;
1656 self.record_step_quietly(
1657 &self.graph[idx].node_id.clone(),
1658 "commit",
1659 "ok",
1660 None,
1661 0,
1662 commit_start.elapsed().as_millis() as i32,
1663 );
1664
1665 if let Some(ref bid) = branch_id {
1667 self.merge_provisional_branch(bid, idx);
1668 }
1669
1670 Ok(NodeOutcome::Completed)
1671 }
1672
1673 async fn step_speculate(&mut self, idx: NodeIndex) -> Result<()> {
1675 log::info!("Step 3: Speculation - Generating implementation");
1676
1677 let retriever = ContextRetriever::new(self.effective_working_dir(idx))
1681 .with_max_file_bytes(8 * 1024)
1682 .with_max_context_bytes(100 * 1024); let node = &self.graph[idx];
1685 let mut restriction_map =
1686 retriever.build_restriction_map(node, &self.context.ownership_manifest);
1687
1688 self.inject_sealed_interfaces(idx, &mut restriction_map);
1693
1694 let node = &self.graph[idx];
1695 let context_package = retriever.assemble_context_package(node, &restriction_map);
1696 let formatted_context = retriever.format_context_package(&context_package);
1697
1698 let node = &self.graph[idx];
1701 let missing_owned: Vec<String> = restriction_map
1702 .owned_files
1703 .iter()
1704 .filter(|f| {
1705 !context_package.included_files.contains_key(*f)
1707 && !node
1708 .output_targets
1709 .iter()
1710 .any(|ot| ot.to_string_lossy() == **f)
1711 })
1712 .cloned()
1713 .collect();
1714
1715 if context_package.budget_exceeded || !missing_owned.is_empty() {
1716 let reason = if context_package.budget_exceeded && !missing_owned.is_empty() {
1717 format!(
1718 "Budget exceeded and {} owned file(s) missing",
1719 missing_owned.len()
1720 )
1721 } else if context_package.budget_exceeded {
1722 "Context budget exceeded; some files replaced with structural digests".to_string()
1723 } else {
1724 format!(
1725 "{} owned file(s) could not be read: {}",
1726 missing_owned.len(),
1727 missing_owned.join(", ")
1728 )
1729 };
1730
1731 log::warn!("Context degraded for node '{}': {}", node.node_id, reason);
1732 self.emit_log(format!("⚠️ Context degraded: {}", reason));
1733 self.emit_event(perspt_core::AgentEvent::ContextDegraded {
1734 node_id: node.node_id.clone(),
1735 budget_exceeded: context_package.budget_exceeded,
1736 missing_owned_files: missing_owned.clone(),
1737 included_file_count: context_package.included_files.len(),
1738 total_bytes: context_package.total_bytes,
1739 reason: reason.clone(),
1740 });
1741
1742 if !missing_owned.is_empty() {
1745 self.emit_event(perspt_core::AgentEvent::ContextBlocked {
1746 node_id: node.node_id.clone(),
1747 missing_owned_files: missing_owned,
1748 reason: reason.clone(),
1749 });
1750 self.graph[idx].state = NodeState::Escalated;
1751 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1752 node_id: self.graph[idx].node_id.clone(),
1753 status: perspt_core::NodeStatus::Escalated,
1754 });
1755 let err_msg = format!(
1756 "Context blocked for node '{}': {}. Node escalated.",
1757 self.graph[idx].node_id, reason
1758 );
1759 eprintln!("[SRBN-DIAG] {}", err_msg);
1760 return Err(anyhow::anyhow!(err_msg));
1761 }
1762 }
1763
1764 {
1767 let node = &self.graph[idx];
1768 let prose_only_deps = self.check_structural_dependencies(node, &restriction_map);
1769 if !prose_only_deps.is_empty() {
1770 for (dep_node_id, dep_reason) in &prose_only_deps {
1771 self.emit_event(perspt_core::AgentEvent::StructuralDependencyMissing {
1772 node_id: node.node_id.clone(),
1773 dependency_node_id: dep_node_id.clone(),
1774 reason: dep_reason.clone(),
1775 });
1776 }
1777 let dep_names: Vec<&str> =
1778 prose_only_deps.iter().map(|(id, _)| id.as_str()).collect();
1779 let block_reason = format!(
1780 "Required structural dependencies lack machine-verifiable digests (only prose summaries): [{}]",
1781 dep_names.join(", ")
1782 );
1783 eprintln!(
1784 "[SRBN-DIAG] Structural dependency check failed for '{}': {}",
1785 self.graph[idx].node_id, block_reason
1786 );
1787 self.emit_log(format!("🚫 {}", block_reason));
1788 self.graph[idx].state = NodeState::Escalated;
1789 self.emit_event(perspt_core::AgentEvent::TaskStatusChanged {
1790 node_id: self.graph[idx].node_id.clone(),
1791 status: perspt_core::NodeStatus::Escalated,
1792 });
1793 return Err(anyhow::anyhow!(
1794 "Structural dependency check failed for node '{}': {}",
1795 self.graph[idx].node_id,
1796 block_reason
1797 ));
1798 }
1799 }
1800
1801 self.last_context_provenance = Some(context_package.provenance());
1803 self.last_formatted_context = formatted_context.clone();
1805
1806 let speculator_hints = if self.planning_policy.needs_speculator() {
1811 let node_id = self.graph[idx].node_id.clone();
1812 let node_goal = self.graph[idx].goal.clone();
1813 let child_goals: Vec<String> = self
1814 .graph
1815 .edges(idx)
1816 .filter_map(|edge| {
1817 let child = &self.graph[edge.target()];
1818 if child.state == NodeState::TaskQueued {
1819 Some(format!("- {}: {}", child.node_id, child.goal))
1820 } else {
1821 None
1822 }
1823 })
1824 .collect();
1825
1826 if !child_goals.is_empty() {
1827 let ev = perspt_core::types::PromptEvidence {
1828 node_goal: Some(node_goal.clone()),
1829 context_files: vec![node_id.clone()],
1830 output_files: child_goals.clone(),
1831 ..Default::default()
1832 };
1833 let speculator_prompt = crate::prompt_compiler::compile(
1834 perspt_core::types::PromptIntent::SpeculatorLookahead,
1835 &ev,
1836 )
1837 .text;
1838
1839 log::debug!(
1840 "Speculator lookahead for node {} using model {}",
1841 node_id,
1842 self.speculator_model
1843 );
1844 self.call_llm_with_logging(
1845 &self.speculator_model.clone(),
1846 &speculator_prompt,
1847 Some(&node_id),
1848 )
1849 .await
1850 .unwrap_or_else(|e| {
1851 log::warn!(
1852 "Speculator lookahead failed ({}), proceeding without hints",
1853 e
1854 );
1855 String::new()
1856 })
1857 } else {
1858 String::new()
1859 }
1860 } else {
1861 String::new()
1862 };
1863
1864 let actuator = &self.agents[1];
1865 let node = &self.graph[idx];
1866 let node_id = node.node_id.clone();
1867
1868 let base_prompt = actuator.build_prompt(node, &self.context);
1870 let mut prompt = if formatted_context.is_empty() {
1871 base_prompt
1872 } else {
1873 format!(
1874 "{}\n\n## Node Context (PSP-5 Restriction Map)\n\n{}",
1875 base_prompt, formatted_context
1876 )
1877 };
1878
1879 if !speculator_hints.is_empty() {
1880 prompt = format!(
1881 "{}\n\n## Speculator Lookahead Hints\n\n{}",
1882 prompt, speculator_hints
1883 );
1884 }
1885
1886 let wd = self.effective_working_dir(idx);
1889 if let Ok(tree) = crate::tools::list_sandbox_files(&wd) {
1890 if !tree.is_empty() {
1891 prompt = format!(
1892 "{}\n\n## Current Project Tree\n\n```\n{}\n```",
1893 prompt,
1894 tree.join("\n")
1895 );
1896 }
1897 }
1898
1899 let model = actuator.model().to_string();
1900
1901 let response = self
1902 .call_llm_with_logging(&model, &prompt, Some(&node_id))
1903 .await?;
1904
1905 let message = crate::types::AgentMessage::new(crate::types::ModelTier::Actuator, response);
1906 let content = &message.content;
1907
1908 if let Some(command) = self.extract_command_from_response(content) {
1910 log::info!("Extracted command: {}", command);
1911 self.emit_log(format!("🔧 Command proposed: {}", command));
1912
1913 let node_id = self.graph[idx].node_id.clone();
1915 let approval_result = self
1916 .await_approval_for_node(
1917 perspt_core::ActionType::Command {
1918 command: command.clone(),
1919 },
1920 format!("Execute shell command: {}", command),
1921 None,
1922 Some(&node_id),
1923 )
1924 .await;
1925
1926 if !matches!(
1927 approval_result,
1928 ApprovalResult::Approved | ApprovalResult::ApprovedWithEdit(_)
1929 ) {
1930 self.emit_log("⏭️ Command skipped (not approved)");
1931 return Ok(());
1932 }
1933
1934 let mut args = HashMap::new();
1936 args.insert("command".to_string(), command.clone());
1937
1938 let call = ToolCall {
1939 name: "run_command".to_string(),
1940 arguments: args,
1941 };
1942
1943 let result = self.tools.execute(&call).await;
1944 if result.success {
1945 log::info!("✓ Command succeeded: {}", command);
1946 self.emit_log(format!("✅ Command succeeded: {}", command));
1947 self.emit_log(result.output);
1948 } else {
1949 log::warn!("Command failed: {:?}", result.error);
1950 self.emit_log(format!("❌ Command failed: {:?}", result.error));
1951 }
1952 }
1953 else {
1955 let (bundle_opt, parse_state, record_opt) =
1956 self.parse_artifact_bundle_typed(content, &node_id, 0);
1957
1958 if let Some(ref record) = record_opt {
1959 log::info!(
1960 "PSP-7 initial gen: parse_state={}, accepted={}",
1961 record.parse_state,
1962 record.accepted
1963 );
1964 }
1965
1966 match parse_state {
1967 perspt_core::types::ParseResultState::StrictJsonOk
1968 | perspt_core::types::ParseResultState::TolerantRecoveryOk => {
1969 let bundle = bundle_opt.expect("Accepted parse must yield a bundle");
1970 let affected_files: Vec<String> = bundle
1971 .affected_paths()
1972 .into_iter()
1973 .map(ToString::to_string)
1974 .collect();
1975 log::info!(
1976 "Parsed artifact bundle for node {} ({}): {} artifacts, {} commands",
1977 node_id,
1978 parse_state,
1979 bundle.artifacts.len(),
1980 bundle.commands.len()
1981 );
1982 self.emit_log(format!(
1983 "📝 Bundle proposed: {} artifact(s) across {} file(s)",
1984 bundle.artifacts.len(),
1985 affected_files.len()
1986 ));
1987
1988 let approval_result = self
1989 .await_approval_for_node(
1990 perspt_core::ActionType::BundleWrite {
1991 node_id: node_id.clone(),
1992 files: affected_files.clone(),
1993 },
1994 format!("Apply bundle touching: {}", affected_files.join(", ")),
1995 serde_json::to_string_pretty(&bundle).ok(),
1996 Some(&node_id),
1997 )
1998 .await;
1999
2000 if !matches!(
2001 approval_result,
2002 ApprovalResult::Approved | ApprovalResult::ApprovedWithEdit(_)
2003 ) {
2004 self.emit_log("⏭️ Bundle application skipped (not approved)");
2005 return Ok(());
2006 }
2007
2008 let node_class = self.graph[idx].node_class;
2009 match self
2010 .apply_bundle_transactionally(&bundle, &node_id, node_class)
2011 .await
2012 {
2013 Ok(()) => {
2014 self.last_tool_failure = None;
2015 self.last_applied_bundle = Some(bundle.clone());
2016 }
2017 Err(e) => return Err(e),
2018 }
2019
2020 let effective_commands = self
2022 .last_applied_bundle
2023 .as_ref()
2024 .map(|b| b.commands.clone())
2025 .unwrap_or_default();
2026 if !effective_commands.is_empty() {
2027 self.emit_log(format!(
2028 "🔧 Executing {} bundle command(s)...",
2029 effective_commands.len()
2030 ));
2031 let work_dir = self.effective_working_dir(idx);
2032 let is_python = self.graph[idx].owner_plugin == "python";
2033 for raw_command in &effective_commands {
2034 let command = if is_python {
2035 Self::normalize_command_to_uv(raw_command)
2036 } else {
2037 raw_command.clone()
2038 };
2039
2040 let cmd_approval = self
2041 .await_approval_for_node(
2042 perspt_core::ActionType::Command {
2043 command: command.clone(),
2044 },
2045 format!("Execute bundle command: {}", command),
2046 None,
2047 Some(&node_id),
2048 )
2049 .await;
2050
2051 if !matches!(
2052 cmd_approval,
2053 ApprovalResult::Approved | ApprovalResult::ApprovedWithEdit(_)
2054 ) {
2055 self.emit_log(format!(
2056 "⏭️ Bundle command skipped (not approved): {}",
2057 command
2058 ));
2059 continue;
2060 }
2061
2062 let mut args = HashMap::new();
2063 args.insert("command".to_string(), command.clone());
2064 args.insert(
2065 "working_dir".to_string(),
2066 work_dir.to_string_lossy().to_string(),
2067 );
2068
2069 let call = ToolCall {
2070 name: "run_command".to_string(),
2071 arguments: args,
2072 };
2073
2074 let result = self.tools.execute(&call).await;
2075 if result.success {
2076 log::info!("✓ Bundle command succeeded: {}", command);
2077 self.emit_log(format!("✅ {}", command));
2078 if !result.output.is_empty() {
2079 let truncated: String =
2080 result.output.chars().take(500).collect();
2081 self.emit_log(truncated);
2082 }
2083 } else {
2084 let err_msg = result.error.unwrap_or_else(|| result.output.clone());
2085 log::warn!("Bundle command failed: {} — {}", command, err_msg);
2086 self.emit_log(format!(
2087 "❌ Command failed: {} — {}",
2088 command, err_msg
2089 ));
2090 self.last_tool_failure = Some(format!(
2091 "Bundle command '{}' failed: {}",
2092 command, err_msg
2093 ));
2094 }
2095 }
2096
2097 if is_python {
2098 log::info!("Running uv sync --dev after bundle commands...");
2099 let sync_result = tokio::process::Command::new("uv")
2100 .args(["sync", "--dev"])
2101 .current_dir(&work_dir)
2102 .stdout(std::process::Stdio::piped())
2103 .stderr(std::process::Stdio::piped())
2104 .output()
2105 .await;
2106 match sync_result {
2107 Ok(output) if output.status.success() => {
2108 self.emit_log("🐍 uv sync --dev completed".to_string());
2109 }
2110 Ok(output) => {
2111 let stderr = String::from_utf8_lossy(&output.stderr);
2112 log::warn!("uv sync --dev failed: {}", stderr);
2113 }
2114 Err(e) => {
2115 log::warn!("Failed to run uv sync --dev: {}", e);
2116 }
2117 }
2118 }
2119 }
2120 }
2121
2122 perspt_core::types::ParseResultState::SemanticallyRejected => {
2123 log::warn!(
2125 "Bundle for '{}' semantically rejected, retrying with retarget prompt",
2126 node_id
2127 );
2128 self.emit_log(format!(
2129 "🔄 Bundle for '{}' targeted wrong files — retrying...",
2130 node_id
2131 ));
2132
2133 let raw_paths: Vec<String> =
2134 perspt_core::normalize::extract_file_markers(content)
2135 .iter()
2136 .filter_map(|m| m.path.clone())
2137 .collect();
2138 let expected: Vec<String> = self.graph[idx]
2139 .output_targets
2140 .iter()
2141 .map(|p| p.to_string_lossy().to_string())
2142 .collect();
2143 let ev = perspt_core::types::PromptEvidence {
2144 output_files: expected.clone(),
2145 existing_file_contents: vec![(raw_paths.join(", "), prompt.clone())],
2146 ..Default::default()
2147 };
2148 let retry_prompt = crate::prompt_compiler::compile(
2149 perspt_core::types::PromptIntent::BundleRetarget,
2150 &ev,
2151 )
2152 .text;
2153
2154 let retry_response = self
2155 .call_llm_with_logging(&model, &retry_prompt, Some(&node_id))
2156 .await?;
2157
2158 let (retry_bundle_opt, retry_state, _) =
2159 self.parse_artifact_bundle_typed(&retry_response, &node_id, 1);
2160
2161 if let Some(retry_bundle) = retry_bundle_opt {
2162 let node_class = self.graph[idx].node_class;
2163 self.apply_bundle_transactionally(&retry_bundle, &node_id, node_class)
2164 .await?;
2165 self.last_tool_failure = None;
2166 self.last_applied_bundle = Some(retry_bundle);
2167 } else {
2168 return Err(anyhow::anyhow!(
2169 "Retry for '{}' did not produce a valid bundle ({})",
2170 node_id,
2171 retry_state
2172 ));
2173 }
2174 }
2175
2176 _ => {
2177 log::debug!(
2179 "No artifact bundle found in response ({}), response length: {}",
2180 parse_state,
2181 content.len()
2182 );
2183 self.emit_log("ℹ️ No file changes detected in response".to_string());
2184 }
2185 }
2186 }
2187
2188 self.context.history.push(message);
2189 Ok(())
2190 }
2191
2192 fn extract_command_from_response(&self, content: &str) -> Option<String> {
2195 for line in content.lines() {
2196 let trimmed = line.trim();
2197 if trimmed.starts_with("[COMMAND]") {
2198 return Some(trimmed.trim_start_matches("[COMMAND]").trim().to_string());
2199 }
2200 if trimmed.starts_with("$ ") || trimmed.starts_with("➜ ") {
2202 return Some(
2203 trimmed
2204 .trim_start_matches("$ ")
2205 .trim_start_matches("➜ ")
2206 .trim()
2207 .to_string(),
2208 );
2209 }
2210 }
2211 None
2212 }
2213
2214 pub fn session_id(&self) -> &str {
2220 &self.context.session_id
2221 }
2222
2223 pub fn node_count(&self) -> usize {
2225 self.graph.node_count()
2226 }
2227
2228 pub async fn start_lsp_for_plugins(&mut self, plugin_names: &[&str]) -> Result<()> {
2233 let registry = perspt_core::plugin::PluginRegistry::new();
2234
2235 for &name in plugin_names {
2236 if self.lsp_clients.contains_key(name) {
2237 log::debug!("LSP client already running for {}", name);
2238 continue;
2239 }
2240
2241 let plugin = match registry.get(name) {
2242 Some(p) => p,
2243 None => {
2244 log::warn!("No plugin found for '{}', skipping LSP startup", name);
2245 continue;
2246 }
2247 };
2248
2249 let profile = plugin.verifier_profile();
2250 let lsp_config = match profile.lsp.effective_config() {
2251 Some(cfg) => cfg.clone(),
2252 None => {
2253 log::warn!(
2254 "No available LSP for {} (primary and fallback unavailable)",
2255 name
2256 );
2257 continue;
2258 }
2259 };
2260
2261 log::info!(
2262 "Starting LSP for {}: {} {:?}",
2263 name,
2264 lsp_config.server_binary,
2265 lsp_config.args
2266 );
2267
2268 let mut client = LspClient::from_config(&lsp_config);
2269 match client
2270 .start_with_config(&lsp_config, &self.context.working_dir)
2271 .await
2272 {
2273 Ok(()) => {
2274 log::info!("{} LSP started successfully", name);
2275 self.lsp_clients.insert(name.to_string(), client);
2276 }
2277 Err(e) => {
2278 log::warn!(
2279 "Failed to start {} LSP: {} (continuing without it)",
2280 name,
2281 e
2282 );
2283 }
2284 }
2285 }
2286
2287 Ok(())
2288 }
2289
2290 fn lsp_key_for_file(&self, path: &str) -> Option<String> {
2295 let registry = perspt_core::plugin::PluginRegistry::new();
2296
2297 for plugin in registry.all() {
2299 if plugin.owns_file(path) {
2300 let name = plugin.name().to_string();
2301 if self.lsp_clients.contains_key(&name) {
2302 return Some(name);
2303 }
2304 }
2305 }
2306
2307 self.lsp_clients.keys().next().cloned()
2309 }
2310
2311 fn sandbox_dir_for_node(&self, idx: NodeIndex) -> Option<std::path::PathBuf> {
2322 let branch_id = self.graph[idx].provisional_branch_id.as_ref()?;
2323 let sandbox_path = self
2324 .context
2325 .working_dir
2326 .join(".perspt")
2327 .join("sandboxes")
2328 .join(&self.context.session_id)
2329 .join(branch_id);
2330 if sandbox_path.exists() {
2331 Some(sandbox_path)
2332 } else {
2333 None
2334 }
2335 }
2336
2337 fn sheaf_pre_check(&self, idx: NodeIndex) -> Option<String> {
2344 let node = &self.graph[idx];
2345 if node.output_targets.is_empty() {
2346 return None;
2347 }
2348
2349 let work_dir = self.effective_working_dir(idx);
2350 let mut issues = Vec::new();
2351
2352 for path in &node.output_targets {
2353 let full = work_dir.join(path);
2354 match std::fs::metadata(&full) {
2355 Ok(m) if m.len() == 0 => {
2356 issues.push(format!("empty: {}", path.display()));
2357 }
2358 Err(_) => {
2359 issues.push(format!("missing: {}", path.display()));
2360 }
2361 Ok(_) => {
2362 if let Some(reason) = detect_stub_content(&full, &node.owner_plugin) {
2364 issues.push(format!("stub content in {}: {}", path.display(), reason));
2365 }
2366 }
2367 }
2368 }
2369
2370 if issues.is_empty() {
2371 None
2372 } else {
2373 Some(format!("Output target issues: {}", issues.join(", ")))
2374 }
2375 }
2376
2377 fn effective_working_dir(&self, idx: NodeIndex) -> std::path::PathBuf {
2380 self.sandbox_dir_for_node(idx)
2381 .unwrap_or_else(|| self.context.working_dir.clone())
2382 }
2383
2384 fn maybe_create_provisional_branch(&mut self, idx: NodeIndex) -> Option<String> {
2387 let parents: Vec<NodeIndex> = self
2389 .graph
2390 .neighbors_directed(idx, petgraph::Direction::Incoming)
2391 .collect();
2392
2393 let node = &self.graph[idx];
2394 let node_id = node.node_id.clone();
2395 let session_id = self.context.session_id.clone();
2396
2397 let parent_node_id = if parents.is_empty() {
2400 "root".to_string()
2401 } else {
2402 self.graph[parents[0]].node_id.clone()
2403 };
2404
2405 let branch_id = format!("branch_{}_{}", node_id, uuid::Uuid::new_v4());
2406 let branch = ProvisionalBranch::new(
2407 branch_id.clone(),
2408 session_id.clone(),
2409 node_id.clone(),
2410 parent_node_id.clone(),
2411 );
2412
2413 if let Err(e) = self.ledger.record_provisional_branch(&branch) {
2415 log::warn!("Failed to record provisional branch: {}", e);
2416 }
2417
2418 for pidx in &parents {
2420 let parent_id = self.graph[*pidx].node_id.clone();
2421 let depends_on_seal = self.graph[*pidx].node_class == NodeClass::Interface;
2423 let lineage = perspt_core::types::BranchLineage {
2424 lineage_id: format!("lin_{}_{}", branch_id, parent_id),
2425 parent_branch_id: parent_id,
2426 child_branch_id: branch_id.clone(),
2427 depends_on_seal,
2428 };
2429 if let Err(e) = self.ledger.record_branch_lineage(&lineage) {
2430 log::warn!("Failed to record branch lineage: {}", e);
2431 }
2432 }
2433
2434 self.graph[idx].provisional_branch_id = Some(branch_id.clone());
2436
2437 match crate::tools::create_sandbox(&self.context.working_dir, &session_id, &branch_id) {
2440 Ok(sandbox_path) => {
2441 log::debug!("Sandbox created at {}", sandbox_path.display());
2442
2443 let plugin_refs: Vec<&str> = self
2446 .context
2447 .active_plugins
2448 .iter()
2449 .map(|s| s.as_str())
2450 .collect();
2451 if let Err(e) = crate::tools::seed_sandbox_manifests(
2452 &self.context.working_dir,
2453 &sandbox_path,
2454 &plugin_refs,
2455 ) {
2456 log::warn!("Failed to seed sandbox manifests: {}", e);
2457 }
2458
2459 let node = &self.graph[idx];
2462 for target in &node.output_targets {
2463 if let Some(rel) = target.to_str() {
2464 if let Err(e) = crate::tools::copy_to_sandbox(
2465 &self.context.working_dir,
2466 &sandbox_path,
2467 rel,
2468 ) {
2469 log::debug!("Could not seed sandbox with {}: {}", rel, e);
2470 }
2471 }
2472 }
2473 let mut ancestor_queue: Vec<NodeIndex> = parents.clone();
2479 let mut visited = std::collections::HashSet::new();
2480 while let Some(ancestor_idx) = ancestor_queue.pop() {
2481 if !visited.insert(ancestor_idx) {
2482 continue;
2483 }
2484 for target in &self.graph[ancestor_idx].output_targets {
2485 if let Some(rel) = target.to_str() {
2486 if let Err(e) = crate::tools::copy_to_sandbox(
2487 &self.context.working_dir,
2488 &sandbox_path,
2489 rel,
2490 ) {
2491 log::debug!(
2492 "Could not seed sandbox with ancestor file {}: {}",
2493 rel,
2494 e
2495 );
2496 }
2497 }
2498 }
2499 for grandparent in self
2501 .graph
2502 .neighbors_directed(ancestor_idx, petgraph::Direction::Incoming)
2503 {
2504 ancestor_queue.push(grandparent);
2505 }
2506 }
2507 }
2508 Err(e) => {
2509 log::warn!("Failed to create sandbox for branch {}: {}", branch_id, e);
2510 }
2511 }
2512
2513 self.emit_event(perspt_core::AgentEvent::BranchCreated {
2514 branch_id: branch_id.clone(),
2515 node_id,
2516 parent_node_id,
2517 });
2518 log::info!("Created provisional branch {} for node", branch_id);
2519
2520 Some(branch_id)
2521 }
2522
2523 fn merge_provisional_branch(&mut self, branch_id: &str, idx: NodeIndex) {
2525 let node_id = self.graph[idx].node_id.clone();
2526 if let Err(e) = self
2527 .ledger
2528 .update_branch_state(branch_id, &ProvisionalBranchState::Merged.to_string())
2529 {
2530 log::warn!("Failed to merge branch {}: {}", branch_id, e);
2531 }
2532
2533 let sandbox_path = self
2535 .context
2536 .working_dir
2537 .join(".perspt")
2538 .join("sandboxes")
2539 .join(&self.context.session_id)
2540 .join(branch_id);
2541 if let Err(e) = crate::tools::cleanup_sandbox(&sandbox_path) {
2542 log::warn!(
2543 "Failed to cleanup sandbox for merged branch {}: {}",
2544 branch_id,
2545 e
2546 );
2547 }
2548
2549 self.emit_event(perspt_core::AgentEvent::BranchMerged {
2550 branch_id: branch_id.to_string(),
2551 node_id,
2552 });
2553 log::info!("Merged provisional branch {}", branch_id);
2554 }
2555
2556 fn flush_provisional_branch(&mut self, branch_id: &str, node_id: &str) {
2558 if let Err(e) = self
2559 .ledger
2560 .update_branch_state(branch_id, &ProvisionalBranchState::Flushed.to_string())
2561 {
2562 log::warn!("Failed to flush branch {}: {}", branch_id, e);
2563 }
2564
2565 let sandbox_path = self
2567 .context
2568 .working_dir
2569 .join(".perspt")
2570 .join("sandboxes")
2571 .join(&self.context.session_id)
2572 .join(branch_id);
2573 if let Err(e) = crate::tools::cleanup_sandbox(&sandbox_path) {
2574 log::warn!(
2575 "Failed to cleanup sandbox for flushed branch {}: {}",
2576 branch_id,
2577 e
2578 );
2579 }
2580
2581 log::info!(
2582 "Flushed provisional branch {} for node {}",
2583 branch_id,
2584 node_id
2585 );
2586 }
2587
2588 fn flush_descendant_branches(&mut self, idx: NodeIndex) {
2594 let parent_node_id = self.graph[idx].node_id.clone();
2595 let session_id = self.context.session_id.clone();
2596
2597 let descendant_indices = self.collect_descendants(idx);
2599
2600 let mut flushed_branch_ids = Vec::new();
2601 let mut requeue_node_ids = Vec::new();
2602
2603 for desc_idx in &descendant_indices {
2604 let desc_node = &self.graph[*desc_idx];
2605 if let Some(ref bid) = desc_node.provisional_branch_id {
2606 let bid_clone = bid.clone();
2608 let nid_clone = desc_node.node_id.clone();
2609 self.flush_provisional_branch(&bid_clone, &nid_clone);
2610 flushed_branch_ids.push(bid_clone);
2611 requeue_node_ids.push(nid_clone);
2612 }
2613 }
2614
2615 if flushed_branch_ids.is_empty() {
2616 return;
2617 }
2618
2619 let flush_record = perspt_core::types::BranchFlushRecord::new(
2621 &session_id,
2622 &parent_node_id,
2623 flushed_branch_ids.clone(),
2624 requeue_node_ids.clone(),
2625 format!(
2626 "Parent node {} failed verification/convergence",
2627 parent_node_id
2628 ),
2629 );
2630 if let Err(e) = self.ledger.record_branch_flush(&flush_record) {
2631 log::warn!("Failed to record branch flush: {}", e);
2632 }
2633
2634 self.emit_event(perspt_core::AgentEvent::BranchFlushed {
2635 parent_node_id: parent_node_id.clone(),
2636 flushed_branch_ids,
2637 reason: format!("Parent {} failed", parent_node_id),
2638 });
2639
2640 log::info!(
2641 "Flushed {} descendant branches for parent {}; {} nodes eligible for requeue",
2642 flush_record.flushed_branch_ids.len(),
2643 parent_node_id,
2644 requeue_node_ids.len(),
2645 );
2646 }
2647
2648 fn collect_descendants(&self, idx: NodeIndex) -> Vec<NodeIndex> {
2651 let mut descendants = Vec::new();
2652 let mut stack = vec![idx];
2653 let mut visited = std::collections::HashSet::new();
2654 visited.insert(idx);
2655
2656 while let Some(current) = stack.pop() {
2657 for child in self
2658 .graph
2659 .neighbors_directed(current, petgraph::Direction::Outgoing)
2660 {
2661 if visited.insert(child) {
2662 descendants.push(child);
2663 stack.push(child);
2664 }
2665 }
2666 }
2667 descendants
2668 }
2669
2670 fn emit_interface_seals(&mut self, idx: NodeIndex) {
2676 let node = &self.graph[idx];
2677 if node.node_class != NodeClass::Interface {
2678 return;
2679 }
2680
2681 let node_id = node.node_id.clone();
2682 let session_id = self.context.session_id.clone();
2683 let output_targets: Vec<_> = node.output_targets.clone();
2684 let mut sealed_paths = Vec::new();
2685 let mut seal_hash = [0u8; 32];
2686
2687 let retriever = ContextRetriever::new(self.context.working_dir.clone());
2688
2689 for target in &output_targets {
2690 let path_str = target.to_string_lossy().to_string();
2691 match retriever.compute_structural_digest(
2692 &path_str,
2693 perspt_core::types::ArtifactKind::InterfaceSeal,
2694 &node_id,
2695 ) {
2696 Ok(digest) => {
2697 let seal = perspt_core::types::InterfaceSealRecord::from_digest(
2698 &session_id,
2699 &node_id,
2700 &digest,
2701 );
2702 seal_hash = seal.seal_hash;
2703 sealed_paths.push(path_str);
2704
2705 if let Err(e) = self.ledger.record_interface_seal(&seal) {
2706 log::warn!("Failed to record interface seal: {}", e);
2707 }
2708 }
2709 Err(e) => {
2710 log::debug!("Skipping seal for {}: {}", path_str, e);
2711 }
2712 }
2713 }
2714
2715 if !sealed_paths.is_empty() {
2716 self.graph[idx].interface_seal_hash = Some(seal_hash);
2718
2719 self.emit_event(perspt_core::AgentEvent::InterfaceSealed {
2720 node_id: node_id.clone(),
2721 sealed_paths: sealed_paths.clone(),
2722 seal_hash: seal_hash
2723 .iter()
2724 .map(|b| format!("{:02x}", b))
2725 .collect::<String>(),
2726 });
2727 log::info!(
2728 "Sealed {} interface artifact(s) for node {}",
2729 sealed_paths.len(),
2730 node_id
2731 );
2732 }
2733 }
2734
2735 fn unblock_dependents(&mut self, idx: NodeIndex) {
2737 let node_id = self.graph[idx].node_id.clone();
2738
2739 let (unblocked, remaining): (Vec<_>, Vec<_>) = self
2741 .blocked_dependencies
2742 .drain(..)
2743 .partition(|dep| dep.parent_node_id == node_id);
2744
2745 self.blocked_dependencies = remaining;
2746
2747 for dep in unblocked {
2748 self.emit_event(perspt_core::AgentEvent::DependentUnblocked {
2749 child_node_id: dep.child_node_id.clone(),
2750 parent_node_id: node_id.clone(),
2751 });
2752 log::info!(
2753 "Unblocked dependent {} (parent {} sealed)",
2754 dep.child_node_id,
2755 node_id
2756 );
2757 }
2758 }
2759
2760 fn check_seal_prerequisites(&mut self, idx: NodeIndex) -> bool {
2763 let parents: Vec<NodeIndex> = self
2764 .graph
2765 .neighbors_directed(idx, petgraph::Direction::Incoming)
2766 .collect();
2767
2768 for pidx in parents {
2769 let parent = &self.graph[pidx];
2770 if parent.node_class == NodeClass::Interface
2771 && parent.interface_seal_hash.is_none()
2772 && parent.state != NodeState::Completed
2773 {
2774 let child_node_id = self.graph[idx].node_id.clone();
2776 let parent_node_id = parent.node_id.clone();
2777 let sealed_paths: Vec<String> = parent
2778 .output_targets
2779 .iter()
2780 .map(|p| p.to_string_lossy().to_string())
2781 .collect();
2782
2783 let dep = perspt_core::types::BlockedDependency::new(
2784 &child_node_id,
2785 &parent_node_id,
2786 sealed_paths,
2787 );
2788 self.blocked_dependencies.push(dep);
2789
2790 log::info!(
2791 "Node {} blocked: waiting on interface seal from {}",
2792 child_node_id,
2793 parent_node_id
2794 );
2795 return true;
2796 }
2797 }
2798 false
2799 }
2800
2801 fn check_structural_dependencies(
2807 &self,
2808 node: &SRBNNode,
2809 restriction_map: &perspt_core::types::RestrictionMap,
2810 ) -> Vec<(String, String)> {
2811 use perspt_core::types::{ArtifactKind, NodeClass};
2812
2813 let mut prose_only = Vec::new();
2814
2815 if node.node_class != NodeClass::Implementation {
2817 return prose_only;
2818 }
2819
2820 let idx = match self.node_indices.get(&node.node_id) {
2822 Some(i) => *i,
2823 None => return prose_only,
2824 };
2825
2826 let parents: Vec<NodeIndex> = self
2827 .graph
2828 .neighbors_directed(idx, petgraph::Direction::Incoming)
2829 .collect();
2830
2831 for pidx in parents {
2832 let parent = &self.graph[pidx];
2833 if parent.node_class != NodeClass::Interface {
2834 continue;
2835 }
2836
2837 let has_structural = restriction_map.structural_digests.iter().any(|d| {
2839 d.source_node_id == parent.node_id
2840 && matches!(
2841 d.artifact_kind,
2842 ArtifactKind::Signature
2843 | ArtifactKind::Schema
2844 | ArtifactKind::InterfaceSeal
2845 )
2846 });
2847
2848 if !has_structural {
2849 prose_only.push((
2850 parent.node_id.clone(),
2851 format!(
2852 "Interface node '{}' has no Signature/Schema/InterfaceSeal digest in the restriction map",
2853 parent.node_id
2854 ),
2855 ));
2856 }
2857 }
2858
2859 prose_only
2860 }
2861
2862 fn inject_sealed_interfaces(
2869 &self,
2870 idx: NodeIndex,
2871 restriction_map: &mut perspt_core::types::RestrictionMap,
2872 ) {
2873 let parents: Vec<NodeIndex> = self
2874 .graph
2875 .neighbors_directed(idx, petgraph::Direction::Incoming)
2876 .collect();
2877
2878 for pidx in parents {
2879 let parent = &self.graph[pidx];
2880 if parent.interface_seal_hash.is_none() {
2881 continue;
2882 }
2883
2884 let parent_node_id = &parent.node_id;
2885
2886 let seals = match self.ledger.get_interface_seals(parent_node_id) {
2888 Ok(rows) => rows,
2889 Err(e) => {
2890 log::debug!("Could not query seals for {}: {}", parent_node_id, e);
2891 continue;
2892 }
2893 };
2894
2895 for seal in seals {
2896 restriction_map
2898 .sealed_interfaces
2899 .retain(|p| *p != seal.sealed_path);
2900
2901 let mut hash = [0u8; 32];
2903 let len = seal.seal_hash.len().min(32);
2904 hash[..len].copy_from_slice(&seal.seal_hash[..len]);
2905
2906 let digest = perspt_core::types::StructuralDigest {
2908 digest_id: format!("seal_{}_{}", seal.node_id, seal.sealed_path),
2909 source_node_id: seal.node_id.clone(),
2910 source_path: seal.sealed_path.clone(),
2911 artifact_kind: perspt_core::types::ArtifactKind::InterfaceSeal,
2912 hash,
2913 version: seal.version as u32,
2914 };
2915 restriction_map.structural_digests.push(digest);
2916
2917 log::debug!(
2918 "Injected sealed digest for {} from parent {}",
2919 seal.sealed_path,
2920 parent_node_id,
2921 );
2922 }
2923 }
2924 }
2925}
2926
2927fn parse_node_state(s: &str) -> NodeState {
2929 NodeState::from_display_str(s)
2930}
2931
2932fn parse_node_class(s: &str) -> NodeClass {
2934 match s {
2935 "Interface" => NodeClass::Interface,
2936 "Implementation" => NodeClass::Implementation,
2937 "Integration" => NodeClass::Integration,
2938 _ => NodeClass::default(),
2939 }
2940}
2941
2942#[cfg(test)]
2943mod tests {
2944 use super::verification::verification_stages_for_node;
2945 use super::*;
2946 use std::path::PathBuf;
2947
2948 #[tokio::test]
2949 async fn test_orchestrator_creation() {
2950 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2951 assert_eq!(orch.node_count(), 0);
2952 }
2953
2954 #[tokio::test]
2955 async fn test_add_nodes() {
2956 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2957
2958 let node1 = SRBNNode::new(
2959 "node1".to_string(),
2960 "Test task 1".to_string(),
2961 ModelTier::Architect,
2962 );
2963 let node2 = SRBNNode::new(
2964 "node2".to_string(),
2965 "Test task 2".to_string(),
2966 ModelTier::Actuator,
2967 );
2968
2969 orch.add_node(node1);
2970 orch.add_node(node2);
2971 orch.add_dependency("node1", "node2", "depends_on").unwrap();
2972
2973 assert_eq!(orch.node_count(), 2);
2974 }
2975 #[tokio::test]
2976 async fn test_lsp_key_for_file_resolves_by_plugin() {
2977 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
2978 orch.lsp_clients.insert(
2980 "rust".to_string(),
2981 crate::lsp::LspClient::new("rust-analyzer"),
2982 );
2983 orch.lsp_clients
2984 .insert("python".to_string(), crate::lsp::LspClient::new("pylsp"));
2985
2986 assert_eq!(
2988 orch.lsp_key_for_file("src/main.rs"),
2989 Some("rust".to_string())
2990 );
2991 assert_eq!(orch.lsp_key_for_file("app.py"), Some("python".to_string()));
2993 let key = orch.lsp_key_for_file("data.csv");
2995 assert!(key.is_some()); }
2997
2998 #[tokio::test]
3003 async fn test_split_node_creates_children() {
3004 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3005 let mut node = SRBNNode::new("parent".into(), "Do everything".into(), ModelTier::Actuator);
3006 node.output_targets = vec![PathBuf::from("a.rs"), PathBuf::from("b.rs")];
3007 orch.add_node(node);
3008
3009 let idx = orch.node_indices["parent"];
3010 let applied = orch.split_node(idx, &["handle a.rs".into(), "handle b.rs".into()]);
3011 assert!(!applied.is_empty());
3012 assert!(!orch.node_indices.contains_key("parent"));
3014 assert!(orch.node_indices.contains_key("parent__split_0"));
3016 assert!(orch.node_indices.contains_key("parent__split_1"));
3017 }
3018
3019 #[tokio::test]
3020 async fn test_split_node_empty_children_is_noop() {
3021 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3022 let node = SRBNNode::new("n".into(), "g".into(), ModelTier::Actuator);
3023 orch.add_node(node);
3024 let idx = orch.node_indices["n"];
3025 let applied = orch.split_node(idx, &[]);
3026 assert!(applied.is_empty());
3028 }
3029
3030 #[tokio::test]
3031 async fn test_insert_interface_node() {
3032 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3033 let n1 = SRBNNode::new("a".into(), "source".into(), ModelTier::Actuator);
3034 let n2 = SRBNNode::new("b".into(), "dest".into(), ModelTier::Actuator);
3035 orch.add_node(n1);
3036 orch.add_node(n2);
3037 orch.add_dependency("a", "b", "data_flow").unwrap();
3038
3039 let idx_a = orch.node_indices["a"];
3040 let applied = orch.insert_interface_node(idx_a, "API boundary");
3041 assert!(applied.is_some());
3042 assert!(orch.node_indices.contains_key("a__iface"));
3043 assert_eq!(orch.node_count(), 3);
3045 }
3046
3047 #[tokio::test]
3048 async fn test_replan_subgraph_resets_nodes() {
3049 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3050 let mut n1 = SRBNNode::new("trigger".into(), "g1".into(), ModelTier::Actuator);
3051 n1.state = NodeState::Coding;
3052 let mut n2 = SRBNNode::new("dep".into(), "g2".into(), ModelTier::Actuator);
3053 n2.state = NodeState::Completed;
3054 orch.add_node(n1);
3055 orch.add_node(n2);
3056
3057 let trigger_idx = orch.node_indices["trigger"];
3058 let applied = orch.replan_subgraph(trigger_idx, &["dep".into()]);
3059 assert!(applied);
3060
3061 let dep_idx = orch.node_indices["dep"];
3062 assert_eq!(orch.graph[dep_idx].state, NodeState::TaskQueued);
3063 assert_eq!(orch.graph[trigger_idx].state, NodeState::Retry);
3064 }
3065
3066 #[tokio::test]
3067 async fn test_select_validators_always_includes_dependency_graph() {
3068 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3069 let node = SRBNNode::new("n".into(), "g".into(), ModelTier::Actuator);
3070 orch.add_node(node);
3071 let idx = orch.node_indices["n"];
3072
3073 let validators = orch.select_validators(idx);
3074 assert!(validators.contains(&SheafValidatorClass::DependencyGraphConsistency));
3075 }
3076
3077 #[tokio::test]
3078 async fn test_select_validators_interface_node() {
3079 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3080 let mut node = SRBNNode::new("iface".into(), "g".into(), ModelTier::Actuator);
3081 node.node_class = perspt_core::types::NodeClass::Interface;
3082 orch.add_node(node);
3083 let idx = orch.node_indices["iface"];
3084
3085 let validators = orch.select_validators(idx);
3086 assert!(validators.contains(&SheafValidatorClass::ExportImportConsistency));
3087 }
3088
3089 #[tokio::test]
3090 async fn test_run_sheaf_validator_dependency_graph_no_cycles() {
3091 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3092 let n1 = SRBNNode::new("a".into(), "g".into(), ModelTier::Actuator);
3093 let n2 = SRBNNode::new("b".into(), "g".into(), ModelTier::Actuator);
3094 orch.add_node(n1);
3095 orch.add_node(n2);
3096 orch.add_dependency("a", "b", "dep").unwrap();
3097
3098 let idx = orch.node_indices["a"];
3099 let result = orch.run_sheaf_validator(idx, SheafValidatorClass::DependencyGraphConsistency);
3100 assert!(result.passed);
3101 assert_eq!(result.v_sheaf_contribution, 0.0);
3102 }
3103
3104 #[tokio::test]
3105 async fn test_classify_non_convergence_default() {
3106 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3107 let node = SRBNNode::new("n".into(), "g".into(), ModelTier::Actuator);
3108 orch.add_node(node);
3109 let idx = orch.node_indices["n"];
3110
3111 let category = orch.classify_non_convergence(idx);
3113 assert_eq!(category, EscalationCategory::ImplementationError);
3114 }
3115
3116 #[tokio::test]
3117 async fn test_affected_dependents() {
3118 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3119 let n1 = SRBNNode::new("root".into(), "g".into(), ModelTier::Actuator);
3120 let n2 = SRBNNode::new("child1".into(), "g".into(), ModelTier::Actuator);
3121 let n3 = SRBNNode::new("child2".into(), "g".into(), ModelTier::Actuator);
3122 orch.add_node(n1);
3123 orch.add_node(n2);
3124 orch.add_node(n3);
3125 orch.add_dependency("root", "child1", "dep").unwrap();
3126 orch.add_dependency("root", "child2", "dep").unwrap();
3127
3128 let idx = orch.node_indices["root"];
3129 let deps = orch.affected_dependents(idx);
3130 assert_eq!(deps.len(), 2);
3131 assert!(deps.contains(&"child1".to_string()));
3132 assert!(deps.contains(&"child2".to_string()));
3133 }
3134
3135 #[tokio::test]
3140 async fn test_maybe_create_provisional_branch_root_node() {
3141 let temp_dir =
3142 std::env::temp_dir().join(format!("perspt_root_branch_{}", uuid::Uuid::new_v4()));
3143 std::fs::create_dir_all(&temp_dir).unwrap();
3144
3145 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
3146 orch.context.session_id = "test_session".into();
3147 let node = SRBNNode::new("root".into(), "root goal".into(), ModelTier::Actuator);
3148 orch.add_node(node);
3149
3150 let idx = orch.node_indices["root"];
3151 let branch = orch.maybe_create_provisional_branch(idx);
3153 assert!(branch.is_some());
3154 assert!(orch.graph[idx].provisional_branch_id.is_some());
3155
3156 let _ = std::fs::remove_dir_all(&temp_dir);
3157 }
3158
3159 #[tokio::test]
3160 async fn test_maybe_create_provisional_branch_child_node() {
3161 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_phase6"));
3162 orch.context.session_id = "test_session".into();
3163 let parent = SRBNNode::new("parent".into(), "parent goal".into(), ModelTier::Actuator);
3164 let child = SRBNNode::new("child".into(), "child goal".into(), ModelTier::Actuator);
3165 orch.add_node(parent);
3166 orch.add_node(child);
3167 orch.add_dependency("parent", "child", "dep").unwrap();
3168
3169 let idx = orch.node_indices["child"];
3170 let branch = orch.maybe_create_provisional_branch(idx);
3171 assert!(branch.is_some());
3172 assert!(orch.graph[idx].provisional_branch_id.is_some());
3173 }
3174
3175 #[tokio::test]
3176 async fn test_collect_descendants() {
3177 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3178 let n1 = SRBNNode::new("a".into(), "g".into(), ModelTier::Actuator);
3179 let n2 = SRBNNode::new("b".into(), "g".into(), ModelTier::Actuator);
3180 let n3 = SRBNNode::new("c".into(), "g".into(), ModelTier::Actuator);
3181 let n4 = SRBNNode::new("d".into(), "g".into(), ModelTier::Actuator);
3182 orch.add_node(n1);
3183 orch.add_node(n2);
3184 orch.add_node(n3);
3185 orch.add_node(n4);
3186 orch.add_dependency("a", "b", "dep").unwrap();
3187 orch.add_dependency("b", "c", "dep").unwrap();
3188 orch.add_dependency("a", "d", "dep").unwrap();
3189
3190 let idx_a = orch.node_indices["a"];
3191 let descendants = orch.collect_descendants(idx_a);
3192 assert_eq!(descendants.len(), 3); }
3194
3195 #[tokio::test]
3196 async fn test_check_seal_prerequisites_no_interface_parent() {
3197 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3198 let parent = SRBNNode::new("parent".into(), "g".into(), ModelTier::Actuator);
3199 let child = SRBNNode::new("child".into(), "g".into(), ModelTier::Actuator);
3200 orch.add_node(parent);
3201 orch.add_node(child);
3202 orch.add_dependency("parent", "child", "dep").unwrap();
3203
3204 let idx = orch.node_indices["child"];
3205 assert!(!orch.check_seal_prerequisites(idx));
3207 assert!(orch.blocked_dependencies.is_empty());
3208 }
3209
3210 #[tokio::test]
3211 async fn test_check_seal_prerequisites_unsealed_interface() {
3212 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3213 let mut parent = SRBNNode::new("iface".into(), "g".into(), ModelTier::Actuator);
3214 parent.node_class = perspt_core::types::NodeClass::Interface;
3215 let child = SRBNNode::new("impl".into(), "g".into(), ModelTier::Actuator);
3216 orch.add_node(parent);
3217 orch.add_node(child);
3218 orch.add_dependency("iface", "impl", "dep").unwrap();
3219
3220 let idx = orch.node_indices["impl"];
3221 assert!(orch.check_seal_prerequisites(idx));
3223 assert_eq!(orch.blocked_dependencies.len(), 1);
3224 assert_eq!(orch.blocked_dependencies[0].parent_node_id, "iface");
3225 }
3226
3227 #[tokio::test]
3228 async fn test_check_seal_prerequisites_sealed_interface() {
3229 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3230 let mut parent = SRBNNode::new("iface".into(), "g".into(), ModelTier::Actuator);
3231 parent.node_class = perspt_core::types::NodeClass::Interface;
3232 parent.interface_seal_hash = Some([1u8; 32]); let child = SRBNNode::new("impl".into(), "g".into(), ModelTier::Actuator);
3234 orch.add_node(parent);
3235 orch.add_node(child);
3236 orch.add_dependency("iface", "impl", "dep").unwrap();
3237
3238 let idx = orch.node_indices["impl"];
3239 assert!(!orch.check_seal_prerequisites(idx));
3241 assert!(orch.blocked_dependencies.is_empty());
3242 }
3243
3244 #[tokio::test]
3245 async fn test_unblock_dependents() {
3246 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3247 let parent = SRBNNode::new("parent".into(), "g".into(), ModelTier::Actuator);
3248 let child = SRBNNode::new("child".into(), "g".into(), ModelTier::Actuator);
3249 orch.add_node(parent);
3250 orch.add_node(child);
3251
3252 orch.blocked_dependencies
3254 .push(perspt_core::types::BlockedDependency::new(
3255 "child",
3256 "parent",
3257 vec!["src/api.rs".into()],
3258 ));
3259 assert_eq!(orch.blocked_dependencies.len(), 1);
3260
3261 let idx = orch.node_indices["parent"];
3262 orch.unblock_dependents(idx);
3263 assert!(orch.blocked_dependencies.is_empty());
3264 }
3265
3266 #[tokio::test]
3267 async fn test_flush_descendant_branches() {
3268 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_phase6_flush"));
3269 orch.context.session_id = "test_session".into();
3270
3271 let parent = SRBNNode::new("parent".into(), "g".into(), ModelTier::Actuator);
3272 let mut child1 = SRBNNode::new("child1".into(), "g".into(), ModelTier::Actuator);
3273 child1.provisional_branch_id = Some("branch_c1".into());
3274 let mut child2 = SRBNNode::new("child2".into(), "g".into(), ModelTier::Actuator);
3275 child2.provisional_branch_id = Some("branch_c2".into());
3276 let grandchild = SRBNNode::new("grandchild".into(), "g".into(), ModelTier::Actuator);
3277 orch.add_node(parent);
3278 orch.add_node(child1);
3279 orch.add_node(child2);
3280 orch.add_node(grandchild);
3281 orch.add_dependency("parent", "child1", "dep").unwrap();
3282 orch.add_dependency("parent", "child2", "dep").unwrap();
3283 orch.add_dependency("child1", "grandchild", "dep").unwrap();
3284
3285 let idx = orch.node_indices["parent"];
3286 orch.flush_descendant_branches(idx);
3289 }
3290
3291 #[tokio::test]
3296 async fn test_effective_working_dir_no_branch() {
3297 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/test/workspace"));
3298 let mut orch = orch;
3300 let node = SRBNNode::new("n1".into(), "goal".into(), ModelTier::Actuator);
3301 orch.add_node(node);
3302 let idx = orch.node_indices["n1"];
3303 assert_eq!(
3305 orch.effective_working_dir(idx),
3306 PathBuf::from("/test/workspace")
3307 );
3308 }
3309
3310 #[tokio::test]
3311 async fn test_sandbox_dir_for_node_none_without_branch() {
3312 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/test/workspace"));
3313 let mut orch = orch;
3314 let node = SRBNNode::new("n1".into(), "goal".into(), ModelTier::Actuator);
3315 orch.add_node(node);
3316 let idx = orch.node_indices["n1"];
3317 assert!(orch.sandbox_dir_for_node(idx).is_none());
3318 }
3319
3320 #[tokio::test]
3321 async fn test_rewrite_churn_guardrail() {
3322 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_churn"));
3323 let mut orch = orch;
3324 let node = SRBNNode::new("node_a".into(), "goal".into(), ModelTier::Actuator);
3325 orch.add_node(node);
3326 let count = orch.count_lineage_rewrites("node_a");
3328 assert_eq!(count, 0);
3329 }
3330
3331 #[tokio::test]
3332 async fn test_run_resumed_skips_terminal_nodes() {
3333 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_resume"));
3334
3335 let mut n1 = SRBNNode::new("done".into(), "completed".into(), ModelTier::Actuator);
3336 n1.state = NodeState::Completed;
3337 let mut n2 = SRBNNode::new("failed".into(), "failed".into(), ModelTier::Actuator);
3338 n2.state = NodeState::Failed;
3339 orch.add_node(n1);
3340 orch.add_node(n2);
3341
3342 let result = orch.run_resumed().await;
3344 assert!(result.is_ok());
3345 }
3346
3347 #[tokio::test]
3348 async fn test_persist_review_decision_no_panic() {
3349 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_review"));
3350 orch.persist_review_decision("node_x", "approved", None);
3353 }
3354
3355 #[tokio::test]
3360 async fn test_check_structural_dependencies_blocks_prose_only() {
3361 use perspt_core::types::{NodeClass, RestrictionMap};
3362
3363 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_struct_dep"));
3364
3365 let mut parent = SRBNNode::new("iface_1".into(), "Define API".into(), ModelTier::Architect);
3367 parent.node_class = NodeClass::Interface;
3368
3369 let mut child = SRBNNode::new("impl_1".into(), "Implement API".into(), ModelTier::Actuator);
3371 child.node_class = NodeClass::Implementation;
3372
3373 let parent_idx = orch.add_node(parent);
3374 let child_idx = orch.add_node(child.clone());
3375 orch.graph
3376 .add_edge(parent_idx, child_idx, Dependency { kind: "dep".into() });
3377
3378 let rmap = RestrictionMap::for_node("impl_1");
3380 let gaps = orch.check_structural_dependencies(&child, &rmap);
3381
3382 assert_eq!(gaps.len(), 1);
3383 assert_eq!(gaps[0].0, "iface_1");
3384 assert!(gaps[0].1.contains("no Signature/Schema/InterfaceSeal"));
3385 }
3386
3387 #[tokio::test]
3388 async fn test_check_structural_dependencies_passes_with_digest() {
3389 use perspt_core::types::{ArtifactKind, NodeClass, RestrictionMap, StructuralDigest};
3390
3391 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_struct_ok"));
3392
3393 let mut parent = SRBNNode::new("iface_2".into(), "Define API".into(), ModelTier::Architect);
3394 parent.node_class = NodeClass::Interface;
3395
3396 let mut child = SRBNNode::new("impl_2".into(), "Implement API".into(), ModelTier::Actuator);
3397 child.node_class = NodeClass::Implementation;
3398
3399 let parent_idx = orch.add_node(parent);
3400 let child_idx = orch.add_node(child.clone());
3401 orch.graph
3402 .add_edge(parent_idx, child_idx, Dependency { kind: "dep".into() });
3403
3404 let mut rmap = RestrictionMap::for_node("impl_2");
3406 rmap.structural_digests.push(StructuralDigest::from_content(
3407 "iface_2",
3408 "api.rs",
3409 ArtifactKind::Signature,
3410 b"fn do_thing(x: i32) -> bool;",
3411 ));
3412
3413 let gaps = orch.check_structural_dependencies(&child, &rmap);
3414 assert!(gaps.is_empty(), "Expected no gaps when digest present");
3415 }
3416
3417 #[tokio::test]
3418 async fn test_check_structural_dependencies_skips_non_implementation() {
3419 use perspt_core::types::{NodeClass, RestrictionMap};
3420
3421 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_struct_skip"));
3422
3423 let mut node = SRBNNode::new("integ_1".into(), "Wire modules".into(), ModelTier::Actuator);
3425 node.node_class = NodeClass::Integration;
3426 orch.add_node(node.clone());
3427
3428 let rmap = RestrictionMap::for_node("integ_1");
3429 let gaps = orch.check_structural_dependencies(&node, &rmap);
3430 assert!(gaps.is_empty(), "Integration nodes should skip the check");
3431 }
3432
3433 #[tokio::test]
3434 async fn test_tier_default_models_are_differentiated() {
3435 let arch = ModelTier::Architect.default_model();
3437 let act = ModelTier::Actuator.default_model();
3438 let spec = ModelTier::Speculator.default_model();
3439
3440 assert_ne!(arch, act, "Architect and Actuator defaults should differ");
3442 assert_ne!(spec, arch, "Speculator should differ from Architect");
3444 }
3445
3446 #[tokio::test]
3451 async fn test_orchestrator_stores_all_four_tier_models() {
3452 let orch = SRBNOrchestrator::new_with_models(
3453 PathBuf::from("/tmp/test_tiers"),
3454 false,
3455 Some("arch-model".into()),
3456 Some("act-model".into()),
3457 Some("ver-model".into()),
3458 Some("spec-model".into()),
3459 None,
3460 None,
3461 None,
3462 None,
3463 );
3464 assert_eq!(orch.architect_model, "arch-model");
3465 assert_eq!(orch.actuator_model, "act-model");
3466 assert_eq!(orch.verifier_model, "ver-model");
3467 assert_eq!(orch.speculator_model, "spec-model");
3468 }
3469
3470 #[tokio::test]
3471 async fn test_orchestrator_default_tier_models() {
3472 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_tier_defaults"));
3473 assert_eq!(orch.architect_model, ModelTier::Architect.default_model());
3474 assert_eq!(orch.actuator_model, ModelTier::Actuator.default_model());
3475 assert_eq!(orch.verifier_model, ModelTier::Verifier.default_model());
3476 assert_eq!(orch.speculator_model, ModelTier::Speculator.default_model());
3477 }
3478
3479 #[tokio::test]
3480 async fn test_create_nodes_rejects_duplicate_output_files() {
3481 use perspt_core::types::PlannedTask;
3482
3483 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_dup_outputs"));
3484
3485 let plan = TaskPlan {
3486 tasks: vec![
3487 PlannedTask {
3488 id: "task_1".into(),
3489 goal: "Create math".into(),
3490 output_files: vec!["src/math.py".into(), "tests/test_math.py".into()],
3491 ..PlannedTask::new("task_1", "Create math")
3492 },
3493 PlannedTask {
3494 id: "task_2".into(),
3495 goal: "Create tests".into(),
3496 output_files: vec!["tests/test_math.py".into()],
3497 ..PlannedTask::new("task_2", "Create tests")
3498 },
3499 ],
3500 };
3501
3502 let result = orch.create_nodes_from_plan(&plan);
3503 assert!(result.is_err(), "Should reject duplicate output_files");
3504 let err = result.unwrap_err().to_string();
3505 assert!(
3506 err.contains("tests/test_math.py"),
3507 "Error should mention the duplicate file: {}",
3508 err
3509 );
3510 }
3511
3512 #[tokio::test]
3513 async fn test_create_nodes_accepts_unique_output_files() {
3514 use perspt_core::types::PlannedTask;
3515
3516 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_unique_outputs"));
3517
3518 let plan = TaskPlan {
3519 tasks: vec![
3520 PlannedTask {
3521 id: "task_1".into(),
3522 goal: "Create math".into(),
3523 output_files: vec!["src/math.py".into()],
3524 ..PlannedTask::new("task_1", "Create math")
3525 },
3526 PlannedTask {
3527 id: "test_1".into(),
3528 goal: "Test math".into(),
3529 output_files: vec!["tests/test_math.py".into()],
3530 dependencies: vec!["task_1".into()],
3531 ..PlannedTask::new("test_1", "Test math")
3532 },
3533 ],
3534 };
3535
3536 let result = orch.create_nodes_from_plan(&plan);
3537 assert!(result.is_ok(), "Should accept unique output_files");
3538 assert_eq!(orch.graph.node_count(), 2);
3539 }
3540
3541 #[tokio::test]
3542 async fn test_ownership_manifest_built_with_majority_plugin_vote() {
3543 use perspt_core::types::PlannedTask;
3544
3545 let mut orch = SRBNOrchestrator::new_for_testing(PathBuf::from("/tmp/test_plugin_vote"));
3546
3547 let plan = TaskPlan {
3548 tasks: vec![PlannedTask {
3549 id: "task_1".into(),
3550 goal: "Create Python module".into(),
3551 output_files: vec![
3552 "src/main.py".into(),
3553 "src/helper.py".into(),
3554 "src/__init__.py".into(),
3555 ],
3556 ..PlannedTask::new("task_1", "Create Python module")
3557 }],
3558 };
3559
3560 orch.create_nodes_from_plan(&plan).unwrap();
3561
3562 assert_eq!(orch.context.ownership_manifest.len(), 3);
3564 let idx = orch.node_indices["task_1"];
3566 assert_eq!(orch.graph[idx].owner_plugin, "python");
3567 }
3568
3569 #[tokio::test]
3570 async fn test_apply_bundle_strips_paths_outside_node_output_targets() {
3571 use perspt_core::types::{ArtifactBundle, ArtifactOperation, PlannedTask};
3572
3573 let temp_dir = std::env::temp_dir().join(format!(
3574 "perspt_bundle_target_guard_{}",
3575 uuid::Uuid::new_v4()
3576 ));
3577 std::fs::create_dir_all(temp_dir.join("src")).unwrap();
3578
3579 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
3580 let plan = TaskPlan {
3581 tasks: vec![
3582 PlannedTask {
3583 id: "validate_module".into(),
3584 goal: "Create validation module".into(),
3585 output_files: vec!["src/validate.rs".into()],
3586 ..PlannedTask::new("validate_module", "Create validation module")
3587 },
3588 PlannedTask {
3589 id: "lib_module".into(),
3590 goal: "Export validation module".into(),
3591 output_files: vec!["src/lib.rs".into()],
3592 dependencies: vec!["validate_module".into()],
3593 ..PlannedTask::new("lib_module", "Export validation module")
3594 },
3595 ],
3596 };
3597
3598 orch.create_nodes_from_plan(&plan).unwrap();
3599
3600 let bundle = ArtifactBundle {
3601 artifacts: vec![
3602 ArtifactOperation::Write {
3603 path: "src/validate.rs".into(),
3604 content: "pub fn ok() {}".into(),
3605 },
3606 ArtifactOperation::Write {
3607 path: "src/lib.rs".into(),
3608 content: "pub mod validate;".into(),
3609 },
3610 ],
3611 commands: vec![],
3612 };
3613
3614 orch.apply_bundle_transactionally(
3617 &bundle,
3618 "validate_module",
3619 perspt_core::types::NodeClass::Implementation,
3620 )
3621 .await
3622 .expect("Should apply valid artifacts after stripping undeclared paths");
3623
3624 assert!(temp_dir.join("src/validate.rs").exists());
3626 assert!(!temp_dir.join("src/lib.rs").exists());
3628 }
3629
3630 #[tokio::test]
3631 async fn test_apply_bundle_keeps_legal_support_file() {
3632 use perspt_core::types::{ArtifactBundle, ArtifactOperation, PlannedTask};
3633
3634 let temp_dir = std::env::temp_dir().join(format!(
3635 "perspt_bundle_support_file_{}",
3636 uuid::Uuid::new_v4()
3637 ));
3638 std::fs::create_dir_all(temp_dir.join("src")).unwrap();
3639
3640 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
3641 let plan = TaskPlan {
3642 tasks: vec![PlannedTask {
3643 id: "main_module".into(),
3644 goal: "Create Rust main".into(),
3645 output_files: vec!["src/main.rs".into()],
3646 ..PlannedTask::new("main_module", "Create Rust main")
3647 }],
3648 };
3649 orch.create_nodes_from_plan(&plan).unwrap();
3650
3651 let bundle = ArtifactBundle {
3652 artifacts: vec![
3653 ArtifactOperation::Write {
3654 path: "src/main.rs".into(),
3655 content: "fn main() {}".into(),
3656 },
3657 ArtifactOperation::Write {
3658 path: "build.rs".into(),
3659 content: "fn main() {}".into(),
3660 },
3661 ],
3662 commands: vec![],
3663 };
3664
3665 orch.apply_bundle_transactionally(
3666 &bundle,
3667 "main_module",
3668 perspt_core::types::NodeClass::Implementation,
3669 )
3670 .await
3671 .expect("legal support files should survive semantic filtering");
3672
3673 assert!(temp_dir.join("src/main.rs").exists());
3674 assert!(temp_dir.join("build.rs").exists());
3675 let _ = std::fs::remove_dir_all(&temp_dir);
3676 }
3677
3678 #[tokio::test]
3679 async fn test_apply_bundle_denies_root_manifest_mutation() {
3680 use perspt_core::types::{ArtifactBundle, ArtifactOperation, PlannedTask};
3681
3682 let temp_dir = std::env::temp_dir().join(format!(
3683 "perspt_bundle_manifest_policy_{}",
3684 uuid::Uuid::new_v4()
3685 ));
3686 std::fs::create_dir_all(temp_dir.join("src")).unwrap();
3687
3688 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
3689 let plan = TaskPlan {
3690 tasks: vec![PlannedTask {
3691 id: "main_module".into(),
3692 goal: "Create Rust main".into(),
3693 output_files: vec!["src/main.rs".into()],
3694 ..PlannedTask::new("main_module", "Create Rust main")
3695 }],
3696 };
3697 orch.create_nodes_from_plan(&plan).unwrap();
3698
3699 let bundle = ArtifactBundle {
3700 artifacts: vec![
3701 ArtifactOperation::Write {
3702 path: "src/main.rs".into(),
3703 content: "fn main() {}".into(),
3704 },
3705 ArtifactOperation::Write {
3706 path: "Cargo.toml".into(),
3707 content: "[package]\nname = \"bad\"\n".into(),
3708 },
3709 ],
3710 commands: vec![],
3711 };
3712
3713 orch.apply_bundle_transactionally(
3714 &bundle,
3715 "main_module",
3716 perspt_core::types::NodeClass::Implementation,
3717 )
3718 .await
3719 .expect("declared artifact should still apply after denied manifest is stripped");
3720
3721 assert!(temp_dir.join("src/main.rs").exists());
3722 assert!(!temp_dir.join("Cargo.toml").exists());
3723 let _ = std::fs::remove_dir_all(&temp_dir);
3724 }
3725
3726 #[tokio::test]
3727 async fn test_apply_bundle_writes_into_branch_sandbox() {
3728 use perspt_core::types::{ArtifactBundle, ArtifactOperation, PlannedTask};
3729
3730 let temp_dir = std::env::temp_dir().join(format!(
3731 "perspt_branch_sandbox_write_{}",
3732 uuid::Uuid::new_v4()
3733 ));
3734 std::fs::create_dir_all(temp_dir.join("src")).unwrap();
3735 std::fs::write(temp_dir.join("src/lib.rs"), "pub fn old() {}\n").unwrap();
3736
3737 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
3738 orch.context.session_id = uuid::Uuid::new_v4().to_string();
3739
3740 let plan = TaskPlan {
3741 tasks: vec![
3742 PlannedTask {
3743 id: "parent".into(),
3744 goal: "Parent node".into(),
3745 output_files: vec!["src/lib.rs".into()],
3746 ..PlannedTask::new("parent", "Parent node")
3747 },
3748 PlannedTask {
3749 id: "child".into(),
3750 goal: "Child node".into(),
3751 context_files: vec!["src/lib.rs".into()],
3752 output_files: vec!["src/child.rs".into()],
3753 dependencies: vec!["parent".into()],
3754 ..PlannedTask::new("child", "Child node")
3755 },
3756 ],
3757 };
3758
3759 orch.create_nodes_from_plan(&plan).unwrap();
3760 let child_idx = orch.node_indices["child"];
3761 let branch_id = orch.maybe_create_provisional_branch(child_idx).unwrap();
3762 let sandbox_dir = orch.sandbox_dir_for_node(child_idx).unwrap();
3763
3764 let bundle = ArtifactBundle {
3765 artifacts: vec![ArtifactOperation::Write {
3766 path: "src/child.rs".into(),
3767 content: "pub fn child() {}\n".into(),
3768 }],
3769 commands: vec![],
3770 };
3771
3772 orch.apply_bundle_transactionally(
3773 &bundle,
3774 "child",
3775 perspt_core::types::NodeClass::Implementation,
3776 )
3777 .await
3778 .unwrap();
3779
3780 assert!(sandbox_dir.join("src/child.rs").exists());
3781 assert!(!temp_dir.join("src/child.rs").exists());
3782
3783 orch.merge_provisional_branch(&branch_id, child_idx);
3784 }
3785
3786 #[test]
3787 fn test_verification_stages_for_node_classes() {
3788 use perspt_core::plugin::VerifierStage;
3789
3790 let interface_node =
3792 SRBNNode::new("iface".into(), "Define trait".into(), ModelTier::Actuator);
3793 let mut interface_node = interface_node;
3795 interface_node.node_class = perspt_core::types::NodeClass::Interface;
3796 let stages = verification_stages_for_node(&interface_node);
3797 assert_eq!(stages, vec![VerifierStage::SyntaxCheck]);
3798
3799 let mut implementation_node = SRBNNode::new(
3801 "impl".into(),
3802 "Implement feature".into(),
3803 ModelTier::Actuator,
3804 );
3805 implementation_node.node_class = perspt_core::types::NodeClass::Implementation;
3806 let stages = verification_stages_for_node(&implementation_node);
3807 assert_eq!(
3808 stages,
3809 vec![VerifierStage::SyntaxCheck, VerifierStage::Build]
3810 );
3811
3812 implementation_node
3814 .contract
3815 .weighted_tests
3816 .push(perspt_core::types::WeightedTest {
3817 test_name: "test_feature".into(),
3818 criticality: perspt_core::types::Criticality::High,
3819 });
3820 let stages = verification_stages_for_node(&implementation_node);
3821 assert_eq!(
3822 stages,
3823 vec![
3824 VerifierStage::SyntaxCheck,
3825 VerifierStage::Build,
3826 VerifierStage::Test
3827 ]
3828 );
3829
3830 let mut integration_node =
3832 SRBNNode::new("test".into(), "Verify feature".into(), ModelTier::Actuator);
3833 integration_node.node_class = perspt_core::types::NodeClass::Integration;
3834 integration_node
3835 .contract
3836 .weighted_tests
3837 .push(perspt_core::types::WeightedTest {
3838 test_name: "test_feature".into(),
3839 criticality: perspt_core::types::Criticality::High,
3840 });
3841 let stages = verification_stages_for_node(&integration_node);
3842 assert_eq!(
3843 stages,
3844 vec![
3845 VerifierStage::SyntaxCheck,
3846 VerifierStage::Build,
3847 VerifierStage::Test,
3848 VerifierStage::Lint,
3849 ]
3850 );
3851 }
3852
3853 #[tokio::test]
3858 async fn test_classify_workspace_empty_dir() {
3859 let temp = tempfile::tempdir().unwrap();
3860 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3861 let state = orch.classify_workspace("build a web app");
3862 assert!(matches!(state, WorkspaceState::Greenfield { .. }));
3864 }
3865
3866 #[tokio::test]
3867 async fn test_classify_workspace_empty_dir_no_lang() {
3868 let temp = tempfile::tempdir().unwrap();
3869 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3870 let state = orch.classify_workspace("do something");
3871 match state {
3873 WorkspaceState::Greenfield { inferred_lang } => assert!(inferred_lang.is_none()),
3874 _ => panic!("expected Greenfield, got {:?}", state),
3875 }
3876 }
3877
3878 #[tokio::test]
3879 async fn test_classify_workspace_existing_rust_project() {
3880 let temp = tempfile::tempdir().unwrap();
3881 std::fs::write(
3883 temp.path().join("Cargo.toml"),
3884 "[package]\nname = \"test\"\nversion = \"0.1.0\"",
3885 )
3886 .unwrap();
3887 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3888 let state = orch.classify_workspace("add a feature");
3889 match state {
3890 WorkspaceState::ExistingProject { plugins } => {
3891 assert!(plugins.contains(&"rust".to_string()));
3892 }
3893 _ => panic!("expected ExistingProject, got {:?}", state),
3894 }
3895 }
3896
3897 #[tokio::test]
3898 async fn test_classify_workspace_existing_python_project() {
3899 let temp = tempfile::tempdir().unwrap();
3900 std::fs::write(
3901 temp.path().join("pyproject.toml"),
3902 "[project]\nname = \"test\"",
3903 )
3904 .unwrap();
3905 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3906 let state = orch.classify_workspace("add a feature");
3907 match state {
3908 WorkspaceState::ExistingProject { plugins } => {
3909 assert!(plugins.contains(&"python".to_string()));
3910 }
3911 _ => panic!("expected ExistingProject, got {:?}", state),
3912 }
3913 }
3914
3915 #[tokio::test]
3916 async fn test_classify_workspace_existing_js_project() {
3917 let temp = tempfile::tempdir().unwrap();
3918 std::fs::write(temp.path().join("package.json"), "{}").unwrap();
3919 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3920 let state = orch.classify_workspace("add auth");
3921 match state {
3922 WorkspaceState::ExistingProject { plugins } => {
3923 assert!(plugins.contains(&"javascript".to_string()));
3924 }
3925 _ => panic!("expected ExistingProject, got {:?}", state),
3926 }
3927 }
3928
3929 #[tokio::test]
3930 async fn test_classify_workspace_ambiguous_with_misc_files() {
3931 let temp = tempfile::tempdir().unwrap();
3932 std::fs::write(temp.path().join("notes.txt"), "hello").unwrap();
3934 std::fs::write(temp.path().join("data.csv"), "a,b,c").unwrap();
3935 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3936 let state = orch.classify_workspace("do something");
3937 assert!(matches!(state, WorkspaceState::Ambiguous));
3938 }
3939
3940 #[tokio::test]
3941 async fn test_classify_workspace_greenfield_with_rust_task() {
3942 let temp = tempfile::tempdir().unwrap();
3943 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3944 let state = orch.classify_workspace("create a rust CLI tool");
3945 match state {
3946 WorkspaceState::Greenfield { inferred_lang } => {
3947 assert_eq!(inferred_lang, Some("rust".to_string()));
3948 }
3949 _ => panic!("expected Greenfield, got {:?}", state),
3950 }
3951 }
3952
3953 #[tokio::test]
3954 async fn test_classify_workspace_greenfield_with_python_task() {
3955 let temp = tempfile::tempdir().unwrap();
3956 let orch = SRBNOrchestrator::new_for_testing(temp.path().to_path_buf());
3957 let state = orch.classify_workspace("build a python flask API");
3958 match state {
3959 WorkspaceState::Greenfield { inferred_lang } => {
3960 assert_eq!(inferred_lang, Some("python".to_string()));
3961 }
3962 _ => panic!("expected Greenfield, got {:?}", state),
3963 }
3964 }
3965
3966 #[tokio::test]
3971 async fn test_check_prerequisites_returns_true_when_tools_available() {
3972 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
3973 let registry = perspt_core::plugin::PluginRegistry::new();
3974 if let Some(plugin) = registry.get("rust") {
3976 let result = orch.check_tool_prerequisites(plugin);
3977 let _ = result;
3980 }
3981 }
3982
3983 #[test]
3984 fn test_required_binaries_rust_includes_cargo() {
3985 let registry = perspt_core::plugin::PluginRegistry::new();
3986 let plugin = registry.get("rust").unwrap();
3987 let bins = plugin.required_binaries();
3988 assert!(bins.iter().any(|(name, _, _)| *name == "cargo"));
3989 assert!(bins.iter().any(|(name, _, _)| *name == "rustc"));
3990 }
3991
3992 #[test]
3993 fn test_required_binaries_python_includes_uv() {
3994 let registry = perspt_core::plugin::PluginRegistry::new();
3995 let plugin = registry.get("python").unwrap();
3996 let bins = plugin.required_binaries();
3997 assert!(bins.iter().any(|(name, _, _)| *name == "uv"));
3998 assert!(bins.iter().any(|(name, _, _)| *name == "python3"));
3999 }
4000
4001 #[test]
4002 fn test_required_binaries_js_includes_node() {
4003 let registry = perspt_core::plugin::PluginRegistry::new();
4004 let plugin = registry.get("javascript").unwrap();
4005 let bins = plugin.required_binaries();
4006 assert!(bins.iter().any(|(name, _, _)| *name == "node"));
4007 assert!(bins.iter().any(|(name, _, _)| *name == "npm"));
4008 }
4009
4010 #[tokio::test]
4015 async fn test_fallback_defaults_to_none_without_explicit_config() {
4016 let orch = SRBNOrchestrator::new_for_testing(PathBuf::from("."));
4017 assert!(orch.architect_fallback_model.is_none());
4018 assert!(orch.actuator_fallback_model.is_none());
4019 assert!(orch.verifier_fallback_model.is_none());
4020 assert!(orch.speculator_fallback_model.is_none());
4021 }
4022
4023 #[tokio::test]
4024 async fn test_explicit_fallback_stored_correctly() {
4025 let orch = SRBNOrchestrator::new_with_models(
4026 PathBuf::from("/tmp/test_fallback"),
4027 false,
4028 None,
4029 None,
4030 None,
4031 None,
4032 Some("gpt-4o".into()),
4033 Some("gpt-4o-mini".into()),
4034 Some("gpt-4o".into()),
4035 Some("gpt-4o-mini".into()),
4036 );
4037 assert_eq!(orch.architect_fallback_model, Some("gpt-4o".to_string()));
4038 assert_eq!(
4039 orch.actuator_fallback_model,
4040 Some("gpt-4o-mini".to_string())
4041 );
4042 assert_eq!(orch.verifier_fallback_model, Some("gpt-4o".to_string()));
4043 assert_eq!(
4044 orch.speculator_fallback_model,
4045 Some("gpt-4o-mini".to_string())
4046 );
4047 }
4048
4049 #[tokio::test]
4050 async fn test_per_tier_models_independent() {
4051 let orch = SRBNOrchestrator::new_with_models(
4052 PathBuf::from("/tmp/test_tiers_independent"),
4053 false,
4054 Some("arch".into()),
4055 Some("act".into()),
4056 Some("ver".into()),
4057 Some("spec".into()),
4058 None,
4059 None,
4060 None,
4061 None,
4062 );
4063 assert_ne!(orch.architect_model, orch.actuator_model);
4065 assert_ne!(orch.verifier_model, orch.speculator_model);
4066 }
4067
4068 #[test]
4073 fn test_extract_missing_python_modules_basic() {
4074 let output = r#"
4075FAILED tests/test_core.py::TestPipeline::test_run - ModuleNotFoundError: No module named 'httpx'
4076E ModuleNotFoundError: No module named 'pydantic'
4077ImportError: No module named 'pyarrow'
4078"#;
4079 let mut missing = SRBNOrchestrator::extract_missing_python_modules(output);
4080 missing.sort();
4081 assert_eq!(missing, vec!["httpx", "pyarrow", "pydantic"]);
4082 }
4083
4084 #[test]
4085 fn test_extract_missing_python_modules_subpackage() {
4086 let output = "ModuleNotFoundError: No module named 'foo.bar.baz'";
4087 let missing = SRBNOrchestrator::extract_missing_python_modules(output);
4088 assert_eq!(missing, vec!["foo"]);
4089 }
4090
4091 #[test]
4092 fn test_extract_missing_python_modules_stdlib_filtered() {
4093 let output = r#"
4094ModuleNotFoundError: No module named 'numpy'
4095ModuleNotFoundError: No module named 'os'
4096ModuleNotFoundError: No module named 'json'
4097"#;
4098 let missing = SRBNOrchestrator::extract_missing_python_modules(output);
4099 assert_eq!(missing, vec!["numpy"]);
4100 }
4101
4102 #[test]
4103 fn test_extract_missing_python_modules_empty() {
4104 let output = "All tests passed!\n3 passed in 0.5s";
4105 let missing = SRBNOrchestrator::extract_missing_python_modules(output);
4106 assert!(missing.is_empty());
4107 }
4108
4109 #[test]
4110 fn test_python_import_to_package_mapping() {
4111 assert_eq!(SRBNOrchestrator::python_import_to_package("PIL"), "pillow");
4112 assert_eq!(SRBNOrchestrator::python_import_to_package("yaml"), "pyyaml");
4113 assert_eq!(
4114 SRBNOrchestrator::python_import_to_package("cv2"),
4115 "opencv-python"
4116 );
4117 assert_eq!(
4118 SRBNOrchestrator::python_import_to_package("sklearn"),
4119 "scikit-learn"
4120 );
4121 assert_eq!(
4122 SRBNOrchestrator::python_import_to_package("bs4"),
4123 "beautifulsoup4"
4124 );
4125 assert_eq!(SRBNOrchestrator::python_import_to_package("httpx"), "httpx");
4127 assert_eq!(
4128 SRBNOrchestrator::python_import_to_package("fastapi"),
4129 "fastapi"
4130 );
4131 }
4132
4133 #[test]
4134 fn test_normalize_command_to_uv_pip_install() {
4135 assert_eq!(
4136 SRBNOrchestrator::normalize_command_to_uv("pip install httpx"),
4137 "uv add httpx"
4138 );
4139 assert_eq!(
4140 SRBNOrchestrator::normalize_command_to_uv("pip3 install httpx pydantic"),
4141 "uv add httpx pydantic"
4142 );
4143 assert_eq!(
4144 SRBNOrchestrator::normalize_command_to_uv("python -m pip install requests"),
4145 "uv add requests"
4146 );
4147 assert_eq!(
4148 SRBNOrchestrator::normalize_command_to_uv("python3 -m pip install flask"),
4149 "uv add flask"
4150 );
4151 }
4152
4153 #[test]
4154 fn test_normalize_command_to_uv_requirements_file() {
4155 assert_eq!(
4156 SRBNOrchestrator::normalize_command_to_uv("pip install -r requirements.txt"),
4157 "uv pip install -r requirements.txt"
4158 );
4159 }
4160
4161 #[test]
4162 fn test_normalize_command_to_uv_passthrough() {
4163 assert_eq!(
4165 SRBNOrchestrator::normalize_command_to_uv("uv add httpx"),
4166 "uv add httpx"
4167 );
4168 assert_eq!(
4170 SRBNOrchestrator::normalize_command_to_uv("cargo add serde"),
4171 "cargo add serde"
4172 );
4173 assert_eq!(
4174 SRBNOrchestrator::normalize_command_to_uv("npm install lodash"),
4175 "npm install lodash"
4176 );
4177 }
4178
4179 #[test]
4180 fn test_extract_commands_from_correction_rust_plugin_policy() {
4181 let response = r#"Here's the fix:
4182Commands:
4183```
4184uv add httpx
4185cargo add serde
4186pip install numpy
4187```
4188File: main.rs
4189```rust
4190use serde;
4191```"#;
4192 let commands = SRBNOrchestrator::extract_commands_from_correction(response, "rust");
4194 assert!(
4195 commands.contains(&"cargo add serde".to_string()),
4196 "{:?}",
4197 commands
4198 );
4199 assert!(
4200 !commands.contains(&"uv add httpx".to_string()),
4201 "Rust plugin should deny uv commands: {:?}",
4202 commands
4203 );
4204 assert!(
4205 !commands.contains(&"pip install numpy".to_string()),
4206 "Rust plugin should deny pip commands: {:?}",
4207 commands
4208 );
4209 }
4210
4211 #[test]
4212 fn test_extract_commands_from_correction_python_plugin_policy() {
4213 let response = r#"Commands:
4214```
4215uv add httpx
4216cargo add serde
4217pip install numpy
4218```"#;
4219 let commands = SRBNOrchestrator::extract_commands_from_correction(response, "python");
4221 assert!(
4222 commands.contains(&"uv add httpx".to_string()),
4223 "{:?}",
4224 commands
4225 );
4226 assert!(
4227 commands.contains(&"pip install numpy".to_string()),
4228 "{:?}",
4229 commands
4230 );
4231 assert!(
4232 !commands.contains(&"cargo add serde".to_string()),
4233 "Python plugin should deny cargo commands: {:?}",
4234 commands
4235 );
4236 }
4237
4238 #[test]
4239 fn test_typed_parse_pipeline_multiple_files() {
4240 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4241 let content = r#"Here are the files:
4242
4243File: src/etl_pipeline/core.py
4244```python
4245def run_pipeline():
4246 pass
4247```
4248
4249File: src/etl_pipeline/validator.py
4250```python
4251def validate(data):
4252 return True
4253```
4254
4255File: tests/test_core.py
4256```python
4257from etl_pipeline.core import run_pipeline
4258
4259def test_run():
4260 run_pipeline()
4261```
4262"#;
4263 let (bundle_opt, state, _) = orch.parse_artifact_bundle_typed(content, "test", 0);
4264 assert!(state.is_ok(), "Expected successful parse, got {}", state);
4265 let bundle = bundle_opt.unwrap();
4266 assert_eq!(bundle.artifacts.len(), 3, "Expected 3 artifacts");
4267 assert_eq!(bundle.artifacts[0].path(), "src/etl_pipeline/core.py");
4268 assert_eq!(bundle.artifacts[1].path(), "src/etl_pipeline/validator.py");
4269 assert_eq!(bundle.artifacts[2].path(), "tests/test_core.py");
4270 }
4271
4272 #[test]
4273 fn test_typed_parse_pipeline_single_file() {
4274 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4275 let content = r#"File: main.py
4276```python
4277print("hello")
4278```"#;
4279 let (bundle_opt, state, _) = orch.parse_artifact_bundle_typed(content, "test", 0);
4280 assert!(state.is_ok());
4281 let bundle = bundle_opt.unwrap();
4282 assert_eq!(bundle.artifacts.len(), 1);
4283 assert_eq!(bundle.artifacts[0].path(), "main.py");
4284 }
4285
4286 #[test]
4287 fn test_typed_parse_pipeline_mixed_file_and_diff() {
4288 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4289 let content = r#"File: new_module.py
4290```python
4291def new_fn():
4292 pass
4293```
4294
4295Diff: existing.py
4296```diff
4297--- existing.py
4298+++ existing.py
4299@@ -1 +1,2 @@
4300+import new_module
4301 def old_fn():
4302```"#;
4303 let (bundle_opt, state, _) = orch.parse_artifact_bundle_typed(content, "test", 0);
4304 assert!(state.is_ok());
4305 let bundle = bundle_opt.unwrap();
4306 assert_eq!(bundle.artifacts.len(), 2);
4307 assert_eq!(bundle.artifacts[0].path(), "new_module.py");
4308 assert!(
4309 bundle.artifacts[0].is_write(),
4310 "new_module.py should be a write"
4311 );
4312 assert_eq!(bundle.artifacts[1].path(), "existing.py");
4313 assert!(
4314 bundle.artifacts[1].is_diff(),
4315 "existing.py should be a diff"
4316 );
4317 }
4318
4319 #[test]
4320 fn test_typed_parse_pipeline_legacy_multi_file() {
4321 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4322 let content = r#"File: core.py
4323```python
4324def core():
4325 pass
4326```
4327
4328File: utils.py
4329```python
4330def util():
4331 pass
4332```"#;
4333 let (bundle_opt, state, _) = orch.parse_artifact_bundle_typed(content, "test", 0);
4334 assert!(state.is_ok(), "Should parse multi-file response");
4335 let bundle = bundle_opt.unwrap();
4336 assert_eq!(bundle.artifacts.len(), 2, "Should have 2 artifacts");
4337 assert_eq!(bundle.artifacts[0].path(), "core.py");
4338 assert_eq!(bundle.artifacts[1].path(), "utils.py");
4339 }
4340
4341 #[test]
4346 fn test_typed_parse_pipeline_structured_json() {
4347 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4348 let content = r#"Here is the output:
4349```json
4350{
4351 "artifacts": [
4352 {"operation": "write", "path": "src/main.py", "content": "print('hello')"},
4353 {"operation": "diff", "path": "src/lib.py", "patch": "--- a\n+++ b\n@@ -1 +1 @@\n-old\n+new"}
4354 ],
4355 "commands": ["uv add requests"]
4356}
4357```"#;
4358 let (bundle_opt, state, _) = orch.parse_artifact_bundle_typed(content, "test", 0);
4359 assert!(state.is_ok(), "Should parse structured JSON bundle");
4360 let bundle = bundle_opt.unwrap();
4361 assert_eq!(bundle.artifacts.len(), 2);
4362 assert!(bundle.artifacts[0].is_write());
4363 assert_eq!(bundle.artifacts[0].path(), "src/main.py");
4364 assert!(bundle.artifacts[1].is_diff());
4365 assert_eq!(bundle.artifacts[1].path(), "src/lib.py");
4366 assert_eq!(bundle.commands, vec!["uv add requests"]);
4367 }
4368
4369 #[test]
4370 fn test_typed_parse_pipeline_schema_invalid_classified() {
4371 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4372 let content = r#"```json
4373{"foo":"bar"}
4374```"#;
4375 let (bundle_opt, state, record_opt) = orch.parse_artifact_bundle_typed(content, "test", 1);
4376 assert!(bundle_opt.is_none());
4377 assert!(matches!(
4378 state,
4379 perspt_core::types::ParseResultState::SchemaInvalid
4380 ));
4381 let record = record_opt.expect("schema failure should be recorded");
4382 assert!(matches!(
4383 record.retry_classification,
4384 Some(perspt_core::types::RetryClassification::MalformedRetry)
4385 ));
4386 }
4387
4388 #[test]
4389 fn test_typed_parse_pipeline_semantic_rejection_classified() {
4390 use perspt_core::types::PlannedTask;
4391
4392 let mut orch = SRBNOrchestrator::new_for_testing(std::path::PathBuf::from("/tmp/test"));
4393 let plan = TaskPlan {
4394 tasks: vec![PlannedTask {
4395 id: "parser".into(),
4396 goal: "Create parser".into(),
4397 output_files: vec!["src/parser.rs".into()],
4398 ..PlannedTask::new("parser", "Create parser")
4399 }],
4400 };
4401 orch.create_nodes_from_plan(&plan).unwrap();
4402
4403 let content = r#"```json
4404{
4405 "artifacts": [
4406 {"operation": "write", "path": "src/wrong.rs", "content": "pub fn wrong() {}"}
4407 ],
4408 "commands": []
4409}
4410```"#;
4411 let (bundle_opt, state, record_opt) =
4412 orch.parse_artifact_bundle_typed(content, "parser", 1);
4413 assert!(bundle_opt.is_none());
4414 assert!(matches!(
4415 state,
4416 perspt_core::types::ParseResultState::SemanticallyRejected
4417 ));
4418 let record = record_opt.expect("semantic rejection should be recorded");
4419 assert!(matches!(
4420 record.retry_classification,
4421 Some(perspt_core::types::RetryClassification::Retarget)
4422 ));
4423 }
4424
4425 #[test]
4426 fn test_typed_parse_pipeline_json_empty_path_rejected() {
4427 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4428 let content = r#"```json
4429{
4430 "artifacts": [
4431 {"operation": "write", "path": "", "content": "bad"}
4432 ],
4433 "commands": []
4434}
4435```"#;
4436 let (bundle_opt, state, _) = orch.parse_artifact_bundle_typed(content, "test", 0);
4437 assert!(
4438 bundle_opt.is_none(),
4439 "Invalid bundle with empty path should be rejected"
4440 );
4441 assert!(
4442 !state.is_ok(),
4443 "Parse state should not be Ok for invalid bundle: {}",
4444 state
4445 );
4446 }
4447
4448 #[test]
4449 fn test_typed_parse_pipeline_json_absolute_path_rejected() {
4450 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4451 let content = r#"```json
4452{
4453 "artifacts": [
4454 {"operation": "write", "path": "/etc/passwd", "content": "bad"}
4455 ],
4456 "commands": []
4457}
4458```"#;
4459 let (bundle_opt, state, _) = orch.parse_artifact_bundle_typed(content, "test", 0);
4460 assert!(
4461 bundle_opt.is_none(),
4462 "Invalid bundle with absolute path should be rejected"
4463 );
4464 assert!(
4465 !state.is_ok(),
4466 "Parse state should not be Ok for path traversal: {}",
4467 state
4468 );
4469 }
4470
4471 #[test]
4472 fn test_typed_parse_pipeline_returns_no_payload_for_garbage() {
4473 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4474 let content = "This is just a plain text response with no code blocks at all.";
4475 let (bundle_opt, state, _) = orch.parse_artifact_bundle_typed(content, "test", 0);
4476 assert!(bundle_opt.is_none());
4477 assert!(
4478 matches!(
4479 state,
4480 perspt_core::types::ParseResultState::NoStructuredPayload
4481 ),
4482 "Expected NoStructuredPayload, got {}",
4483 state
4484 );
4485 }
4486
4487 #[tokio::test]
4488 async fn test_effective_working_dir_with_sandbox() {
4489 let temp_dir = std::env::temp_dir().join(format!(
4492 "perspt_eff_workdir_sandbox_{}",
4493 uuid::Uuid::new_v4()
4494 ));
4495 std::fs::create_dir_all(&temp_dir).unwrap();
4496
4497 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
4498 orch.context.session_id = "test_session".into();
4499
4500 let parent = SRBNNode::new("root".into(), "root goal".into(), ModelTier::Actuator);
4501 let child = SRBNNode::new("child".into(), "child goal".into(), ModelTier::Actuator);
4502 orch.add_node(parent);
4503 orch.add_node(child);
4504 orch.add_dependency("root", "child", "dep").unwrap();
4505
4506 let child_idx = orch.node_indices["child"];
4507 let branch_id = orch.maybe_create_provisional_branch(child_idx).unwrap();
4508
4509 let sandbox_path = temp_dir
4510 .join(".perspt")
4511 .join("sandboxes")
4512 .join("test_session")
4513 .join(&branch_id);
4514 assert!(sandbox_path.exists(), "Sandbox should have been created");
4515
4516 let eff = orch.effective_working_dir(child_idx);
4518 assert_eq!(eff, sandbox_path);
4519
4520 let _ = std::fs::remove_dir_all(&temp_dir);
4522 }
4523
4524 #[tokio::test]
4525 async fn test_sandbox_dir_for_node_returns_path_when_exists() {
4526 let temp_dir = std::env::temp_dir().join(format!(
4527 "perspt_sandbox_dir_exists_{}",
4528 uuid::Uuid::new_v4()
4529 ));
4530 std::fs::create_dir_all(&temp_dir).unwrap();
4531
4532 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
4533 orch.context.session_id = "sess".into();
4534
4535 let parent = SRBNNode::new("p".into(), "g".into(), ModelTier::Actuator);
4536 let child = SRBNNode::new("c".into(), "g".into(), ModelTier::Actuator);
4537 orch.add_node(parent);
4538 orch.add_node(child);
4539 orch.add_dependency("p", "c", "dep").unwrap();
4540
4541 let child_idx = orch.node_indices["c"];
4542 let branch_id = orch.maybe_create_provisional_branch(child_idx).unwrap();
4543
4544 let sandbox = orch.sandbox_dir_for_node(child_idx);
4545 assert!(sandbox.is_some());
4546 let sandbox_path = sandbox.unwrap();
4547 assert!(sandbox_path.ends_with(&branch_id));
4548
4549 let _ = std::fs::remove_dir_all(&temp_dir);
4550 }
4551
4552 #[tokio::test]
4553 async fn test_root_node_bypasses_sandbox() {
4554 let temp_dir =
4557 std::env::temp_dir().join(format!("perspt_root_bypass_{}", uuid::Uuid::new_v4()));
4558 std::fs::create_dir_all(&temp_dir).unwrap();
4559
4560 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
4561
4562 let root = SRBNNode::new("root".into(), "root goal".into(), ModelTier::Actuator);
4563 orch.add_node(root);
4564
4565 let root_idx = orch.node_indices["root"];
4566 let branch = orch.maybe_create_provisional_branch(root_idx);
4568 assert!(
4569 branch.is_some(),
4570 "Root node should now get a provisional branch for sandbox isolation"
4571 );
4572
4573 let wd = orch.effective_working_dir(root_idx);
4575 assert_ne!(wd, temp_dir, "Root should use sandbox, not raw workspace");
4576 assert!(wd.to_string_lossy().contains("sandboxes"));
4577
4578 let _ = std::fs::remove_dir_all(&temp_dir);
4579 }
4580
4581 #[tokio::test]
4582 async fn test_step_commit_copies_sandbox_to_workspace() {
4583 use perspt_core::types::{ArtifactBundle, ArtifactOperation, PlannedTask};
4586
4587 let temp_dir =
4588 std::env::temp_dir().join(format!("perspt_commit_copy_{}", uuid::Uuid::new_v4()));
4589 std::fs::create_dir_all(temp_dir.join("src")).unwrap();
4590
4591 let mut orch = SRBNOrchestrator::new_for_testing(temp_dir.clone());
4592 orch.context.session_id = uuid::Uuid::new_v4().to_string();
4593
4594 let plan = TaskPlan {
4595 tasks: vec![
4596 PlannedTask {
4597 id: "parent".into(),
4598 goal: "Parent".into(),
4599 output_files: vec!["src/parent.rs".into()],
4600 ..PlannedTask::new("parent", "Parent")
4601 },
4602 PlannedTask {
4603 id: "child".into(),
4604 goal: "Child".into(),
4605 output_files: vec!["src/child.rs".into()],
4606 dependencies: vec!["parent".into()],
4607 ..PlannedTask::new("child", "Child")
4608 },
4609 ],
4610 };
4611 orch.create_nodes_from_plan(&plan).unwrap();
4612
4613 let child_idx = orch.node_indices["child"];
4614 let _branch_id = orch.maybe_create_provisional_branch(child_idx).unwrap();
4615
4616 let bundle = ArtifactBundle {
4618 artifacts: vec![ArtifactOperation::Write {
4619 path: "src/child.rs".into(),
4620 content: "pub fn child_fn() {}\n".into(),
4621 }],
4622 commands: vec![],
4623 };
4624 orch.apply_bundle_transactionally(
4625 &bundle,
4626 "child",
4627 perspt_core::types::NodeClass::Implementation,
4628 )
4629 .await
4630 .unwrap();
4631
4632 let sandbox = orch.sandbox_dir_for_node(child_idx).unwrap();
4634 assert!(sandbox.join("src/child.rs").exists());
4635 assert!(!temp_dir.join("src/child.rs").exists());
4636
4637 let child_idx = orch.node_indices["child"];
4639 let _ = orch.step_commit(child_idx).await;
4640
4641 assert!(
4643 temp_dir.join("src/child.rs").exists(),
4644 "step_commit should copy sandbox files to workspace"
4645 );
4646 let content = std::fs::read_to_string(temp_dir.join("src/child.rs")).unwrap();
4647 assert_eq!(content, "pub fn child_fn() {}\n");
4648
4649 let _ = std::fs::remove_dir_all(&temp_dir);
4650 }
4651
4652 #[test]
4653 fn test_typed_parse_pipeline_json_path_traversal_rejected() {
4654 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4655 let content = r#"```json
4656{
4657 "artifacts": [
4658 {"operation": "write", "path": "../../../etc/shadow", "content": "bad"}
4659 ],
4660 "commands": []
4661}
4662```"#;
4663 let (bundle_opt, state, _) = orch.parse_artifact_bundle_typed(content, "test", 0);
4664 assert!(
4665 bundle_opt.is_none(),
4666 "Invalid bundle with path traversal should be rejected"
4667 );
4668 assert!(
4669 !state.is_ok(),
4670 "Parse state should not be Ok for path traversal: {}",
4671 state
4672 );
4673 }
4674
4675 #[test]
4678 fn test_dependency_expectations_threaded_to_nodes() {
4679 use perspt_core::types::{DependencyExpectation, PlannedTask, TaskPlan};
4680
4681 let mut plan = TaskPlan::new();
4682 let mut t1 = PlannedTask::new("t1", "Create server module");
4683 t1.output_files = vec!["src/server.py".to_string()];
4684 t1.dependency_expectations = DependencyExpectation {
4685 required_packages: vec!["flask".to_string(), "pydantic".to_string()],
4686 setup_commands: vec![],
4687 min_toolchain_version: Some("3.11".to_string()),
4688 };
4689 plan.tasks.push(t1);
4690
4691 let mut orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4692 orch.create_nodes_from_plan(&plan).unwrap();
4693
4694 let idx = orch.node_indices["t1"];
4696 let node = &orch.graph[idx];
4697 assert_eq!(node.dependency_expectations.required_packages.len(), 2);
4698 assert_eq!(node.dependency_expectations.required_packages[0], "flask");
4699 assert_eq!(
4700 node.dependency_expectations
4701 .min_toolchain_version
4702 .as_deref(),
4703 Some("3.11")
4704 );
4705 }
4706
4707 #[test]
4708 fn test_verifier_readiness_gate_no_plugins() {
4709 let orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4710 orch.check_verifier_readiness_gate();
4712 }
4713
4714 #[test]
4715 fn test_architect_prompt_includes_dependency_expectations() {
4716 let ev = perspt_core::types::PromptEvidence {
4717 user_goal: Some("Build a web server".to_string()),
4718 project_summary: Some("empty project".to_string()),
4719 working_dir: Some("/tmp".to_string()),
4720 ..Default::default()
4721 };
4722 let prompt = crate::prompt_compiler::compile(
4723 perspt_core::types::PromptIntent::ArchitectExisting,
4724 &ev,
4725 )
4726 .text;
4727 assert!(
4728 prompt.contains("dependency_expectations"),
4729 "Architect prompt must include dependency_expectations in the JSON schema"
4730 );
4731 assert!(
4732 prompt.contains("required_packages"),
4733 "Architect prompt must mention required_packages"
4734 );
4735 assert!(
4736 prompt.contains("min_toolchain_version"),
4737 "Architect prompt must mention min_toolchain_version"
4738 );
4739 }
4740
4741 #[test]
4744 fn test_budget_gate_stops_execution_when_exhausted() {
4745 let mut orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4746 orch.set_budget(Some(0), None, None);
4748 assert!(
4749 orch.budget.any_exhausted(),
4750 "Budget with max_steps=0 should be immediately exhausted"
4751 );
4752 }
4753
4754 #[test]
4755 fn test_budget_step_recording() {
4756 let mut budget = perspt_core::types::BudgetEnvelope::new("test-session");
4757 budget.max_steps = Some(3);
4758 assert!(!budget.any_exhausted());
4759 budget.record_step();
4760 budget.record_step();
4761 assert!(!budget.any_exhausted());
4762 budget.record_step();
4763 assert!(budget.steps_exhausted());
4764 assert!(budget.any_exhausted());
4765 }
4766
4767 #[test]
4768 fn test_set_budget_configures_envelope() {
4769 let mut orch = SRBNOrchestrator::new(std::path::PathBuf::from("/tmp/test"), false);
4770 orch.set_budget(Some(10), Some(5), Some(2.50));
4771 assert_eq!(orch.budget.max_steps, Some(10));
4772 assert_eq!(orch.budget.max_revisions, Some(5));
4773 assert_eq!(orch.budget.max_cost_usd, Some(2.50));
4774 assert!(!orch.budget.any_exhausted());
4775 }
4776
4777 #[test]
4778 fn test_node_outcome_equality() {
4779 assert_eq!(NodeOutcome::Completed, NodeOutcome::Completed);
4780 assert_eq!(NodeOutcome::Escalated, NodeOutcome::Escalated);
4781 assert_ne!(NodeOutcome::Completed, NodeOutcome::Escalated);
4782 }
4783
4784 #[test]
4785 fn test_session_outcome_from_counts() {
4786 fn derive_outcome(
4789 completed: usize,
4790 escalated: usize,
4791 total: usize,
4792 ) -> perspt_core::SessionOutcome {
4793 if escalated == 0 && completed >= total {
4794 perspt_core::SessionOutcome::Success
4795 } else if completed > 0 {
4796 perspt_core::SessionOutcome::PartialSuccess
4797 } else {
4798 perspt_core::SessionOutcome::Failed
4799 }
4800 }
4801
4802 assert_eq!(
4804 derive_outcome(3, 0, 3),
4805 perspt_core::SessionOutcome::Success,
4806 );
4807 assert_eq!(
4809 derive_outcome(2, 1, 3),
4810 perspt_core::SessionOutcome::PartialSuccess,
4811 );
4812 assert_eq!(derive_outcome(0, 3, 3), perspt_core::SessionOutcome::Failed,);
4814 assert_eq!(
4816 derive_outcome(5, 0, 20),
4817 perspt_core::SessionOutcome::PartialSuccess,
4818 );
4819 assert_eq!(
4821 derive_outcome(0, 0, 20),
4822 perspt_core::SessionOutcome::Failed,
4823 );
4824 }
4825
4826 #[test]
4827 fn test_resumed_outcome_from_counts() {
4828 fn derive_resumed_outcome(
4831 executed: usize,
4832 escalated: usize,
4833 terminal_count: usize,
4834 total: usize,
4835 ) -> perspt_core::SessionOutcome {
4836 if escalated == 0 && executed + terminal_count >= total {
4837 perspt_core::SessionOutcome::Success
4838 } else if executed > 0 {
4839 perspt_core::SessionOutcome::PartialSuccess
4840 } else {
4841 perspt_core::SessionOutcome::Failed
4842 }
4843 }
4844
4845 assert_eq!(
4847 derive_resumed_outcome(3, 0, 2, 5),
4848 perspt_core::SessionOutcome::Success,
4849 );
4850 assert_eq!(
4852 derive_resumed_outcome(2, 1, 2, 5),
4853 perspt_core::SessionOutcome::PartialSuccess,
4854 );
4855 assert_eq!(
4857 derive_resumed_outcome(1, 0, 2, 5),
4858 perspt_core::SessionOutcome::PartialSuccess,
4859 );
4860 assert_eq!(
4862 derive_resumed_outcome(0, 0, 5, 5),
4863 perspt_core::SessionOutcome::Success,
4864 );
4865 assert_eq!(
4867 derive_resumed_outcome(0, 0, 2, 5),
4868 perspt_core::SessionOutcome::Failed,
4869 );
4870 }
4871
4872 #[test]
4873 fn test_sheaf_pre_check_stub_escalates_after_retry() {
4874 let dir = tempfile::tempdir().unwrap();
4875 let stub_path = dir.path().join("stub.rs");
4876 std::fs::write(&stub_path, "fn main() {\n todo!()\n}\n").unwrap();
4877
4878 let (mut orch, idx) = orch_with_node(dir.path().to_path_buf());
4879 orch.graph[idx]
4880 .output_targets
4881 .push(std::path::PathBuf::from("stub.rs"));
4882 orch.graph[idx].owner_plugin = "rust".to_string();
4883
4884 let first = orch.sheaf_pre_check(idx);
4886 assert!(first.is_some(), "First pre-check should detect stub");
4887
4888 let second = orch.sheaf_pre_check(idx);
4891 assert!(
4892 second.is_some(),
4893 "Final guard should still detect stub after retry"
4894 );
4895 }
4896
4897 fn orch_with_node(
4899 working_dir: std::path::PathBuf,
4900 ) -> (SRBNOrchestrator, petgraph::graph::NodeIndex) {
4901 let mut orch = SRBNOrchestrator::new(working_dir, false);
4902 let node = SRBNNode::new(
4903 "test-node".to_string(),
4904 "test goal".to_string(),
4905 perspt_core::ModelTier::Actuator,
4906 );
4907 let idx = orch.add_node(node);
4908 (orch, idx)
4909 }
4910
4911 #[test]
4912 fn test_sheaf_pre_check_passes_when_no_outputs() {
4913 let (orch, idx) = orch_with_node(std::path::PathBuf::from("/tmp/test"));
4914 assert!(orch.sheaf_pre_check(idx).is_none());
4915 }
4916
4917 #[test]
4918 fn test_sheaf_pre_check_detects_missing_files() {
4919 let (mut orch, idx) = orch_with_node(std::path::PathBuf::from("/tmp/test"));
4920 orch.graph[idx]
4921 .output_targets
4922 .push(std::path::PathBuf::from("nonexistent_file_xyz.rs"));
4923 let result = orch.sheaf_pre_check(idx);
4924 assert!(result.is_some());
4925 assert!(result.unwrap().contains("missing"));
4926 }
4927
4928 #[test]
4929 fn test_sheaf_pre_check_detects_empty_files() {
4930 let dir = tempfile::tempdir().unwrap();
4931 std::fs::File::create(dir.path().join("empty.rs")).unwrap();
4932
4933 let (mut orch, idx) = orch_with_node(dir.path().to_path_buf());
4934 orch.graph[idx]
4935 .output_targets
4936 .push(std::path::PathBuf::from("empty.rs"));
4937 let result = orch.sheaf_pre_check(idx);
4938 assert!(result.is_some());
4939 assert!(result.unwrap().contains("empty"));
4940 }
4941
4942 #[test]
4943 fn test_sheaf_pre_check_passes_for_valid_files() {
4944 let dir = tempfile::tempdir().unwrap();
4945 std::fs::write(dir.path().join("main.rs"), "fn main() {}").unwrap();
4946
4947 let (mut orch, idx) = orch_with_node(dir.path().to_path_buf());
4948 orch.graph[idx]
4949 .output_targets
4950 .push(std::path::PathBuf::from("main.rs"));
4951 assert!(orch.sheaf_pre_check(idx).is_none());
4952 }
4953
4954 #[test]
4955 fn test_v_boot_energy_from_degraded_sensors() {
4956 use perspt_core::types::{
4957 EnergyComponents, SensorStatus, StageOutcome, VerificationResult,
4958 };
4959
4960 let vr = VerificationResult {
4962 syntax_ok: true,
4963 build_ok: true,
4964 tests_ok: true,
4965 lint_ok: true,
4966 diagnostics_count: 0,
4967 tests_passed: 5,
4968 tests_failed: 0,
4969 summary: String::new(),
4970 raw_output: None,
4971 degraded: true,
4972 degraded_reason: Some("test sensor fallback".into()),
4973 stage_outcomes: vec![
4974 StageOutcome {
4975 stage: "syntax_check".into(),
4976 passed: true,
4977 sensor_status: SensorStatus::Available,
4978 output: None,
4979 },
4980 StageOutcome {
4981 stage: "build".into(),
4982 passed: true,
4983 sensor_status: SensorStatus::Fallback {
4984 actual: "cargo check".into(),
4985 reason: "primary not found".into(),
4986 },
4987 output: None,
4988 },
4989 StageOutcome {
4990 stage: "test".into(),
4991 passed: true,
4992 sensor_status: SensorStatus::Unavailable {
4993 reason: "no test runner".into(),
4994 },
4995 output: None,
4996 },
4997 ],
4998 };
4999
5000 let mut energy = EnergyComponents::default();
5002 for so in &vr.stage_outcomes {
5003 match &so.sensor_status {
5004 SensorStatus::Unavailable { .. } => energy.v_boot += 3.0,
5005 SensorStatus::Fallback { .. } => energy.v_boot += 1.0,
5006 SensorStatus::Available => {}
5007 }
5008 }
5009 assert!(
5011 (energy.v_boot - 4.0).abs() < f32::EPSILON,
5012 "Expected V_boot=4.0, got {}",
5013 energy.v_boot
5014 );
5015 }
5016
5017 #[test]
5020 fn test_detect_stub_rust_todo() {
5021 let dir = tempfile::tempdir().unwrap();
5022 let path = dir.path().join("lib.rs");
5023 std::fs::write(&path, "fn main() {\n todo!()\n}\n").unwrap();
5024 let result = detect_stub_content(&path, "rust");
5025 assert!(result.is_some(), "Should detect todo!() stub");
5026 assert!(result.unwrap().contains("todo!()"));
5027 }
5028
5029 #[test]
5030 fn test_detect_stub_rust_unimplemented() {
5031 let dir = tempfile::tempdir().unwrap();
5032 let path = dir.path().join("lib.rs");
5033 std::fs::write(&path, "fn run() {\n unimplemented!()\n}\n").unwrap();
5034 let result = detect_stub_content(&path, "rust");
5035 assert!(result.is_some(), "Should detect unimplemented!() stub");
5036 }
5037
5038 #[test]
5039 fn test_detect_stub_rust_real_code_not_flagged() {
5040 let dir = tempfile::tempdir().unwrap();
5041 let path = dir.path().join("lib.rs");
5042 let real_code = r#"
5043use std::collections::HashMap;
5044
5045fn add(a: i32, b: i32) -> i32 {
5046 a + b
5047}
5048
5049fn multiply(a: i32, b: i32) -> i32 {
5050 a * b
5051}
5052
5053fn compute(data: &[i32]) -> i32 {
5054 data.iter().sum()
5055}
5056
5057fn transform(input: &str) -> String {
5058 input.to_uppercase()
5059}
5060
5061fn process() {
5062 let x = add(1, 2);
5063 let y = multiply(x, 3);
5064 println!("{}", y);
5065 // todo!() in a comment should not trigger
5066}
5067"#;
5068 std::fs::write(&path, real_code).unwrap();
5069 let result = detect_stub_content(&path, "rust");
5070 assert!(
5071 result.is_none(),
5072 "Real code with comment-only todo should not be flagged"
5073 );
5074 }
5075
5076 #[test]
5077 fn test_detect_stub_rust_real_code_with_one_todo_branch() {
5078 let dir = tempfile::tempdir().unwrap();
5079 let path = dir.path().join("lib.rs");
5080 let code = r#"
5081fn add(a: i32, b: i32) -> i32 { a + b }
5082fn sub(a: i32, b: i32) -> i32 { a - b }
5083fn mul(a: i32, b: i32) -> i32 { a * b }
5084fn div(a: i32, b: i32) -> i32 { a / b }
5085fn modulo(a: i32, b: i32) -> i32 { todo!() }
5086"#;
5087 std::fs::write(&path, code).unwrap();
5088 let result = detect_stub_content(&path, "rust");
5089 assert!(
5090 result.is_none(),
5091 "File with 5+ real lines and one todo!() should NOT be flagged"
5092 );
5093 }
5094
5095 #[test]
5096 fn test_detect_stub_python_pass_body() {
5097 let dir = tempfile::tempdir().unwrap();
5098 let path = dir.path().join("main.py");
5099 std::fs::write(&path, "def run():\n pass\n").unwrap();
5100 let result = detect_stub_content(&path, "python");
5101 assert!(result.is_some(), "Should detect pass-only Python function");
5102 }
5103
5104 #[test]
5105 fn test_detect_stub_python_not_implemented() {
5106 let dir = tempfile::tempdir().unwrap();
5107 let path = dir.path().join("main.py");
5108 std::fs::write(&path, "def run():\n raise NotImplementedError()\n").unwrap();
5109 let result = detect_stub_content(&path, "python");
5110 assert!(result.is_some(), "Should detect NotImplementedError stub");
5111 }
5112
5113 #[test]
5114 fn test_detect_stub_python_ellipsis_body() {
5115 let dir = tempfile::tempdir().unwrap();
5116 let path = dir.path().join("main.py");
5117 std::fs::write(&path, "def run():\n ...\n").unwrap();
5118 let result = detect_stub_content(&path, "python");
5119 assert!(
5120 result.is_some(),
5121 "Should detect ellipsis-only Python function"
5122 );
5123 }
5124
5125 #[test]
5126 fn test_detect_stub_python_real_code_not_flagged() {
5127 let dir = tempfile::tempdir().unwrap();
5128 let path = dir.path().join("main.py");
5129 let code = "import os\n\ndef run():\n data = os.listdir('.')\n filtered = [f for f in data if f.endswith('.py')]\n for f in filtered:\n print(f)\n return filtered\n";
5130 std::fs::write(&path, code).unwrap();
5131 let result = detect_stub_content(&path, "python");
5132 assert!(result.is_none(), "Real Python code should not be flagged");
5133 }
5134
5135 #[test]
5136 fn test_detect_stub_js_throw_not_implemented() {
5137 let dir = tempfile::tempdir().unwrap();
5138 let path = dir.path().join("index.js");
5139 std::fs::write(
5140 &path,
5141 "function run() {\n throw new Error(\"not implemented\");\n}\n",
5142 )
5143 .unwrap();
5144 let result = detect_stub_content(&path, "javascript");
5145 assert!(
5146 result.is_some(),
5147 "Should detect JS throw not-implemented stub"
5148 );
5149 }
5150
5151 #[test]
5152 fn test_detect_stub_universal_comment() {
5153 let dir = tempfile::tempdir().unwrap();
5154 let path = dir.path().join("lib.rs");
5155 std::fs::write(&path, "// stub — will be replaced by agent\n").unwrap();
5156 let result = detect_stub_content(&path, "rust");
5157 assert!(result.is_some(), "Should detect universal stub comment");
5158 }
5159
5160 #[test]
5161 fn test_detect_stub_extension_fallback() {
5162 let dir = tempfile::tempdir().unwrap();
5163 let path = dir.path().join("main.py");
5164 std::fs::write(&path, "# placeholder\ndef run():\n pass\n").unwrap();
5165 let result = detect_stub_content(&path, "unknown");
5167 assert!(
5168 result.is_some(),
5169 "Should detect stub via extension fallback"
5170 );
5171 }
5172
5173 #[test]
5174 fn test_detect_stub_empty_file_returns_none() {
5175 let dir = tempfile::tempdir().unwrap();
5176 let path = dir.path().join("empty.rs");
5177 std::fs::write(&path, "").unwrap();
5178 let result = detect_stub_content(&path, "rust");
5181 assert!(result.is_none(), "Empty file has no stub pattern to match");
5182 }
5183}