1use super::*;
4use std::collections::hash_map::DefaultHasher;
5use std::hash::{Hash, Hasher};
6
7impl SRBNOrchestrator {
8 pub fn parse_artifact_bundle_typed(
21 &self,
22 content: &str,
23 node_id: &str,
24 attempt: u32,
25 ) -> (
26 Option<perspt_core::types::ArtifactBundle>,
27 perspt_core::types::ParseResultState,
28 Option<perspt_core::types::CorrectionAttemptRecord>,
29 ) {
30 use perspt_core::types::{
31 ArtifactBundle, ArtifactOperation, CorrectionAttemptRecord, ParseResultState,
32 RetryClassification,
33 };
34
35 let response_fingerprint = {
37 let mut hasher = DefaultHasher::new();
38 content.hash(&mut hasher);
39 format!("{:016x}", hasher.finish())
40 };
41 let response_length = content.len();
42 let created_at = std::time::SystemTime::now()
43 .duration_since(std::time::UNIX_EPOCH)
44 .unwrap_or_default()
45 .as_secs() as i64;
46
47 let classify_retry = |state: ParseResultState, rejection: Option<&str>| match state {
48 ParseResultState::StrictJsonOk | ParseResultState::TolerantRecoveryOk => None,
49 ParseResultState::NoStructuredPayload
50 | ParseResultState::SchemaInvalid
51 | ParseResultState::EmptyBundle => Some(RetryClassification::MalformedRetry),
52 ParseResultState::SemanticallyRejected => {
53 let reason = rejection.unwrap_or_default().to_ascii_lowercase();
54 if reason.contains("all artifacts rejected")
55 || reason.contains("undeclared")
56 || reason.contains("target")
57 {
58 Some(RetryClassification::Retarget)
59 } else if reason.contains("support") {
60 Some(RetryClassification::SupportFileViolation)
61 } else {
62 Some(RetryClassification::Replan)
63 }
64 }
65 };
66
67 let build_record = |state: ParseResultState, accepted: bool, rejection: Option<String>| {
68 let retry_classification = classify_retry(state, rejection.as_deref());
69 CorrectionAttemptRecord {
70 attempt,
71 parse_state: state,
72 retry_classification,
73 response_fingerprint: response_fingerprint.clone(),
74 response_length,
75 energy_after: None,
76 accepted,
77 rejection_reason: rejection,
78 created_at,
79 }
80 };
81
82 match perspt_core::normalize::extract_json(content) {
86 Ok(output) => {
87 let bundle = match serde_json::from_str::<ArtifactBundle>(&output.json_body) {
88 Ok(bundle) => {
89 log::info!(
90 "Parsed ArtifactBundle via normalization ({})",
91 output.method
92 );
93 bundle
94 }
95 Err(e) => {
96 let record = build_record(
97 ParseResultState::SchemaInvalid,
98 false,
99 Some(format!(
100 "JSON extracted via {} but bundle schema deserialization failed: {}",
101 output.method, e
102 )),
103 );
104 return (None, ParseResultState::SchemaInvalid, Some(record));
105 }
106 };
107
108 if bundle.validate().is_ok() {
109 let bundle = self.normalize_bundle_paths(bundle);
111
112 if bundle.artifacts.is_empty() {
113 let record = build_record(
114 ParseResultState::EmptyBundle,
115 false,
116 Some("Bundle is empty after path normalization".to_string()),
117 );
118 return (None, ParseResultState::EmptyBundle, Some(record));
119 }
120
121 match self.semantic_validate_bundle(&bundle, node_id) {
123 Ok(filtered) => {
124 if filtered.artifacts.is_empty() {
125 let record = build_record(
126 ParseResultState::SemanticallyRejected,
127 false,
128 Some(
129 "All artifacts rejected by semantic validation".to_string(),
130 ),
131 );
132 return (
133 None,
134 ParseResultState::SemanticallyRejected,
135 Some(record),
136 );
137 }
138 let record = build_record(ParseResultState::StrictJsonOk, true, None);
139 return (Some(filtered), ParseResultState::StrictJsonOk, Some(record));
140 }
141 Err(reason) => {
142 let record = build_record(
143 ParseResultState::SemanticallyRejected,
144 false,
145 Some(reason),
146 );
147 return (None, ParseResultState::SemanticallyRejected, Some(record));
148 }
149 }
150 } else {
151 log::warn!("JSON bundle found but failed schema validation");
152 let record = build_record(
153 ParseResultState::SchemaInvalid,
154 false,
155 Some("JSON parsed but bundle schema validation failed".to_string()),
156 );
157 return (None, ParseResultState::SchemaInvalid, Some(record));
158 }
159 }
160 Err(e) => {
161 log::debug!("Normalization could not extract ArtifactBundle JSON: {}", e);
162 }
163 }
164
165 let markers = perspt_core::normalize::extract_file_markers(content);
167 if !markers.is_empty() {
168 let artifacts: Vec<ArtifactOperation> = markers
169 .into_iter()
170 .filter_map(|m| {
171 let path = m.path?;
172 if m.content.is_empty() {
173 return None;
174 }
175 if m.is_diff {
176 Some(ArtifactOperation::Diff {
177 path,
178 patch: m.content,
179 })
180 } else {
181 Some(ArtifactOperation::Write {
182 path,
183 content: m.content,
184 })
185 }
186 })
187 .collect();
188
189 if artifacts.is_empty() {
190 let record = build_record(
191 ParseResultState::NoStructuredPayload,
192 false,
193 Some("File markers found but no named artifacts extracted".to_string()),
194 );
195 return (None, ParseResultState::NoStructuredPayload, Some(record));
196 }
197
198 let bundle = ArtifactBundle {
199 artifacts,
200 commands: vec![],
201 };
202 let bundle = self.normalize_bundle_paths(bundle);
203
204 log::info!(
205 "Tolerant recovery extracted {}-artifact bundle via file markers",
206 bundle.len()
207 );
208
209 match self.semantic_validate_bundle(&bundle, node_id) {
211 Ok(filtered) => {
212 if filtered.artifacts.is_empty() {
213 let record = build_record(
214 ParseResultState::SemanticallyRejected,
215 false,
216 Some("All artifacts rejected by semantic validation".to_string()),
217 );
218 return (None, ParseResultState::SemanticallyRejected, Some(record));
219 }
220 let record = build_record(ParseResultState::TolerantRecoveryOk, true, None);
221 return (
222 Some(filtered),
223 ParseResultState::TolerantRecoveryOk,
224 Some(record),
225 );
226 }
227 Err(reason) => {
228 let record =
229 build_record(ParseResultState::SemanticallyRejected, false, Some(reason));
230 return (None, ParseResultState::SemanticallyRejected, Some(record));
231 }
232 }
233 }
234
235 let record = build_record(
237 ParseResultState::NoStructuredPayload,
238 false,
239 Some("No JSON bundle or file markers found in response".to_string()),
240 );
241 (None, ParseResultState::NoStructuredPayload, Some(record))
242 }
243
244 fn normalize_bundle_paths(
246 &self,
247 mut bundle: perspt_core::types::ArtifactBundle,
248 ) -> perspt_core::types::ArtifactBundle {
249 bundle.artifacts = bundle
250 .artifacts
251 .into_iter()
252 .filter_map(|op| match op {
253 perspt_core::types::ArtifactOperation::Write { path, content } => {
254 match perspt_core::path::normalize_artifact_path(&path) {
255 Ok(normalized) => Some(perspt_core::types::ArtifactOperation::Write {
256 path: normalized,
257 content,
258 }),
259 Err(e) => {
260 log::warn!("Dropping write artifact with bad path '{}': {}", path, e);
261 None
262 }
263 }
264 }
265 perspt_core::types::ArtifactOperation::Diff { path, patch } => {
266 match perspt_core::path::normalize_artifact_path(&path) {
267 Ok(normalized) => Some(perspt_core::types::ArtifactOperation::Diff {
268 path: normalized,
269 patch,
270 }),
271 Err(e) => {
272 log::warn!("Dropping diff artifact with bad path '{}': {}", path, e);
273 None
274 }
275 }
276 }
277 perspt_core::types::ArtifactOperation::Delete { path } => {
278 match perspt_core::path::normalize_artifact_path(&path) {
279 Ok(normalized) => {
280 Some(perspt_core::types::ArtifactOperation::Delete { path: normalized })
281 }
282 Err(e) => {
283 log::warn!("Dropping delete artifact with bad path '{}': {}", path, e);
284 None
285 }
286 }
287 }
288 perspt_core::types::ArtifactOperation::Move { from, to } => {
289 let from_norm = perspt_core::path::normalize_artifact_path(&from);
290 let to_norm = perspt_core::path::normalize_artifact_path(&to);
291 match (from_norm, to_norm) {
292 (Ok(f), Ok(t)) => {
293 Some(perspt_core::types::ArtifactOperation::Move { from: f, to: t })
294 }
295 _ => {
296 log::warn!("Dropping move artifact with bad paths '{}'→'{}'", from, to);
297 None
298 }
299 }
300 }
301 })
302 .collect();
303 bundle
304 }
305
306 fn semantic_validate_bundle(
314 &self,
315 bundle: &perspt_core::types::ArtifactBundle,
316 node_id: &str,
317 ) -> Result<perspt_core::types::ArtifactBundle, String> {
318 let allowed_paths = self.allowed_bundle_paths(node_id);
319
320 if allowed_paths.is_empty() {
322 return Ok(bundle.clone());
323 }
324
325 let registry = perspt_core::plugin::PluginRegistry::new();
327 let plugin_name = self
328 .node_indices
329 .get(node_id)
330 .map(|idx| self.graph[*idx].owner_plugin.as_str())
331 .unwrap_or("");
332 let plugin = registry.get(plugin_name);
333 let legal_support: std::collections::HashSet<String> = plugin
334 .map(|p| {
335 p.legal_support_files()
336 .iter()
337 .map(|s| s.to_string())
338 .collect()
339 })
340 .unwrap_or_default();
341
342 let (kept, dropped): (Vec<_>, Vec<_>) = bundle.artifacts.iter().cloned().partition(|a| {
343 let normalized = perspt_core::path::normalize_artifact_path(a.path())
344 .unwrap_or_else(|_| a.path().to_string());
345
346 if let Some(plugin) = plugin {
347 if Self::is_manifest_path(&normalized)
348 && plugin.manifest_mutation_policy(&normalized)
349 == perspt_core::types::ManifestMutationPolicy::Deny
350 {
351 log::warn!(
352 "Rejecting manifest mutation '{}' by plugin policy for '{}'",
353 normalized,
354 plugin_name
355 );
356 return false;
357 }
358 }
359
360 if allowed_paths.contains(&normalized) {
362 return true;
363 }
364
365 let filename = std::path::Path::new(&normalized)
367 .file_name()
368 .map(|f| f.to_string_lossy().to_string())
369 .unwrap_or_default();
370 if legal_support.contains(&filename) {
371 log::info!(
372 "Accepting support file '{}' via plugin legal_support_files",
373 normalized
374 );
375 return true;
376 }
377
378 false
379 });
380
381 if !dropped.is_empty() {
382 let dropped_paths: Vec<String> = dropped.iter().map(|a| a.path().to_string()).collect();
383 log::warn!(
384 "Semantic validation stripped {} artifact(s) from node '{}': {}",
385 dropped.len(),
386 node_id,
387 dropped_paths.join(", ")
388 );
389 }
390
391 let mut validated_commands = Vec::new();
393 for cmd in &bundle.commands {
394 let decision = self
395 .node_indices
396 .get(node_id)
397 .and_then(|idx| {
398 let plugin_name = &self.graph[*idx].owner_plugin;
399 let registry = perspt_core::plugin::PluginRegistry::new();
400 registry
401 .get(plugin_name)
402 .map(|p| p.dependency_command_policy(cmd))
403 })
404 .unwrap_or(perspt_core::types::CommandPolicyDecision::Allow);
405
406 match decision {
407 perspt_core::types::CommandPolicyDecision::Allow => {
408 validated_commands.push(cmd.clone());
409 }
410 perspt_core::types::CommandPolicyDecision::RequireApproval => {
411 log::info!("Command '{}' requires approval — including with flag", cmd);
412 validated_commands.push(cmd.clone());
413 }
414 perspt_core::types::CommandPolicyDecision::Deny => {
415 log::warn!("Command '{}' denied by plugin policy", cmd);
416 }
417 }
418 }
419
420 Ok(perspt_core::types::ArtifactBundle {
421 artifacts: kept,
422 commands: validated_commands,
423 })
424 }
425
426 pub async fn apply_bundle_transactionally(
433 &mut self,
434 bundle: &perspt_core::types::ArtifactBundle,
435 node_id: &str,
436 node_class: perspt_core::types::NodeClass,
437 ) -> Result<()> {
438 let idx =
439 self.node_indices.get(node_id).copied().ok_or_else(|| {
440 anyhow::anyhow!("Unknown node '{}' for bundle application", node_id)
441 })?;
442 let node_workdir = self.effective_working_dir(idx);
443
444 bundle.validate().map_err(|e| {
446 eprintln!(
447 "[SRBN-DIAG] Bundle validation failed for '{}': {}",
448 node_id, e
449 );
450 anyhow::anyhow!(e)
451 })?;
452
453 let filtered = self
455 .semantic_validate_bundle(bundle, node_id)
456 .map_err(|reason| {
457 anyhow::anyhow!(
458 "Bundle semantic validation failed for '{}': {}",
459 node_id,
460 reason
461 )
462 })?;
463
464 if filtered.artifacts.is_empty() && !bundle.artifacts.is_empty() {
469 let dropped_paths: Vec<String> = bundle
470 .artifacts
471 .iter()
472 .map(|a| a.path().to_string())
473 .collect();
474 eprintln!(
475 "[SRBN-DIAG] All artifacts stripped for '{}': {:?}",
476 node_id, dropped_paths
477 );
478 log::warn!(
479 "All artifacts stripped for node '{}' — skipping bundle application. \
480 Dropped paths: {}",
481 node_id,
482 dropped_paths.join(", ")
483 );
484 self.emit_log(format!(
485 "⚠️ All artifacts for '{}' targeted undeclared paths — bundle skipped. \
486 The actuator's output_files don't match the plan.",
487 node_id
488 ));
489 return Err(anyhow::anyhow!(
490 "All {} artifact(s) targeted undeclared paths for node '{}': [{}]. \
491 Expected paths: {:?}",
492 bundle.artifacts.len(),
493 node_id,
494 dropped_paths.join(", "),
495 self.node_indices
496 .get(node_id)
497 .map(|idx| self.graph[*idx]
498 .output_targets
499 .iter()
500 .map(|p| p.to_string_lossy().to_string())
501 .collect::<Vec<_>>())
502 .unwrap_or_default()
503 ));
504 }
505 let bundle = filtered;
506
507 if let Err(e) = self
512 .context
513 .ownership_manifest
514 .validate_bundle(&bundle, node_id, node_class)
515 {
516 log::warn!("Ownership validation warning for node '{}': {}", node_id, e);
517 self.emit_log(format!("⚠️ Ownership warning: {}", e));
518 }
519
520 let owner_plugin = self
522 .node_indices
523 .get(node_id)
524 .and_then(|idx| {
525 let plugin = &self.graph[*idx].owner_plugin;
526 if plugin.is_empty() {
527 None
528 } else {
529 Some(plugin.clone())
530 }
531 })
532 .unwrap_or_else(|| "unknown".to_string());
533
534 let mut files_created: Vec<String> = Vec::new();
535 let mut files_modified: Vec<String> = Vec::new();
536 let mut files_deleted: Vec<String> = Vec::new();
537
538 for op in &bundle.artifacts {
539 let mut args = HashMap::new();
540 let resolved_path = node_workdir.join(op.path());
541 args.insert(
542 "path".to_string(),
543 resolved_path.to_string_lossy().to_string(),
544 );
545
546 let call = match op {
547 perspt_core::types::ArtifactOperation::Write { content, .. } => {
548 args.insert("content".to_string(), content.clone());
549 ToolCall {
550 name: "write_file".to_string(),
551 arguments: args,
552 }
553 }
554 perspt_core::types::ArtifactOperation::Diff { patch, .. } => {
555 args.insert("diff".to_string(), patch.clone());
556 ToolCall {
557 name: "apply_diff".to_string(),
558 arguments: args,
559 }
560 }
561 perspt_core::types::ArtifactOperation::Delete { path } => {
562 if let Err(e) = perspt_policy::sanitize::validate_artifact_mutation(
564 path,
565 &self.context.working_dir,
566 "Delete",
567 ) {
568 log::warn!("Delete blocked by policy: {}", e);
569 self.emit_log(format!("⚠️ Delete blocked: {}", e));
570 continue;
571 }
572 ToolCall {
573 name: "delete_file".to_string(),
574 arguments: args,
575 }
576 }
577 perspt_core::types::ArtifactOperation::Move { from, to } => {
578 if let Err(e) = perspt_policy::sanitize::validate_artifact_mutation(
580 from,
581 &self.context.working_dir,
582 "Move",
583 ) {
584 log::warn!("Move source blocked by policy: {}", e);
585 self.emit_log(format!("⚠️ Move blocked: {}", e));
586 continue;
587 }
588 if let Err(e) = perspt_policy::sanitize::validate_artifact_mutation(
589 to,
590 &self.context.working_dir,
591 "Move",
592 ) {
593 log::warn!("Move destination blocked by policy: {}", e);
594 self.emit_log(format!("⚠️ Move blocked: {}", e));
595 continue;
596 }
597 let resolved_to = node_workdir.join(to);
598 args.insert("from".to_string(), args["path"].clone());
599 args.insert("to".to_string(), resolved_to.to_string_lossy().to_string());
600 ToolCall {
601 name: "move_file".to_string(),
602 arguments: args,
603 }
604 }
605 };
606
607 let result = self.tools.execute(&call).await;
608 if result.success {
609 let full_path = resolved_path.clone();
610
611 if op.is_write() {
612 files_created.push(op.path().to_string());
613 } else if op.is_delete() {
614 files_deleted.push(op.path().to_string());
615 self.emit_event(perspt_core::AgentEvent::FileDeleted {
616 node_id: self.graph[idx].node_id.clone(),
617 path: op.path().to_string(),
618 });
619 } else if op.is_move() {
620 if let perspt_core::types::ArtifactOperation::Move { to, .. } = op {
621 files_modified.push(format!("{} -> {}", op.path(), to));
622 self.emit_event(perspt_core::AgentEvent::FileMoved {
623 node_id: self.graph[idx].node_id.clone(),
624 from: op.path().to_string(),
625 to: to.to_string(),
626 });
627 }
628 } else {
629 files_modified.push(op.path().to_string());
630 }
631
632 if !op.is_delete() {
634 self.last_written_file = Some(full_path.clone());
635 self.file_version += 1;
636
637 let registry = perspt_core::plugin::PluginRegistry::new();
639 for (lang, client) in self.lsp_clients.iter_mut() {
640 let should_notify = match registry.get(lang) {
642 Some(plugin) => plugin.owns_file(op.path()),
643 None => true,
644 };
645 if should_notify {
646 if let Ok(content) = std::fs::read_to_string(&full_path) {
647 let _ = client
648 .did_change(&full_path, &content, self.file_version)
649 .await;
650 }
651 }
652 }
653 }
654
655 log::info!("✓ Applied: {}", op.path());
656 self.emit_log(format!("✅ Applied: {}", op.path()));
657 } else {
658 log::warn!("Failed to apply {}: {:?}", op.path(), result.error);
659 self.emit_log(format!("❌ Failed: {} - {:?}", op.path(), result.error));
660 self.last_tool_failure = result.error.clone();
661 return Err(anyhow::anyhow!(
662 "Bundle application failed at {}: {:?}",
663 op.path(),
664 result.error
665 ));
666 }
667 }
668
669 self.context.ownership_manifest.assign_new_paths(
671 &bundle,
672 node_id,
673 &owner_plugin,
674 node_class,
675 );
676
677 self.emit_event(perspt_core::AgentEvent::BundleApplied {
679 node_id: node_id.to_string(),
680 files_created,
681 files_modified,
682 writes_count: bundle.writes_count(),
683 diffs_count: bundle.diffs_count(),
684 node_class: node_class.to_string(),
685 });
686
687 self.last_tool_failure = None;
688 Ok(())
689 }
690
691 fn allowed_bundle_paths(&self, node_id: &str) -> std::collections::HashSet<String> {
692 self.node_indices
693 .get(node_id)
694 .map(|idx| {
695 self.graph[*idx]
696 .output_targets
697 .iter()
698 .map(|p| {
699 let raw = p.to_string_lossy();
700 perspt_core::path::normalize_artifact_path(&raw)
701 .unwrap_or_else(|_| raw.to_string())
702 })
703 .collect()
704 })
705 .unwrap_or_default()
706 }
707
708 fn is_manifest_path(path: &str) -> bool {
709 matches!(
710 std::path::Path::new(path)
711 .file_name()
712 .and_then(|name| name.to_str()),
713 Some("Cargo.toml" | "package.json" | "pyproject.toml" | "setup.py" | "setup.cfg")
714 )
715 }
716}
717
718#[cfg(test)]
719mod tests {
720 use perspt_core::types::{ArtifactBundle, ArtifactOperation, ParseResultState};
721
722 #[test]
723 fn test_parse_result_state_is_ok() {
724 assert!(ParseResultState::StrictJsonOk.is_ok());
725 assert!(ParseResultState::TolerantRecoveryOk.is_ok());
726 assert!(!ParseResultState::NoStructuredPayload.is_ok());
727 assert!(!ParseResultState::SchemaInvalid.is_ok());
728 assert!(!ParseResultState::SemanticallyRejected.is_ok());
729 assert!(!ParseResultState::EmptyBundle.is_ok());
730 }
731
732 #[test]
733 fn test_strict_json_layer_c_valid_bundle() {
734 let json = r#"{"artifacts":[{"operation":"write","path":"src/main.rs","content":"fn main() {}"}],"commands":[]}"#;
736 let result = perspt_core::normalize::extract_and_deserialize::<ArtifactBundle>(json);
737 assert!(result.is_ok());
738 let (bundle, _method) = result.unwrap();
739 assert_eq!(bundle.artifacts.len(), 1);
740 assert!(bundle.validate().is_ok());
741 }
742
743 #[test]
744 fn test_strict_json_layer_c_invalid_schema() {
745 let json = r#"{"foo": "bar"}"#;
747 let result = perspt_core::normalize::extract_and_deserialize::<ArtifactBundle>(json);
748 assert!(result.is_err());
750 }
751
752 #[test]
753 fn test_tolerant_recovery_layer_d_file_markers() {
754 let response = r#"
756Here is the implementation:
757
758### File: src/main.rs
759```rust
760fn main() {
761 println!("Hello");
762}
763```
764
765### File: src/lib.rs
766```rust
767pub fn greet() -> &'static str { "Hello" }
768```
769"#;
770 let markers = perspt_core::normalize::extract_file_markers(response);
771 assert_eq!(markers.len(), 2);
772 assert_eq!(markers[0].path, Some("src/main.rs".to_string()));
773 assert_eq!(markers[1].path, Some("src/lib.rs".to_string()));
774 assert!(!markers[0].is_diff);
775 }
776
777 #[test]
778 fn test_tolerant_recovery_layer_d_no_named_blocks() {
779 let response = "Here is some code:\n```rust\nfn foo() {}\n```\n";
781 let markers = perspt_core::normalize::extract_file_markers(response);
782 let named = markers.iter().filter(|m| m.path.is_some()).count();
784 assert_eq!(named, 0);
785 }
786
787 #[test]
788 fn test_path_normalization_layer_b() {
789 let normalized = perspt_core::path::normalize_artifact_path("`src/main.rs`").unwrap();
791 assert_eq!(normalized, "src/main.rs");
792
793 let normalized = perspt_core::path::normalize_artifact_path("'src/lib.rs'").unwrap();
794 assert_eq!(normalized, "src/lib.rs");
795
796 let normalized = perspt_core::path::normalize_artifact_path("**src/utils.rs**").unwrap();
797 assert_eq!(normalized, "src/utils.rs");
798 }
799
800 #[test]
801 fn test_empty_bundle_detection() {
802 let bundle = ArtifactBundle {
803 artifacts: vec![],
804 commands: vec![],
805 };
806 assert!(bundle.artifacts.is_empty());
807 }
808
809 #[test]
810 fn test_bundle_with_commands() {
811 let json = r#"{"artifacts":[{"operation":"write","path":"src/main.rs","content":"fn main() {}"}],"commands":["cargo add serde"]}"#;
812 let result = perspt_core::normalize::extract_and_deserialize::<ArtifactBundle>(json);
813 assert!(result.is_ok());
814 let (bundle, _) = result.unwrap();
815 assert_eq!(bundle.commands.len(), 1);
816 assert_eq!(bundle.commands[0], "cargo add serde");
817 }
818
819 #[test]
820 fn test_layer_d_diff_markers() {
821 let response = r#"
822### Diff: src/main.rs
823```diff
824--- a/src/main.rs
825+++ b/src/main.rs
826@@ -1 +1 @@
827-fn main() {}
828+fn main() { println!("hello"); }
829```
830"#;
831 let markers = perspt_core::normalize::extract_file_markers(response);
832 assert!(!markers.is_empty());
833 let first = &markers[0];
834 assert_eq!(first.path, Some("src/main.rs".to_string()));
835 assert!(first.is_diff);
836 }
837
838 #[test]
839 fn test_no_structured_payload() {
840 let response = "I'm sorry, I can't help with that. Please try again.";
842 let json_result =
843 perspt_core::normalize::extract_and_deserialize::<ArtifactBundle>(response);
844 assert!(json_result.is_err());
845 let markers = perspt_core::normalize::extract_file_markers(response);
846 assert!(markers.is_empty());
847 }
848
849 #[test]
850 fn test_fenced_json_bundle_extraction() {
851 let response = r#"Here is the bundle:
853```json
854{"artifacts":[{"operation":"write","path":"src/main.rs","content":"fn main() {}"}],"commands":[]}
855```
856"#;
857 let result = perspt_core::normalize::extract_and_deserialize::<ArtifactBundle>(response);
858 assert!(result.is_ok());
859 }
860
861 #[test]
862 fn test_artifact_operation_paths() {
863 let write = ArtifactOperation::Write {
864 path: "src/main.rs".to_string(),
865 content: "fn main() {}".to_string(),
866 };
867 assert_eq!(write.path(), "src/main.rs");
868
869 let diff = ArtifactOperation::Diff {
870 path: "src/lib.rs".to_string(),
871 patch: "...".to_string(),
872 };
873 assert_eq!(diff.path(), "src/lib.rs");
874 }
875}