Coverage for src/prosemark/app/use_cases.py: 99%
538 statements
« prev ^ index » next coverage.py v7.8.0, created at 2025-09-24 18:08 +0000
« prev ^ index » next coverage.py v7.8.0, created at 2025-09-24 18:08 +0000
1"""Use case interactors for prosemark application layer."""
3import json
4from dataclasses import dataclass, field
5from pathlib import Path
6from typing import TYPE_CHECKING
8from prosemark.domain.models import Binder, BinderItem, NodeId
9from prosemark.exceptions import (
10 AlreadyMaterializedError,
11 BinderIntegrityError,
12 EditorLaunchError,
13 FileSystemError,
14 NodeIdentityError,
15 NodeNotFoundError,
16 PlaceholderNotFoundError,
17)
19if TYPE_CHECKING: # pragma: no cover
20 from prosemark.ports.binder_repo import BinderRepo
21 from prosemark.ports.clock import Clock
22 from prosemark.ports.config_port import ConfigPort
23 from prosemark.ports.console_port import ConsolePort
24 from prosemark.ports.daily_repo import DailyRepo
25 from prosemark.ports.editor_port import EditorPort
26 from prosemark.ports.id_generator import IdGenerator
27 from prosemark.ports.logger import Logger
28 from prosemark.ports.node_repo import NodeRepo
31@dataclass(frozen=True)
32class PlaceholderIssue:
33 """Represents a placeholder item found during audit."""
35 display_title: str
36 position: str # Human-readable position like "[0][1]"
39@dataclass(frozen=True)
40class MissingIssue:
41 """Represents a missing node file found during audit."""
43 node_id: NodeId
44 expected_path: str
47@dataclass(frozen=True)
48class OrphanIssue:
49 """Represents an orphaned node file found during audit."""
51 node_id: NodeId
52 file_path: str
55@dataclass(frozen=True)
56class MismatchIssue:
57 """Represents a frontmatter ID mismatch found during audit."""
59 file_path: str
60 expected_id: NodeId
61 actual_id: NodeId
64@dataclass
65class AuditReport:
66 """Contains the results of a binder audit operation."""
68 placeholders: list[PlaceholderIssue] = field(default_factory=list)
69 missing: list[MissingIssue] = field(default_factory=list)
70 orphans: list[OrphanIssue] = field(default_factory=list)
71 mismatches: list[MismatchIssue] = field(default_factory=list)
73 def is_clean(self) -> bool:
74 """Check if the audit found no issues.
76 Placeholders are not considered errors - they are informational items
77 that indicate planned content without actual implementation.
79 Returns:
80 True if no issues were found, False otherwise
82 """
83 return not self.missing and not self.orphans and not self.mismatches
85 def format_report(self) -> str:
86 """Format the audit results as a human-readable report.
88 Returns:
89 Formatted string report with issues organized by type
91 """
92 if self.is_clean() and not self.placeholders:
93 return 'Audit Results:\n============\n✓ Clean (no issues found)'
95 lines = ['Issues Found:' if not self.is_clean() else 'Audit Results:', '============']
97 if self.placeholders:
98 lines.append(f'PLACEHOLDERS ({len(self.placeholders)}):')
99 lines.extend(
100 f' - "{placeholder.display_title}" at position {placeholder.position}'
101 for placeholder in self.placeholders
102 )
103 lines.append('')
105 if self.missing:
106 lines.append(f'MISSING ({len(self.missing)}):')
107 lines.extend(
108 f' - Node {missing.expected_path} referenced in binder but file missing' for missing in self.missing
109 )
110 lines.append('')
112 if self.orphans:
113 lines.append(f'ORPHANS ({len(self.orphans)}):')
114 lines.extend(f' - Node {orphan.file_path} exists but not in binder' for orphan in self.orphans)
115 lines.append('')
117 if self.mismatches:
118 lines.append(f'MISMATCHES ({len(self.mismatches)}):')
119 lines.extend(
120 f' - File {mismatch.file_path} has frontmatter id: {mismatch.actual_id}'
121 for mismatch in self.mismatches
122 )
123 lines.append('')
125 return '\n'.join(lines).rstrip()
127 def to_json(self) -> str:
128 """Convert audit results to JSON format.
130 Returns:
131 JSON string representation of the audit results
133 """
134 data = {
135 'placeholders': [
136 {
137 'display_title': p.display_title,
138 'position': p.position,
139 }
140 for p in self.placeholders
141 ],
142 'missing': [
143 {
144 'node_id': str(m.node_id),
145 'expected_path': m.expected_path,
146 }
147 for m in self.missing
148 ],
149 'orphans': [
150 {
151 'node_id': str(o.node_id),
152 'file_path': o.file_path,
153 }
154 for o in self.orphans
155 ],
156 'mismatches': [
157 {
158 'file_path': m.file_path,
159 'expected_id': str(m.expected_id),
160 'actual_id': str(m.actual_id),
161 }
162 for m in self.mismatches
163 ],
164 }
165 return json.dumps(data, indent=2)
168class InitProject:
169 """Use case interactor for initializing a new prosemark project.
171 Orchestrates the creation of a new prosemark project by setting up
172 the necessary file structure, configuration, and initial binder state.
173 Follows hexagonal architecture principles with pure business logic
174 that delegates all I/O operations to injected port implementations.
176 The initialization process:
177 1. Validates the target directory is suitable for project creation
178 2. Checks for existing project files to prevent conflicts
179 3. Creates an empty binder structure with proper managed blocks
180 4. Generates default configuration file (.prosemark.yml)
181 5. Logs operational details and provides user feedback
183 Args:
184 binder_repo: Port for binder persistence operations
185 config_port: Port for configuration file management
186 console_port: Port for user output and messaging
187 logger: Port for operational logging and audit trails
188 clock: Port for timestamp generation
190 Examples:
191 >>> # With dependency injection
192 >>> interactor = InitProject(
193 ... binder_repo=file_binder_repo,
194 ... config_port=yaml_config_port,
195 ... console_port=terminal_console,
196 ... logger=production_logger,
197 ... clock=system_clock,
198 ... )
199 >>> interactor.execute(Path('/path/to/new/project'))
201 """
203 def __init__(
204 self,
205 binder_repo: 'BinderRepo',
206 config_port: 'ConfigPort',
207 console_port: 'ConsolePort',
208 logger: 'Logger',
209 clock: 'Clock',
210 ) -> None:
211 """Initialize InitProject with injected dependencies.
213 Args:
214 binder_repo: Port for binder persistence operations
215 config_port: Port for configuration file management
216 console_port: Port for user output and messaging
217 logger: Port for operational logging and audit trails
218 clock: Port for timestamp generation
220 """
221 self._binder_repo = binder_repo
222 self._config_port = config_port
223 self._console_port = console_port
224 self._logger = logger
225 self._clock = clock
227 def execute(self, project_path: Path) -> None:
228 """Execute project initialization workflow.
230 Creates a new prosemark project at the specified path with default
231 configuration and empty binder structure. Validates that the target
232 directory doesn't already contain a prosemark project.
234 Args:
235 project_path: Directory where project should be initialized
237 Raises:
238 BinderIntegrityError: If project is already initialized (_binder.md exists)
239 FileSystemError: If files cannot be created (propagated from ports)
241 """
242 self._logger.info('Starting project initialization at %s', project_path)
244 # Validation Phase - Check for existing project
245 binder_path = project_path / '_binder.md'
246 config_path = project_path / '.prosemark.yml'
248 if binder_path.exists():
249 self._logger.error('Project initialization failed: project already exists at %s', binder_path)
250 msg = 'Project already initialized'
251 raise BinderIntegrityError(msg, str(binder_path))
253 self._logger.debug('Validation passed: no existing project found')
255 # Creation Phase - Set up project structure
256 self._clock.now_iso()
257 self._create_initial_binder()
258 self._create_default_config(config_path)
260 # User Feedback - Confirm successful initialization
261 self._console_port.print(f'Initialized prosemark project at {project_path}')
262 self._logger.info('Project initialization completed successfully at %s', project_path)
264 def _create_initial_binder(self) -> None:
265 """Create initial empty binder structure.
267 Creates a new Binder aggregate with empty roots list and saves it
268 through the binder repository. This establishes the foundational
269 hierarchy structure for the project.
271 """
272 self._logger.debug('Creating initial empty binder structure')
273 initial_binder = Binder(roots=[])
274 self._binder_repo.save(initial_binder)
275 self._logger.info('Initial binder structure created and saved')
277 def _create_default_config(self, config_path: Path) -> None:
278 """Create default configuration file.
280 Delegates configuration file creation to the config port, which
281 handles the specific format and default values according to the
282 MVP specification.
284 Args:
285 config_path: Path where configuration file should be created
287 """
288 self._logger.debug('Creating default configuration at %s', config_path)
289 self._config_port.create_default_config(config_path)
290 self._logger.info('Default configuration created at %s', config_path)
293class AddNode:
294 """Use case interactor for adding new nodes to the binder structure.
296 Orchestrates the creation of new nodes by generating unique identifiers,
297 creating node files with proper frontmatter, and updating the binder
298 hierarchy. Follows hexagonal architecture principles with pure business
299 logic that delegates all I/O operations to injected port implementations.
301 The node creation process:
302 1. Generates unique NodeId for the new node
303 2. Creates node draft file ({id}.md) with YAML frontmatter
304 3. Creates node notes file ({id}.notes.md) as empty file
305 4. Validates parent node exists when specified
306 5. Adds BinderItem to binder structure at specified position
307 6. Updates and saves binder changes to _binder.md
308 7. Logs all operations with NodeId for traceability
310 Args:
311 binder_repo: Port for binder persistence operations
312 node_repo: Port for node file creation and management
313 id_generator: Port for generating unique NodeId values
314 logger: Port for operational logging and audit trails
315 clock: Port for timestamp generation
317 Examples:
318 >>> # With dependency injection
319 >>> interactor = AddNode(
320 ... binder_repo=file_binder_repo,
321 ... node_repo=file_node_repo,
322 ... id_generator=uuid_generator,
323 ... logger=production_logger,
324 ... clock=system_clock,
325 ... )
326 >>> node_id = interactor.execute(title='Chapter One', synopsis='The beginning', parent_id=None, position=None)
328 """
330 def __init__(
331 self,
332 binder_repo: 'BinderRepo',
333 node_repo: 'NodeRepo',
334 id_generator: 'IdGenerator',
335 logger: 'Logger',
336 clock: 'Clock',
337 ) -> None:
338 """Initialize AddNode with injected dependencies.
340 Args:
341 binder_repo: Port for binder persistence operations
342 node_repo: Port for node file creation and management
343 id_generator: Port for generating unique NodeId values
344 logger: Port for operational logging and audit trails
345 clock: Port for timestamp generation
347 """
348 self._binder_repo = binder_repo
349 self._node_repo = node_repo
350 self._id_generator = id_generator
351 self._logger = logger
352 self._clock = clock
354 def execute(
355 self,
356 title: str | None,
357 synopsis: str | None,
358 parent_id: NodeId | None,
359 position: int | None,
360 ) -> NodeId:
361 """Execute node creation workflow.
363 Creates a new node with the specified metadata and adds it to the
364 binder hierarchy. The node is added at the root level if no parent
365 is specified, or under the specified parent node.
367 Args:
368 title: Optional title for the node (used as display_title)
369 synopsis: Optional synopsis/summary for the node
370 parent_id: Optional parent NodeId for nested placement
371 position: Optional position for insertion order (None = append)
373 Returns:
374 NodeId of the created node
376 Raises:
377 NodeNotFoundError: If specified parent_id doesn't exist in binder
378 BinderIntegrityError: If binder integrity is violated after addition
379 FileSystemError: If node files cannot be created (propagated from ports)
381 """
382 self._logger.info('Starting node creation with title=%s, parent_id=%s', title, parent_id)
384 # Generation Phase - Create unique identity
385 node_id = self._id_generator.new()
386 self._logger.debug('Generated new NodeId: %s', node_id)
388 # Creation Phase - Set up node files with proper metadata
389 self._clock.now_iso()
390 self._node_repo.create(node_id, title, synopsis)
391 self._logger.debug('Created node files for NodeId: %s', node_id)
393 # Integration Phase - Add to binder structure
394 binder = self._binder_repo.load()
395 self._add_node_to_binder(binder, node_id, title, parent_id, position)
396 self._binder_repo.save(binder)
397 self._logger.debug('Added node to binder and saved changes for NodeId: %s', node_id)
399 # Completion
400 self._logger.info('Node creation completed successfully for NodeId: %s', node_id)
401 return node_id
403 def _add_node_to_binder(
404 self,
405 binder: Binder,
406 node_id: NodeId,
407 title: str | None,
408 parent_id: NodeId | None,
409 position: int | None,
410 ) -> None:
411 """Add the new node to the binder hierarchy.
413 Creates a BinderItem for the node and adds it to the appropriate
414 location in the binder tree structure.
416 Args:
417 binder: Binder instance to modify
418 node_id: NodeId of the new node
419 title: Title to use as display_title (or empty string if None)
420 parent_id: Optional parent NodeId for nested placement
421 position: Optional position for insertion order
423 Raises:
424 NodeNotFoundError: If parent_id is specified but doesn't exist
426 """
427 # Create BinderItem for the new node
428 display_title = title if title is not None else '(untitled)'
429 new_item = BinderItem(display_title=display_title, node_id=node_id, children=[])
431 if parent_id is None:
432 # Add to root level
433 self._logger.debug('Adding node to binder roots for NodeId: %s', node_id)
434 if position is None:
435 binder.roots.append(new_item)
436 else:
437 binder.roots.insert(position, new_item)
438 else:
439 # Add under specified parent
440 self._logger.debug('Adding node under parent %s for NodeId: %s', parent_id, node_id)
441 parent_item = binder.find_by_id(parent_id)
442 if parent_item is None:
443 self._logger.error('Parent node not found in binder: %s', parent_id)
444 msg = 'Parent node not found'
445 raise NodeNotFoundError(msg, str(parent_id))
447 if position is None:
448 parent_item.children.append(new_item)
449 else:
450 parent_item.children.insert(position, new_item)
452 # Validate binder integrity after modification
453 binder.validate_integrity() # pragma: no cover
456class EditPart:
457 """Use case interactor for editing node parts in external editor.
459 Orchestrates the opening of node parts (draft, notes, synopsis) in the
460 configured external editor. Follows hexagonal architecture principles
461 with pure business logic that delegates all I/O operations to injected
462 port implementations.
464 The edit process:
465 1. Validates that the specified node exists in the binder
466 2. Validates that the requested part is valid (draft, notes, synopsis)
467 3. Opens the appropriate file part in the external editor
468 4. Logs the editor operation for traceability
470 Args:
471 binder_repo: Port for binder persistence operations (validation)
472 node_repo: Port for node file operations and editor integration
473 logger: Port for operational logging and audit trails
475 Examples:
476 >>> # With dependency injection
477 >>> interactor = EditPart(
478 ... binder_repo=file_binder_repo,
479 ... node_repo=file_node_repo,
480 ... logger=production_logger,
481 ... )
482 >>> interactor.execute(node_id=node_id, part='draft')
484 """
486 def __init__(
487 self,
488 binder_repo: 'BinderRepo',
489 node_repo: 'NodeRepo',
490 logger: 'Logger',
491 ) -> None:
492 """Initialize EditPart with injected dependencies.
494 Args:
495 binder_repo: Port for binder persistence operations (validation)
496 node_repo: Port for node file operations and editor integration
497 logger: Port for operational logging and audit trails
499 """
500 self._binder_repo = binder_repo
501 self._node_repo = node_repo
502 self._logger = logger
504 def execute(self, node_id: NodeId, part: str) -> None:
505 """Execute part editing workflow.
507 Opens the specified part of the node in the external editor.
508 Validates that both the node and part are valid before proceeding.
510 Args:
511 node_id: NodeId of the node to edit
512 part: Which part to edit - must be one of:
513 - 'draft': Edit the main content in {id}.md
514 - 'notes': Edit the notes in {id}.notes.md
515 - 'synopsis': Edit the synopsis field in {id}.md frontmatter
517 Raises:
518 NodeNotFoundError: If node_id doesn't exist in binder
519 ValueError: If part is not a valid option
520 FileSystemError: If editor cannot be launched or files don't exist
522 """
523 self._logger.info('Starting edit operation for NodeId: %s, part: %s', node_id, part)
525 # Validation Phase - Check node exists in binder
526 binder = self._binder_repo.load()
527 target_item = binder.find_by_id(node_id)
528 if target_item is None:
529 self._logger.error('Node not found in binder: %s', node_id)
530 msg = 'Node not found in binder'
531 raise NodeNotFoundError(msg, str(node_id))
533 # Validation Phase - Check part is valid
534 valid_parts = {'draft', 'notes', 'synopsis'}
535 if part not in valid_parts:
536 self._logger.error('Invalid part specified: %s (valid: %s)', part, valid_parts)
537 msg = f'Invalid part: {part}. Must be one of: {", ".join(sorted(valid_parts))}'
538 raise ValueError(msg)
540 self._logger.debug('Validation passed: node exists and part is valid')
542 # Editor Launch Phase - Open file in external editor
543 self._logger.debug('Opening %s part of node %s in editor', part, node_id)
544 self._node_repo.open_in_editor(node_id, part)
546 self._logger.info('Edit operation completed successfully for NodeId: %s, part: %s', node_id, part)
549class MoveNode:
550 """Use case interactor for moving nodes within the binder hierarchy.
552 Orchestrates the movement of existing nodes by updating the binder
553 structure while preserving node identity and files. Follows hexagonal
554 architecture principles with pure business logic that delegates all I/O
555 operations to injected port implementations.
557 The node movement process:
558 1. Validates source node exists in binder hierarchy
559 2. Validates target parent exists when specified
560 3. Checks for circular dependencies using ancestor traversal
561 4. Removes node from current location in binder tree
562 5. Adds node to new location at specified position
563 6. Updates and saves binder changes to _binder.md
564 7. Logs all operations with NodeId details for traceability
566 Node files remain unchanged during move operations - only the binder
567 hierarchy structure is modified.
569 Args:
570 binder_repo: Port for binder persistence operations
571 logger: Port for operational logging and audit trails
573 Examples:
574 >>> # With dependency injection
575 >>> interactor = MoveNode(
576 ... binder_repo=file_binder_repo,
577 ... logger=production_logger,
578 ... )
579 >>> interactor.execute(node_id=node_id, parent_id=new_parent_id, position=0)
581 """
583 def __init__(
584 self,
585 binder_repo: 'BinderRepo',
586 logger: 'Logger',
587 ) -> None:
588 """Initialize MoveNode with injected dependencies.
590 Args:
591 binder_repo: Port for binder persistence operations
592 logger: Port for operational logging and audit trails
594 """
595 self._binder_repo = binder_repo
596 self._logger = logger
598 def execute(
599 self,
600 node_id: NodeId,
601 parent_id: NodeId | None,
602 position: int | None,
603 ) -> None:
604 """Execute node movement workflow.
606 Moves the specified node to a new location in the binder hierarchy.
607 The node is moved to the root level if no parent is specified, or
608 under the specified parent node at the given position.
610 Args:
611 node_id: NodeId of the node to move
612 parent_id: Optional target parent NodeId (None = move to root)
613 position: Optional position for insertion order (None = append)
615 Raises:
616 NodeNotFoundError: If node_id or parent_id doesn't exist in binder
617 BinderIntegrityError: If move would create circular dependency
618 FileSystemError: If binder file cannot be saved (propagated from ports)
620 """
621 self._logger.info(
622 'Starting move node operation for NodeId: %s to parent: %s position: %s',
623 node_id,
624 parent_id,
625 position,
626 )
628 # Load and validate binder structure
629 binder = self._binder_repo.load()
630 self._logger.debug('Validating source and target nodes')
632 # Validate source node exists
633 source_item = binder.find_by_id(node_id)
634 if source_item is None:
635 self._logger.error('Source node not found in binder: %s', node_id)
636 msg = 'Source node not found in binder'
637 raise NodeNotFoundError(msg, str(node_id))
639 # Validate target parent exists (if specified)
640 if parent_id is not None:
641 target_parent = binder.find_by_id(parent_id)
642 if target_parent is None:
643 self._logger.error('Target parent not found in binder: %s', parent_id)
644 msg = 'Target parent not found in binder'
645 raise NodeNotFoundError(msg, str(parent_id))
647 # Check for circular dependencies
648 self._logger.debug('Checking for circular dependencies')
649 if MoveNode._would_create_circular_dependency(binder, node_id, parent_id):
650 self._logger.error(
651 'Circular dependency detected: cannot move %s under %s',
652 node_id,
653 parent_id,
654 )
655 msg = 'Move would create circular dependency'
656 raise BinderIntegrityError(
657 msg,
658 str(node_id),
659 str(parent_id),
660 )
662 # Perform the move operation
663 self._remove_node_from_current_location(binder, source_item)
664 self._add_node_to_new_location(binder, source_item, parent_id, position)
666 # Save updated binder
667 self._binder_repo.save(binder)
669 self._logger.info('Move node operation completed successfully for NodeId: %s', node_id)
671 @staticmethod
672 def _would_create_circular_dependency(
673 binder: Binder,
674 node_id: NodeId,
675 parent_id: NodeId | None,
676 ) -> bool:
677 """Check if moving node under parent would create circular dependency.
679 Uses ancestor traversal approach: walks up from target parent to see
680 if the source node is an ancestor.
682 Args:
683 binder: Binder instance to check
684 node_id: NodeId of node being moved
685 parent_id: Target parent NodeId (None means root level)
687 Returns:
688 True if move would create circular dependency, False otherwise
690 """
691 # Moving to root level cannot create circular dependency
692 if parent_id is None:
693 return False
695 # Check if source node is an ancestor of target parent
696 return MoveNode._is_ancestor(binder, node_id, parent_id)
698 @staticmethod
699 def _is_ancestor(binder: Binder, potential_ancestor_id: NodeId, descendant_id: NodeId) -> bool:
700 """Check if potential_ancestor_id is an ancestor of descendant_id.
702 Traverses up the tree from descendant to see if potential_ancestor
703 is found in the ancestry chain.
705 Args:
706 binder: Binder instance to traverse
707 potential_ancestor_id: NodeId that might be an ancestor
708 descendant_id: NodeId to check ancestry for
710 Returns:
711 True if potential_ancestor_id is an ancestor of descendant_id
713 """
714 current_id: NodeId | None = descendant_id
716 while current_id is not None:
717 # Find parent of current node
718 parent_item = MoveNode._find_parent_of_node(binder, current_id)
720 if parent_item is None:
721 # Reached root level, no more ancestors
722 return False
724 if parent_item.id == potential_ancestor_id:
725 # Found the potential ancestor in ancestry chain
726 return True
728 # Continue up the tree
729 current_id = parent_item.id
731 return False # pragma: no cover
733 @staticmethod
734 def _find_parent_of_node(binder: Binder, node_id: NodeId) -> BinderItem | None:
735 """Find the parent BinderItem of the specified node.
737 Args:
738 binder: Binder instance to search
739 node_id: NodeId to find parent for
741 Returns:
742 Parent BinderItem or None if node is at root level
744 """
746 def _search_for_parent(item: BinderItem) -> BinderItem | None:
747 """Recursively search for parent of node_id."""
748 # Check if any direct child matches the target node_id
749 for child in item.children:
750 if child.id == node_id:
751 return item
753 # Recursively search in children
754 for child in item.children:
755 result = _search_for_parent(child)
756 if result is not None:
757 return result
759 return None
761 # Search through all root items
762 for root_item in binder.roots:
763 if root_item.id == node_id:
764 # Node is at root level, no parent
765 return None
767 result = _search_for_parent(root_item)
768 if result is not None:
769 return result
771 return None # pragma: no cover
773 def _remove_node_from_current_location(self, binder: Binder, source_item: BinderItem) -> None:
774 """Remove the source node from its current location in the binder.
776 Args:
777 binder: Binder instance to modify
778 source_item: BinderItem to remove
780 """
781 self._logger.debug('Removing node from current location: %s', source_item.id)
783 # Source item must have a valid NodeId to be moved
784 if source_item.id is None:
785 msg = 'Cannot remove item without NodeId'
786 raise BinderIntegrityError(msg, source_item)
788 # Find parent and remove from its children list
789 parent_item = MoveNode._find_parent_of_node(binder, source_item.id)
791 if parent_item is None:
792 # Node is at root level
793 binder.roots.remove(source_item)
794 else:
795 # Node is under a parent
796 parent_item.children.remove(source_item)
798 def _add_node_to_new_location(
799 self,
800 binder: Binder,
801 source_item: BinderItem,
802 parent_id: NodeId | None,
803 position: int | None,
804 ) -> None:
805 """Add the source node to its new location in the binder.
807 Args:
808 binder: Binder instance to modify
809 source_item: BinderItem to add
810 parent_id: Target parent NodeId (None = root level)
811 position: Position for insertion (None = append, out-of-bounds = append)
813 """
814 self._logger.debug('Adding node to new location: %s under parent: %s', source_item.id, parent_id)
816 if parent_id is None:
817 # Add to root level
818 target_list = binder.roots
819 else:
820 # Add under specified parent
821 parent_item = binder.find_by_id(parent_id)
822 if parent_item is None:
823 msg = 'Parent item not found'
824 raise NodeNotFoundError(msg, parent_id)
825 target_list = parent_item.children
827 # Insert at specified position or append
828 if position is None or position >= len(target_list):
829 target_list.append(source_item)
830 else:
831 # Ensure position is not negative (treat as 0)
832 position = max(0, position)
833 target_list.insert(position, source_item)
836class RemoveNode:
837 """Use case interactor for removing nodes from the binder structure.
839 Orchestrates the removal of nodes by updating the binder hierarchy while
840 optionally deleting associated files. Follows hexagonal architecture
841 principles with pure business logic that delegates all I/O operations
842 to injected port implementations.
844 The node removal process:
845 1. Validates node exists in binder hierarchy
846 2. Handles child nodes by promoting them to removed node's parent level
847 3. Removes node from binder structure (from parent or root level)
848 4. Optionally deletes node files using NodeRepo when delete_files=True
849 5. Updates and saves binder changes to _binder.md
850 6. Logs removal operations with NodeId and file deletion status
851 7. Preserves binder integrity after node removal
853 Child nodes are promoted to maintain hierarchy consistency - when a parent
854 node is removed, its children are moved to the grandparent level rather
855 than being orphaned or automatically removed.
857 Args:
858 binder_repo: Port for binder persistence operations
859 node_repo: Port for node file deletion when delete_files=True
860 logger: Port for operational logging and audit trails
862 Examples:
863 >>> # With dependency injection
864 >>> interactor = RemoveNode(
865 ... binder_repo=file_binder_repo,
866 ... node_repo=file_node_repo,
867 ... logger=production_logger,
868 ... )
869 >>> interactor.execute(node_id=node_id, delete_files=False)
871 """
873 def __init__(
874 self,
875 binder_repo: 'BinderRepo',
876 node_repo: 'NodeRepo',
877 logger: 'Logger',
878 ) -> None:
879 """Initialize RemoveNode with injected dependencies.
881 Args:
882 binder_repo: Port for binder persistence operations
883 node_repo: Port for node file deletion when delete_files=True
884 logger: Port for operational logging and audit trails
886 """
887 self._binder_repo = binder_repo
888 self._node_repo = node_repo
889 self._logger = logger
891 def execute(self, node_id: NodeId, *, delete_files: bool = False) -> None:
892 """Execute node removal workflow.
894 Removes the specified node from the binder hierarchy and optionally
895 deletes the associated files. Child nodes are promoted to the parent
896 level to maintain hierarchy consistency.
898 Args:
899 node_id: NodeId of the node to remove
900 delete_files: If True, delete {id}.md and {id}.notes.md files
902 Raises:
903 NodeNotFoundError: If node_id doesn't exist in binder
904 FileSystemError: If binder or node files cannot be updated
906 """
907 self._logger.info(
908 'Starting node removal for NodeId: %s with delete_files=%s',
909 node_id,
910 delete_files,
911 )
913 # Load and validate binder structure
914 binder = self._binder_repo.load()
915 self._logger.debug('Validating node exists in binder')
917 # Validate node exists
918 target_item = binder.find_by_id(node_id)
919 if target_item is None:
920 self._logger.error('Node not found in binder: %s', node_id)
921 msg = 'Node not found in binder'
922 raise NodeNotFoundError(msg, str(node_id))
924 # Find parent for child promotion logic
925 parent_item = RemoveNode._find_parent_of_node(binder, node_id)
927 # Promote children before removing node
928 if target_item.children:
929 self._logger.debug(
930 'Promoting %d children of node %s to parent level',
931 len(target_item.children),
932 node_id,
933 )
934 self._promote_children_to_parent_level(binder, target_item, parent_item)
936 # Remove node from binder structure
937 self._remove_node_from_binder(binder, target_item, parent_item)
939 # Delete files if requested
940 if delete_files:
941 self._logger.debug('Deleting node files for NodeId: %s', node_id)
942 self._node_repo.delete(node_id, delete_files=True)
944 # Save updated binder
945 self._binder_repo.save(binder)
947 self._logger.info(
948 'Node removal completed successfully for NodeId: %s (files deleted: %s)',
949 node_id,
950 delete_files,
951 )
953 @staticmethod
954 def _find_parent_of_node(binder: Binder, node_id: NodeId) -> BinderItem | None:
955 """Find the parent BinderItem of the specified node.
957 Args:
958 binder: Binder instance to search
959 node_id: NodeId to find parent for
961 Returns:
962 Parent BinderItem or None if node is at root level
964 """
966 def _search_for_parent(item: BinderItem) -> BinderItem | None:
967 """Recursively search for parent of node_id."""
968 # Check if any direct child matches the target node_id
969 for child in item.children:
970 if child.id == node_id:
971 return item
973 # Recursively search in children
974 for child in item.children:
975 result = _search_for_parent(child)
976 if result is not None:
977 return result
979 return None
981 # Search through all root items
982 for root_item in binder.roots:
983 if root_item.id == node_id:
984 # Node is at root level, no parent
985 return None
987 result = _search_for_parent(root_item)
988 if result is not None:
989 return result
991 return None # pragma: no cover
993 def _promote_children_to_parent_level(
994 self,
995 binder: Binder,
996 target_item: BinderItem,
997 parent_item: BinderItem | None,
998 ) -> None:
999 """Promote children of target node to parent level.
1001 Args:
1002 binder: Binder instance to modify
1003 target_item: BinderItem being removed
1004 parent_item: Parent of target item (None if at root level)
1006 """
1007 self._logger.debug('Preparing to promote children')
1008 children_to_promote = target_item.children.copy()
1009 self._logger.debug('Promoting %d children of %s', len(children_to_promote), target_item.id)
1011 if parent_item is None:
1012 # Target is at root level, promote children to root
1013 target_index = binder.roots.index(target_item)
1014 # Insert children at the target's position
1015 for i, child in enumerate(children_to_promote):
1016 binder.roots.insert(target_index + i, child)
1017 else:
1018 # Target is under a parent, promote children to parent level
1019 target_index = parent_item.children.index(target_item)
1020 # Insert children at the target's position under parent
1021 for i, child in enumerate(children_to_promote):
1022 parent_item.children.insert(target_index + i, child)
1024 def _remove_node_from_binder(
1025 self,
1026 binder: Binder,
1027 target_item: BinderItem,
1028 parent_item: BinderItem | None,
1029 ) -> None:
1030 """Remove the target node from the binder structure.
1032 Args:
1033 binder: Binder instance to modify
1034 target_item: BinderItem to remove
1035 parent_item: Parent of target item (None if at root level)
1037 """
1038 self._logger.debug('Removing node from binder structure: %s', target_item.id)
1040 if parent_item is None:
1041 # Node is at root level
1042 binder.roots.remove(target_item)
1043 else:
1044 # Node is under a parent
1045 parent_item.children.remove(target_item)
1048class WriteFreeform:
1049 """Use case interactor for creating timestamped freewrite files.
1051 Creates standalone markdown files with optional titles and UUIDv7 identifiers
1052 outside the binder structure for frictionless writing. This interactor supports
1053 spontaneous idea capture without structural constraints and can launch the
1054 created file in the user's preferred editor.
1056 The freewrite creation process:
1057 1. Generates a unique timestamped filename with UUIDv7 identifier
1058 2. Creates the file with optional title in YAML frontmatter
1059 3. Opens the file in external editor for immediate writing
1060 4. Logs the operation for reference and session tracking
1061 5. Returns the filename for confirmation or further operations
1063 Args:
1064 daily_repo: Port for freewrite file creation and management
1065 editor_port: Port for launching external editor
1066 logger: Port for operational logging and audit trails
1067 clock: Port for timestamp generation
1069 Examples:
1070 >>> # With dependency injection
1071 >>> interactor = WriteFreeform(
1072 ... daily_repo=filesystem_daily_repo,
1073 ... editor_port=system_editor_port,
1074 ... logger=production_logger,
1075 ... clock=system_clock,
1076 ... )
1077 >>> filename = interactor.execute(title='Morning Thoughts')
1078 >>> print(filename)
1079 "20250911T0830_01932c5a-7f3e-7000-8000-000000000001.md"
1081 """
1083 def __init__(
1084 self,
1085 daily_repo: 'DailyRepo',
1086 editor_port: 'EditorPort',
1087 logger: 'Logger',
1088 clock: 'Clock',
1089 ) -> None:
1090 """Initialize WriteFreeform with injected dependencies.
1092 Args:
1093 daily_repo: Port for freewrite file creation and management
1094 editor_port: Port for launching external editor
1095 logger: Port for operational logging and audit trails
1096 clock: Port for timestamp generation
1098 """
1099 self._daily_repo = daily_repo
1100 self._editor_port = editor_port
1101 self._logger = logger
1102 self._clock = clock
1104 def execute(self, title: str | None = None) -> str:
1105 """Execute freewrite creation workflow.
1107 Creates a new timestamped freewrite file with optional title,
1108 opens it in the external editor, and returns the filename for
1109 confirmation. Handles editor launch failures gracefully.
1111 Args:
1112 title: Optional title to include in the file's frontmatter.
1113 If provided, will be added as a 'title' field in the
1114 YAML frontmatter block.
1116 Returns:
1117 The filename of the created freewrite file, following the
1118 format YYYYMMDDTHHMM_<uuid7>.md
1120 Raises:
1121 FileSystemError: If the file cannot be created due to I/O
1122 errors, permission issues, or disk space
1123 constraints (propagated from DailyRepo).
1125 """
1126 # Log start of freewrite creation
1127 if title:
1128 self._logger.info('Starting freewrite creation with title: %s', title)
1129 else:
1130 self._logger.info('Starting freewrite creation without title')
1132 try:
1133 # Create the freewrite file
1134 filename = self._daily_repo.write_freeform(title=title)
1135 self._logger.info('Created freewrite file: %s', filename)
1137 # Attempt to open in editor
1138 try:
1139 self._editor_port.open(filename)
1140 self._logger.debug('Opened freewrite file in editor: %s', filename)
1141 except EditorLaunchError as exc:
1142 # Editor failure shouldn't prevent the freewrite from being created
1143 self._logger.warning('Failed to open freewrite file in editor: %s (file still created)', str(exc))
1144 return filename
1145 else:
1146 return filename
1148 except FileSystemError:
1149 self._logger.exception('Failed to create freewrite file')
1150 raise # Re-raise filesystem errors as they're critical
1153class ShowStructure:
1154 """Use case interactor for displaying the hierarchical structure of the binder.
1156 Provides a read-only view of the binder hierarchy, supporting both full
1157 structure display and subtree filtering. Formats the tree structure using
1158 ASCII art for console display with proper indentation and tree characters.
1160 The structure display process:
1161 1. Loads the current binder structure from storage
1162 2. Validates subtree root node exists when node_id is specified
1163 3. Filters to subtree or shows full structure based on parameters
1164 4. Formats the hierarchy using tree drawing characters (├─, └─, │)
1165 5. Shows placeholders with distinctive visual markers
1166 6. Returns formatted string representation for console output
1167 7. Logs operation details for traceability and debugging
1169 Placeholders (items without NodeId) are displayed with [Placeholder]
1170 markers to distinguish them from actual nodes. The formatter uses
1171 standard tree drawing characters for clear hierarchy visualization.
1173 Args:
1174 binder_repo: Port for binder persistence operations
1175 logger: Port for operational logging and audit trails
1177 Examples:
1178 >>> # With dependency injection
1179 >>> interactor = ShowStructure(
1180 ... binder_repo=file_binder_repo,
1181 ... logger=production_logger,
1182 ... )
1183 >>> # Display full structure
1184 >>> structure = interactor.execute()
1185 >>> print(structure)
1186 ├─ Part 1
1187 │ ├─ Chapter 1
1188 │ │ └─ Section 1.1
1189 │ └─ Chapter 2
1190 └─ Part 2
1191 >>>
1192 >>> # Display subtree from specific node
1193 >>> subtree = interactor.execute(node_id=part1_id)
1194 >>> print(subtree)
1195 Part 1
1196 ├─ Chapter 1
1197 │ └─ Section 1.1
1198 └─ Chapter 2
1200 """
1202 def __init__(
1203 self,
1204 binder_repo: 'BinderRepo',
1205 logger: 'Logger',
1206 ) -> None:
1207 """Initialize ShowStructure with injected dependencies.
1209 Args:
1210 binder_repo: Port for binder persistence operations
1211 logger: Port for operational logging and audit trails
1213 """
1214 self._binder_repo = binder_repo
1215 self._logger = logger
1217 def execute(self, node_id: NodeId | None = None) -> str:
1218 """Execute structure display workflow.
1220 Displays the binder hierarchy as a formatted tree structure.
1221 When node_id is provided, shows only the subtree starting from
1222 that node. When node_id is None, shows the complete binder structure.
1224 Args:
1225 node_id: Optional NodeId for subtree display (None = full structure)
1227 Returns:
1228 Formatted string representation of the tree structure using
1229 ASCII art characters for hierarchy visualization
1231 Raises:
1232 NodeNotFoundError: If node_id is specified but doesn't exist in binder
1233 FileSystemError: If binder cannot be loaded (propagated from ports)
1235 """
1236 if node_id is None:
1237 self._logger.info('Displaying full binder structure')
1238 else:
1239 self._logger.info('Displaying subtree structure for NodeId: %s', node_id)
1241 # Load binder structure
1242 binder = self._binder_repo.load()
1244 if node_id is None:
1245 # Display full structure
1246 return self._format_full_structure(binder)
1247 # Display subtree
1248 return self._format_subtree_structure(binder, node_id)
1250 def _format_full_structure(self, binder: Binder) -> str:
1251 """Format the complete binder structure.
1253 Args:
1254 binder: Binder instance to format
1256 Returns:
1257 Formatted string representation of full structure
1259 """
1260 if not binder.roots:
1261 self._logger.debug('Binder is empty')
1262 return 'Binder is empty - no nodes to display'
1264 total_items = self._count_all_items(binder.roots)
1265 placeholder_count = self._count_placeholders(binder.roots)
1267 self._logger.debug('Found %d total items in binder', total_items)
1268 if placeholder_count > 0:
1269 self._logger.debug('Found %d placeholders in structure', placeholder_count)
1271 # If there are multiple root items, they should have tree connectors
1272 if len(binder.roots) > 1:
1273 result = self._format_items_with_root_connectors(binder.roots)
1274 else:
1275 result = self._format_items(binder.roots, prefix='')
1277 self._logger.info('Structure display completed successfully')
1278 return result
1280 def _format_subtree_structure(self, binder: Binder, node_id: NodeId) -> str:
1281 """Format subtree structure starting from specified node.
1283 Args:
1284 binder: Binder instance to search
1285 node_id: NodeId of subtree root
1287 Returns:
1288 Formatted string representation of subtree
1290 Raises:
1291 NodeNotFoundError: If node_id doesn't exist in binder
1293 """
1294 # Find the target node in binder structure
1295 target_item = binder.find_by_id(node_id)
1296 if target_item is None:
1297 self._logger.error('Node not found for subtree display: %s', node_id)
1298 msg = 'Node not found for subtree display'
1299 raise NodeNotFoundError(msg, str(node_id))
1301 self._logger.debug('Found subtree root: %s', target_item.display_title)
1303 # Format the subtree starting from the target node
1304 result = self._format_single_item(
1305 target_item,
1306 prefix='',
1307 is_last=True,
1308 show_children=True,
1309 force_connector=False,
1310 )
1312 self._logger.info('Structure display completed successfully')
1313 return result
1315 def _format_items(self, items: list[BinderItem], prefix: str) -> str:
1316 """Format a list of BinderItems with tree structure.
1318 Args:
1319 items: List of BinderItems to format
1320 prefix: Current indentation prefix
1321 is_last_group: Whether this is the last group of siblings
1323 Returns:
1324 Formatted string representation
1326 """
1327 if not items:
1328 return ''
1330 lines = []
1331 for i, item in enumerate(items):
1332 is_last = i == len(items) - 1
1333 line = self._format_single_item(item, prefix, is_last=is_last, show_children=True, force_connector=False)
1334 lines.append(line)
1336 return '\n'.join(lines)
1338 def _format_items_with_root_connectors(self, items: list[BinderItem]) -> str:
1339 """Format root items with tree connectors.
1341 Args:
1342 items: List of root BinderItems to format
1344 Returns:
1345 Formatted string representation with root connectors
1347 """
1348 if not items:
1349 return ''
1351 lines = []
1352 for i, item in enumerate(items):
1353 is_last = i == len(items) - 1
1354 # Force connector even at root level
1355 line = self._format_single_item(item, prefix='', is_last=is_last, show_children=True, force_connector=True)
1356 lines.append(line)
1358 return '\n'.join(lines)
1360 def _format_single_item(
1361 self,
1362 item: BinderItem,
1363 prefix: str,
1364 *,
1365 is_last: bool,
1366 show_children: bool = True,
1367 force_connector: bool = False,
1368 ) -> str:
1369 """Format a single BinderItem with proper tree characters.
1371 Args:
1372 item: BinderItem to format
1373 prefix: Current indentation prefix
1374 is_last: Whether this is the last sibling
1375 show_children: Whether to recursively show children
1376 force_connector: Whether to force tree connector even at root level
1378 Returns:
1379 Formatted string representation of item and its children
1381 """
1382 # Choose tree connector
1383 connector = '' if not prefix and not force_connector else '└─ ' if is_last else '├─ '
1385 # Format display title with placeholder marker if needed
1386 display_title = item.display_title
1387 if item.id is None:
1388 display_title = f'{display_title} [Placeholder]'
1390 # Create the line for this item
1391 line = f'{prefix}{connector}{display_title}'
1393 if not show_children or not item.children:
1394 return line
1396 # Format children with appropriate prefix
1397 lines = [line]
1398 child_prefix = prefix + (' ' if is_last else '│ ')
1400 for i, child in enumerate(item.children):
1401 child_is_last = i == len(item.children) - 1
1402 child_line = self._format_single_item(
1403 child,
1404 child_prefix,
1405 is_last=child_is_last,
1406 show_children=True,
1407 force_connector=False,
1408 )
1409 lines.append(child_line)
1411 return '\n'.join(lines)
1413 def _count_all_items(self, items: list[BinderItem]) -> int:
1414 """Count total number of items in tree structure.
1416 Args:
1417 items: Root list of BinderItems
1419 Returns:
1420 Total count of all items including nested children
1422 """
1423 count = len(items)
1424 for item in items:
1425 count += self._count_all_items(item.children)
1426 return count
1428 def _count_placeholders(self, items: list[BinderItem]) -> int:
1429 """Count placeholder items (items without NodeId) in tree structure.
1431 Args:
1432 items: Root list of BinderItems
1434 Returns:
1435 Count of placeholder items including nested children
1437 """
1438 count = sum(1 for item in items if item.id is None)
1439 for item in items:
1440 count += self._count_placeholders(item.children)
1441 return count
1444class MaterializeNode:
1445 """Use case interactor for converting binder placeholders into actual nodes.
1447 Orchestrates the materialization of placeholder items by generating unique
1448 identifiers, creating node files, and updating the binder structure.
1449 Follows hexagonal architecture principles with pure business logic that
1450 delegates all I/O operations to injected port implementations.
1452 The materialization process:
1453 1. Locates placeholder by display title in binder structure
1454 2. Validates that the item is indeed a placeholder (has None id)
1455 3. Generates unique NodeId for the new node
1456 4. Creates node files with proper frontmatter and content
1457 5. Updates binder structure replacing placeholder with node reference
1458 6. Saves updated binder to persistent storage
1459 7. Logs all operations for audit trail
1461 Args:
1462 binder_repo: Port for binder persistence operations
1463 node_repo: Port for node file creation and management
1464 id_generator: Port for generating unique NodeId values
1465 logger: Port for operational logging and audit trails
1467 Examples:
1468 >>> # With dependency injection
1469 >>> interactor = MaterializeNode(
1470 ... binder_repo=file_binder_repo,
1471 ... node_repo=file_node_repo,
1472 ... id_generator=uuid_generator,
1473 ... logger=production_logger,
1474 ... )
1475 >>> node_id = interactor.execute(display_title='Chapter One', synopsis='The beginning')
1477 """
1479 def __init__(
1480 self,
1481 binder_repo: 'BinderRepo',
1482 node_repo: 'NodeRepo',
1483 id_generator: 'IdGenerator',
1484 logger: 'Logger',
1485 ) -> None:
1486 """Initialize MaterializeNode with injected dependencies.
1488 Args:
1489 binder_repo: Port for binder persistence operations
1490 node_repo: Port for node file creation and management
1491 id_generator: Port for generating unique NodeId values
1492 logger: Port for operational logging and audit trails
1494 """
1495 self._binder_repo = binder_repo
1496 self._node_repo = node_repo
1497 self._id_generator = id_generator
1498 self._logger = logger
1500 def execute(self, display_title: str, synopsis: str | None) -> NodeId:
1501 """Execute placeholder materialization workflow.
1503 Converts a binder placeholder with the specified display title into
1504 a concrete node with files and proper binder structure integration.
1506 Args:
1507 display_title: Display title of the placeholder to materialize
1508 synopsis: Optional synopsis/summary for the new node
1510 Returns:
1511 NodeId of the materialized node
1513 Raises:
1514 PlaceholderNotFoundError: If no placeholder with display_title exists
1515 AlreadyMaterializedError: If item with display_title already has NodeId
1516 BinderIntegrityError: If binder integrity is violated after materialization
1517 FileSystemError: If node files cannot be created (propagated from ports)
1519 """
1520 self._logger.info('Starting placeholder materialization for display_title=%s', display_title)
1522 # Discovery Phase - Find the placeholder in binder structure
1523 binder = self._binder_repo.load()
1524 placeholder = binder.find_placeholder_by_display_title(display_title)
1526 if placeholder is None:
1527 # Check if an item with this title already exists but is materialized
1528 for root_item in binder.roots:
1529 existing_item = self._find_item_by_title_recursive(root_item, display_title)
1530 if existing_item is not None and existing_item.id is not None:
1531 self._logger.error('Item with display_title already materialized: %s', display_title)
1532 msg = 'Item already materialized'
1533 raise AlreadyMaterializedError(msg, display_title, str(existing_item.id))
1535 # No item found at all
1536 self._logger.error('Placeholder not found with display_title: %s', display_title)
1537 msg = 'Placeholder not found'
1538 raise PlaceholderNotFoundError(msg, display_title)
1540 # Validation Phase - Ensure it's actually a placeholder
1541 if placeholder.id is not None: # pragma: no cover
1542 # This should never happen as find_placeholder_by_display_title only returns items with id=None
1543 self._logger.error('Item with display_title already materialized: %s', display_title) # pragma: no cover
1544 msg = 'Item already materialized'
1545 raise AlreadyMaterializedError(
1546 msg,
1547 display_title,
1548 str(placeholder.id),
1549 ) # pragma: no cover
1551 # Generation Phase - Create unique identity
1552 node_id = self._id_generator.new()
1553 self._logger.debug('Generated new NodeId for materialization: %s', node_id)
1555 # Creation Phase - Set up node files with proper metadata
1556 self._node_repo.create(node_id, display_title, synopsis)
1557 self._logger.debug('Created node files for materialized NodeId: %s', node_id)
1559 # Materialization Phase - Update placeholder to reference actual node
1560 placeholder.node_id = node_id
1561 self._binder_repo.save(binder)
1562 self._logger.debug('Updated binder with materialized node: %s', node_id)
1564 # Completion
1565 self._logger.info('Placeholder materialization completed successfully for NodeId: %s', node_id)
1566 return node_id
1568 def _find_item_by_title_recursive(self, item: BinderItem, target_title: str) -> BinderItem | None:
1569 """Recursively search for any item (placeholder or materialized) by display title.
1571 Args:
1572 item: Current item to check
1573 target_title: Title to search for
1575 Returns:
1576 The BinderItem with matching display title, or None if not found
1578 """
1579 if item.display_title == target_title:
1580 return item
1582 for child in item.children:
1583 result = self._find_item_by_title_recursive(child, target_title)
1584 if result is not None: # pragma: no branch
1585 return result
1587 return None
1590class AuditBinder:
1591 """Use case interactor for auditing binder consistency and integrity.
1593 Provides comprehensive validation of binder integrity by detecting four
1594 types of issues: PLACEHOLDER (no ID), MISSING (referenced but file doesn't
1595 exist), ORPHAN (file exists but not in binder), and MISMATCH (frontmatter
1596 ID ≠ filename). Follows hexagonal architecture principles with pure business
1597 logic that delegates all I/O operations to injected port implementations.
1599 The audit process:
1600 1. Loads binder structure from BinderRepo
1601 2. Scans project directory for existing node files via NodeRepo
1602 3. Cross-references binder items with file system state
1603 4. Validates frontmatter IDs match filenames for existing files
1604 5. Categorizes and reports all discovered issues by type
1605 6. Returns structured audit report with human-readable and JSON formats
1607 Issue Types and Detection Logic:
1608 - PLACEHOLDER: BinderItem.id is None (has display title but no NodeId)
1609 - MISSING: Binder references NodeId but corresponding file doesn't exist
1610 - ORPHAN: Node file exists but NodeId not found in binder structure
1611 - MISMATCH: File exists but frontmatter.id ≠ filename NodeId
1613 Args:
1614 binder_repo: Port for binder persistence operations
1615 node_repo: Port for node file scanning and validation
1616 logger: Port for operational logging and audit trails
1618 Examples:
1619 >>> # With dependency injection
1620 >>> interactor = AuditBinder(
1621 ... binder_repo=file_binder_repo,
1622 ... node_repo=file_node_repo,
1623 ... logger=production_logger,
1624 ... )
1625 >>> report = interactor.execute()
1626 >>> if report.is_clean():
1627 ... print('✓ No issues found')
1628 >>> else:
1629 ... print(report.format_report())
1631 """
1633 def __init__(
1634 self,
1635 binder_repo: 'BinderRepo',
1636 node_repo: 'NodeRepo',
1637 logger: 'Logger',
1638 ) -> None:
1639 """Initialize AuditBinder with injected dependencies.
1641 Args:
1642 binder_repo: Port for binder persistence operations
1643 node_repo: Port for node file scanning and validation
1644 logger: Port for operational logging and audit trails
1646 """
1647 self._binder_repo = binder_repo
1648 self._node_repo = node_repo
1649 self._logger = logger
1651 def execute(self) -> AuditReport:
1652 """Execute binder audit workflow.
1654 Performs comprehensive audit of binder consistency by scanning the
1655 binder structure and cross-referencing with the file system state.
1656 Detects and categorizes all integrity issues.
1658 Returns:
1659 AuditReport containing all discovered issues organized by type
1661 Raises:
1662 BinderNotFoundError: If binder file doesn't exist
1663 FileSystemError: If files cannot be read (propagated from ports)
1665 """
1666 self._logger.info('Starting binder audit')
1668 # Load binder structure
1669 binder = self._binder_repo.load()
1670 self._logger.debug('Loaded binder structure with %d root items', len(binder.roots))
1672 # Initialize report
1673 report = AuditReport()
1675 # Scan for placeholders
1676 self._scan_placeholders(binder, report)
1678 # Get all node IDs referenced in binder
1679 binder_node_ids = binder.get_all_node_ids()
1680 self._logger.debug('Found %d node IDs in binder', len(binder_node_ids))
1682 # Get all existing node files from file system
1683 existing_files = self._get_existing_node_files()
1684 self._logger.debug('Found %d existing node files', len(existing_files))
1686 # Cross-reference binder with file system
1687 self._scan_missing_files(binder_node_ids, existing_files, report)
1688 self._scan_missing_notes_files(binder_node_ids, report)
1689 self._scan_orphaned_files(binder_node_ids, existing_files, report)
1690 self._scan_orphaned_invalid_files(binder_node_ids, report)
1691 self._scan_id_mismatches(existing_files, report)
1693 # Log summary
1694 total_issues = len(report.placeholders) + len(report.missing) + len(report.orphans) + len(report.mismatches)
1695 self._logger.info('Binder audit completed: %d issues found', total_issues)
1697 return report
1699 def _scan_placeholders(self, binder: Binder, report: AuditReport) -> None:
1700 """Scan binder structure for placeholder items.
1702 Args:
1703 binder: Binder instance to scan
1704 report: AuditReport to populate with findings
1706 """
1707 self._logger.debug('Scanning for placeholder items')
1709 def _scan_item_recursive(item: BinderItem, path: list[int]) -> None:
1710 """Recursively scan items and record placeholders."""
1711 if item.id is None:
1712 position = '[' + ']['.join(map(str, path)) + ']'
1713 placeholder_issue = PlaceholderIssue(
1714 display_title=item.display_title,
1715 position=position,
1716 )
1717 report.placeholders.append(placeholder_issue)
1718 self._logger.debug(
1719 'Found placeholder: "%s" at position %s',
1720 item.display_title,
1721 position,
1722 )
1724 # Scan children
1725 for i, child in enumerate(item.children):
1726 child_path = [*path, i]
1727 _scan_item_recursive(child, child_path)
1729 # Scan all root items
1730 for i, root_item in enumerate(binder.roots):
1731 _scan_item_recursive(root_item, [i])
1733 self._logger.debug('Found %d placeholder items', len(report.placeholders))
1735 def _get_existing_node_files(self) -> set[NodeId]:
1736 """Get all existing node files from the file system.
1738 Returns:
1739 Set of NodeIds for files that exist on disk
1741 """
1742 return self._node_repo.get_existing_files()
1744 def _scan_missing_files(
1745 self,
1746 binder_node_ids: set[NodeId],
1747 existing_files: set[NodeId],
1748 report: AuditReport,
1749 ) -> None:
1750 """Scan for node IDs referenced in binder but missing from file system.
1752 Args:
1753 binder_node_ids: Set of NodeIds referenced in binder
1754 existing_files: Set of NodeIds that exist as files
1755 report: AuditReport to populate with findings
1757 """
1758 self._logger.debug('Scanning for missing files')
1760 missing_ids = binder_node_ids - existing_files
1761 for node_id in missing_ids:
1762 missing_issue = MissingIssue(
1763 node_id=node_id,
1764 expected_path=f'{node_id}.md',
1765 )
1766 report.missing.append(missing_issue)
1767 self._logger.debug('Found missing file: %s.md', node_id)
1769 self._logger.debug('Found %d missing files', len(report.missing))
1771 def _scan_missing_notes_files(
1772 self,
1773 binder_node_ids: set[NodeId],
1774 report: AuditReport,
1775 ) -> None:
1776 """Scan for node IDs that are missing their .notes.md files.
1778 Args:
1779 binder_node_ids: Set of NodeIds referenced in binder
1780 report: AuditReport to populate with findings
1782 """
1783 self._logger.debug('Scanning for missing notes files')
1785 for node_id in binder_node_ids:
1786 if not self._node_repo.file_exists(node_id, 'notes'):
1787 missing_issue = MissingIssue(
1788 node_id=node_id,
1789 expected_path=f'{node_id}.notes.md',
1790 )
1791 report.missing.append(missing_issue)
1792 self._logger.debug('Found missing notes file: %s.notes.md', node_id)
1794 notes_missing_count = sum(1 for m in report.missing if m.expected_path.endswith('.notes.md'))
1795 self._logger.debug('Found %d missing notes files', notes_missing_count)
1797 def _scan_orphaned_files(
1798 self,
1799 binder_node_ids: set[NodeId],
1800 existing_files: set[NodeId],
1801 report: AuditReport,
1802 ) -> None:
1803 """Scan for files that exist but aren't referenced in binder.
1805 Args:
1806 binder_node_ids: Set of NodeIds referenced in binder
1807 existing_files: Set of NodeIds that exist as files
1808 report: AuditReport to populate with findings
1810 """
1811 self._logger.debug('Scanning for orphaned files')
1813 orphaned_ids = existing_files - binder_node_ids
1814 for node_id in orphaned_ids:
1815 orphan_issue = OrphanIssue(
1816 node_id=node_id,
1817 file_path=f'{node_id}.md',
1818 )
1819 report.orphans.append(orphan_issue)
1820 self._logger.debug('Found orphaned file: %s.md', node_id)
1822 self._logger.debug('Found %d orphaned files', len(report.orphans))
1824 def _scan_orphaned_invalid_files(
1825 self,
1826 _binder_node_ids: set[NodeId],
1827 report: AuditReport,
1828 ) -> None:
1829 """Scan for files that look like node files but have invalid NodeIds.
1831 Args:
1832 _binder_node_ids: Set of NodeIds referenced in binder (currently unused)
1833 report: AuditReport to populate with findings
1835 """
1836 self._logger.debug('Scanning for orphaned files with invalid NodeIds')
1838 # Get all potential node files, including those with invalid NodeIds
1839 try:
1840 # Scan project directory for .md files that look like node files
1841 project_path = getattr(self._node_repo, 'project_path', None)
1842 if project_path is None:
1843 # For fake implementations, we can't scan the filesystem
1844 return
1846 from pathlib import Path
1848 project_path = Path(project_path)
1850 for md_file in project_path.glob('*.md'):
1851 # Skip system files
1852 if md_file.stem.startswith('_'):
1853 continue
1855 # Skip .notes.md files
1856 if md_file.stem.endswith('.notes'):
1857 continue
1859 # Skip freeform files (pattern: YYYYMMDDTHHMM_<uuid>.md)
1860 import re
1862 if re.match(r'^\d{8}T\d{4}_[0-9a-f-]+$', md_file.stem): 1862 ↛ 1863line 1862 didn't jump to line 1863 because the condition on line 1862 was never true
1863 continue
1865 # Try to create a NodeId from the filename
1866 try:
1867 NodeId(md_file.stem)
1868 # If successful, this is handled by regular orphan scanning
1869 continue
1870 except NodeIdentityError:
1871 # This file has an invalid NodeId but looks like a node file
1872 pass
1874 # Check if this file might be a node file based on content
1875 try:
1876 content = md_file.read_text()
1877 if content.startswith('---') and '\nid:' in content:
1878 # This looks like a node file with frontmatter
1879 # Create a dummy NodeId for reporting purposes
1880 dummy_node_id = NodeId('00000000-0000-7000-8000-000000000000') # UUIDv7 format
1881 orphan_issue = OrphanIssue(
1882 node_id=dummy_node_id,
1883 file_path=md_file.name,
1884 )
1885 report.orphans.append(orphan_issue)
1886 self._logger.debug('Found orphaned file with invalid NodeId: %s', md_file.name)
1887 except (OSError, UnicodeDecodeError): # pragma: no cover
1888 # Couldn't read the file or doesn't look like a node file
1889 self._logger.debug('Could not read file %s, skipping', md_file.name) # pragma: no cover
1890 continue # pragma: no cover
1892 except (OSError, AttributeError) as exc: # pragma: no cover
1893 self._logger.warning('Could not scan for orphaned invalid files: %s', exc) # pragma: no cover
1895 invalid_orphan_count = sum(1 for o in report.orphans if o.file_path != f'{o.node_id}.md')
1896 self._logger.debug('Found %d orphaned files with invalid NodeIds', invalid_orphan_count)
1898 def _scan_id_mismatches(self, existing_files: set[NodeId], report: AuditReport) -> None:
1899 """Scan for files where frontmatter ID doesn't match filename.
1901 Args:
1902 existing_files: Set of NodeIds that exist as files
1903 report: AuditReport to populate with findings
1905 """
1906 self._logger.debug('Scanning for ID mismatches')
1908 for node_id in existing_files:
1909 try:
1910 frontmatter = self._node_repo.read_frontmatter(node_id)
1911 frontmatter_id_str = frontmatter.get('id')
1913 if frontmatter_id_str and frontmatter_id_str != str(node_id):
1914 try:
1915 actual_id = NodeId(frontmatter_id_str)
1916 mismatch_issue = MismatchIssue(
1917 file_path=f'{node_id}.md',
1918 expected_id=node_id,
1919 actual_id=actual_id,
1920 )
1921 report.mismatches.append(mismatch_issue)
1922 self._logger.debug(
1923 'Found ID mismatch in %s.md: expected %s, found %s',
1924 node_id,
1925 node_id,
1926 actual_id,
1927 )
1928 except NodeIdentityError as e:
1929 # Handle invalid frontmatter IDs as mismatches
1930 self._logger.debug('Found invalid frontmatter ID %s: %s', frontmatter_id_str, e)
1931 # Create a dummy NodeId for reporting purposes
1932 dummy_actual_id = NodeId('00000000-0000-7000-8000-000000000001') # UUIDv7 format
1933 mismatch_issue = MismatchIssue(
1934 file_path=f'{node_id}.md (frontmatter id: {frontmatter_id_str})',
1935 expected_id=node_id,
1936 actual_id=dummy_actual_id,
1937 )
1938 report.mismatches.append(mismatch_issue)
1939 self._logger.debug(
1940 'Found ID mismatch in %s.md: expected %s, found invalid %s',
1941 node_id,
1942 node_id,
1943 frontmatter_id_str,
1944 )
1945 except (OSError, KeyError, NodeNotFoundError) as e:
1946 # Log and skip files that can't be read
1947 self._logger.debug('Could not read file for node %s: %s', node_id, e)
1948 continue
1950 self._logger.debug('Found %d ID mismatches', len(report.mismatches))