Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 46 additions & 30 deletions crispen/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -509,6 +509,40 @@ def _categorize_into_stats(stats: RunStats, msg: str) -> None:
# ---------------------------------------------------------------------------


def _record_changes(
change_source, filepath: str, file_msgs: list, stats: RunStats
) -> None:
for msg in change_source.get_changes():
file_msgs.append(f"{filepath}: {msg}")
_categorize_into_stats(stats, msg)


def _check_and_get_dir(result, file_path: str):
if result.abort or not result.new_files:
return None
return Path(file_path).parent


def _write_new_file_with_init(
base_dir: Path,
rel_path: Path,
new_source: str,
_stats: RunStats,
_fl_new_file_final: Dict[str, str],
) -> Path:
new_path = base_dir / rel_path
new_path.parent.mkdir(parents=True, exist_ok=True)
if new_path.parent != base_dir:
init_py = new_path.parent / "__init__.py"
if not init_py.exists():
init_py.write_text("", encoding="utf-8")
new_path.write_text(new_source, encoding="utf-8")
_stats.files_edited.append(str(new_path))
_stats.file_limiter_edits += 1
_fl_new_file_final[str(new_path)] = new_source
return new_path


def run_engine(
changed: Dict[str, List[Tuple[int, int]]],
verbose: bool = True,
Expand Down Expand Up @@ -608,9 +642,7 @@ def run_engine(
)
continue

for msg in transformer.get_changes():
file_msgs.append(f"{filepath}: {msg}")
_categorize_into_stats(_stats, msg)
_record_changes(transformer, filepath, file_msgs, _stats)
_stats.merge(transformer.stats)
current_source = new_source

Expand Down Expand Up @@ -657,9 +689,7 @@ def run_engine(
except SyntaxError: # pragma: no cover
pass
else:
for msg in cu.get_changes():
file_msgs.append(f"{filepath}: {msg}")
_categorize_into_stats(_stats, msg)
_record_changes(cu, filepath, file_msgs, _stats)
current_source = cu_new_source

per_file[filepath] = {
Expand Down Expand Up @@ -832,21 +862,14 @@ def run_engine(
if fl_result.messages:
state["msgs"].extend(fl_result.messages)

if fl_result.abort or not fl_result.new_files:
original_dir = _check_and_get_dir(fl_result, filepath)
if original_dir is None:
continue

original_dir = Path(filepath).parent
for rel_path, new_source in fl_result.new_files.items():
new_path = original_dir / rel_path
new_path.parent.mkdir(parents=True, exist_ok=True)
if new_path.parent != original_dir:
init_py = new_path.parent / "__init__.py"
if not init_py.exists():
init_py.write_text("", encoding="utf-8")
new_path.write_text(new_source, encoding="utf-8")
_stats.files_edited.append(str(new_path))
_stats.file_limiter_edits += 1
_fl_new_file_final[str(new_path)] = new_source
new_path = _write_new_file_with_init(
original_dir, rel_path, new_source, _stats, _fl_new_file_final
)
if (
config.file_limiter_recursive
and len(new_source.splitlines()) > config.max_file_lines
Expand Down Expand Up @@ -898,21 +921,14 @@ def run_engine(

_recursive_msgs.extend(r_result.messages)

if r_result.abort or not r_result.new_files:
r_dir = _check_and_get_dir(r_result, r_path)
if r_dir is None:
continue

r_dir = Path(r_path).parent
for rel_path, new_source in r_result.new_files.items():
new_path = r_dir / rel_path
new_path.parent.mkdir(parents=True, exist_ok=True)
if new_path.parent != r_dir:
init_py = new_path.parent / "__init__.py"
if not init_py.exists():
init_py.write_text("", encoding="utf-8")
new_path.write_text(new_source, encoding="utf-8")
_stats.files_edited.append(str(new_path))
_stats.file_limiter_edits += 1
_fl_new_file_final[str(new_path)] = new_source
new_path = _write_new_file_with_init(
r_dir, rel_path, new_source, _stats, _fl_new_file_final
)
if len(new_source.splitlines()) > config.max_file_lines:
_fl_recursive.append((str(new_path), new_source))

Expand Down
36 changes: 20 additions & 16 deletions crispen/file_limiter/advisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,15 @@ def _advise_set3(
]


def _build_groups_text(groups, entities):
entity_map = {e.name: e for e in entities}
group_lines = []
for idx, group in enumerate(groups):
summary = _group_summary(group, entity_map)
group_lines.append(f" [{idx}]: {summary}")
return "\n".join(group_lines)


def _propose_files_step(
groups_to_place: List[List[str]],
classified: ClassifiedEntities,
Expand All @@ -353,12 +362,7 @@ def _propose_files_step(
2 * total_lines / max_file_lines) so output files average half the line
limit, keeping well-sized files even when entity sizes are uneven.
"""
entity_map = {e.name: e for e in classified.entities}
group_lines = []
for idx, group in enumerate(groups_to_place):
summary = _group_summary(group, entity_map)
group_lines.append(f" [{idx}]: {summary}")
groups_text = "\n".join(group_lines)
groups_text = _build_groups_text(groups_to_place, classified.entities)

mermaid_text = _build_group_mermaid(groups_to_place, classified)
exclude_section = ""
Expand Down Expand Up @@ -477,12 +481,7 @@ def _assign_placements_chunk(
Groups within *chunk* are numbered 0…N-1 for this call.
Returns ``None`` on failure (LLM error, missing group, or invalid target).
"""
entity_map = {e.name: e for e in classified.entities}
group_lines = []
for idx, group in enumerate(chunk):
summary = _group_summary(group, entity_map)
group_lines.append(f" [{idx}]: {summary}")
groups_text = "\n".join(group_lines)
groups_text = _build_groups_text(chunk, classified.entities)

n_groups = len(chunk)
original_basename = Path(original_path).name
Expand Down Expand Up @@ -847,6 +846,13 @@ def _rename_conflicting_chunk(
return placements


def _extend_or_return(chunk_result, accumulator):
if chunk_result is None:
return None
accumulator.extend(chunk_result)
return accumulator


def _assign_placements(
groups_to_place: List[List[str]],
classified: ClassifiedEntities,
Expand Down Expand Up @@ -926,9 +932,8 @@ def _assign_placements(
)
if chunk_placements is not None:
break
if chunk_placements is None:
if _extend_or_return(chunk_placements, all_placements) is None:
return None
all_placements.extend(chunk_placements)

# Step 3: Refine — merge tiny output files (best-effort).
all_placements = _refine_merge_tiny(
Expand Down Expand Up @@ -1019,9 +1024,8 @@ def resolve_naming_conflicts(
"Please try again with a valid, non-conflicting target filename "
"for every group."
)
if chunk_result is None:
if _extend_or_return(chunk_result, all_renamed) is None:
return None
all_renamed.extend(chunk_result)

# Merge back: replace conflicting slots, leave non-conflicting unchanged.
result = list(placements)
Expand Down
Loading