Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 9 additions & 5 deletions .github/workflows/build-nuitka.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ name: Build (Nuitka)
on:
workflow_dispatch:

env:
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true

jobs:
build:
name: ${{ matrix.name }}
Expand Down Expand Up @@ -31,13 +34,13 @@ jobs:
suffix: win-arm64

steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5

- name: Install uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v7

- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v6
with:
python-version-file: "pyproject.toml"

Expand All @@ -50,7 +53,7 @@ jobs:
run: brew install ccache

- name: Cache Nuitka & CCache
uses: actions/cache@v4
uses: actions/cache@v5
with:
path: |
~/.cache/Nuitka
Expand Down Expand Up @@ -169,11 +172,12 @@ jobs:
# Upload
# -----------------------------------------------------------------------
- name: Upload Artifacts
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v6
with:
name: sessionprep-${{ matrix.suffix }}
path: |
dist_nuitka/sessionprep-*.*
dist_nuitka/sessionprep_*.*
!dist_nuitka/sessionprep-*.build
!dist_nuitka/sessionprep-*.build/**
!dist_nuitka/sessionprep-*.dist
Expand Down
15 changes: 9 additions & 6 deletions .github/workflows/build-pyinstaller.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ name: Build (PyInstaller)
on:
workflow_dispatch:

env:
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true

jobs:
build:
name: Build on ${{ matrix.os }}
Expand All @@ -13,13 +16,13 @@ jobs:
os: [ubuntu-latest, windows-latest, macos-latest, macos-15-intel]

steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v5

- name: Install uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v7

- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@v6
with:
python-version-file: "pyproject.toml"

Expand Down Expand Up @@ -101,7 +104,7 @@ jobs:

- name: Upload Artifacts (Windows)
if: runner.os == 'Windows'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v6
with:
name: sessionprep-${{ matrix.os }}
path: |
Expand All @@ -112,15 +115,15 @@ jobs:

- name: Upload Artifacts (macOS)
if: runner.os == 'macOS'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v6
with:
name: sessionprep-${{ matrix.os }}
path: dist_pyinstaller/*.dmg
if-no-files-found: error

- name: Upload Artifacts (Linux)
if: runner.os == 'Linux'
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v6
with:
name: sessionprep-${{ matrix.os }}
path: |
Expand Down
88 changes: 82 additions & 6 deletions DEVELOPMENT.md

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ SessionPrep operates in three phases:

| Phase | Name | What happens | When |
|-------|------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------------|
| **1** | Track Layout | Define how source tracks map to output files: channel routing, reordering, splitting, merging. Automatically optimizes layouts based on Phase 1 diagnostics (e.g. dropping silent files, extracting active channels from dual-mono and one-sided silence). Drag-and-drop between input/output trees with visual insert-position indicator. Optional recursive subfolder scanning. Output written to `sp_01_tracklayout/`. | GUI Phase 1 (always available) |
| **1** | Track Layout | Define how source tracks map to output files: channel routing, reordering, splitting, merging. Automatically optimizes layouts based on Phase 1 diagnostics (e.g. dropping silent files, extracting active channels from dual-mono and one-sided silence). Drag-and-drop between input/output trees with visual insert-position indicator. Optional recursive subfolder scanning. Output written to `sp_01_tracklayout/`. Pre-computed peak cache files written to `sp_peaks/` for fast waveform rendering. | GUI Phase 1 (always available) |
| **2** | Analysis & Preparation | Format checks, clipping, DC offset, stereo compatibility, silence, subsonic, peak/RMS measurement, classification, tail exceedance. Bimodal normalization (clip gain adjustment) via Prepare. Output written to `sp_02_prepared/`. | GUI Phase 2 / CLI |
| **3** | DAW Transfer | Transfer tracks into DAW session with per-track naming and folder assignment. Duplicate entries for multi-track scenarios (same clip on different tracks). Fader offsets applied automatically (Pro Tools via PTSL, DAWproject via file generation). Support for unattended batch processing of multiple songs. | GUI Phase 3 |

Expand Down
114 changes: 114 additions & 0 deletions sessionprepgui/analysis/mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,6 +220,8 @@ def _clear_workspace(self):
self._cancel_worker("_worker")
self._cancel_worker("_batch_reanalyze_worker")
self._cancel_worker("_prepare_worker")
self._cancel_worker("_peak_build_worker")
self._peak_cache = {}
self._session = None
self._summary = None
self._current_track = None
Expand Down Expand Up @@ -302,6 +304,7 @@ def _load_directory(self, path: str):
skip_folders = {
app_cfg.get("phase1_output_folder", "sp_01_tracklayout"),
app_cfg.get("phase2_output_folder", "sp_02_prepared"),
app_cfg.get("peak_cache_folder", "sp_peaks"),
}
wav_files = discover_audio_files(
path, recursive=self._recursive_scan,
Expand Down Expand Up @@ -385,6 +388,9 @@ def _on_phase1_done(self, session):
self._save_session_action.setEnabled(True)
self.setWindowTitle("SessionPrep")

# Eagerly build peak cache for all source tracks in the background
self._start_peak_build(session.tracks, self._source_dir)

@Slot()
def _on_save_session(self):
"""Save the current session state to a .spsession file."""
Expand Down Expand Up @@ -489,6 +495,7 @@ def _restore_session_state(self, data: dict):
skip_folders = {
app_cfg.get("phase1_output_folder", "sp_01_tracklayout"),
app_cfg.get("phase2_output_folder", "sp_02_prepared"),
app_cfg.get("peak_cache_folder", "sp_peaks"),
}
source_tracks = []
for rel in discover_audio_files(
Expand Down Expand Up @@ -696,6 +703,13 @@ def _restore_session_state(self, data: dict):
)
self.setWindowTitle("SessionPrep")

# Eagerly build peak cache for source tracks (Phase 1 waveforms)
if source_dir and source_tracks:
self._start_peak_build(source_tracks, source_dir)
# And for analysis tracks (Phase 2 waveforms) if topology was applied
if self._topology_dir and tracks:
self._start_peak_build(tracks, self._topology_dir)

# ── Analyze ──────────────────────────────────────────────────────────

@Slot()
Expand Down Expand Up @@ -980,6 +994,106 @@ def _on_analyze_done(self, session, summary):
f"Analysis complete: {ok_count}/{len(session.tracks)} tracks OK"
)

# Eagerly build peak cache for Phase 2 tracks
analyze_dir = self._topology_dir or self._source_dir
if analyze_dir:
self._start_peak_build(session.tracks, analyze_dir)

# ── Peak cache ────────────────────────────────────────────────────────

def _start_peak_build(self, tracks, base_dir: str):
"""Launch a background PeakBuildWorker for all tracks under *base_dir*.

Before launching, scan existing ``sp_peaks/`` for valid ``.peaks``
files and load them into the in-memory cache — only stale or missing
files are queued for background building.
"""
from ..waveform.compute import PeakBuildWorker
from ..waveform.peakcache import (
peaks_path_for, load_peaks, get_source_mtime,
)

app_cfg = self._config.get("app", {})
peaks_folder = app_cfg.get("peak_cache_folder", "sp_peaks")
peaks_dir = os.path.join(base_dir, peaks_folder)

if not hasattr(self, "_peak_cache"):
self._peak_cache = {}

items = []
for track in tracks:
filepath = getattr(track, "filepath", None)
if not filepath:
filepath = os.path.join(base_dir, track.filename)
if not os.path.isfile(filepath):
continue
pp = peaks_path_for(peaks_dir, track.filename)
# Try to reuse existing .peaks from disk
mtime = get_source_mtime(filepath)
existing = load_peaks(pp, expected_mtime=mtime)
if existing is not None:
self._peak_cache[track.filename] = existing
else:
items.append((filepath, track.filename, pp))

if not items:
log.info("Peak cache: all %d files already cached", len(tracks))
return

log.info(
"Peak cache: %d cached from disk, %d to build",
len(tracks) - len(items), len(items),
)

self._cancel_worker("_peak_build_worker")
worker = PeakBuildWorker(items)
worker.file_done.connect(self._on_peak_file_done)
worker.progress.connect(self._status_bar.showMessage)
worker.all_done.connect(
lambda: log.info("Peak cache build complete (%d files)", len(items)))
self._peak_build_worker = worker
worker.start()

@Slot(str, object)
def _on_peak_file_done(self, filename: str, peak_data):
"""Cache a newly built PeakData in memory, and push to UI if active."""
if not hasattr(self, "_peak_cache"):
self._peak_cache = {}
self._peak_cache[filename] = peak_data

# Push to Phase 1 topology preview if active
if getattr(self, "_topo_wf_filename", None) == filename:
wf = getattr(self, "_topo_wf_panel", None)
if wf and hasattr(wf, "waveform"):
wf.waveform.set_peak_data(peak_data)
if getattr(wf.waveform, "_loading", False):
track = None
if hasattr(self, "_topo_track_map"):
track = self._topo_track_map().get(filename)
if track:
wf.waveform.set_preview_mode(
track.channels, track.total_samples,
track.samplerate, peak_data
)

# Push to Phase 2 analysis preview if active
cur_track = getattr(self, "_current_track", None)
if cur_track and cur_track.filename == filename:
wf = getattr(self, "_waveform", None)
if wf:
wf.set_peak_data(peak_data)
if getattr(wf, "_loading", False):
wf.set_preview_mode(
cur_track.channels, cur_track.total_samples,
cur_track.samplerate, peak_data
)

def _prioritize_peak(self, filename: str):
"""If a peak build is in progress, move *filename* to the front."""
worker = getattr(self, "_peak_build_worker", None)
if worker is not None and worker.isRunning():
worker.prioritize(filename)

@Slot(str)
def _on_analyze_error(self, message: str):
self._analyze_action.setEnabled(True)
Expand Down
Loading