import Foundation
import AVFoundation
import Combine

// MARK: - Audio Engine

/// Central audio coordinator managing tone generation, WAV playback, recording, and mixing.
class AudioEngine: ObservableObject {

    // MARK: - Properties

    #if os(iOS)
    private var audioSessionObservers: [NSObjectProtocol] = []
    /// Measured round-trip audio latency (input + output) in seconds
    private var measuredLatency: Double = 0.0
    #endif

    private var engine: AVAudioEngine?
    private var toneGenerators: [String: ToneGenerator] = [:]
    private var sourceNodes: [String: AVAudioSourceNode] = [:]

    // WAV playback for recorded parts
    private var playerNodes: [String: AVAudioPlayerNode] = [:]
    private var audioSource: PlaybackState.AudioSource = .tones
    private var recordedAudioURLs: [String: URL] = [:]
    private var recordingStartMeasures: [String: Int] = [:]

    // Metronome
    private var metronomeGenerator: ToneGenerator?
    private var metronomeSourceNode: AVAudioSourceNode?
    private var countInBeats: Double = 0.0           // In quarter-note beats
    private var lastMetronomeBeatIndex: Int = -1
    private var metronomeClicksPerMeasure: Int = 4   // From time sig numerator
    private var metronomeClickInterval: Double = 1.0 // Quarter-note beats between clicks
    private var metronomeCompoundGroupSize: Int = 1   // Group size for strong beat detection

    // Stored playback params (needed for count-in delayed WAV start)
    private var storedPartMode: PlaybackState.PartMode = .all
    private var storedCurrentPartName: String?
    private var storedExcludePartName: String?

    // Metronome during recording
    private var metronomeDuringPlayback: Bool = false
    private var isRecordingActive: Bool = false

    private var playbackTimer: Timer?
    private var currentSong: Song?

    // Playback tracking — beat-based, not time-based, so tempo changes work live
    private var playbackStartTime: Date?
    private var accumulatedBeats: Double = 0.0
    private var lastTempoChangeTime: Date?
    private var currentTempo: Int = 120
    private var scheduledNotes: [(partName: String, note: Note, beatStart: Double, beatEnd: Double)] = []
    private var activeNotes: Set<Int> = []
    private var isLooping: Bool = false
    private var playbackRange: (start: Int, end: Int) = (0, 0)
    private var beatsPerMeasure: Double = 4.0
    private var totalBeats: Double = 0.0

    // Callbacks for UI updates
    var onPositionUpdate: ((Int, Double) -> Void)?
    var onPlaybackStopped: (() -> Void)?
    var onRecordingComplete: ((String, URL, Int) -> Void)?  // (partName, wavURL, startMeasure)

    // Recording
    private var inputNode: AVAudioInputNode?
    private var recordingFile: AVAudioFile?
    private var recordingPartName: String?
    private var isInCountIn: Bool = false  // Don't write mic data during count-in
    private var recordingReady: Bool = false  // Don't write until playback has started
    private var latencyFramesToSkip: Int = 0  // Frames to skip for I/O latency compensation
    private var latencyFramesSkipped: Int = 0

    @Published var isEngineRunning: Bool = false

    // MARK: - Setup

    func setup(song: Song) {
        if engine != nil { teardown() }

        self.currentSong = song

        #if os(iOS)
        configureAudioSession()
        #endif

        engine = AVAudioEngine()
        guard let engine = engine else { return }

        let format = AVAudioFormat(standardFormatWithSampleRate: AppConstants.sampleRate, channels: 1)!

        for part in song.parts {
            let generator = ToneGenerator()
            toneGenerators[part.name] = generator

            let sourceNode = generator.createSourceNode(format: format)
            sourceNodes[part.name] = sourceNode

            engine.attach(sourceNode)
            engine.connect(sourceNode, to: engine.mainMixerNode, format: format)
        }

        // Metronome generator for count-in clicks
        let metronome = ToneGenerator()
        metronomeGenerator = metronome
        let metroNode = metronome.createSourceNode(format: format)
        metronomeSourceNode = metroNode
        engine.attach(metroNode)
        engine.connect(metroNode, to: engine.mainMixerNode, format: format)

        do {
            try engine.start()
            isEngineRunning = true
        } catch {
            print("Audio engine failed to start: \(error)")
        }

        #if os(iOS)
        setupAudioSessionObservers()
        #endif
    }

    // MARK: - iOS Audio Session

    #if os(iOS)
    private func configureAudioSession() {
        let session = AVAudioSession.sharedInstance()
        do {
            try session.setCategory(.playAndRecord, options: [
                .defaultToSpeaker,        // Play through speaker, not earpiece
                .allowBluetoothA2DP,      // Allow high-quality Bluetooth output (headphones)
            ])
            // Request a small I/O buffer to minimize latency
            try session.setPreferredIOBufferDuration(0.005)
            try session.setActive(true)

            // Measure round-trip latency for recording compensation
            measuredLatency = session.inputLatency + session.outputLatency
            let bufferDuration = session.ioBufferDuration
            print("[AudioSession] Input latency: \(String(format: "%.1f", session.inputLatency * 1000))ms")
            print("[AudioSession] Output latency: \(String(format: "%.1f", session.outputLatency * 1000))ms")
            print("[AudioSession] Buffer duration: \(String(format: "%.1f", bufferDuration * 1000))ms")
            print("[AudioSession] Total round-trip: \(String(format: "%.1f", measuredLatency * 1000))ms")

            // Log current audio route
            let route = session.currentRoute
            for input in route.inputs {
                print("[AudioSession] Input: \(input.portName) (\(input.portType.rawValue))")
            }
            for output in route.outputs {
                print("[AudioSession] Output: \(output.portName) (\(output.portType.rawValue))")
            }
        } catch {
            print("[AudioSession] Failed to configure: \(error)")
        }
    }

    private func setupAudioSessionObservers() {
        // Handle route changes (headphones plugged/unplugged, AirPods connect/disconnect)
        let routeObserver = NotificationCenter.default.addObserver(
            forName: AVAudioSession.routeChangeNotification,
            object: nil, queue: .main
        ) { [weak self] notification in
            guard let reason = notification.userInfo?[AVAudioSessionRouteChangeReasonKey] as? UInt,
                  reason == AVAudioSession.RouteChangeReason.oldDeviceUnavailable.rawValue else { return }
            print("[AudioSession] Route changed — old device unavailable, stopping playback")
            self?.stopPlayback()
            DispatchQueue.main.async {
                self?.onPlaybackStopped?()
            }
        }
        audioSessionObservers.append(routeObserver)

        // Handle interruptions (phone calls, Siri, etc.)
        let interruptionObserver = NotificationCenter.default.addObserver(
            forName: AVAudioSession.interruptionNotification,
            object: nil, queue: .main
        ) { [weak self] notification in
            guard let type = notification.userInfo?[AVAudioSessionInterruptionTypeKey] as? UInt else { return }
            if type == AVAudioSession.InterruptionType.began.rawValue {
                print("[AudioSession] Interrupted — stopping playback")
                self?.stopPlayback()
                DispatchQueue.main.async {
                    self?.onPlaybackStopped?()
                }
            } else if type == AVAudioSession.InterruptionType.ended.rawValue {
                // Re-activate session after interruption ends
                try? AVAudioSession.sharedInstance().setActive(true)
            }
        }
        audioSessionObservers.append(interruptionObserver)
    }

    private func removeAudioSessionObservers() {
        for observer in audioSessionObservers {
            NotificationCenter.default.removeObserver(observer)
        }
        audioSessionObservers.removeAll()
    }
    #endif

    func teardown() {
        #if os(iOS)
        removeAudioSessionObservers()
        #endif

        stopPlayback()
        stopAndRemovePlayerNodes()
        engine?.stop()
        for (_, node) in sourceNodes {
            engine?.detach(node)
        }
        if let node = metronomeSourceNode {
            engine?.detach(node)
        }
        sourceNodes.removeAll()
        toneGenerators.removeAll()
        playerNodes.removeAll()
        metronomeGenerator = nil
        metronomeSourceNode = nil
        engine = nil
        isEngineRunning = false
    }

    // MARK: - Live tempo update

    func updateTempo(_ newTempo: Int) {
        guard playbackTimer != nil else {
            currentTempo = newTempo
            return
        }

        let now = Date()
        if let lastChange = lastTempoChangeTime {
            let elapsed = now.timeIntervalSince(lastChange)
            accumulatedBeats += elapsed * Double(currentTempo) / 60.0
        }

        currentTempo = newTempo
        lastTempoChangeTime = now
    }

    // MARK: - Current beat calculation

    private var currentBeat: Double {
        guard let lastChange = lastTempoChangeTime else { return 0.0 }
        let elapsed = Date().timeIntervalSince(lastChange)
        return accumulatedBeats + elapsed * Double(currentTempo) / 60.0
    }

    // MARK: - Playback

    func startPlayback(
        song: Song,
        tempo: Int,
        partMode: PlaybackState.PartMode,
        currentPartName: String?,
        audioSource: PlaybackState.AudioSource = .tones,
        recordedAudioURLs: [String: URL] = [:],
        recordingStartMeasures: [String: Int] = [:],
        countInEnabled: Bool = false,
        excludePartName: String? = nil,
        metronomeDuringPlayback: Bool = false,
        isLooping: Bool,
        selection: Selection
    ) {
        // Stop any existing playback
        playbackTimer?.invalidate()
        playbackTimer = nil
        activeNotes = []
        lastMetronomeBeatIndex = -1
        wavPlayersStartedAfterCountIn = false
        for (_, generator) in toneGenerators {
            generator.silence()
        }
        metronomeGenerator?.silence()
        stopAndRemovePlayerNodes()

        // Make sure engine is set up
        if !isEngineRunning || currentSong?.title != song.title {
            setup(song: song)
        }

        guard let currentSong = currentSong else { return }

        // Ensure engine is running
        if let engine = engine, !engine.isRunning {
            do {
                try engine.start()
                isEngineRunning = true
            } catch {
                print("Failed to restart audio engine: \(error)")
                return
            }
        }

        // Store playback parameters
        self.currentTempo = tempo
        self.isLooping = isLooping
        self.audioSource = audioSource
        self.recordedAudioURLs = recordedAudioURLs
        self.recordingStartMeasures = recordingStartMeasures
        self.beatsPerMeasure = currentSong.timeSignature.quarterNoteBeatsPerMeasure

        // Store for count-in delayed WAV start
        self.storedPartMode = partMode
        self.storedCurrentPartName = currentPartName
        self.storedExcludePartName = excludePartName
        self.metronomeDuringPlayback = metronomeDuringPlayback

        // Metronome setup based on time signature
        let timeSig = currentSong.timeSignature
        self.metronomeClicksPerMeasure = timeSig.beatsPerMeasure  // e.g., 4 for 4/4, 6 for 6/8
        self.metronomeClickInterval = beatsPerMeasure / Double(metronomeClicksPerMeasure)  // quarter-note beats per click
        // Compound time: group of 3 for 6/8, 9/8, 12/8
        if timeSig.noteValue == 8 && timeSig.beatsPerMeasure % 3 == 0 {
            self.metronomeCompoundGroupSize = 3
        } else {
            self.metronomeCompoundGroupSize = timeSig.beatsPerMeasure  // strong only on beat 0
        }

        // Determine measure range
        if let selRange = selection.measureRange {
            playbackRange = selRange
        } else {
            playbackRange = (0, max(0, currentSong.totalMeasures - 1))
        }

        totalBeats = beatsPerMeasure * Double(playbackRange.end - playbackRange.start + 1)

        // Count-in: one measure of beats before content starts
        self.countInBeats = countInEnabled ? beatsPerMeasure : 0.0
        self.isInCountIn = countInEnabled

        // Determine which parts use WAV vs tones
        let partsUsingWav = partsForWavPlayback(
            song: currentSong, partMode: partMode,
            currentPartName: currentPartName, excludePartName: excludePartName
        )

        // Build note schedule — only for parts that will use tones
        scheduledNotes = buildNoteSchedule(
            song: currentSong,
            partMode: partMode,
            currentPartName: currentPartName,
            range: playbackRange,
            excludeParts: partsUsingWav,
            excludePartName: excludePartName
        )

        // Reset beat tracking — start negative for count-in
        accumulatedBeats = -countInBeats
        lastTempoChangeTime = Date()

        // Signal the recording tap that playback clock is running
        recordingReady = true

        // Start WAV players immediately if no count-in
        if countInBeats == 0 {
            for partName in partsUsingWav {
                startWavPlayer(partName: partName)
            }
        }

        // Start the tick timer
        playbackTimer = Timer.scheduledTimer(withTimeInterval: 0.016, repeats: true) { [weak self] _ in
            self?.tick()
        }
    }

    /// Determine which parts should use WAV playback
    private func partsForWavPlayback(
        song: Song,
        partMode: PlaybackState.PartMode,
        currentPartName: String?,
        excludePartName: String? = nil
    ) -> Set<String> {
        guard audioSource == .recorded else { return [] }

        let partsToPlay: [Part]
        if partMode == .current, let partName = currentPartName {
            partsToPlay = song.parts.filter { $0.name == partName }
        } else {
            partsToPlay = song.parts
        }

        var wavParts = Set<String>()
        for part in partsToPlay {
            if part.name == excludePartName { continue }
            if recordedAudioURLs[part.name] != nil {
                wavParts.insert(part.name)
            }
        }
        return wavParts
    }

    /// Start an AVAudioPlayerNode for a recorded part, seeking to the correct position
    private func startWavPlayer(partName: String) {
        guard let engine = engine,
              let url = recordedAudioURLs[partName] else { return }

        do {
            let audioFile = try AVAudioFile(forReading: url)
            let playerNode = AVAudioPlayerNode()

            engine.attach(playerNode)
            engine.connect(playerNode, to: engine.mainMixerNode, format: audioFile.processingFormat)

            // Calculate frame offset based on where the WAV starts vs where playback starts
            let wavStartMeasure = recordingStartMeasures[partName] ?? 0
            let playbackStartMeasure = playbackRange.start
            let measuresToSkip = playbackStartMeasure - wavStartMeasure

            if measuresToSkip > 0 {
                let beatsToSkip = Double(measuresToSkip) * beatsPerMeasure
                let secondsToSkip = beatsToSkip * 60.0 / Double(currentTempo)
                let framesToSkip = AVAudioFramePosition(secondsToSkip * audioFile.processingFormat.sampleRate)

                if framesToSkip < audioFile.length {
                    let framesRemaining = AVAudioFrameCount(audioFile.length - framesToSkip)
                    playerNode.scheduleSegment(audioFile, startingFrame: framesToSkip, frameCount: framesRemaining, at: nil)
                } else {
                    engine.detach(playerNode)
                    return
                }
            } else {
                playerNode.scheduleFile(audioFile, at: nil)
            }

            playerNode.play()
            playerNodes[partName] = playerNode
            print("[Playback] Started WAV player for \(partName): \(url.lastPathComponent)")
        } catch {
            print("[Playback] Error loading WAV for \(partName): \(error)")
        }
    }

    /// Stop and detach all WAV player nodes
    private func stopAndRemovePlayerNodes() {
        for (_, node) in playerNodes {
            node.stop()
            engine?.detach(node)
        }
        playerNodes.removeAll()
    }

    private var wavPlayersStartedAfterCountIn = false

    func stopPlayback() {
        playbackTimer?.invalidate()
        playbackTimer = nil
        accumulatedBeats = 0.0
        lastTempoChangeTime = nil
        activeNotes = []
        lastMetronomeBeatIndex = -1
        wavPlayersStartedAfterCountIn = false
        isInCountIn = false
        isRecordingActive = false
        recordingReady = false

        for (_, generator) in toneGenerators {
            generator.silence()
        }
        metronomeGenerator?.silence()
        stopAndRemovePlayerNodes()

        // Stop recording and notify if we got audio
        if let partName = recordingPartName {
            if let url = stopRecording() {
                let startMeasure = self.playbackRange.start
                DispatchQueue.main.async { [weak self] in
                    self?.onRecordingComplete?(partName, url, startMeasure)
                }
            }
        } else {
            _ = stopRecording()
        }
    }

    // MARK: - Recording

    func startRecording(
        partName: String,
        song: Song,
        tempo: Int,
        partMode: PlaybackState.PartMode,
        audioSource: PlaybackState.AudioSource = .tones,
        recordedAudioURLs: [String: URL] = [:],
        recordingStartMeasures: [String: Int] = [:],
        countInEnabled: Bool = false,
        monitorRecordingPart: Bool = false,
        metronomeDuringRecording: Bool = false,
        isLooping: Bool,
        selection: Selection
    ) {
        guard let engine = engine else { return }

        isRecordingActive = true
        recordingPartName = partName
        recordingReady = false  // Don't write until playback clock starts
        latencyFramesSkipped = 0
        print("[Recording] Starting recording for part: \(partName)")

        // CRITICAL: Stop the engine before installing the input tap.
        engine.stop()

        let inputFormat = engine.inputNode.outputFormat(forBus: 0)

        // Calculate how many frames to skip for I/O latency compensation
        #if os(iOS)
        let session = AVAudioSession.sharedInstance()
        var totalLatency = session.inputLatency + session.outputLatency + session.ioBufferDuration

        // Bluetooth A2DP underreports latency — the codec and transmission add
        // significant delay beyond what outputLatency reflects.
        let route = session.currentRoute
        let hasBluetooth = route.outputs.contains { $0.portType == .bluetoothA2DP || $0.portType == .bluetoothHFP || $0.portType == .bluetoothLE }
        if hasBluetooth {
            // Add extra compensation for Bluetooth codec/transmission overhead.
            // Typical A2DP adds 100-200ms on top of the reported outputLatency.
            let extraBT: Double = 0.120
            totalLatency += extraBT
            print("[Recording] Bluetooth output detected — adding \(String(format: "%.0f", extraBT * 1000))ms extra compensation")
        }

        latencyFramesToSkip = Int(totalLatency * inputFormat.sampleRate)
        print("[Recording] Latency compensation: \(String(format: "%.1f", totalLatency * 1000))ms = \(latencyFramesToSkip) frames @ \(inputFormat.sampleRate)Hz")
        #else
        latencyFramesToSkip = 0
        #endif

        let tempDir = FileManager.default.temporaryDirectory
        let fileName = "\(partName.lowercased())_\(Int(Date().timeIntervalSince1970)).wav"
        let fileURL = tempDir.appendingPathComponent(fileName)

        do {
            recordingFile = try AVAudioFile(forWriting: fileURL, settings: [
                AVFormatIDKey: kAudioFormatLinearPCM,
                AVSampleRateKey: inputFormat.sampleRate,
                AVNumberOfChannelsKey: 1,
                AVLinearPCMBitDepthKey: 16,
                AVLinearPCMIsFloatKey: false
            ])

            // Install the tap while engine is stopped
            var bufferCount = 0
            engine.inputNode.installTap(onBus: 0, bufferSize: 1024, format: inputFormat) { [weak self] buffer, _ in
                guard let self = self else { return }

                // Gate 1: Don't write until startPlayback() signals we're ready
                if !self.recordingReady { return }

                // Gate 2: Don't write mic data during count-in
                if self.isInCountIn { return }

                // Gate 3: Skip initial frames to compensate for I/O latency
                if self.latencyFramesSkipped < self.latencyFramesToSkip {
                    self.latencyFramesSkipped += Int(buffer.frameLength)
                    if self.latencyFramesSkipped <= self.latencyFramesToSkip {
                        return  // Skip this entire buffer
                    }
                    // Partial skip: we've now passed the threshold, but let
                    // the remainder through (close enough for sub-buffer accuracy)
                }

                bufferCount += 1
                if bufferCount <= 3 {
                    var peak: Float = 0.0
                    if let channelData = buffer.floatChannelData {
                        let frameCount = Int(buffer.frameLength)
                        for i in 0..<frameCount {
                            let sample = abs(channelData[0][i])
                            if sample > peak { peak = sample }
                        }
                    }
                    print("[Recording] Buffer #\(bufferCount): frames=\(buffer.frameLength), peak=\(peak)")
                }
                do {
                    try self.recordingFile?.write(from: buffer)
                } catch {
                    print("[Recording] Error writing audio: \(error)")
                }
            }

            // Restart engine with input+output
            try engine.start()
            isEngineRunning = true

            // Determine which part to exclude from playback
            // If monitorRecordingPart is false, don't play the part being recorded
            let excludePart = monitorRecordingPart ? nil : partName

            // Start playback with full audio source settings
            // NOTE: startPlayback sets recordingReady = true after the beat clock starts,
            // ensuring the tap doesn't write audio before beat tracking begins.
            startPlayback(
                song: song,
                tempo: tempo,
                partMode: partMode,
                currentPartName: partName,
                audioSource: audioSource,
                recordedAudioURLs: recordedAudioURLs,
                recordingStartMeasures: recordingStartMeasures,
                countInEnabled: countInEnabled,
                excludePartName: excludePart,
                metronomeDuringPlayback: metronomeDuringRecording,
                isLooping: isLooping,
                selection: selection
            )

        } catch {
            print("Error starting recording: \(error)")
            recordingPartName = nil
            recordingFile = nil
        }
    }

    func stopRecording() -> URL? {
        if recordingFile != nil {
            engine?.inputNode.removeTap(onBus: 0)
        }
        let url = recordingFile?.url

        if let url = url {
            let fileSize = (try? FileManager.default.attributesOfItem(atPath: url.path)[.size] as? Int) ?? 0
            print("[Recording] Stopped. File: \(url.lastPathComponent), size: \(fileSize) bytes")
        }

        recordingFile = nil
        recordingPartName = nil
        return url
    }

    // MARK: - Export final mix

    func exportMix(
        song: Song,
        tempo: Int,
        recordedAudioURLs: [String: URL],
        recordingStartMeasures: [String: Int],
        to outputURL: URL
    ) throws {
        // Detect the recording sample rate from the first WAV file
        // (mic typically records at 48000, not 44100)
        var sampleRate = AppConstants.sampleRate
        if let firstURL = recordedAudioURLs.values.first {
            let probe = try AVAudioFile(forReading: firstURL)
            sampleRate = probe.fileFormat.sampleRate
            print("[Export] Using sample rate from recordings: \(sampleRate)")
        }

        let bpm = song.timeSignature.quarterNoteBeatsPerMeasure
        let totalBeats = bpm * Double(song.totalMeasures)
        let totalSeconds = totalBeats * 60.0 / Double(tempo)
        let totalFrames = Int(totalSeconds * sampleRate)

        var mixBuffer = [Float](repeating: 0.0, count: totalFrames)

        for part in song.parts {
            if let wavURL = recordedAudioURLs[part.name] {
                // Read WAV at its native format (no sample rate conversion needed)
                let audioFile = try AVAudioFile(forReading: wavURL)
                let nativeFormat = audioFile.processingFormat
                let buffer = AVAudioPCMBuffer(pcmFormat: nativeFormat, frameCapacity: AVAudioFrameCount(audioFile.length))!
                try audioFile.read(into: buffer)

                let wavStartMeasure = recordingStartMeasures[part.name] ?? 0
                let wavStartBeats = Double(wavStartMeasure) * bpm
                let wavStartFrame = Int(wavStartBeats * 60.0 / Double(tempo) * sampleRate)

                if let channelData = buffer.floatChannelData {
                    let frameCount = Int(buffer.frameLength)
                    for i in 0..<frameCount {
                        let destIndex = wavStartFrame + i
                        if destIndex >= 0 && destIndex < totalFrames {
                            mixBuffer[destIndex] += channelData[0][i]
                        }
                    }
                }
            } else {
                // Generate tones for unrecorded parts
                let schedule = buildPartNoteSchedule(part: part, range: (0, song.totalMeasures - 1))
                let twoPi = 2.0 * Double.pi

                for entry in schedule {
                    guard !entry.note.isRest, let pitch = entry.note.pitch else { continue }
                    let freq = pitch.frequency
                    let startSample = Int(entry.beatStart * 60.0 / Double(tempo) * sampleRate)
                    let endSample = min(Int(entry.beatEnd * 60.0 / Double(tempo) * sampleRate), totalFrames)
                    let attackSamples = Int(0.01 * sampleRate)
                    let releaseSamples = Int(0.02 * sampleRate)

                    for i in startSample..<endSample {
                        let t = Double(i) / sampleRate
                        var sample = Float(sin(twoPi * freq * t)) * 0.3
                        let notePos = i - startSample
                        let noteLen = endSample - startSample
                        if notePos < attackSamples {
                            sample *= Float(notePos) / Float(attackSamples)
                        } else if notePos > noteLen - releaseSamples {
                            sample *= Float(noteLen - notePos) / Float(releaseSamples)
                        }
                        mixBuffer[i] += sample
                    }
                }
            }
        }

        // Normalize
        var peak: Float = 0.0
        for sample in mixBuffer { let a = abs(sample); if a > peak { peak = a } }
        if peak > 0.95 {
            let scale = 0.95 / peak
            for i in 0..<mixBuffer.count { mixBuffer[i] *= scale }
        }

        // Write to WAV at the recording's native sample rate
        let outputFormat = AVAudioFormat(standardFormatWithSampleRate: sampleRate, channels: 1)!
        let outputFile = try AVAudioFile(forWriting: outputURL, settings: [
            AVFormatIDKey: kAudioFormatLinearPCM,
            AVSampleRateKey: sampleRate,
            AVNumberOfChannelsKey: 1,
            AVLinearPCMBitDepthKey: 16,
            AVLinearPCMIsFloatKey: false
        ])

        let outputBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount(totalFrames))!
        outputBuffer.frameLength = AVAudioFrameCount(totalFrames)
        memcpy(outputBuffer.floatChannelData![0], &mixBuffer, totalFrames * MemoryLayout<Float>.size)
        try outputFile.write(from: outputBuffer)

        print("[Export] Wrote mix to \(outputURL.lastPathComponent), \(totalFrames) frames @ \(sampleRate)Hz, \(totalSeconds)s")
    }

    private func buildPartNoteSchedule(
        part: Part,
        range: (start: Int, end: Int)
    ) -> [(note: Note, beatStart: Double, beatEnd: Double)] {
        var schedule: [(note: Note, beatStart: Double, beatEnd: Double)] = []
        var beatOffset: Double = 0.0
        for measureIndex in range.start...range.end {
            guard measureIndex < part.measures.count else { continue }
            for note in part.measures[measureIndex].notes {
                let beatEnd = beatOffset + note.beats
                schedule.append((note: note, beatStart: beatOffset, beatEnd: beatEnd))
                beatOffset = beatEnd
            }
        }
        return schedule
    }

    // MARK: - Note scheduling

    private func buildNoteSchedule(
        song: Song,
        partMode: PlaybackState.PartMode,
        currentPartName: String?,
        range: (start: Int, end: Int),
        excludeParts: Set<String> = [],
        excludePartName: String? = nil
    ) -> [(partName: String, note: Note, beatStart: Double, beatEnd: Double)] {

        var schedule: [(partName: String, note: Note, beatStart: Double, beatEnd: Double)] = []

        let partsToPlay: [Part]
        if partMode == .current, let partName = currentPartName {
            partsToPlay = song.parts.filter { $0.name == partName && !excludeParts.contains($0.name) && $0.name != excludePartName }
        } else {
            partsToPlay = song.parts.filter { !excludeParts.contains($0.name) && $0.name != excludePartName }
        }

        for part in partsToPlay {
            var beatOffset: Double = 0.0
            for measureIndex in range.start...range.end {
                guard measureIndex < part.measures.count else { continue }
                for note in part.measures[measureIndex].notes {
                    let beatEnd = beatOffset + note.beats
                    schedule.append((partName: part.name, note: note, beatStart: beatOffset, beatEnd: beatEnd))
                    beatOffset = beatEnd
                }
            }
        }

        schedule.sort { $0.beatStart < $1.beatStart }
        return schedule
    }

    // MARK: - Playback tick

    private func tick() {
        let beat = currentBeat

        // Count-in phase: play metronome clicks, no content yet
        if beat < 0 {
            // Map beat to click index within the count-in measure
            // beat goes from -countInBeats to 0
            // countInBeat goes from 0 to countInBeats
            let countInBeat = beat + countInBeats
            let clickIndex = Int(floor(countInBeat / metronomeClickInterval))

            if clickIndex != lastMetronomeBeatIndex && clickIndex >= 0 && clickIndex < metronomeClicksPerMeasure {
                lastMetronomeBeatIndex = clickIndex
                // Strong beat: first of each compound group (0, 3 for 6/8) or just beat 0 for simple time
                let isStrongBeat = (clickIndex % metronomeCompoundGroupSize == 0)
                let clickFreq: Double = isStrongBeat ? 1200.0 : 900.0
                metronomeGenerator?.amplitude = 0.3  // Full volume during count-in
                metronomeGenerator?.noteOn(frequency: clickFreq)

                DispatchQueue.main.asyncAfter(deadline: .now() + 0.04) { [weak self] in
                    self?.metronomeGenerator?.noteOff()
                }
            }

            // During count-in, show the start position
            DispatchQueue.main.async { [weak self] in
                guard let self = self else { return }
                self.onPositionUpdate?(self.playbackRange.start, 0.0)
            }
            return
        }

        // Transition from count-in to content
        if isInCountIn {
            isInCountIn = false  // Recording tap will now start writing
        }

        // Start WAV players after count-in ends
        if countInBeats > 0 && !wavPlayersStartedAfterCountIn {
            wavPlayersStartedAfterCountIn = true
            if let song = currentSong {
                let partsUsingWav = partsForWavPlayback(
                    song: song,
                    partMode: storedPartMode,
                    currentPartName: storedCurrentPartName,
                    excludePartName: storedExcludePartName
                )
                for partName in partsUsingWav {
                    startWavPlayer(partName: partName)
                }
            }
        }

        // Check if we've reached the end
        if beat >= totalBeats {
            if isLooping {
                accumulatedBeats = -countInBeats
                lastTempoChangeTime = Date()
                activeNotes = []
                lastMetronomeBeatIndex = -1
                wavPlayersStartedAfterCountIn = false
                isInCountIn = (countInBeats > 0)
                for (_, generator) in toneGenerators {
                    generator.silence()
                }
                stopAndRemovePlayerNodes()
                return
            } else {
                stopPlayback()
                DispatchQueue.main.async { [weak self] in
                    self?.onPlaybackStopped?()
                }
                return
            }
        }

        // Update UI position
        let measureOffset = Int(beat / beatsPerMeasure)
        let beatInMeasure = beat.truncatingRemainder(dividingBy: beatsPerMeasure)
        let beatFraction = beatInMeasure / beatsPerMeasure

        DispatchQueue.main.async { [weak self] in
            guard let self = self else { return }
            self.onPositionUpdate?(self.playbackRange.start + measureOffset, beatFraction)
        }

        // Metronome during recording (quieter than count-in)
        if metronomeDuringPlayback {
            let beatInMeasureForClick = beat.truncatingRemainder(dividingBy: beatsPerMeasure)
            let clickIndex = Int(floor(beatInMeasureForClick / metronomeClickInterval))
            // Use a large offset so click indices don't collide with count-in indices
            let uniqueClickId = 10000 + measureOffset * metronomeClicksPerMeasure + clickIndex

            if uniqueClickId != lastMetronomeBeatIndex && clickIndex >= 0 && clickIndex < metronomeClicksPerMeasure {
                lastMetronomeBeatIndex = uniqueClickId
                let isStrongBeat = (clickIndex % metronomeCompoundGroupSize == 0)
                let clickFreq: Double = isStrongBeat ? 1200.0 : 900.0
                metronomeGenerator?.amplitude = 0.12  // Quieter during content
                metronomeGenerator?.noteOn(frequency: clickFreq)

                DispatchQueue.main.asyncAfter(deadline: .now() + 0.04) { [weak self] in
                    self?.metronomeGenerator?.noteOff()
                }
            }
        }

        // Process notes (tones)
        for (index, entry) in scheduledNotes.enumerated() {
            let isInActiveSet = activeNotes.contains(index)

            if beat >= entry.beatStart && beat < entry.beatEnd {
                if !isInActiveSet {
                    activeNotes.insert(index)
                    if let generator = toneGenerators[entry.partName] {
                        if entry.note.isRest {
                            generator.noteOff()
                        } else {
                            generator.noteOn(frequency: entry.note.frequency)
                        }
                    }
                }
            } else if isInActiveSet {
                activeNotes.remove(index)
                if let generator = toneGenerators[entry.partName] {
                    generator.noteOff()
                }
            }
        }
    }
}
