using NAudio.Lame; using System.Windows.Media.Imaging; using static QuikDawEditor.EDITING.AudioMethods; namespace QuikDawEditor; public partial class ClipControl { public void NormalizeClip() { ShowProcessingMessage("Normalizing clip . . ."); Dispatcher.InvokeAsync(() => { Clip normClip = thisClip; undoActions.Add(new ClipNormalizeUndoClass(normClip.myTrack.UndoTrackID, GetJsonForClip(normClip), normClip.myTrack.Clips.IndexOf(normClip))); string originalClipSourceFileName = EditingProjectClipsDirectory + "\\" + normClip.ClipSourceFileName; string newnormName = Path.GetFileNameWithoutExtension(normClip.ClipSourceFileName).Replace("_NORMALIZED", "") + "_NORMALIZED" + Path.GetExtension(normClip.ClipSourceFileName); string normalizedClipSourceFileName = EditingProjectClipsDirectory + "\\" + newnormName; float max = 0; if (File.Exists(normalizedClipSourceFileName)) MessageBox.Show("Will use already normalized source file: " + newnormName); else { Debug.WriteLine("Normalizing clip..."); IWaveProvider wavProv = null; switch (Path.GetExtension(originalClipSourceFileName).ToLower()) { case ".wav": wavProv = new WaveFileReader(originalClipSourceFileName); break; case ".mp3": wavProv = new Mp3FileReader(originalClipSourceFileName); break; } float[] buffer = new float[wavProv.WaveFormat.SampleRate]; int read; do { read = wavProv.ToSampleProvider().Read(buffer, 0, buffer.Length); for (int n = 0; n < read; n++) { float abs = Math.Abs(buffer[n]); if (abs > max) max = abs; } } while (read > 0); //Debug.WriteLine($"Max sample value: {max}"); switch (Path.GetExtension(originalClipSourceFileName).ToLower()) { case ".wav": ((WaveFileReader)wavProv).Close(); break; case ".mp3": ((Mp3FileReader)wavProv).Close(); break; } if (max == 0 || max > 0.95) { MessageBox.Show("No need to normalize this mp3 clip - aborting"); HideProcessingMessage(); return; } Debug.WriteLine("Doesn't exist: will now create: " + normalizedClipSourceFileName); try { AudioFileReader afr = new AudioFileReader(originalClipSourceFileName); afr.Volume = 1.0f / max; switch (Path.GetExtension(originalClipSourceFileName).ToLower()) { case ".wav": byte[] writeBytes = new byte[afr.Length]; afr.Read(writeBytes, 0, writeBytes.Length); WaveFileWriter wfr = new WaveFileWriter(normalizedClipSourceFileName, afr.WaveFormat); wfr.Write(writeBytes, 0, writeBytes.Length); wfr.Close(); break; case ".mp3": LameMP3FileWriter writer = new LameMP3FileWriter(normalizedClipSourceFileName, afr.WaveFormat, 128); afr.CopyTo(writer); writer.Close(); break; } afr.Close(); } catch (Exception ex) { MessageBox.Show("Error normalizing the clip:\n" + ex.Message); } } bool ClipWasLooped = normClip.IsLooped; bool ClipWasReversed = normClip.IsReversed; ObservableCollection prevGainPoints = normClip.GainPoints; Track thisTrack = normClip.myTrack; int normClipIndex = thisTrack.Clips.IndexOf(normClip); thisTrack.RemoveAndDisposeClip(normClip); double clipWidthMs = normClip.ClipWidthMs; AddNewAudioClipToTrack(newnormName, thisTrack, normClip.ClipVirtualStartMs, clipWidthMs, ClipWasLooped, ClipWasReversed); thisTrack.UpdateMyClipsToOwner(); thisTrack.Clips[normClipIndex].GainPoints = prevGainPoints; thisTrack.Clips[normClipIndex].RecreateClipUnitBackgrounds(); HideProcessingMessage(); }, System.Windows.Threading.DispatcherPriority.Background); } BackgroundWorker fusenewClipAudioBW; Clip fusingClip; string fusingClipFileName; public void FuseToNewClip() { fusingClip = thisClip; switch (fusingClip.clipType) { case ClipType.Midi: FuseNewClipMidi(); break; case ClipType.Audio: IsMixingDown = true; ShowProcessingMessage("Fusing clip to new clip . . ."); fusenewClipAudioBW = new BackgroundWorker() { WorkerReportsProgress = true }; fusenewClipAudioBW.DoWork += FuseNewClipAudioBW_DoWork; fusenewClipAudioBW.ProgressChanged += FuseNewClipAudioBW_ProgressChanged; fusenewClipAudioBW.RunWorkerCompleted += FuseNewClipAudioBW_RunWorkerCompleted; fusenewClipAudioBW.RunWorkerAsync(); break; } } private void FuseNewClipAudioBW_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e) { projPlayer.IsProjectPlaying = false; projPlayer.AudioPlayerPlay(); projPlayer.StartPlayTimer(); HideProcessingMessage(); IsMixingDown = false; double ClipLeftMs = fusingClip.ClipLeftMs; undoActions.Add(new ClipFuseUndoClass(fusingClip.myTrack.UndoTrackID, GetJsonForClip(fusingClip), fusingClip.myTrack.Clips.IndexOf(fusingClip))); Track thisTrack = fusingClip.myTrack; thisTrack.RemoveAndDisposeClip(fusingClip); AddNewAudioClipToTrack(Path.GetFileName(fusingClipFileName), thisTrack, ClipLeftMs, -1); thisTrack.UpdateMyClipsToOwner(); editingProject.NeedsSaving = true; } private void FuseNewClipAudioBW_ProgressChanged(object sender, ProgressChangedEventArgs e) { if (e.ProgressPercentage == -1) ShowProcessingMessage("Error creating file"); else ShowProcessingMessage("Mixing down clip. . .\n" + e.ProgressPercentage.ToString() + "%\n"); } int approxMp3FileSize = 0; private void FuseNewClipAudioBW_DoWork(object sender, DoWorkEventArgs e) { projPlayer.AudioPlayerStop(); projPlayer.StopPlayTimer(); fusingClipFileName = EditingProjectClipsDirectory + "\\" + Path.GetFileNameWithoutExtension(fusingClip.ClipSourceFileName) + "_MixedDown_" + DateTime.Now.ToString("yyyy-MM-dd HH-mm-ss") + Path.GetExtension(fusingClip.ClipSourceFileName); double runningPlayPosMs = fusingClip.ClipLeftMs; double endPointMs = fusingClip.ClipRightMs; projPlayer.CurrentPlayingPosMS = runningPlayPosMs; fusingClip.ResetMe(runningPlayPosMs); Debug.WriteLine("Will mixdown clip to: " + fusingClipFileName); WaveFormat outWFormat = projPlayer.mmsp.ToWaveProvider16().WaveFormat; double bytePosInSavingFile = 0; double BytesPerSec = (double)outWFormat.SampleRate * (double)outWFormat.Channels * (double)(outWFormat.BitsPerSample / 8D); //176,400 int waveBytesCount = (int)(fusingClip.ClipWidthMs / 1000D * BytesPerSec); Debug.WriteLine("Number of wave bytes = " + waveBytesCount.ToString("N0")); //double playIncrement = 100; double playIncrement = 200; int BufferSize = (int)(17640 * playIncrement / 100D); //int BufferSize = 8820; byte[] writeBytes = new byte[BufferSize]; List SaveWaveBytes = new List(); projPlayer.IsProjectPlaying = true; fusenewClipAudioBW.ReportProgress(0); try { while (runningPlayPosMs < endPointMs) { int readcount = 0; try { readcount = fusingClip.myTrack.myTrackSampleProvider.ToWaveProvider16().Read(writeBytes, 0, writeBytes.Length); } catch (Exception exx) { Debug.WriteLine("Error in readingwaveprov16: " + exx.Message); } if (readcount > 0) { SaveWaveBytes.AddRange(writeBytes); bytePosInSavingFile += readcount; if (bytePosInSavingFile % (readcount * playIncrement * 2) == 0) { int percentDone = (int)Math.Round((double)(bytePosInSavingFile / waveBytesCount) * 100, 0); fusenewClipAudioBW.ReportProgress(percentDone); //Debug.WriteLine("CreatingWaveBytes: " + bytePosInSavingFile.ToString("N0") + " (" + percentDone.ToString() + "%) "); } } runningPlayPosMs += playIncrement; projPlayer.CurrentPlayingPosMS = runningPlayPosMs; } } catch (Exception ex) { Debug.WriteLine("\nSending Exception Streaming: " + ex.Message + "\nsendBytesLen=" + writeBytes.Length.ToString()); } Debug.WriteLine("\nRead actual bytes count=" + bytePosInSavingFile.ToString("N0")); try { string mixdownFilePath = ProjectMixdownDirectory + "\\" + fusingClipFileName; approxMp3FileSize = (int)(fusingClip.ClipWidthMs / 1000D / 50D * 620000D); LameConfig lconf = new LameConfig() { OutputSampleRate = 44100, ABRRateKbps = 96, BitRate = 96 }; LameMP3FileWriter lame3 = new LameMP3FileWriter(fusingClipFileName, outWFormat, lconf); Debug.WriteLine("Approx mp3filesize=" + approxMp3FileSize.ToString()); lame3.OnProgress += Lame3_OnProgress; lame3.MinProgressTime = 500; lame3.Write(SaveWaveBytes.ToArray(), 0, SaveWaveBytes.Count); lame3.Close(); } catch (Exception ex) { Debug.WriteLine("Error mixing down:\n" + ex.Message); fusenewClipAudioBW.ReportProgress(-1); } } private void Lame3_OnProgress(object writer, long inputBytes, long outputBytes, bool finished) { int percentDone = (int)((double)outputBytes / (double)approxMp3FileSize * 100D); //Dispatcher.Invoke(() => //{ // coverMessageText = "Mixdown complete:\nWriting file: " + percentDone.ToString() + "%"; //}, DispatcherPriority.Background); //Debug.WriteLine("\rWriting: " + outputBytes.ToString("N0") + " /" + approxMp3FileSize.ToString() + " (" + Math.Round((double)outputBytes / (double)approxMp3FileSize * 100, 0).ToString() + "%) "); if (finished) Debug.WriteLine("\nDone writing lame file!"); } string waveImageFile { get { Clip thisClip = (Clip)this.DataContext; return ProjectWaveformImagesDirectory + "\\" + thisClip.ClipSourceFileName.Replace(".mp3", ".png"); } } private void OpenAudioSourceLocationMenuItem_Click(object sender, RoutedEventArgs e) { Clip thisClip = (Clip)this.DataContext; Process.Start("explorer.exe", "\"" + EditingProjectClipsDirectory + "\""); } }