public Song Process(LiveProject project, ILog logger) { this.logger = logger; var song = new Song(); song.Tempo = (int)project.Tempo; song.SampleRate = 44100; var projectLoopEnd = project.LoopStart + project.LoopLength; trackReceives = new Dictionary <LiveProject.Track, List <Receive> >(); foreach (var projectTrack in project.Tracks) { trackReceives.Add(projectTrack, new List <Receive>()); } foreach (var projectTrack in project.Tracks) { foreach (var send in projectTrack.Sends) { if (send.IsActive) { trackReceives[send.ReceivingTrack].Add(new Receive(projectTrack, send.ReceivingChannelIndex - 1, send.Volume)); } } } project.MasterTrack.Name = project.MasterTrack.Name == "" ? "Master" : project.MasterTrack.Name; visitedTracks = new List <LiveProject.Track>(); orderedTracks = new List <LiveProject.Track>(); visitTrack(project.MasterTrack); var projectTracksToSongTracks = new Dictionary <LiveProject.Track, Song.Track>(); var songTrackEvents = new Dictionary <Song.Track, List <Event> >(); double?minEventTime = null; double?maxEventTime = null; foreach (var projectTrack in orderedTracks) { var track = new Song.Track(); track.Name = projectTrack.Name; track.Volume = (float)projectTrack.Volume; foreach (var projectDevice in projectTrack.Devices) { Song.Device device = null; Song.DeviceId deviceId; if (Enum.TryParse <Song.DeviceId>(projectDevice.PluginDll.Replace(".dll", "").Replace(".64", ""), out deviceId)) { device = new Song.Device(); device.Id = deviceId; device.Chunk = projectDevice.RawData != null ? (byte[])projectDevice.RawData.Clone() : new byte[0]; } if (device == null) { logger.WriteLine("WARNING: Device skipped (unsupported plugin): " + projectDevice.PluginDll); } else if (projectDevice.Bypass) { logger.WriteLine("WARNING: Device skipped (bypass enabled): " + projectDevice.PluginDll); } else { track.Devices.Add(device); foreach (var floatParameter in projectDevice.FloatParameters) { if (floatParameter.Id >= 0) { var automation = new Song.Automation(); automation.DeviceIndex = track.Devices.IndexOf(device); automation.ParamId = floatParameter.Id; foreach (var e in floatParameter.Events) { if (e.Time >= 0.0) { var point = new Song.Point(); point.TimeStamp = secondsToSamples(e.Time, (double)song.Tempo, (double)song.SampleRate); point.Value = e.Value; automation.Points.Add(point); } } if (automation.Points.Count > 0) { track.Automations.Add(automation); } } } } } var events = new List <Event>(); foreach (var midiClip in projectTrack.MidiClips) { if (!midiClip.IsDisabled) { var loopLength = midiClip.LoopEnd - midiClip.LoopStart; for (var currentTime = midiClip.CurrentStart; currentTime < midiClip.CurrentEnd; currentTime += loopLength) { foreach (var keyTrack in midiClip.KeyTracks) { foreach (var note in keyTrack.Notes) { if (note.IsEnabled) { var startTime = note.Time - (currentTime - midiClip.CurrentStart) - midiClip.LoopStartRelative; while (startTime < 0.0) { startTime += loopLength; } startTime = currentTime + startTime - midiClip.LoopStart; var endTime = startTime + note.Duration; if ((startTime >= midiClip.CurrentStart && startTime < midiClip.CurrentEnd) && (!project.IsLoopOn || ( startTime >= project.LoopStart && startTime < projectLoopEnd))) { endTime = Math.Min(endTime, midiClip.CurrentEnd); if (project.IsLoopOn) { endTime = Math.Min(endTime, projectLoopEnd); } if (endTime > startTime) { var startEvent = new Event(); startEvent.Time = startTime; startEvent.Type = Song.EventType.NoteOn; startEvent.Note = (byte)keyTrack.MidiKey; startEvent.Velocity = (byte)note.Velocity; events.Add(startEvent); var endEvent = new Event(); endEvent.Time = endTime; endEvent.Type = Song.EventType.NoteOff; endEvent.Note = (byte)keyTrack.MidiKey; events.Add(endEvent); } } } } } } } } events.Sort((a, b) => { if (a.Time > b.Time) { return(1); } if (a.Time < b.Time) { return(-1); } if (a.Type == Song.EventType.NoteOn && b.Type == Song.EventType.NoteOff) { return(1); } if (a.Type == Song.EventType.NoteOff && b.Type == Song.EventType.NoteOn) { return(-1); } return(0); }); foreach (var e in events) { if (!minEventTime.HasValue || e.Time < minEventTime.Value) { minEventTime = e.Time; } if (!maxEventTime.HasValue || e.Time > maxEventTime.Value) { maxEventTime = e.Time; } } projectTracksToSongTracks.Add(projectTrack, track); songTrackEvents.Add(track, events); song.Tracks.Add(track); } double songStartTime, songEndTime; if (project.IsLoopOn) { songStartTime = project.LoopStart; songEndTime = projectLoopEnd; } else if (minEventTime.HasValue && maxEventTime.HasValue) { songStartTime = minEventTime.Value; songEndTime = maxEventTime.Value; } else { throw new Exception("Couldn't find song start/end times"); } song.Length = (songEndTime - songStartTime) * 60.0 / (double)song.Tempo; foreach (var kvp in songTrackEvents) { var track = kvp.Key; var events = kvp.Value; int lastTimeStamp = 0; foreach (var e in events) { var songEvent = new Song.Event(); var time = e.Time - songStartTime; int timeStamp = Math.Max(secondsToSamples(time, (double)song.Tempo, (double)song.SampleRate), lastTimeStamp); songEvent.TimeStamp = secondsToSamples(time, (double)song.Tempo, (double)song.SampleRate); songEvent.Type = e.Type; songEvent.Note = e.Note; songEvent.Velocity = e.Velocity; track.Events.Add(songEvent); lastTimeStamp = timeStamp; } } // TODO: Clip all of this instead of just offsetting // adjust automation start times based on song start foreach (var track in song.Tracks) { foreach (var automation in track.Automations) { foreach (var point in automation.Points) { point.TimeStamp -= secondsToSamples(songStartTime, (double)song.Tempo, (double)song.SampleRate); } } } foreach (var kvp in projectTracksToSongTracks) { foreach (var projectReceive in trackReceives[kvp.Key]) { if (projectTracksToSongTracks.ContainsKey(projectReceive.SendingTrack)) { var receive = new Song.Receive(); receive.SendingTrackIndex = song.Tracks.IndexOf(projectTracksToSongTracks[projectReceive.SendingTrack]); receive.ReceivingChannelIndex = projectReceive.ReceivingChannelIndex; receive.Volume = (float)projectReceive.Volume; kvp.Value.Receives.Add(receive); } } } return(song); }
public LiveProject Process(string fileName) { project = new LiveProject(); outputRoutingStrings = new Dictionary <LiveProject.Track, string>(); returnSendInfos = new Dictionary <LiveProject.Track, List <ReturnSendInfo> >(); using (var originalStream = new FileInfo(fileName).OpenRead()) { using (var decompressionStream = new GZipStream(originalStream, CompressionMode.Decompress)) { using (reader = XmlReader.Create(decompressionStream)) { while (reader.Read()) { if (reader.NodeType == XmlNodeType.Element) { switch (reader.Name) { case "Tracks": while (reader.Read() && !(reader.NodeType == XmlNodeType.EndElement && reader.Name == "Tracks")) { if (reader.NodeType == XmlNodeType.Element) { switch (reader.Name) { case "MidiTrack": case "AudioTrack": case "GroupTrack": parseTrack(); break; case "ReturnTrack": parseTrack(false, true); break; } } } break; case "MasterTrack": parseTrack(true); break; case "Transport": parseTransport(); break; } } } } } } foreach (var kvp in outputRoutingStrings) { try { var routingString = kvp.Value; switch (routingString) { case "AudioOut/None": break; case "AudioOut/Master": kvp.Key.Sends.Add(new LiveProject.Send(project.MasterTrack, 1, 1.0, kvp.Key.IsSpeakerOn)); break; default: if (!routingString.StartsWith("AudioOut/")) { throw new Exception("routing string must begin with \"AudioOut/\""); } routingString = routingString.Replace("AudioOut/", ""); if (!routingString.StartsWith("Track.") && routingString != "GroupTrack") { throw new Exception("unrecognized routing string format"); } string trackId = ""; string trackInputId = ""; if (routingString == "GroupTrack") { trackId = kvp.Key.TrackGroupId; trackInputId = ""; } else { routingString = routingString.Replace("Track.", ""); var parts = routingString.Split('/'); if (routingString != "GroupTrack" && parts.Length != 2) { throw new Exception("routing string has too many parts"); } trackId = parts[0]; trackInputId = parts[1]; } LiveProject.Track sendTarget = null; int sendTargetChannelIndex = 1; foreach (var track in project.Tracks) { if (track != kvp.Key && track.Id == trackId) { sendTarget = track; break; } } if (sendTarget == null) { throw new Exception("couldn't find target track"); } if (routingString != "GroupTrack" && trackInputId != "TrackIn") { if (!trackInputId.StartsWith("DeviceIn.")) { throw new Exception("unrecognized track input string"); } trackInputId = trackInputId.Replace("DeviceIn.", ""); var parts = trackInputId.Split('.'); if (parts.Length != 2) { throw new Exception("unrecognized track device input string"); } if (parts[1] != "S1") { throw new Exception("unrecognized track device input channel"); } var deviceId = parts[0]; bool found = false; foreach (var device in sendTarget.Devices) { if (device.Id == deviceId) { found = true; break; } } if (!found) { throw new Exception("couldn't find target track device"); } sendTargetChannelIndex = 3; } kvp.Key.Sends.Add(new LiveProject.Send(sendTarget, sendTargetChannelIndex, 1.0, kvp.Key.IsSpeakerOn)); break; } } catch (Exception e) { throw new Exception("Unrecognized AudioOutputRouting: " + kvp.Value + " (" + e.Message + ")"); } } foreach (var kvp in returnSendInfos) { foreach (var returnSendInfo in kvp.Value) { // Cull sends whose value is -inf if (returnSendInfo.Volume > returnSendInfo.MinVolume) { kvp.Key.Sends.Add(new LiveProject.Send(project.ReturnTracks[kvp.Value.IndexOf(returnSendInfo)], 1, returnSendInfo.Volume, returnSendInfo.IsActive)); } } } return(project); }