// Copyright Epic Games, Inc. All Rights Reserved. using System; using System.Collections.Concurrent; using System.Collections.Generic; using System.IO; using System.IO.Compression; using System.Linq; using System.Text.Json; using System.Text.RegularExpressions; using System.Threading; using System.Threading.Tasks; using EpicGames.Core; using Microsoft.Extensions.Logging; namespace UnrealBuildTool { /// /// Aggregates parsed Visual C++ timing information files together into one monolithic breakdown file. /// [ToolMode("AggregateParsedTimingInfo", ToolModeOptions.None)] class AggregateParsedTimingInfo : ToolMode { private TimingData? FileTimingData; private ConcurrentDictionary DecompressedFileSizes = new ConcurrentDictionary(); private ConcurrentDictionary CompressedFiles = new ConcurrentDictionary(); private ConcurrentBag AggregateIncludes = new ConcurrentBag(); private ConcurrentBag AggregateClasses = new ConcurrentBag(); private ConcurrentBag AggregateFunctions = new ConcurrentBag(); private object AddFileLock = new object(); public override Task ExecuteAsync(CommandLineArguments Arguments, ILogger Logger) { FileReference ManifestFile = Arguments.GetFileReference("-ManifestFile="); string[] ParsedFileNames = FileReference.ReadAllLines(ManifestFile); // Load all the file timing data and summarize them for the aggregate. FileTimingData = new TimingData("Files", TimingDataType.Summary); Parallel.ForEach(ParsedFileNames, new ParallelOptions() { MaxDegreeOfParallelism = 4 }, x => ParseTimingDataFile(x, Logger)); // Create aggregate summary. Duration is the duration of the files in the aggregate. string AggregateName = Arguments.GetString("-Name="); TimingData AggregateData = new TimingData(AggregateName, TimingDataType.Aggregate); AggregateData.AddChild(FileTimingData); // Group the includes, classes, and functions by name and sum them up then add to the aggregate. GroupTimingDataOnName(AggregateData, "Include Timings", AggregateIncludes); GroupTimingDataOnName(AggregateData, "Class Timings", AggregateClasses); GroupTimingDataOnName(AggregateData, "Function Timings", AggregateFunctions); // Write out aggregate summary. string OutputFile = Path.Combine(ManifestFile.Directory.FullName, String.Format("{0}.cta", AggregateName)); Logger.LogDebug("Writing {OutputFile}", OutputFile); using (BinaryWriter Writer = new BinaryWriter(File.Open(OutputFile, FileMode.Create))) { // Write out the aggregate data. Writer.Write(AggregateData); // Write the look up table for the compressed binary blobs. int Offset = 0; Writer.Write(CompressedFiles.Count); foreach (KeyValuePair CompressedFile in CompressedFiles) { Writer.Write(CompressedFile.Key); Writer.Write(Offset); Writer.Write(CompressedFile.Value.Length); Writer.Write(DecompressedFileSizes[CompressedFile.Key]); Offset += CompressedFile.Value.Length; } // Write the compressed binary blobs. foreach (KeyValuePair CompressedFile in CompressedFiles) { Writer.Write(CompressedFile.Value); } } if (Arguments.HasValue("-CompileTimingFile=")) { FileReference CompileTimingFile = Arguments.GetFileReference("-CompileTimingFile="); Dictionary CompileTimes = new Dictionary(); foreach (KeyValuePair TimingData in FileTimingData.Children) { CompileTimes.Add(JsonWriter.EscapeString(TimingData.Key), TimingData.Value.InclusiveDuration); } string JsonCompileTimes = JsonSerializer.Serialize(CompileTimes); FileReference.WriteAllText(CompileTimingFile, JsonCompileTimes); } return Task.FromResult(0); } private void GroupTimingDataOnName(TimingData AggregateData, string TimingName, IEnumerable UngroupedData) { string GroupName = TimingName; TimingData GroupedTimingData = new TimingData(GroupName, TimingDataType.Summary); IEnumerable> Groups = UngroupedData.GroupBy(i => i.Name).OrderByDescending(g => g.Sum(d => d.ExclusiveDuration)).ToList(); foreach (IGrouping Group in Groups) { TimingData GroupedData = new TimingData(Group.Key, TimingDataType.None) { ExclusiveDuration = Group.Sum(d => d.ExclusiveDuration), Count = Group.Sum(d => d.Count) }; GroupedTimingData.Children.Add(Group.Key, GroupedData); } AggregateData.AddChild(GroupedTimingData); AggregateData.ExclusiveDuration -= GroupedTimingData.InclusiveDuration; } private void ParseTimingDataFile(string ParsedFileName, ILogger Logger) { Logger.LogDebug("Parsing {ParsedFileName}", ParsedFileName); // Convert input file back into summary objects. using (BinaryReader Reader = new BinaryReader(File.Open(ParsedFileName, FileMode.Open, FileAccess.Read))) { TimingData ParsedTimingData = new TimingData(Reader); Task CompressDataTask = Task.Run(() => { Reader.BaseStream.Seek(0, SeekOrigin.Begin); using (MemoryStream CompressedMemoryStream = new MemoryStream()) { using (GZipStream CompressionStream = new GZipStream(CompressedMemoryStream, CompressionMode.Compress)) { Reader.BaseStream.CopyTo(CompressionStream); } CompressedFiles.TryAdd(ParsedTimingData.Name, CompressedMemoryStream.ToArray()); DecompressedFileSizes.TryAdd(ParsedTimingData.Name, (int)Reader.BaseStream.Length); } }); TimingData SummarizedTimingData = new TimingData(ParsedTimingData.Name, TimingDataType.Summary) { Parent = FileTimingData }; foreach (TimingData Include in ParsedTimingData.Children["IncludeTimings"].Children.Values) { SummarizedTimingData.AddChild(Include.Clone()); } SummarizedTimingData.ExclusiveDuration += ParsedTimingData.Children["ClassTimings"].Children.Values.Sum(d => d.InclusiveDuration); SummarizedTimingData.ExclusiveDuration += ParsedTimingData.Children["FunctionTimings"].Children.Values.Sum(d => d.InclusiveDuration); // Gather and update child timing data. Task QueueIncludesTask = Task.Run(() => { // Flatten the includes for aggregation. IEnumerable Includes = ParsedTimingData.Children["IncludeTimings"].Children.Values; Dictionary FlattenedIncludes = new Dictionary(); FlattenIncludes(FlattenedIncludes, Includes); foreach (TimingData Include in FlattenedIncludes.Values) { AggregateIncludes.Add(Include); } }); Task QueueClassesTask = Task.Run(() => { // Collapse templates into single entries. IEnumerable CollapsedClasses = GroupChildren(ParsedTimingData.Children["ClassTimings"].Children.Values, TimingDataType.Class); foreach (TimingData Class in CollapsedClasses) { AggregateClasses.Add(new TimingData(Class.Name, TimingDataType.Class) { ExclusiveDuration = Class.InclusiveDuration }); } }); Task QueueFunctionsTask = Task.Run(() => { // Collapse templates into single entries. IEnumerable CollapsedFunctions = GroupChildren(ParsedTimingData.Children["FunctionTimings"].Children.Values, TimingDataType.Function); foreach (TimingData Function in CollapsedFunctions) { AggregateFunctions.Add(new TimingData(Function.Name, TimingDataType.Function) { ExclusiveDuration = Function.InclusiveDuration }); } }); while (!CompressDataTask.IsCompleted || !QueueIncludesTask.IsCompleted || !QueueClassesTask.IsCompleted || !QueueFunctionsTask.IsCompleted) { Thread.Sleep(TimeSpan.FromMilliseconds(50)); } lock (AddFileLock) { FileTimingData!.AddChild(SummarizedTimingData); } } } private int HeaderFilesFound = 0; private object CounterLock = new object(); private void FlattenIncludes(Dictionary Includes, IEnumerable IncludeData) { foreach (TimingData Include in IncludeData) { TimingData? AggregatedInclude; if (Includes.TryGetValue(Include.Name, out AggregatedInclude)) { AggregatedInclude.ExclusiveDuration += Include.ExclusiveDuration; AggregatedInclude.Count += 1; } else { if (Include.Name.EndsWith(".h")) { lock (CounterLock) { HeaderFilesFound += 1; } } Includes.Add(Include.Name, new TimingData(Include.Name, TimingDataType.Include) { ExclusiveDuration = Include.ExclusiveDuration }); } FlattenIncludes(Includes, Include.Children.Values); } } private IEnumerable GroupChildren(IEnumerable Children, TimingDataType Type) { List GroupedChildren = new List(); Dictionary> ChildGroups = new Dictionary>(); foreach (TimingData Child in Children) { // See if this is a templated class. If not, add it as is. Match Match = Regex.Match(Child.Name, @"^([^<]*)(?