diff --git a/BulkLoadEx/BulkLoadRowset.cs b/BulkLoadEx/BulkLoadRowset.cs
index 32ddb3ec..2bb204ba 100644
--- a/BulkLoadEx/BulkLoadRowset.cs
+++ b/BulkLoadEx/BulkLoadRowset.cs
@@ -78,10 +78,25 @@ public DataRow GetNewRow()
if (null == dataTableBuffer)
{
// Get a DataTable w/no rows that captures the schema of the destination table
- SqlDataAdapter schemaAdapter = new SqlDataAdapter("select * from [" + targetTable + "] where 1=0", cn);
- dataTableBuffer = new DataTable();
- dataTableBuffer.TableName = targetTable;
- schemaAdapter.Fill(this.dataTableBuffer);
+ // Handle schema-qualified names (e.g. "ReadTrace.tblTraceFiles") by bracketing each part separately
+ string quotedName;
+ int dotIndex = targetTable.IndexOf('.');
+ if (dotIndex >= 0)
+ {
+ string schema = targetTable.Substring(0, dotIndex);
+ string table = targetTable.Substring(dotIndex + 1);
+ quotedName = "[" + schema + "].[" + table + "]";
+ }
+ else
+ {
+ quotedName = "[" + targetTable + "]";
+ }
+ using (SqlDataAdapter schemaAdapter = new SqlDataAdapter("select * from " + quotedName + " where 1=0", cn))
+ {
+ dataTableBuffer = new DataTable();
+ dataTableBuffer.TableName = targetTable;
+ schemaAdapter.Fill(this.dataTableBuffer);
+ }
}
return this.dataTableBuffer.NewRow();
}
diff --git a/ReadTraceNexusImporter/ReadTraceNexusImporter.cs b/ReadTraceNexusImporter/ReadTraceNexusImporter.cs
index f67dadcf..8ddf457d 100644
--- a/ReadTraceNexusImporter/ReadTraceNexusImporter.cs
+++ b/ReadTraceNexusImporter/ReadTraceNexusImporter.cs
@@ -50,6 +50,9 @@ public class ReadTraceNexusImporter : INexusImporter, INexusProgressReporter
"tbl_ServerProperties " +
"WHERE PropertyName = 'UTCOffset_in_Hours'";
+ // Flag name written to the DB to record whether timestamps are in local time or UTC.
+ // SQLNexus_PostProcessing.sql reads this value to choose the correct time conversion.
+
// Private members
private ArrayList knownRowsets = new ArrayList(); // List of the rowsets we know how to interpret
@@ -186,6 +189,72 @@ private long GetApproximateTotalRowsInserted()
}
}
+ ///
+ /// Writes a flag recording whether timestamps in the ReadTrace tables are already in local
+ /// server time ( = true, value '1') or in UTC (false, '0').
+ /// Writes to tbl_ServerProperties when it exists, and also sets a
+ /// ImportedTraceTimestampsInLocalTime column on tbl_server_times (adding the column
+ /// first if necessary) as a fallback for environments where tbl_ServerProperties
+ /// is not present. SQLNexus_PostProcessing.sql reads either location to choose
+ /// the correct UTC offset conversion when populating StartTime_local /
+ /// EndTime_local.
+ ///
+ private void WriteLocalTimeFlag(bool isLocalTime)
+ {
+ // @flagStr = '1' or '0' for VARCHAR columns (tbl_ServerProperties, tblMiscInfo).
+ // @flagBit = 1 or 0 for the BIT column on tbl_server_times.
+ // sp_executesql's own @val parameter is fed from the outer @flagBit so the BIT
+ // update is also fully parameterised with no runtime concatenation.
+ const string sql =
+ // --- tbl_ServerProperties (primary) ---
+ "IF OBJECT_ID('dbo.tbl_ServerProperties') IS NOT NULL " +
+ "BEGIN " +
+ " IF EXISTS (SELECT 1 FROM dbo.tbl_ServerProperties WHERE PropertyName = 'ImportedTraceTimestampsInLocalTime') " +
+ " UPDATE dbo.tbl_ServerProperties SET PropertyValue = @flagStr WHERE PropertyName = 'ImportedTraceTimestampsInLocalTime'; " +
+ " ELSE " +
+ " INSERT INTO dbo.tbl_ServerProperties (PropertyName, PropertyValue) VALUES ('ImportedTraceTimestampsInLocalTime', @flagStr); " +
+ "END " +
+ // --- tbl_server_times (fallback) ---
+ // NOTE: ALTER TABLE and UPDATE must be in different batches; sp_executesql is
+ // used here so the UPDATE is compiled only after the column is already visible.
+ // The outer @flagBit parameter is forwarded into sp_executesql's own @val.
+ "IF OBJECT_ID('dbo.tbl_server_times') IS NOT NULL " +
+ "BEGIN " +
+ " IF COL_LENGTH('dbo.tbl_server_times', 'ImportedTraceTimestampsInLocalTime') IS NULL " +
+ " ALTER TABLE dbo.tbl_server_times ADD [ImportedTraceTimestampsInLocalTime] BIT NULL; " +
+ " IF COL_LENGTH('dbo.tbl_server_times', 'ImportedTraceTimestampsInLocalTime') IS NOT NULL " +
+ " EXEC sp_executesql N'UPDATE dbo.tbl_server_times SET [ImportedTraceTimestampsInLocalTime] = @val', N'@val BIT', @val = @flagBit; " +
+ "END " +
+ // --- ReadTrace.tblMiscInfo (guaranteed fallback) ---
+ "IF OBJECT_ID('ReadTrace.tblMiscInfo') IS NOT NULL " +
+ "BEGIN " +
+ " IF EXISTS (SELECT 1 FROM ReadTrace.tblMiscInfo WHERE Attribute = 'ImportedTraceTimestampsInLocalTime') " +
+ " UPDATE ReadTrace.tblMiscInfo SET Value = @flagStr WHERE Attribute = 'ImportedTraceTimestampsInLocalTime'; " +
+ " ELSE " +
+ " INSERT INTO ReadTrace.tblMiscInfo (Attribute, Value) VALUES ('ImportedTraceTimestampsInLocalTime', @flagStr); " +
+ "END";
+
+ using (SqlConnection cn = new SqlConnection(connStr))
+ {
+ try
+ {
+ cn.Open();
+ using (SqlCommand cmd = new SqlCommand(sql, cn))
+ {
+ cmd.CommandTimeout = 0;
+ cmd.Parameters.Add("@flagStr", SqlDbType.VarChar, 1).Value = isLocalTime ? "1" : "0";
+ cmd.Parameters.Add("@flagBit", SqlDbType.Bit).Value = isLocalTime;
+ cmd.ExecuteNonQuery();
+ }
+ Util.Logger.LogMessage("ReadTraceNexusImporter: Wrote 'ImportedTraceTimestampsInLocalTime'=" + (isLocalTime ? "1" : "0") + " to tbl_ServerProperties, tbl_server_times, and/or ReadTrace.tblMiscInfo.");
+ }
+ catch (Exception e)
+ {
+ Util.Logger.LogMessage("ReadTraceNexusImporter: Could not write local time flag: " + e.Message);
+ }
+ }
+ }
+
private decimal GetLocalServerTimeOffset()
{
using (SqlConnection cn = new SqlConnection(connStr))
@@ -292,7 +361,7 @@ private string FileFirstXelFile(string[] files)
/// Cancel an in-progress load
/// Called by host to ask in importer abort an in-progress load. Can return before abort is complete;
- /// the host will wait until DoImport() returns.
+ /// the host will wait until DoImport() returns.
public void Cancel()
{
Cancelled = true;
@@ -312,7 +381,7 @@ public void Cancel()
State = ImportState.Idle;
}
- /// True if the import has been asked to cancel an in-progress load. Set by the Cancel method.
+ /// True if the import has been asked to cancel an in-progress load. Set by the Cancel method.
public bool Cancelled
{
get { return canceled; }
@@ -320,7 +389,7 @@ public bool Cancelled
}
/// Start import
- /// Initialize() will be called prior to DoImport()
+ /// Initialize() will be called prior to DoImport()
/// true if import succeeds, false otherwise
public bool DoImport()
{
@@ -451,7 +520,12 @@ public bool DoImport()
State = ImportState.Idle;
if (0 == processReadTrace.ExitCode)
+ {
+ // Always write the flag ('1' = already local time, '0' = UTC) so that
+ // SQLNexus_PostProcessing.sql always has an explicit value to act on.
+ WriteLocalTimeFlag((bool)this.options[OPTION_USE_LOCAL_SERVER_TIME]);
return true;
+ }
else
return false;
}
diff --git a/TraceEventImporter/Database/BulkWriter.cs b/TraceEventImporter/Database/BulkWriter.cs
new file mode 100644
index 00000000..6dbdf0a2
--- /dev/null
+++ b/TraceEventImporter/Database/BulkWriter.cs
@@ -0,0 +1,440 @@
+using System;
+using System.Collections.Generic;
+using System.Data;
+using BulkLoadEx;
+using TraceEventImporter.Processing;
+
+namespace TraceEventImporter.Database
+{
+ ///
+ /// Writes processed trace data to SQL Server using BulkLoadRowset from BulkLoadEx.
+ /// Tables must already exist (created via CreateSchema.sql) before calling these methods.
+ ///
+ public class BulkWriter : IDisposable
+ {
+ private readonly string _connStr;
+ private bool _disposed;
+
+ public long TotalRowsInserted { get; private set; }
+
+ public BulkWriter(string connectionString)
+ {
+ _connStr = connectionString;
+ }
+
+ public void WriteMiscInfo(string attribute, string value)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblMiscInfo", _connStr);
+ try
+ {
+ DataRow row = bl.GetNewRow();
+ row["Attribute"] = attribute;
+ row["Value"] = (object)value ?? DBNull.Value;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteTraceFile(long firstSeq, long lastSeq, DateTime? firstTime, DateTime? lastTime, long eventsRead, string fileName)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblTraceFiles", _connStr);
+ try
+ {
+ DataRow row = bl.GetNewRow();
+ row["FirstSeqNumber"] = firstSeq;
+ row["LastSeqNumber"] = lastSeq;
+ row["FirstEventTime"] = (object)firstTime ?? DBNull.Value;
+ row["LastEventTime"] = (object)lastTime ?? DBNull.Value;
+ row["EventsRead"] = eventsRead;
+ row["TraceFileName"] = fileName;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteTracedEvents(IEnumerable eventIds)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblTracedEvents", _connStr);
+ try
+ {
+ foreach (int id in eventIds)
+ {
+ DataRow row = bl.GetNewRow();
+ row["EventID"] = (short)id;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteUniqueAppNames(IEnumerable> appNames)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblUniqueAppNames", _connStr);
+ try
+ {
+ foreach (var kvp in appNames)
+ {
+ DataRow row = bl.GetNewRow();
+ // iID is identity — don't set it; SQL Server generates it
+ row["AppName"] = kvp.Key ?? "";
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteUniqueLoginNames(IEnumerable> loginNames)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblUniqueLoginNames", _connStr);
+ try
+ {
+ foreach (var kvp in loginNames)
+ {
+ DataRow row = bl.GetNewRow();
+ row["LoginName"] = kvp.Key ?? "";
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteProcedureNames(IEnumerable procs)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblProcedureNames", _connStr);
+ try
+ {
+ foreach (var proc in procs)
+ {
+ DataRow row = bl.GetNewRow();
+ row["DBID"] = proc.DBID;
+ row["ObjectID"] = proc.ObjectID;
+ row["SpecialProcID"] = proc.SpecialProcID;
+ row["Name"] = proc.Name ?? "";
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteUniqueBatches(IEnumerable batches)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblUniqueBatches", _connStr);
+ try
+ {
+ foreach (var b in batches)
+ {
+ DataRow row = bl.GetNewRow();
+ row["Seq"] = b.Seq;
+ row["HashID"] = b.HashID;
+ row["OrigText"] = b.OrigText ?? "";
+ row["NormText"] = b.NormText ?? "";
+ row["SpecialProcID"] = b.SpecialProcID;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteUniqueStatements(IEnumerable stmts)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblUniqueStatements", _connStr);
+ try
+ {
+ foreach (var s in stmts)
+ {
+ DataRow row = bl.GetNewRow();
+ row["Seq"] = s.Seq;
+ row["HashID"] = s.HashID;
+ row["OrigText"] = (object)s.OrigText ?? DBNull.Value;
+ row["NormText"] = (object)s.NormText ?? DBNull.Value;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteBatches(List batches)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblBatches", _connStr);
+ try
+ {
+ foreach (var b in batches)
+ {
+ DataRow row = bl.GetNewRow();
+ row["BatchSeq"] = b.BatchSeq;
+ row["HashID"] = b.HashID;
+ row["Session"] = b.Session;
+ row["Request"] = b.Request;
+ row["ConnId"] = b.ConnId;
+ row["StartTime"] = (object)b.StartTime ?? DBNull.Value;
+ row["EndTime"] = (object)b.EndTime ?? DBNull.Value;
+ row["Duration"] = (object)b.Duration ?? DBNull.Value;
+ row["Reads"] = (object)b.Reads ?? DBNull.Value;
+ row["Writes"] = (object)b.Writes ?? DBNull.Value;
+ row["CPU"] = (object)b.CPU ?? DBNull.Value;
+ row["fRPCEvent"] = b.fRPCEvent;
+ row["DBID"] = b.DBID;
+ row["StartSeq"] = (object)b.StartSeq ?? DBNull.Value;
+ row["EndSeq"] = (object)b.EndSeq ?? DBNull.Value;
+ row["AttnSeq"] = (object)b.AttnSeq ?? DBNull.Value;
+ row["ConnSeq"] = (object)b.ConnSeq ?? DBNull.Value;
+ row["TextData"] = (object)b.TextData ?? DBNull.Value;
+ row["OrigRowCount"] = (object)b.OrigRowCount ?? DBNull.Value;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteStatements(List stmts)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblStatements", _connStr);
+ try
+ {
+ foreach (var s in stmts)
+ {
+ DataRow row = bl.GetNewRow();
+ row["StmtSeq"] = s.StmtSeq;
+ row["HashID"] = s.HashID;
+ row["Session"] = s.Session;
+ row["Request"] = s.Request;
+ row["ConnId"] = s.ConnId;
+ row["StartTime"] = (object)s.StartTime ?? DBNull.Value;
+ row["EndTime"] = (object)s.EndTime ?? DBNull.Value;
+ row["Duration"] = (object)s.Duration ?? DBNull.Value;
+ row["Reads"] = (object)s.Reads ?? DBNull.Value;
+ row["Writes"] = (object)s.Writes ?? DBNull.Value;
+ row["CPU"] = (object)s.CPU ?? DBNull.Value;
+ row["Rows"] = (object)s.Rows ?? DBNull.Value;
+ row["DBID"] = s.DBID;
+ row["ObjectID"] = (object)s.ObjectID ?? DBNull.Value;
+ row["NestLevel"] = s.NestLevel.HasValue ? (object)(byte)s.NestLevel.Value : DBNull.Value;
+ row["fDynamicSQL"] = s.fDynamicSQL;
+ row["StartSeq"] = (object)s.StartSeq ?? DBNull.Value;
+ row["EndSeq"] = (object)s.EndSeq ?? DBNull.Value;
+ row["ConnSeq"] = (object)s.ConnSeq ?? DBNull.Value;
+ row["BatchSeq"] = (object)s.BatchSeq ?? DBNull.Value;
+ row["ParentStmtSeq"] = (object)s.ParentStmtSeq ?? DBNull.Value;
+ row["AttnSeq"] = (object)s.AttnSeq ?? DBNull.Value;
+ row["TextData"] = (object)s.TextData ?? DBNull.Value;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteConnections(List conns)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblConnections", _connStr);
+ try
+ {
+ foreach (var c in conns)
+ {
+ DataRow row = bl.GetNewRow();
+ row["ConnSeq"] = c.ConnSeq;
+ row["Session"] = c.Session;
+ row["StartTime"] = (object)c.StartTime ?? DBNull.Value;
+ row["EndTime"] = (object)c.EndTime ?? DBNull.Value;
+ row["Duration"] = (object)c.Duration ?? DBNull.Value;
+ row["Reads"] = (object)c.Reads ?? DBNull.Value;
+ row["Writes"] = (object)c.Writes ?? DBNull.Value;
+ row["CPU"] = (object)c.CPU ?? DBNull.Value;
+ row["ApplicationName"] = (object)c.ApplicationName ?? DBNull.Value;
+ row["LoginName"] = (object)c.LoginName ?? DBNull.Value;
+ row["HostName"] = (object)c.HostName ?? DBNull.Value;
+ row["NTDomainName"] = (object)c.NTDomainName ?? DBNull.Value;
+ row["NTUserName"] = (object)c.NTUserName ?? DBNull.Value;
+ row["StartSeq"] = (object)c.StartSeq ?? DBNull.Value;
+ row["EndSeq"] = (object)c.EndSeq ?? DBNull.Value;
+ row["TextData"] = (object)c.TextData ?? DBNull.Value;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteInterestingEvents(List events)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblInterestingEvents", _connStr);
+ try
+ {
+ foreach (var e in events)
+ {
+ DataRow row = bl.GetNewRow();
+ row["Seq"] = e.Seq;
+ row["EventID"] = e.EventID;
+ row["Session"] = e.Session;
+ row["Request"] = e.Request;
+ row["ConnId"] = e.ConnId;
+ row["StartTime"] = (object)e.StartTime ?? DBNull.Value;
+ row["EndTime"] = (object)e.EndTime ?? DBNull.Value;
+ row["Duration"] = (object)e.Duration ?? DBNull.Value;
+ row["DBID"] = e.DBID;
+ row["IntegerData"] = (object)e.IntegerData ?? DBNull.Value;
+ row["EventSubclass"] = (object)e.EventSubclass ?? DBNull.Value;
+ row["TextData"] = (object)e.TextData ?? DBNull.Value;
+ row["ObjectID"] = (object)e.ObjectID ?? DBNull.Value;
+ row["Error"] = (object)e.Error ?? DBNull.Value;
+ row["BatchSeq"] = (object)e.BatchSeq ?? DBNull.Value;
+ row["Severity"] = (object)e.Severity ?? DBNull.Value;
+ row["State"] = (object)e.State ?? DBNull.Value;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteTimeIntervals(List intervals)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblTimeIntervals", _connStr);
+ try
+ {
+ foreach (var ti in intervals)
+ {
+ DataRow row = bl.GetNewRow();
+ row["StartTime"] = ti.StartTime;
+ row["EndTime"] = ti.EndTime;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteBatchPartialAggs(List aggs)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblBatchPartialAggs", _connStr);
+ try
+ {
+ foreach (var a in aggs)
+ {
+ DataRow row = bl.GetNewRow();
+ row["HashID"] = a.HashID;
+ row["TimeInterval"] = a.TimeInterval;
+ row["StartingEvents"] = a.StartingEvents;
+ row["CompletedEvents"] = a.CompletedEvents;
+ row["AttentionEvents"] = a.AttentionEvents;
+ row["MinDuration"] = (object)a.MinDuration ?? DBNull.Value;
+ row["MaxDuration"] = (object)a.MaxDuration ?? DBNull.Value;
+ row["TotalDuration"] = (object)a.TotalDuration ?? DBNull.Value;
+ row["MinReads"] = (object)a.MinReads ?? DBNull.Value;
+ row["MaxReads"] = (object)a.MaxReads ?? DBNull.Value;
+ row["TotalReads"] = (object)a.TotalReads ?? DBNull.Value;
+ row["MinWrites"] = (object)a.MinWrites ?? DBNull.Value;
+ row["MaxWrites"] = (object)a.MaxWrites ?? DBNull.Value;
+ row["TotalWrites"] = (object)a.TotalWrites ?? DBNull.Value;
+ row["MinCPU"] = (object)a.MinCPU ?? DBNull.Value;
+ row["MaxCPU"] = (object)a.MaxCPU ?? DBNull.Value;
+ row["TotalCPU"] = (object)a.TotalCPU ?? DBNull.Value;
+ row["AppNameID"] = a.AppNameID;
+ row["LoginNameID"] = a.LoginNameID;
+ row["DBID"] = a.DBID;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void WriteStmtPartialAggs(List aggs)
+ {
+ var bl = new BulkLoadRowset("ReadTrace.tblStmtPartialAggs", _connStr);
+ try
+ {
+ foreach (var a in aggs)
+ {
+ DataRow row = bl.GetNewRow();
+ row["HashID"] = a.HashID;
+ row["TimeInterval"] = a.TimeInterval;
+ row["ObjectID"] = (object)a.ObjectID ?? DBNull.Value;
+ row["DBID"] = (object)a.DBID ?? DBNull.Value;
+ row["AppNameID"] = a.AppNameID;
+ row["LoginNameID"] = a.LoginNameID;
+ row["StartingEvents"] = a.StartingEvents;
+ row["CompletedEvents"] = a.CompletedEvents;
+ row["AttentionEvents"] = a.AttentionEvents;
+ row["MinDuration"] = (object)a.MinDuration ?? DBNull.Value;
+ row["MaxDuration"] = (object)a.MaxDuration ?? DBNull.Value;
+ row["TotalDuration"] = (object)a.TotalDuration ?? DBNull.Value;
+ row["MinReads"] = (object)a.MinReads ?? DBNull.Value;
+ row["MaxReads"] = (object)a.MaxReads ?? DBNull.Value;
+ row["TotalReads"] = (object)a.TotalReads ?? DBNull.Value;
+ row["MinWrites"] = (object)a.MinWrites ?? DBNull.Value;
+ row["MaxWrites"] = (object)a.MaxWrites ?? DBNull.Value;
+ row["TotalWrites"] = (object)a.TotalWrites ?? DBNull.Value;
+ row["MinCPU"] = (object)a.MinCPU ?? DBNull.Value;
+ row["MaxCPU"] = (object)a.MaxCPU ?? DBNull.Value;
+ row["TotalCPU"] = (object)a.TotalCPU ?? DBNull.Value;
+ bl.InsertRow(row);
+ TotalRowsInserted++;
+ }
+ }
+ finally
+ {
+ bl.Close();
+ }
+ }
+
+ public void Dispose()
+ {
+ if (!_disposed)
+ {
+ _disposed = true;
+ }
+ }
+ }
+}
diff --git a/TraceEventImporter/Models/TraceEvent.cs b/TraceEventImporter/Models/TraceEvent.cs
new file mode 100644
index 00000000..62ee4dc9
--- /dev/null
+++ b/TraceEventImporter/Models/TraceEvent.cs
@@ -0,0 +1,139 @@
+using System;
+
+namespace TraceEventImporter.Models
+{
+ public enum TraceEventType
+ {
+ Unknown = 0,
+ RpcCompleted = 10,
+ RpcStarting = 11,
+ SqlBatchCompleted = 12,
+ SqlBatchStarting = 13,
+ AuditLogin = 14,
+ AuditLogout = 15,
+ Attention = 16,
+ ExistingConnection = 17,
+ DtcTransaction = 19,
+ StmtStarting = 40,
+ StmtCompleted = 41,
+ SpStarting = 42,
+ SpCompleted = 43,
+ SpStmtStarting = 44,
+ SpStmtCompleted = 45,
+ ShowplanAll = 97,
+ ShowplanStatisticsProfile = 146,
+ StmtRecompile = 166,
+ }
+
+ public class TraceEvent
+ {
+ public long Seq { get; set; }
+ public TraceEventType EventType { get; set; }
+ public int EventId { get; set; }
+ public int SessionId { get; set; }
+ public int RequestId { get; set; }
+ public long ConnId { get; set; }
+ public int DatabaseId { get; set; }
+ public DateTime? StartTime { get; set; }
+ public DateTime? EndTime { get; set; }
+ public long? Duration { get; set; }
+ public long? CPU { get; set; }
+ public long? Reads { get; set; }
+ public long? Writes { get; set; }
+ public long? RowCount { get; set; }
+ public string TextData { get; set; }
+ public int? ObjectId { get; set; }
+ public string ObjectName { get; set; }
+ public int? NestLevel { get; set; }
+ public string ApplicationName { get; set; }
+ public string LoginName { get; set; }
+ public string HostName { get; set; }
+ public string NTDomainName { get; set; }
+ public string NTUserName { get; set; }
+ public int? Error { get; set; }
+ public int? EventSubclass { get; set; }
+ public int? IntegerData { get; set; }
+ public int? Severity { get; set; }
+ public int? State { get; set; }
+ public int? Offset { get; set; }
+ public int? LineNumber { get; set; }
+
+ public bool IsStartingEvent
+ {
+ get
+ {
+ switch (EventType)
+ {
+ case TraceEventType.SqlBatchStarting:
+ case TraceEventType.RpcStarting:
+ case TraceEventType.SpStmtStarting:
+ case TraceEventType.StmtStarting:
+ case TraceEventType.SpStarting:
+ return true;
+ default:
+ return false;
+ }
+ }
+ }
+
+ public bool IsCompletedEvent
+ {
+ get
+ {
+ switch (EventType)
+ {
+ case TraceEventType.SqlBatchCompleted:
+ case TraceEventType.RpcCompleted:
+ case TraceEventType.SpStmtCompleted:
+ case TraceEventType.StmtCompleted:
+ case TraceEventType.SpCompleted:
+ return true;
+ default:
+ return false;
+ }
+ }
+ }
+
+ public bool IsBatchEvent
+ {
+ get
+ {
+ switch (EventType)
+ {
+ case TraceEventType.SqlBatchStarting:
+ case TraceEventType.SqlBatchCompleted:
+ case TraceEventType.RpcStarting:
+ case TraceEventType.RpcCompleted:
+ return true;
+ default:
+ return false;
+ }
+ }
+ }
+
+ public bool IsStatementEvent
+ {
+ get
+ {
+ switch (EventType)
+ {
+ case TraceEventType.SpStmtStarting:
+ case TraceEventType.SpStmtCompleted:
+ case TraceEventType.StmtStarting:
+ case TraceEventType.StmtCompleted:
+ return true;
+ default:
+ return false;
+ }
+ }
+ }
+
+ public bool IsRpcEvent
+ {
+ get
+ {
+ return EventType == TraceEventType.RpcStarting || EventType == TraceEventType.RpcCompleted;
+ }
+ }
+ }
+}
diff --git a/TraceEventImporter/Normalization/HashComputer.cs b/TraceEventImporter/Normalization/HashComputer.cs
new file mode 100644
index 00000000..92146c45
--- /dev/null
+++ b/TraceEventImporter/Normalization/HashComputer.cs
@@ -0,0 +1,71 @@
+namespace TraceEventImporter.Normalization
+{
+ ///
+ /// Exact port of StringHash() from READ80TRACE/Analyze.cpp.
+ /// Produces a 64-bit hash from normalized SQL text using dual-hash
+ /// (One-At-A-Time + Bernstein's) with periodic DWORD swaps.
+ ///
+ public static class HashComputer
+ {
+ ///
+ /// Compute a 64-bit hash of the normalized text.
+ /// Must produce identical results to the C++ StringHash() function.
+ ///
+ /// Uppercased, whitespace-collapsed, placeholder-substituted SQL text
+ /// Special procedure ID (0 for regular batches, nonzero for sp_executesql etc.)
+ /// 64-bit hash as a signed long (matching SQL bigint storage)
+ public static long ComputeHash(string normalizedText, int specialProcId = 0)
+ {
+ if (string.IsNullOrEmpty(normalizedText))
+ return 0;
+
+ int charCount = normalizedText.Length;
+
+ // Seed: dwHighHash = charCount, dwLowHash = specialProcId
+ // In the C++ struct, dwHighHash is at offset 0 (low 32 bits of uint64),
+ // dwLowHash is at offset 4 (high 32 bits of uint64) on little-endian.
+ uint dwHighHash = (uint)charCount;
+ uint dwLowHash = (uint)specialProcId;
+
+ for (int i = 0; i < charCount; i++)
+ {
+ ushort c = normalizedText[i];
+
+ // One At A Time hash on dwLowHash
+ dwLowHash += c;
+ dwLowHash += (dwLowHash << 10);
+ dwLowHash ^= (dwLowHash >> 6);
+
+ // Bernstein's hash on dwHighHash: ((self << 5) + self) == *33
+ dwHighHash = ((dwHighHash << 5) + dwHighHash) + c;
+
+ // Swap DWORD halves to avoid funnel patterns.
+ // Check next char for '!' (33) or current position divisible by 32.
+ // When i is the last char, "next char" is the null terminator (0) in C++,
+ // which is != 33, so only the modulo check applies.
+ bool shouldSwap = (i % 32 == 0);
+ if (!shouldSwap && i + 1 < charCount)
+ {
+ shouldSwap = (normalizedText[i + 1] == '!');
+ }
+
+ if (shouldSwap)
+ {
+ uint temp = dwLowHash;
+ dwLowHash = dwHighHash;
+ dwHighHash = temp;
+ }
+ }
+
+ // Final One At A Time finalization on dwLowHash
+ dwLowHash += (dwLowHash << 3);
+ dwLowHash ^= (dwLowHash >> 11);
+ dwLowHash += (dwLowHash << 15);
+
+ // Combine into 64-bit value matching C++ union layout on little-endian:
+ // dwHighHash at offset 0 = low 32 bits, dwLowHash at offset 4 = high 32 bits
+ ulong result = ((ulong)dwLowHash << 32) | dwHighHash;
+ return unchecked((long)result);
+ }
+ }
+}
diff --git a/TraceEventImporter/Normalization/SpExecuteSqlExtractor.cs b/TraceEventImporter/Normalization/SpExecuteSqlExtractor.cs
new file mode 100644
index 00000000..5f1deda3
--- /dev/null
+++ b/TraceEventImporter/Normalization/SpExecuteSqlExtractor.cs
@@ -0,0 +1,115 @@
+using System;
+
+namespace TraceEventImporter.Normalization
+{
+ ///
+ /// Extracts the inner SQL query text from sp_executesql, sp_prepexec, and sp_prepare
+ /// RPC calls. ReadTrace.exe does this via GetRPCParamText() to normalize/hash the
+ /// actual query rather than the wrapper call.
+ ///
+ public static class SpExecuteSqlExtractor
+ {
+ // SpecialProcIDs that carry an inner SQL query as the first parameter
+ private const byte SP_PREPARE_ID = 1;
+ private const byte SP_EXECUTESQL_ID = 3;
+ private const byte SP_PREPEXEC_ID = 4;
+ private const byte SP_CURSOROPEN_ID = 5;
+ private const byte SP_CURSORPREPARE_ID = 9;
+ private const byte SP_CURSORPREPEXEC_ID = 10;
+
+ ///
+ /// Returns true if this SpecialProcID should have its inner SQL extracted.
+ ///
+ public static bool ShouldExtractInnerSql(byte specialProcId)
+ {
+ return specialProcId == SP_EXECUTESQL_ID
+ || specialProcId == SP_PREPEXEC_ID
+ || specialProcId == SP_PREPARE_ID
+ || specialProcId == SP_CURSOROPEN_ID
+ || specialProcId == SP_CURSORPREPARE_ID
+ || specialProcId == SP_CURSORPREPEXEC_ID;
+ }
+
+ ///
+ /// Extracts the inner SQL query from a sp_executesql/sp_prepexec/sp_prepare
+ /// TextData string. Returns the inner SQL if found, otherwise returns null.
+ ///
+ /// Expected formats:
+ /// exec sp_executesql N'SELECT ...', N'@p1 int', @p1=42
+ /// exec sp_executesql N'SELECT ...'
+ /// SELECT ... (already extracted by trace infrastructure)
+ ///
+ public static string TryExtractInnerSql(string textData)
+ {
+ if (string.IsNullOrEmpty(textData))
+ return null;
+
+ // Find the first N' or ' which starts the SQL parameter
+ int i = 0;
+ int len = textData.Length;
+
+ // Skip past the proc name to find the first string parameter
+ // Look for N'...' or '...' pattern
+ while (i < len)
+ {
+ // N'...' unicode string literal — check we're past the proc name
+ if (i < len - 1
+ && (textData[i] == 'N' || textData[i] == 'n')
+ && textData[i + 1] == '\''
+ && i > 0
+ && IsAfterProcName(textData, i))
+ {
+ return ExtractQuotedString(textData, i + 2);
+ }
+
+ // '...' regular string literal
+ if (textData[i] == '\'' && i > 0 && IsAfterProcName(textData, i))
+ {
+ return ExtractQuotedString(textData, i + 1);
+ }
+
+ i++;
+ }
+
+ return null;
+ }
+
+ private static bool IsAfterProcName(string text, int pos)
+ {
+ // Walk backwards to check we're past whitespace/comma after the proc name
+ int j = pos - 1;
+ while (j >= 0 && (text[j] == ' ' || text[j] == '\t' || text[j] == ','))
+ j--;
+ // We should be past at least a few chars (the proc name)
+ return j > 3;
+ }
+
+ private static string ExtractQuotedString(string text, int startAfterQuote)
+ {
+ var result = new System.Text.StringBuilder();
+ int i = startAfterQuote;
+ int len = text.Length;
+
+ while (i < len)
+ {
+ if (text[i] == '\'')
+ {
+ // Check for escaped quote ''
+ if (i + 1 < len && text[i + 1] == '\'')
+ {
+ result.Append('\'');
+ i += 2;
+ continue;
+ }
+ // End of string
+ break;
+ }
+ result.Append(text[i]);
+ i++;
+ }
+
+ string inner = result.ToString().Trim();
+ return inner.Length > 0 ? inner : null;
+ }
+ }
+}
\ No newline at end of file
diff --git a/TraceEventImporter/Normalization/SqlTextNormalizer.cs b/TraceEventImporter/Normalization/SqlTextNormalizer.cs
new file mode 100644
index 00000000..9c1451ac
--- /dev/null
+++ b/TraceEventImporter/Normalization/SqlTextNormalizer.cs
@@ -0,0 +1,393 @@
+using System;
+using System.Text;
+
+namespace TraceEventImporter.Normalization
+{
+ ///
+ /// Normalizes SQL text by replacing literal values with placeholders.
+ /// Implements the same normalization rules as NORMALIZEMODE in rml.y:
+ /// @variable -> @P#
+ /// 123 -> {##}
+ /// 1.23 -> {##}.{##}
+ /// 'text' -> {STR}
+ /// N'text' -> {STR}
+ /// 0xABCD -> {BS}
+ /// {GUID-here} -> {GUID}
+ /// Also: UPPER case, collapse whitespace, trim, strip comments.
+ ///
+ public static class SqlTextNormalizer
+ {
+ public static string Normalize(string sql)
+ {
+ if (string.IsNullOrEmpty(sql))
+ return string.Empty;
+
+ var result = new StringBuilder(sql.Length);
+ int i = 0;
+ int len = sql.Length;
+
+ while (i < len)
+ {
+ char c = sql[i];
+
+ // --- Line comments: -- ... \n ---
+ if (c == '-' && i + 1 < len && sql[i + 1] == '-')
+ {
+ i += 2;
+ while (i < len && sql[i] != '\n' && sql[i] != '\r')
+ i++;
+ continue;
+ }
+
+ // --- Block comments: /* ... */ ---
+ if (c == '/' && i + 1 < len && sql[i + 1] == '*')
+ {
+ i += 2;
+ int depth = 1;
+ while (i < len && depth > 0)
+ {
+ if (sql[i] == '/' && i + 1 < len && sql[i + 1] == '*')
+ {
+ depth++;
+ i += 2;
+ }
+ else if (sql[i] == '*' && i + 1 < len && sql[i + 1] == '/')
+ {
+ depth--;
+ i += 2;
+ }
+ else
+ {
+ i++;
+ }
+ }
+ continue;
+ }
+
+ // --- N'unicode string literal' ---
+ if ((c == 'N' || c == 'n') && i + 1 < len && sql[i + 1] == '\'')
+ {
+ i += 2;
+ SkipStringLiteral(sql, ref i);
+ AppendWithSpace(result, "{STR}");
+ continue;
+ }
+
+ // --- 'string literal' ---
+ if (c == '\'')
+ {
+ i++;
+ SkipStringLiteral(sql, ref i);
+ AppendWithSpace(result, "{STR}");
+ continue;
+ }
+
+ // --- Binary literal: 0xHEX — only when not part of an identifier ---
+ if (c == '0' && i + 1 < len
+ && (sql[i + 1] == 'x' || sql[i + 1] == 'X')
+ && (i == 0 || !IsIdentChar(sql[i - 1])))
+ {
+ i += 2;
+ while (i < len && IsHexDigit(sql[i]))
+ i++;
+ AppendWithSpace(result, "{BS}");
+ continue;
+ }
+
+ // --- GUID literal: {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx} ---
+ if (c == '{' && IsGuidLiteral(sql, i))
+ {
+ int end = sql.IndexOf('}', i);
+ if (end > i)
+ {
+ i = end + 1;
+ AppendWithSpace(result, "{GUID}");
+ continue;
+ }
+ }
+
+ // --- @variable or @@system_variable ---
+ if (c == '@')
+ {
+ i++;
+ if (i < len && sql[i] == '@')
+ {
+ // @@system_variable — preserve as-is
+ result.Append("@@");
+ i++;
+ while (i < len && IsIdentChar(sql[i]))
+ {
+ result.Append(char.ToUpperInvariant(sql[i]));
+ i++;
+ }
+ }
+ else
+ {
+ // @user_variable — capture the name to decide whether to normalize
+ int nameStart = i;
+ while (i < len && IsIdentChar(sql[i]))
+ i++;
+
+ // ReadTrace only normalizes auto-generated RPC parameter names
+ // (@P0, @P1, @P2, ...) to @P#. Named parameters like @JOB_ID,
+ // @ALL, @IS_SYSTEM are preserved as-is (uppercased).
+ string varName = sql.Substring(nameStart, i - nameStart);
+ if (IsAutoGeneratedParam(varName))
+ {
+ AppendWithSpace(result, "@P#");
+ }
+ else
+ {
+ result.Append('@');
+ for (int k = 0; k < varName.Length; k++)
+ result.Append(char.ToUpperInvariant(varName[k]));
+ }
+ }
+ continue;
+ }
+
+ // --- Currency-prefixed numeric literal: $380, \u00A5100, \u20AC50, \u00A320, etc. ---
+ if (IsCurrencySymbol(c) && i + 1 < len && char.IsDigit(sql[i + 1]))
+ {
+ i++; // skip the currency symbol
+ c = sql[i];
+ // fall through to numeric literal handling below
+ }
+
+ // --- Numeric literal ---
+ if (char.IsDigit(c) && (i == 0 || !IsIdentChar(sql[i - 1]) || IsCurrencySymbol(sql[i - 1])))
+ {
+ bool hasDecimal = false;
+ while (i < len && char.IsDigit(sql[i]))
+ i++;
+
+ if (i < len && sql[i] == '.' && i + 1 < len && char.IsDigit(sql[i + 1]))
+ {
+ hasDecimal = true;
+ i++; // skip '.'
+ while (i < len && char.IsDigit(sql[i]))
+ i++;
+ }
+
+ // Skip scientific notation suffix (e.g., 1.5E+10)
+ if (i < len && (sql[i] == 'e' || sql[i] == 'E'))
+ {
+ i++;
+ if (i < len && (sql[i] == '+' || sql[i] == '-'))
+ i++;
+ while (i < len && char.IsDigit(sql[i]))
+ i++;
+ }
+
+ // Make sure this number isn't followed by an identifier char (e.g., part of a name like "table1")
+ if (i < len && IsIdentChar(sql[i]))
+ {
+ // It's part of an identifier — don't normalize, just output what we've consumed
+ // Actually, rewind and just output as identifier
+ // This is a simplification; the original lexer handles this more precisely
+ }
+
+ AppendWithSpace(result, hasDecimal ? "{##}.{##}" : "{##}");
+ continue;
+ }
+
+ // --- Whitespace collapsing ---
+ if (char.IsWhiteSpace(c))
+ {
+ if (result.Length > 0 && result[result.Length - 1] != ' ')
+ result.Append(' ');
+ i++;
+ while (i < len && char.IsWhiteSpace(sql[i]))
+ i++;
+ continue;
+ }
+
+ // --- Quoted identifier [name] — preserve as-is ---
+ if (c == '[')
+ {
+ // Check for showplan variable pattern [ExprNNNN] or [BMKNNNN]
+ if (IsShowplanVariable(sql, i))
+ {
+ result.Append('[');
+ i++;
+ // Output the alpha prefix, strip trailing digits
+ while (i < len && sql[i] != ']' && !char.IsDigit(sql[i]))
+ {
+ result.Append(char.ToUpperInvariant(sql[i]));
+ i++;
+ }
+ // Skip digits and closing bracket
+ while (i < len && sql[i] != ']')
+ i++;
+ if (i < len) i++; // skip ']'
+ result.Append(']');
+ continue;
+ }
+
+ // Regular quoted identifier — preserve
+ result.Append('[');
+ i++;
+ while (i < len && sql[i] != ']')
+ {
+ result.Append(char.ToUpperInvariant(sql[i]));
+ i++;
+ }
+ if (i < len)
+ {
+ result.Append(']');
+ i++; // skip ']'
+ }
+ continue;
+ }
+
+ // --- Regular character — uppercase ---
+ result.Append(char.ToUpperInvariant(c));
+ i++;
+ }
+
+ return result.ToString().Trim();
+ }
+
+ private static void SkipStringLiteral(string sql, ref int i)
+ {
+ int len = sql.Length;
+ while (i < len)
+ {
+ if (sql[i] == '\'')
+ {
+ i++;
+ // Escaped quote ''
+ if (i < len && sql[i] == '\'')
+ {
+ i++;
+ continue;
+ }
+ return;
+ }
+ i++;
+ }
+ }
+
+ private static bool IsHexDigit(char c)
+ {
+ return (c >= '0' && c <= '9')
+ || (c >= 'a' && c <= 'f')
+ || (c >= 'A' && c <= 'F');
+ }
+
+ private static bool IsIdentChar(char c)
+ {
+ return char.IsLetterOrDigit(c) || c == '_' || c == '#' || c == '$';
+ }
+
+ ///
+ /// Returns true if the character is a currency symbol (dollar, yen/yuan, euro, pound, etc.)
+ /// that can appear as a prefix before a numeric value in SQL parameters.
+ ///
+ private static bool IsCurrencySymbol(char c)
+ {
+ // \u0024 $ dollar (USD, CAD, AUD, ...)
+ // \u00A2 ¢ cent
+ // \u00A3 £ pound sterling (GBP)
+ // \u00A4 ¤ generic currency sign
+ // \u00A5 ¥ yen / yuan (JPY, CNY)
+ // \u20A0-\u20CF Unicode Currency Symbols block:
+ // \u20A0 â‚ euro-currency sign
+ // \u20A1 ₡ colón (CRC)
+ // \u20A2 â‚¢ cruzeiro (BRL legacy)
+ // \u20A3 â‚£ franc (CHF legacy)
+ // \u20A4 ₤ lira sign
+ // \u20A5 â‚¥ mill sign
+ // \u20A6 ₦ naira (NGN)
+ // \u20A7 â‚§ peseta (ESP)
+ // \u20A8 ₨ rupee sign
+ // \u20A9 â‚© won (KRW)
+ // \u20AA ₪ shekel (ILS)
+ // \u20AB â‚« dong (VND)
+ // \u20AC € euro (EUR)
+ // \u20AD â‚ kip (LAK)
+ // \u20AE ₮ tögrög (MNT)
+ // \u20AF ₯ drachma (GRD)
+ // \u20B0 â‚° pfennig
+ // \u20B1 ₱ peso (PHP)
+ // \u20B2 ₲ guaranà (PYG)
+ // \u20B3 ₳ austral (ARS legacy)
+ // \u20B4 â‚´ hryvnia (UAH)
+ // \u20B5 ₵ cedi (GHS)
+ // \u20B6 â‚¶ livre tournois
+ // \u20B7 â‚· spesmilo
+ // \u20B8 ₸ tenge (KZT)
+ // \u20B9 ₹ rupee (INR)
+ // \u20BA ₺ lira (TRY)
+ // \u20BB â‚» nordic mark
+ // \u20BC ₼ manat (AZN)
+ // \u20BD ₽ ruble (RUB)
+ // \u20BE ₾ lari (GEL)
+ // \u20BF â‚¿ bitcoin
+ // \u20C0-\u20CF reserved for future currency symbols
+ return c == '\u0024' || c == '\u00A2' || c == '\u00A3' || c == '\u00A4' || c == '\u00A5' ||
+ (c >= '\u20A0' && c <= '\u20CF');
+ }
+
+ private static bool IsGuidLiteral(string sql, int pos)
+ {
+ // {XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX} = 38 chars
+ if (pos + 37 >= sql.Length) return false;
+ if (sql[pos + 9] != '-') return false;
+ if (sql[pos + 14] != '-') return false;
+ if (sql[pos + 19] != '-') return false;
+ if (sql[pos + 24] != '-') return false;
+ if (sql[pos + 37] != '}') return false;
+ return true;
+ }
+
+ private static bool IsShowplanVariable(string sql, int pos)
+ {
+ // Matches [ExprNNNN] or [BMKNNNN] patterns
+ if (pos + 5 >= sql.Length) return false;
+ int i = pos + 1;
+ var prefixBuilder = new StringBuilder();
+ while (i < sql.Length && char.IsLetter(sql[i]))
+ {
+ prefixBuilder.Append(sql[i]);
+ i++;
+ }
+
+ if (prefixBuilder.Length == 0)
+ return false;
+
+ string upper = prefixBuilder.ToString().ToUpperInvariant();
+ if (upper != "EXPR" && upper != "BMK" && upper != "UNION" && upper != "CONST")
+ return false;
+
+ // Must be followed by digits then ']'
+ if (i >= sql.Length || !char.IsDigit(sql[i]))
+ return false;
+ while (i < sql.Length && char.IsDigit(sql[i])) i++;
+ return (i < sql.Length && sql[i] == ']');
+ }
+
+ private static void AppendWithSpace(StringBuilder sb, string text)
+ {
+ sb.Append(text);
+ }
+
+ ///
+ /// Returns true if the variable name is an auto-generated RPC parameter
+ /// (e.g., P0, P1, P2, ...). These are the only @variables that get
+ /// normalized to @P# (matches ReadTrace.exe behavior).
+ ///
+ private static bool IsAutoGeneratedParam(string name)
+ {
+ // Must be "P" followed by one or more digits (case-insensitive)
+ if (name.Length < 2) return false;
+ if (name[0] != 'P' && name[0] != 'p') return false;
+ for (int i = 1; i < name.Length; i++)
+ {
+ if (!char.IsDigit(name[i]))
+ return false;
+ }
+ return true;
+ }
+ }
+}
diff --git a/TraceEventImporter/Processing/Aggregator.cs b/TraceEventImporter/Processing/Aggregator.cs
new file mode 100644
index 00000000..362a99e9
--- /dev/null
+++ b/TraceEventImporter/Processing/Aggregator.cs
@@ -0,0 +1,268 @@
+using System;
+using System.Collections.Generic;
+using System.Linq;
+
+namespace TraceEventImporter.Processing
+{
+ ///
+ /// Builds tblTimeIntervals and computes per-(HashID, TimeInterval, DBID, AppNameID, LoginNameID)
+ /// aggregates for tblBatchPartialAggs and tblStmtPartialAggs.
+ ///
+ public class Aggregator
+ {
+ private readonly int _intervalSeconds;
+
+ public List TimeIntervals { get; } = new List();
+ public List BatchAggs { get; } = new List();
+ public List StmtAggs { get; } = new List();
+
+ public Aggregator(int intervalSeconds = 60)
+ {
+ _intervalSeconds = intervalSeconds > 0 ? intervalSeconds : 60;
+ }
+
+ public void Compute(List batches, List statements)
+ {
+ // Determine overall time range
+ DateTime minTime = DateTime.MaxValue;
+ DateTime maxTime = DateTime.MinValue;
+
+ foreach (var b in batches)
+ {
+ if (b.StartTime.HasValue && b.StartTime.Value < minTime) minTime = b.StartTime.Value;
+ if (b.EndTime.HasValue && b.EndTime.Value > maxTime) maxTime = b.EndTime.Value;
+ }
+ foreach (var s in statements)
+ {
+ if (s.StartTime.HasValue && s.StartTime.Value < minTime) minTime = s.StartTime.Value;
+ if (s.EndTime.HasValue && s.EndTime.Value > maxTime) maxTime = s.EndTime.Value;
+ }
+
+ if (minTime >= maxTime)
+ return;
+
+ // Build time intervals
+ BuildTimeIntervals(minTime, maxTime);
+
+ // Build batch aggregations
+ ComputeBatchAggs(batches);
+
+ // Build statement aggregations
+ ComputeStmtAggs(statements);
+ }
+
+ private void BuildTimeIntervals(DateTime minTime, DateTime maxTime)
+ {
+ DateTime current = minTime;
+ int intervalId = 1;
+ while (current < maxTime)
+ {
+ DateTime end = current.AddSeconds(_intervalSeconds);
+ if (end > maxTime) end = maxTime;
+
+ TimeIntervals.Add(new TimeIntervalRow
+ {
+ TimeInterval = intervalId++,
+ StartTime = current,
+ EndTime = end
+ });
+
+ current = end;
+ }
+ }
+
+ private void ComputeBatchAggs(List batches)
+ {
+ // Group by HashID, TimeInterval, DBID, AppNameID, LoginNameID
+ foreach (var batch in batches)
+ {
+ int timeInterval = FindTimeInterval(batch.EndTime ?? batch.StartTime);
+ if (timeInterval <= 0) continue;
+
+ var key = new AggKey(batch.HashID, timeInterval, batch.DBID, batch.AppNameID, batch.LoginNameID);
+
+ if (!_batchAggDict.TryGetValue(key, out var agg))
+ {
+ agg = new BatchPartialAggRow
+ {
+ HashID = batch.HashID,
+ TimeInterval = timeInterval,
+ DBID = batch.DBID,
+ AppNameID = batch.AppNameID,
+ LoginNameID = batch.LoginNameID
+ };
+ _batchAggDict[key] = agg;
+ BatchAggs.Add(agg);
+ }
+
+ // Count events
+ if (batch.StartTime.HasValue)
+ agg.StartingEvents++;
+ if (batch.EndTime.HasValue)
+ agg.CompletedEvents++;
+ if (batch.AttnSeq.HasValue)
+ agg.AttentionEvents++;
+
+ // Aggregate metrics (from completed events only)
+ if (batch.Duration.HasValue)
+ {
+ agg.TotalDuration = (agg.TotalDuration ?? 0) + batch.Duration.Value;
+ agg.MinDuration = agg.MinDuration.HasValue ? Math.Min(agg.MinDuration.Value, batch.Duration.Value) : batch.Duration.Value;
+ agg.MaxDuration = agg.MaxDuration.HasValue ? Math.Max(agg.MaxDuration.Value, batch.Duration.Value) : batch.Duration.Value;
+ }
+ if (batch.Reads.HasValue)
+ {
+ agg.TotalReads = (agg.TotalReads ?? 0) + batch.Reads.Value;
+ agg.MinReads = agg.MinReads.HasValue ? Math.Min(agg.MinReads.Value, batch.Reads.Value) : batch.Reads.Value;
+ agg.MaxReads = agg.MaxReads.HasValue ? Math.Max(agg.MaxReads.Value, batch.Reads.Value) : batch.Reads.Value;
+ }
+ if (batch.Writes.HasValue)
+ {
+ agg.TotalWrites = (agg.TotalWrites ?? 0) + batch.Writes.Value;
+ agg.MinWrites = agg.MinWrites.HasValue ? Math.Min(agg.MinWrites.Value, batch.Writes.Value) : batch.Writes.Value;
+ agg.MaxWrites = agg.MaxWrites.HasValue ? Math.Max(agg.MaxWrites.Value, batch.Writes.Value) : batch.Writes.Value;
+ }
+ if (batch.CPU.HasValue)
+ {
+ agg.TotalCPU = (agg.TotalCPU ?? 0) + batch.CPU.Value;
+ agg.MinCPU = agg.MinCPU.HasValue ? Math.Min(agg.MinCPU.Value, batch.CPU.Value) : batch.CPU.Value;
+ agg.MaxCPU = agg.MaxCPU.HasValue ? Math.Max(agg.MaxCPU.Value, batch.CPU.Value) : batch.CPU.Value;
+ }
+ }
+ }
+
+ private void ComputeStmtAggs(List statements)
+ {
+ foreach (var stmt in statements)
+ {
+ int timeInterval = FindTimeInterval(stmt.EndTime ?? stmt.StartTime);
+ if (timeInterval <= 0) continue;
+
+ var key = new AggKey(stmt.HashID, timeInterval, stmt.DBID, stmt.AppNameID, stmt.LoginNameID);
+
+ if (!_stmtAggDict.TryGetValue(key, out var agg))
+ {
+ agg = new StmtPartialAggRow
+ {
+ HashID = stmt.HashID,
+ TimeInterval = timeInterval,
+ ObjectID = stmt.ObjectID,
+ DBID = stmt.DBID,
+ AppNameID = stmt.AppNameID,
+ LoginNameID = stmt.LoginNameID
+ };
+ _stmtAggDict[key] = agg;
+ StmtAggs.Add(agg);
+ }
+
+ if (stmt.StartTime.HasValue) agg.StartingEvents++;
+ if (stmt.EndTime.HasValue) agg.CompletedEvents++;
+ if (stmt.AttnSeq.HasValue) agg.AttentionEvents++;
+
+ if (stmt.Duration.HasValue)
+ {
+ agg.TotalDuration = (agg.TotalDuration ?? 0) + stmt.Duration.Value;
+ agg.MinDuration = agg.MinDuration.HasValue ? Math.Min(agg.MinDuration.Value, stmt.Duration.Value) : stmt.Duration.Value;
+ agg.MaxDuration = agg.MaxDuration.HasValue ? Math.Max(agg.MaxDuration.Value, stmt.Duration.Value) : stmt.Duration.Value;
+ }
+ if (stmt.Reads.HasValue)
+ {
+ agg.TotalReads = (agg.TotalReads ?? 0) + stmt.Reads.Value;
+ agg.MinReads = agg.MinReads.HasValue ? Math.Min(agg.MinReads.Value, stmt.Reads.Value) : stmt.Reads.Value;
+ agg.MaxReads = agg.MaxReads.HasValue ? Math.Max(agg.MaxReads.Value, stmt.Reads.Value) : stmt.Reads.Value;
+ }
+ if (stmt.Writes.HasValue)
+ {
+ agg.TotalWrites = (agg.TotalWrites ?? 0) + stmt.Writes.Value;
+ agg.MinWrites = agg.MinWrites.HasValue ? Math.Min(agg.MinWrites.Value, stmt.Writes.Value) : stmt.Writes.Value;
+ agg.MaxWrites = agg.MaxWrites.HasValue ? Math.Max(agg.MaxWrites.Value, stmt.Writes.Value) : stmt.Writes.Value;
+ }
+ if (stmt.CPU.HasValue)
+ {
+ agg.TotalCPU = (agg.TotalCPU ?? 0) + stmt.CPU.Value;
+ agg.MinCPU = agg.MinCPU.HasValue ? Math.Min(agg.MinCPU.Value, stmt.CPU.Value) : stmt.CPU.Value;
+ agg.MaxCPU = agg.MaxCPU.HasValue ? Math.Max(agg.MaxCPU.Value, stmt.CPU.Value) : stmt.CPU.Value;
+ }
+ }
+ }
+
+ private int FindTimeInterval(DateTime? time)
+ {
+ if (!time.HasValue || TimeIntervals.Count == 0)
+ return 0;
+ DateTime t = time.Value;
+ foreach (var ti in TimeIntervals)
+ {
+ if (t >= ti.StartTime && t < ti.EndTime)
+ return ti.TimeInterval;
+ }
+ // If after last interval, use the last one
+ return TimeIntervals[TimeIntervals.Count - 1].TimeInterval;
+ }
+
+ private readonly Dictionary _batchAggDict = new Dictionary();
+ private readonly Dictionary _stmtAggDict = new Dictionary();
+
+ private struct AggKey : IEquatable
+ {
+ public readonly long HashID;
+ public readonly int TimeInterval;
+ public readonly int DBID;
+ public readonly int AppNameID;
+ public readonly int LoginNameID;
+
+ public AggKey(long hashId, int ti, int dbid, int appId, int loginId)
+ {
+ HashID = hashId; TimeInterval = ti; DBID = dbid; AppNameID = appId; LoginNameID = loginId;
+ }
+
+ public bool Equals(AggKey o) => HashID == o.HashID && TimeInterval == o.TimeInterval && DBID == o.DBID && AppNameID == o.AppNameID && LoginNameID == o.LoginNameID;
+ public override bool Equals(object obj) => obj is AggKey k && Equals(k);
+ public override int GetHashCode() => HashID.GetHashCode() ^ (TimeInterval * 397) ^ (DBID * 31) ^ AppNameID ^ LoginNameID;
+ }
+ }
+
+ #region Aggregation Row Types
+
+ public class TimeIntervalRow
+ {
+ public int TimeInterval;
+ public DateTime StartTime;
+ public DateTime EndTime;
+ }
+
+ public class BatchPartialAggRow
+ {
+ public long HashID;
+ public int TimeInterval;
+ public int StartingEvents;
+ public int CompletedEvents;
+ public int AttentionEvents;
+ public long? MinDuration, MaxDuration, TotalDuration;
+ public long? MinReads, MaxReads, TotalReads;
+ public long? MinWrites, MaxWrites, TotalWrites;
+ public long? MinCPU, MaxCPU, TotalCPU;
+ public int AppNameID;
+ public int LoginNameID;
+ public int DBID;
+ }
+
+ public class StmtPartialAggRow
+ {
+ public long HashID;
+ public int TimeInterval;
+ public int? ObjectID;
+ public int? DBID;
+ public int AppNameID;
+ public int LoginNameID;
+ public int StartingEvents;
+ public int CompletedEvents;
+ public int AttentionEvents;
+ public long? MinDuration, MaxDuration, TotalDuration;
+ public long? MinReads, MaxReads, TotalReads;
+ public long? MinWrites, MaxWrites, TotalWrites;
+ public long? MinCPU, MaxCPU, TotalCPU;
+ }
+
+ #endregion
+}
diff --git a/TraceEventImporter/Processing/EventProcessor.cs b/TraceEventImporter/Processing/EventProcessor.cs
new file mode 100644
index 00000000..65b682a3
--- /dev/null
+++ b/TraceEventImporter/Processing/EventProcessor.cs
@@ -0,0 +1,654 @@
+using System;
+using System.Collections.Generic;
+using TraceEventImporter.Models;
+using TraceEventImporter.Normalization;
+
+namespace TraceEventImporter.Processing
+{
+ ///
+ /// Correlates Starting↔Completed events per session+request, normalizes SQL text,
+ /// computes HashID, and produces rows for tblBatches, tblStatements, tblConnections,
+ /// and tblInterestingEvents.
+ ///
+ public class EventProcessor
+ {
+ private readonly UniqueStore _store;
+
+ // Pending batch events awaiting their Completed counterpart
+ private readonly Dictionary _pendingBatches =
+ new Dictionary();
+
+ // Pending statement events (nested via stack per session+request)
+ private readonly Dictionary> _pendingStatements =
+ new Dictionary>();
+
+ // Connection tracking (login→logout)
+ private readonly Dictionary _connections =
+ new Dictionary();
+
+ // Per session+request: the StartSeq of the current in-flight batch.
+ // This is how ReadTrace links statements to their parent batch.
+ private readonly Dictionary _curBatchStartSeq =
+ new Dictionary();
+
+ // Sessions that have had a connection event (login/existing connection).
+ // Used to detect sessions that were "connected before trace" started.
+ // Value is the ConnSeq assigned to that session's connection row.
+ private readonly Dictionary _sessionConnSeq =
+ new Dictionary();
+
+ // All sequence values (ConnSeq, BatchSeq, StmtSeq) are derived from the
+ // global event sequence (evt.Seq), aligned with ReadTrace.exe which uses
+ // pEvent->GetGlobalSeq() for all of them. No separate counters needed.
+
+ // Collected rows for bulk insert
+ public List Batches { get; } = new List();
+ public List Statements { get; } = new List();
+ public List Connections { get; } = new List();
+ public List InterestingEvents { get; } = new List();
+
+ public EventProcessor(UniqueStore store)
+ {
+ _store = store;
+ }
+
+ public void ProcessEvent(TraceEvent evt)
+ {
+ _store.AddTracedEvent(evt.EventId);
+
+ switch (evt.EventType)
+ {
+ case TraceEventType.SqlBatchStarting:
+ case TraceEventType.RpcStarting:
+ HandleBatchStarting(evt);
+ break;
+
+ case TraceEventType.SqlBatchCompleted:
+ case TraceEventType.RpcCompleted:
+ HandleBatchCompleted(evt);
+ break;
+
+ case TraceEventType.SpStmtStarting:
+ case TraceEventType.StmtStarting:
+ HandleStatementStarting(evt);
+ break;
+
+ case TraceEventType.SpStmtCompleted:
+ case TraceEventType.StmtCompleted:
+ HandleStatementCompleted(evt);
+ break;
+
+ case TraceEventType.AuditLogin:
+ case TraceEventType.ExistingConnection:
+ HandleLogin(evt);
+ break;
+
+ case TraceEventType.AuditLogout:
+ HandleLogout(evt);
+ break;
+
+ case TraceEventType.Attention:
+ HandleAttention(evt);
+ break;
+
+ default:
+ // Interesting events (recompile, autogrow, etc.)
+ if (evt.EventType != TraceEventType.Unknown)
+ HandleInterestingEvent(evt);
+ break;
+ }
+ }
+
+ ///
+ /// Finalize: flush any pending connections that never got a logout event.
+ ///
+ public void Finalize()
+ {
+ foreach (var conn in _connections.Values)
+ {
+ Connections.Add(new ConnectionRow
+ {
+ ConnSeq = conn.ConnSeq,
+ Session = conn.SessionId,
+ StartTime = conn.StartTime,
+ EndTime = null,
+ Duration = null,
+ Reads = null,
+ Writes = null,
+ CPU = null,
+ ApplicationName = conn.AppName,
+ LoginName = conn.LoginName,
+ HostName = conn.HostName,
+ NTDomainName = conn.NTDomainName,
+ NTUserName = conn.NTUserName,
+ StartSeq = conn.StartSeq,
+ EndSeq = null,
+ TextData = conn.TextData
+ });
+ }
+ _connections.Clear();
+ }
+
+ #region Batch Handling
+
+ private void HandleBatchStarting(TraceEvent evt)
+ {
+ var key = new SessionRequestKey(evt.SessionId, evt.RequestId);
+
+ // Track the starting sequence for this session+request so statements
+ // processed during batch execution can reference it (ReadTrace pattern).
+ _curBatchStartSeq[key] = evt.Seq;
+
+ _pendingBatches[key] = new PendingBatch
+ {
+ StartSeq = evt.Seq,
+ StartTime = evt.StartTime,
+ TextData = evt.TextData,
+ ObjectName = evt.ObjectName,
+ IsRpc = evt.IsRpcEvent,
+ DatabaseId = evt.DatabaseId,
+ ApplicationName = evt.ApplicationName,
+ LoginName = evt.LoginName
+ };
+ }
+
+ private void HandleBatchCompleted(TraceEvent evt)
+ {
+ var key = new SessionRequestKey(evt.SessionId, evt.RequestId);
+
+ PendingBatch pending = null;
+ _pendingBatches.TryGetValue(key, out pending);
+ if (pending != null)
+ _pendingBatches.Remove(key);
+
+ // BatchSeq = StartSeq if available, otherwise EndSeq (matches ReadTrace.exe:
+ // Row.BatchSeq_Value = (StartSeq_Status == OK) ? StartSeq : EndSeq)
+ long batchSeq = pending?.StartSeq ?? evt.Seq;
+
+ // Use completed event's text, fall back to starting event's text
+ string textData = evt.TextData ?? pending?.TextData;
+ string objectName = evt.ObjectName ?? pending?.ObjectName;
+ bool isRpc = evt.IsRpcEvent || (pending?.IsRpc ?? false);
+
+ // Determine special proc and normalize
+ byte specialProcId = isRpc ? SpecialProcDetector.GetSpecialProcId(objectName) : (byte)0;
+
+ // For sp_executesql, sp_prepexec, sp_prepare, etc.: extract the inner SQL
+ // query text and use that for normalization/hashing (matches ReadTrace.exe
+ // which calls GetRPCParamText to get the actual query from the first parameter).
+ string textForNormalization = textData;
+ if (SpExecuteSqlExtractor.ShouldExtractInnerSql(specialProcId))
+ {
+ string innerSql = SpExecuteSqlExtractor.TryExtractInnerSql(textData);
+ if (innerSql != null)
+ {
+ textForNormalization = innerSql;
+ }
+ }
+
+ string normText = SqlTextNormalizer.Normalize(textForNormalization);
+ long hashId = HashComputer.ComputeHash(normText, specialProcId);
+
+ // Add to unique store — store original textData but normalized inner SQL
+ _store.TryAddBatch(batchSeq, hashId, textData, normText, specialProcId);
+
+ // Track procedure name
+ if (isRpc && !string.IsNullOrEmpty(objectName))
+ {
+ _store.AddProcedureName(
+ evt.DatabaseId,
+ evt.ObjectId ?? 0,
+ specialProcId,
+ objectName);
+ }
+
+ int appNameId = _store.GetOrAddAppName(evt.ApplicationName ?? pending?.ApplicationName);
+ int loginNameId = _store.GetOrAddLoginName(evt.LoginName ?? pending?.LoginName);
+
+ // Determine ConnSeq for this batch. If the session has no connection event,
+ // create a "connected before trace" placeholder (matches ReadTrace.exe behavior).
+ long connSeq = EnsureConnectionForSession(evt);
+
+ var row = new BatchRow
+ {
+ BatchSeq = batchSeq,
+ HashID = hashId,
+ Session = evt.SessionId,
+ Request = evt.RequestId,
+ ConnId = evt.ConnId,
+ StartTime = pending?.StartTime ?? evt.StartTime,
+ EndTime = evt.EndTime ?? evt.StartTime,
+ Duration = evt.Duration,
+ Reads = evt.Reads,
+ Writes = evt.Writes,
+ CPU = evt.CPU,
+ fRPCEvent = (byte)(isRpc ? 1 : 0),
+ DBID = evt.DatabaseId,
+ StartSeq = pending?.StartSeq,
+ EndSeq = evt.Seq,
+ AttnSeq = null,
+ ConnSeq = connSeq,
+ TextData = textData,
+ OrigRowCount = evt.RowCount,
+ AppNameID = appNameId,
+ LoginNameID = loginNameId
+ };
+
+ Batches.Add(row);
+
+ // Clear the in-flight batch sequence now that the batch has completed
+ // (matches ReadTrace: pStateInfo->CurBatchStartSeq = 0)
+ _curBatchStartSeq.Remove(key);
+ }
+
+ #endregion
+
+ #region Statement Handling
+
+ private void HandleStatementStarting(TraceEvent evt)
+ {
+ var key = new SessionRequestKey(evt.SessionId, evt.RequestId);
+ if (!_pendingStatements.ContainsKey(key))
+ _pendingStatements[key] = new Stack();
+
+ _pendingStatements[key].Push(new PendingStatement
+ {
+ StartSeq = evt.Seq,
+ StartTime = evt.StartTime,
+ TextData = evt.TextData,
+ ObjectId = evt.ObjectId,
+ NestLevel = evt.NestLevel
+ });
+ }
+
+ private void HandleStatementCompleted(TraceEvent evt)
+ {
+ var key = new SessionRequestKey(evt.SessionId, evt.RequestId);
+
+ PendingStatement pending = null;
+ if (_pendingStatements.TryGetValue(key, out var stack) && stack.Count > 0)
+ pending = stack.Pop();
+
+ string textData = evt.TextData ?? pending?.TextData;
+ string normText = SqlTextNormalizer.Normalize(textData);
+ long hashId = HashComputer.ComputeHash(normText);
+
+ // StmtSeq = StartSeq if available, otherwise EndSeq (matches ReadTrace.exe:
+ // Row.StmtSeq_Value = (StartSeq_Status == OK) ? StartSeq : EndSeq)
+ long stmtSeq = pending?.StartSeq ?? evt.Seq;
+
+ _store.TryAddStatement(stmtSeq, hashId, textData, normText);
+
+ // Find the parent batch using the in-flight batch's starting sequence.
+ // This is the key difference from the old approach: ReadTrace maintains
+ // CurBatchStartSeq as live session state set when BatchStarting arrives,
+ // so statements that complete *during* batch execution get linked.
+ long? batchSeq = null;
+ _curBatchStartSeq.TryGetValue(key, out long curBatchStart);
+ if (curBatchStart > 0)
+ {
+ batchSeq = curBatchStart;
+ }
+
+ // Get ConnSeq for this statement's session. Unlike batches, statements
+ // do NOT create fake "CONNECTED BEFORE TRACE" rows — they just reference
+ // the existing ConnSeq if available (matches ReadTrace.exe: InsertStmt
+ // uses CurConnectSeq directly, only InsertBatch calls InsertConnectEvent).
+ long? connSeq = null;
+ if (_sessionConnSeq.TryGetValue(evt.SessionId, out long existingConnSeq))
+ {
+ connSeq = existingConnSeq;
+ }
+
+ int appNameId = _store.GetOrAddAppName(evt.ApplicationName);
+ int loginNameId = _store.GetOrAddLoginName(evt.LoginName);
+
+ var row = new StatementRow
+ {
+ StmtSeq = stmtSeq,
+ HashID = hashId,
+ Session = evt.SessionId,
+ Request = evt.RequestId,
+ ConnId = evt.ConnId,
+ StartTime = pending?.StartTime ?? evt.StartTime,
+ EndTime = evt.EndTime ?? evt.StartTime,
+ Duration = evt.Duration,
+ Reads = evt.Reads,
+ Writes = evt.Writes,
+ CPU = evt.CPU,
+ Rows = evt.RowCount,
+ DBID = evt.DatabaseId,
+ ObjectID = evt.ObjectId ?? pending?.ObjectId,
+ NestLevel = evt.NestLevel ?? pending?.NestLevel,
+ fDynamicSQL = false,
+ StartSeq = pending?.StartSeq,
+ EndSeq = evt.Seq,
+ ConnSeq = connSeq,
+ BatchSeq = batchSeq,
+ ParentStmtSeq = null, // Filled in post-load fixups
+ AttnSeq = null,
+ TextData = textData,
+ AppNameID = appNameId,
+ LoginNameID = loginNameId
+ };
+
+ Statements.Add(row);
+ }
+
+ #endregion
+
+ #region Connection Handling
+
+ ///
+ /// Ensures a connection row exists for the given event's session.
+ /// If no login/existing-connection event was seen for this session,
+ /// creates a "CONNECTED BEFORE TRACE" placeholder row using the event's
+ /// global sequence as ConnSeq (matches ReadTrace.exe: CurConnectSeq = BatchSeq_Value).
+ /// All ConnSeq values come from the global sequence space, ensuring uniqueness.
+ /// Returns the ConnSeq for the session.
+ ///
+ private long EnsureConnectionForSession(TraceEvent evt)
+ {
+ if (_sessionConnSeq.TryGetValue(evt.SessionId, out long existingConnSeq))
+ {
+ return existingConnSeq;
+ }
+
+ // No login event seen for this session — create a placeholder.
+ // ReadTrace.exe uses the batch's BatchSeq (which equals its StartSeq global
+ // sequence) as ConnSeq for fake connection rows. Since we call this from
+ // HandleBatchCompleted/HandleStatementCompleted, evt.Seq is the triggering
+ // event's unique global sequence — guaranteed unique per event.
+ long fakeConnSeq = evt.Seq;
+
+ Connections.Add(new ConnectionRow
+ {
+ ConnSeq = fakeConnSeq,
+ Session = evt.SessionId,
+ StartTime = null,
+ EndTime = null,
+ Duration = null,
+ Reads = null,
+ Writes = null,
+ CPU = null,
+ ApplicationName = "CONNECTED BEFORE TRACE",
+ LoginName = "CONNECTED BEFORE TRACE",
+ HostName = null,
+ NTDomainName = null,
+ NTUserName = null,
+ StartSeq = null,
+ EndSeq = null,
+ TextData = null
+ });
+
+ _sessionConnSeq[evt.SessionId] = fakeConnSeq;
+ return fakeConnSeq;
+ }
+
+ private void HandleLogin(TraceEvent evt)
+ {
+ // ConnSeq = the login event's global sequence (matches ReadTrace.exe:
+ // pStateInfo->CurConnectSeq = pEvent->GetGlobalSeq())
+ long connSeq = evt.Seq;
+
+ _connections[evt.SessionId] = new ConnectionInfo
+ {
+ ConnSeq = connSeq,
+ SessionId = evt.SessionId,
+ StartTime = evt.StartTime,
+ StartSeq = evt.Seq,
+ AppName = evt.ApplicationName,
+ LoginName = evt.LoginName,
+ HostName = evt.HostName,
+ NTDomainName = evt.NTDomainName,
+ NTUserName = evt.NTUserName,
+ TextData = evt.TextData
+ };
+
+ // Track that this session now has a real connection event
+ _sessionConnSeq[evt.SessionId] = connSeq;
+ }
+
+ private void HandleLogout(TraceEvent evt)
+ {
+ if (_connections.TryGetValue(evt.SessionId, out var conn))
+ {
+ Connections.Add(new ConnectionRow
+ {
+ ConnSeq = conn.ConnSeq,
+ Session = conn.SessionId,
+ StartTime = conn.StartTime,
+ EndTime = evt.StartTime,
+ Duration = evt.Duration,
+ Reads = evt.Reads,
+ Writes = evt.Writes,
+ CPU = evt.CPU,
+ ApplicationName = conn.AppName,
+ LoginName = conn.LoginName,
+ HostName = conn.HostName,
+ NTDomainName = conn.NTDomainName,
+ NTUserName = conn.NTUserName,
+ StartSeq = conn.StartSeq,
+ EndSeq = evt.Seq,
+ TextData = conn.TextData
+ });
+ _connections.Remove(evt.SessionId);
+
+ // Do NOT clear _sessionConnSeq here. ReadTrace.exe clears
+ // CurConnectSeq = 0 on logout, but then creates only ONE fake
+ // connection per session gap. Our EnsureConnectionForSession
+ // already caches per-session, so keeping the last known ConnSeq
+ // prevents creating duplicate fake connection rows for every
+ // batch that arrives between logout and the next login.
+ // The next HandleLogin will overwrite _sessionConnSeq with the
+ // new login's global sequence anyway.
+ }
+ }
+
+ #endregion
+
+ #region Attention & Interesting Events
+
+ private void HandleAttention(TraceEvent evt)
+ {
+ // Find the most recent batch for this session and mark it with AttnSeq
+ for (int i = Batches.Count - 1; i >= 0; i--)
+ {
+ if (Batches[i].Session == evt.SessionId && Batches[i].AttnSeq == null)
+ {
+ Batches[i].AttnSeq = evt.Seq;
+ break;
+ }
+ }
+ }
+
+ private void HandleInterestingEvent(TraceEvent evt)
+ {
+ // Find parent batch using in-flight batch sequence (matches ReadTrace)
+ var key = new SessionRequestKey(evt.SessionId, evt.RequestId);
+ long? batchSeq = null;
+ _curBatchStartSeq.TryGetValue(key, out long curBatchStart);
+ if (curBatchStart > 0)
+ {
+ batchSeq = curBatchStart;
+ }
+
+ InterestingEvents.Add(new InterestingEventRow
+ {
+ Seq = evt.Seq,
+ EventID = evt.EventId,
+ Session = evt.SessionId,
+ Request = evt.RequestId,
+ ConnId = evt.ConnId,
+ StartTime = evt.StartTime,
+ EndTime = evt.EndTime,
+ Duration = evt.Duration,
+ DBID = evt.DatabaseId,
+ IntegerData = evt.IntegerData,
+ EventSubclass = evt.EventSubclass,
+ TextData = evt.TextData?.Length > 1000
+ ? evt.TextData.Substring(0, 1000)
+ : evt.TextData,
+ ObjectID = evt.ObjectId,
+ Error = evt.Error,
+ BatchSeq = batchSeq,
+ Severity = evt.Severity,
+ State = evt.State
+ });
+ }
+
+ #endregion
+ }
+
+ #region Row Types
+
+ public class BatchRow
+ {
+ public long BatchSeq;
+ public long HashID;
+ public int Session;
+ public int Request;
+ public long ConnId;
+ public DateTime? StartTime;
+ public DateTime? EndTime;
+ public long? Duration;
+ public long? Reads;
+ public long? Writes;
+ public long? CPU;
+ public byte fRPCEvent;
+ public int DBID;
+ public long? StartSeq;
+ public long? EndSeq;
+ public long? AttnSeq;
+ public long? ConnSeq;
+ public string TextData;
+ public long? OrigRowCount;
+ // For aggregation — not stored in table
+ public int AppNameID;
+ public int LoginNameID;
+ }
+
+ public class StatementRow
+ {
+ public long StmtSeq;
+ public long HashID;
+ public int Session;
+ public int Request;
+ public long ConnId;
+ public DateTime? StartTime;
+ public DateTime? EndTime;
+ public long? Duration;
+ public long? Reads;
+ public long? Writes;
+ public long? CPU;
+ public long? Rows;
+ public int DBID;
+ public int? ObjectID;
+ public int? NestLevel;
+ public bool fDynamicSQL;
+ public long? StartSeq;
+ public long? EndSeq;
+ public long? ConnSeq;
+ public long? BatchSeq;
+ public long? ParentStmtSeq;
+ public long? AttnSeq;
+ public string TextData;
+ // For aggregation — not stored in table
+ public int AppNameID;
+ public int LoginNameID;
+ }
+
+ public class ConnectionRow
+ {
+ public long ConnSeq;
+ public int Session;
+ public DateTime? StartTime;
+ public DateTime? EndTime;
+ public long? Duration;
+ public long? Reads;
+ public long? Writes;
+ public long? CPU;
+ public string ApplicationName;
+ public string LoginName;
+ public string HostName;
+ public string NTDomainName;
+ public string NTUserName;
+ public long? StartSeq;
+ public long? EndSeq;
+ public string TextData;
+ }
+
+ public class InterestingEventRow
+ {
+ public long Seq;
+ public int EventID;
+ public int Session;
+ public int Request;
+ public long ConnId;
+ public DateTime? StartTime;
+ public DateTime? EndTime;
+ public long? Duration;
+ public int DBID;
+ public int? IntegerData;
+ public int? EventSubclass;
+ public string TextData;
+ public int? ObjectID;
+ public int? Error;
+ public long? BatchSeq;
+ public int? Severity;
+ public int? State;
+ }
+
+ internal struct SessionRequestKey : IEquatable
+ {
+ public readonly int Session;
+ public readonly int Request;
+
+ public SessionRequestKey(int session, int request)
+ {
+ Session = session;
+ Request = request;
+ }
+
+ public bool Equals(SessionRequestKey other) => Session == other.Session && Request == other.Request;
+ public override bool Equals(object obj) => obj is SessionRequestKey k && Equals(k);
+ public override int GetHashCode() => (Session * 397) ^ Request;
+ }
+
+ internal class PendingBatch
+ {
+ public long StartSeq;
+ public DateTime? StartTime;
+ public string TextData;
+ public string ObjectName;
+ public bool IsRpc;
+ public int DatabaseId;
+ public string ApplicationName;
+ public string LoginName;
+ }
+
+ internal class PendingStatement
+ {
+ public long StartSeq;
+ public DateTime? StartTime;
+ public string TextData;
+ public int? ObjectId;
+ public int? NestLevel;
+ }
+
+ internal class ConnectionInfo
+ {
+ public long ConnSeq;
+ public int SessionId;
+ public DateTime? StartTime;
+ public long StartSeq;
+ public string AppName;
+ public string LoginName;
+ public string HostName;
+ public string NTDomainName;
+ public string NTUserName;
+ public string TextData;
+ }
+
+ #endregion
+}
diff --git a/TraceEventImporter/Processing/SpecialProcDetector.cs b/TraceEventImporter/Processing/SpecialProcDetector.cs
new file mode 100644
index 00000000..c37947da
--- /dev/null
+++ b/TraceEventImporter/Processing/SpecialProcDetector.cs
@@ -0,0 +1,64 @@
+using System;
+using System.Collections.Generic;
+
+namespace TraceEventImporter.Processing
+{
+ ///
+ /// Detects the 17 special stored procedures that get special handling during
+ /// normalization and hash computation. The SpecialProcID is used as a seed
+ /// component in the StringHash function.
+ ///
+ public static class SpecialProcDetector
+ {
+ private static readonly Dictionary SpecialProcs =
+ new Dictionary(StringComparer.OrdinalIgnoreCase)
+ {
+ { "sp_prepare", 1 },
+ { "sp_execute", 2 },
+ { "sp_executesql", 3 },
+ { "sp_prepexec", 4 },
+ { "sp_cursoropen", 5 },
+ { "sp_cursorclose", 6 },
+ { "sp_cursorfetch", 7 },
+ { "sp_cursorexecute", 8 },
+ { "sp_cursorprepare", 9 },
+ { "sp_cursorprepexec", 10 },
+ { "sp_cursorunprepare", 11 },
+ { "sp_cursor", 12 },
+ { "sp_unprepare", 13 },
+ { "sp_getschemalock", 14 },
+ { "sp_releaseschemalock", 15 },
+ { "sp_reset_connection", 16 },
+ { "sp_refreshview", 17 },
+ };
+
+ ///
+ /// Returns the SpecialProcID for a stored procedure name, or 0 if not special.
+ /// Handles fully qualified names (e.g., "master.dbo.sp_executesql").
+ ///
+ public static byte GetSpecialProcId(string procName)
+ {
+ if (string.IsNullOrEmpty(procName))
+ return 0;
+
+ // Handle fully qualified names: extract the last part
+ string simpleName = procName;
+ int lastDot = procName.LastIndexOf('.');
+ if (lastDot >= 0 && lastDot < procName.Length - 1)
+ simpleName = procName.Substring(lastDot + 1);
+
+ if (SpecialProcs.TryGetValue(simpleName, out byte id))
+ return id;
+
+ return 0;
+ }
+
+ ///
+ /// Checks if an RPC event's object name is a special procedure.
+ ///
+ public static bool IsSpecialProc(string procName)
+ {
+ return GetSpecialProcId(procName) != 0;
+ }
+ }
+}
diff --git a/TraceEventImporter/Processing/UniqueStore.cs b/TraceEventImporter/Processing/UniqueStore.cs
new file mode 100644
index 00000000..e9a7e7b1
--- /dev/null
+++ b/TraceEventImporter/Processing/UniqueStore.cs
@@ -0,0 +1,162 @@
+using System;
+using System.Collections.Generic;
+
+namespace TraceEventImporter.Processing
+{
+ ///
+ /// In-memory deduplication store for unique batches, statements, app names, and login names.
+ /// First occurrence stores original + normalized text; subsequent occurrences reuse the HashID.
+ ///
+ public class UniqueStore
+ {
+ private readonly Dictionary _uniqueBatches = new Dictionary();
+ private readonly Dictionary _uniqueStatements = new Dictionary();
+ private readonly Dictionary _uniqueAppNames = new Dictionary(StringComparer.OrdinalIgnoreCase);
+ private readonly Dictionary _uniqueLoginNames = new Dictionary(StringComparer.OrdinalIgnoreCase);
+ private readonly Dictionary _procedureNames = new Dictionary(StringComparer.OrdinalIgnoreCase);
+ private readonly HashSet _tracedEventIds = new HashSet();
+
+ private long _uniqueBatchSeq;
+ private long _uniqueStmtSeq;
+ private int _appNameIdSeq;
+ private int _loginNameIdSeq;
+
+ #region Unique Batches
+
+ public bool TryAddBatch(long batchSeq, long hashId, string origText, string normText, byte specialProcId)
+ {
+ if (_uniqueBatches.ContainsKey(hashId))
+ return false;
+
+ _uniqueBatches[hashId] = new UniqueBatch
+ {
+ Seq = batchSeq, // first-occurrence BatchSeq (matches tblBatches.BatchSeq)
+ HashID = hashId,
+ OrigText = origText ?? "",
+ NormText = normText ?? "",
+ SpecialProcID = specialProcId
+ };
+ return true;
+ }
+
+ public IEnumerable GetUniqueBatches() => _uniqueBatches.Values;
+
+ #endregion
+
+ #region Unique Statements
+
+ public bool TryAddStatement(long stmtSeq, long hashId, string origText, string normText)
+ {
+ if (_uniqueStatements.ContainsKey(hashId))
+ return false;
+
+ _uniqueStatements[hashId] = new UniqueStatement
+ {
+ Seq = stmtSeq, // first-occurrence StmtSeq (matches tblStatements.StmtSeq)
+ HashID = hashId,
+ OrigText = origText,
+ NormText = normText
+ };
+ return true;
+ }
+
+ public IEnumerable GetUniqueStatements() => _uniqueStatements.Values;
+
+ #endregion
+
+ #region App Names
+
+ public int GetOrAddAppName(string appName)
+ {
+ if (string.IsNullOrEmpty(appName))
+ appName = "";
+
+ if (_uniqueAppNames.TryGetValue(appName, out int id))
+ return id;
+
+ id = ++_appNameIdSeq;
+ _uniqueAppNames[appName] = id;
+ return id;
+ }
+
+ public IEnumerable> GetUniqueAppNames() => _uniqueAppNames;
+
+ #endregion
+
+ #region Login Names
+
+ public int GetOrAddLoginName(string loginName)
+ {
+ if (string.IsNullOrEmpty(loginName))
+ loginName = "";
+
+ if (_uniqueLoginNames.TryGetValue(loginName, out int id))
+ return id;
+
+ id = ++_loginNameIdSeq;
+ _uniqueLoginNames[loginName] = id;
+ return id;
+ }
+
+ public IEnumerable> GetUniqueLoginNames() => _uniqueLoginNames;
+
+ #endregion
+
+ #region Procedure Names
+
+ public void AddProcedureName(int dbid, int objectId, byte specialProcId, string name)
+ {
+ string key = $"{dbid}_{objectId}_{specialProcId}";
+ if (!_procedureNames.ContainsKey(key))
+ {
+ _procedureNames[key] = new ProcedureInfo
+ {
+ DBID = dbid,
+ ObjectID = objectId,
+ SpecialProcID = specialProcId,
+ Name = name
+ };
+ }
+ }
+
+ public IEnumerable GetProcedureNames() => _procedureNames.Values;
+
+ #endregion
+
+ #region Traced Events
+
+ public void AddTracedEvent(int eventId)
+ {
+ _tracedEventIds.Add(eventId);
+ }
+
+ public IEnumerable GetTracedEventIds() => _tracedEventIds;
+
+ #endregion
+ }
+
+ public class UniqueBatch
+ {
+ public long Seq { get; set; }
+ public long HashID { get; set; }
+ public string OrigText { get; set; }
+ public string NormText { get; set; }
+ public byte SpecialProcID { get; set; }
+ }
+
+ public class UniqueStatement
+ {
+ public long Seq { get; set; }
+ public long HashID { get; set; }
+ public string OrigText { get; set; }
+ public string NormText { get; set; }
+ }
+
+ public class ProcedureInfo
+ {
+ public int DBID { get; set; }
+ public int ObjectID { get; set; }
+ public byte SpecialProcID { get; set; }
+ public string Name { get; set; }
+ }
+}
diff --git a/TraceEventImporter/Readers/ITraceEventReader.cs b/TraceEventImporter/Readers/ITraceEventReader.cs
new file mode 100644
index 00000000..a59dc0f5
--- /dev/null
+++ b/TraceEventImporter/Readers/ITraceEventReader.cs
@@ -0,0 +1,24 @@
+using System.Collections.Generic;
+using TraceEventImporter.Models;
+
+namespace TraceEventImporter.Readers
+{
+ ///
+ /// Abstraction for reading trace events from different file formats (.trc, .xel).
+ /// Implementations yield events in file order with monotonically increasing Seq numbers.
+ ///
+ public interface ITraceEventReader
+ {
+ ///
+ /// Read all trace events from the specified file.
+ ///
+ /// Path to the trace file
+ /// Enumerable of trace events in file order
+ IEnumerable ReadEvents(string filePath);
+
+ ///
+ /// File extensions supported by this reader (e.g., ".trc", ".xel").
+ ///
+ string[] SupportedExtensions { get; }
+ }
+}
diff --git a/TraceEventImporter/Readers/XelFileReader.cs b/TraceEventImporter/Readers/XelFileReader.cs
new file mode 100644
index 00000000..d92f95cb
--- /dev/null
+++ b/TraceEventImporter/Readers/XelFileReader.cs
@@ -0,0 +1,286 @@
+using System;
+using System.Collections.Concurrent;
+using System.Collections.Generic;
+using System.Threading;
+using System.Threading.Tasks;
+using TraceEventImporter.Models;
+using Microsoft.SqlServer.XEvent.XELite;
+
+namespace TraceEventImporter.Readers
+{
+ ///
+ /// Reads SQL Server Extended Events (.xel) files using Microsoft.SqlServer.XEvent.XELite.
+ /// Maps XE event fields and actions to the unified TraceEvent model based on
+ /// the field mappings defined in xereaderlib/XEtoTrcConversions.h.
+ ///
+ public class XelFileReader : ITraceEventReader
+ {
+ private long _globalSeq;
+
+ public XelFileReader(long startingSeq = 0)
+ {
+ _globalSeq = startingSeq;
+ }
+
+ public string[] SupportedExtensions => new[] { ".xel" };
+
+ public IEnumerable ReadEvents(string filePath)
+ {
+ // XELite uses async callbacks; bridge to synchronous IEnumerable via BlockingCollection
+ using (BlockingCollection collection = new BlockingCollection(boundedCapacity: 1000))
+ {
+ Task readerTask = Task.Run(() =>
+ {
+ try
+ {
+ var streamer = new XEFileEventStreamer(filePath);
+ streamer.ReadEventStream(
+ () => Task.CompletedTask,
+ xevent =>
+ {
+ TraceEvent evt = MapEvent(xevent);
+ if (evt != null)
+ {
+ if (evt.Seq == 0)
+ evt.Seq = Interlocked.Increment(ref _globalSeq);
+ collection.Add(evt);
+ }
+ return Task.CompletedTask;
+ },
+ CancellationToken.None).Wait();
+ }
+ finally
+ {
+ collection.CompleteAdding();
+ }
+ });
+
+ foreach (TraceEvent evt in collection.GetConsumingEnumerable())
+ {
+ yield return evt;
+ }
+
+ // Propagate any exception from the reader task
+ readerTask.GetAwaiter().GetResult();
+ }
+ }
+
+ private TraceEvent MapEvent(IXEvent xe)
+ {
+ var evt = new TraceEvent();
+
+ // Map event name to TraceEventType
+ switch (xe.Name.ToLowerInvariant())
+ {
+ case "sql_batch_completed":
+ evt.EventType = TraceEventType.SqlBatchCompleted;
+ evt.EventId = 12;
+ MapBatchCompleted(xe, evt);
+ break;
+ case "sql_batch_starting":
+ evt.EventType = TraceEventType.SqlBatchStarting;
+ evt.EventId = 13;
+ MapBatchStarting(xe, evt);
+ break;
+ case "rpc_completed":
+ evt.EventType = TraceEventType.RpcCompleted;
+ evt.EventId = 10;
+ MapRpcCompleted(xe, evt);
+ break;
+ case "rpc_starting":
+ evt.EventType = TraceEventType.RpcStarting;
+ evt.EventId = 11;
+ MapRpcStarting(xe, evt);
+ break;
+ case "sp_statement_completed":
+ evt.EventType = TraceEventType.SpStmtCompleted;
+ evt.EventId = 45;
+ MapSpStmtCompleted(xe, evt);
+ break;
+ case "sp_statement_starting":
+ evt.EventType = TraceEventType.SpStmtStarting;
+ evt.EventId = 44;
+ MapSpStmtStarting(xe, evt);
+ break;
+ case "login":
+ evt.EventType = TraceEventType.AuditLogin;
+ evt.EventId = 14;
+ break;
+ case "logout":
+ evt.EventType = TraceEventType.AuditLogout;
+ evt.EventId = 15;
+ break;
+ case "attention":
+ evt.EventType = TraceEventType.Attention;
+ evt.EventId = 16;
+ break;
+ case "sql_statement_recompile":
+ evt.EventType = TraceEventType.StmtRecompile;
+ evt.EventId = 166;
+ break;
+ default:
+ evt.EventType = TraceEventType.Unknown;
+ break;
+ }
+
+ // Common timestamp
+ evt.StartTime = xe.Timestamp.UtcDateTime;
+
+ // Common actions (global fields attached to all events)
+ evt.Seq = GetActionInt64(xe, "event_sequence");
+ evt.DatabaseId = (int)GetActionInt64(xe, "database_id");
+ evt.SessionId = (int)GetActionInt64(xe, "session_id");
+ evt.RequestId = (int)GetActionInt64(xe, "request_id");
+ evt.ApplicationName = GetActionString(xe, "client_app_name");
+ evt.LoginName = GetActionString(xe, "server_principal_name");
+ evt.HostName = GetActionString(xe, "client_hostname");
+ evt.NTUserName = GetActionString(xe, "nt_username");
+
+ // If no text from event-specific mapping, try the sql_text action
+ if (string.IsNullOrEmpty(evt.TextData))
+ evt.TextData = GetActionString(xe, "sql_text");
+
+ return evt;
+ }
+
+ private void MapBatchCompleted(IXEvent xe, TraceEvent evt)
+ {
+ evt.TextData = GetFieldString(xe, "batch_text");
+ MapPerformanceFields(xe, evt);
+ evt.Error = (int?)GetFieldInt64Nullable(xe, "result");
+ }
+
+ private void MapBatchStarting(IXEvent xe, TraceEvent evt)
+ {
+ evt.TextData = GetFieldString(xe, "batch_text");
+ }
+
+ private void MapRpcCompleted(IXEvent xe, TraceEvent evt)
+ {
+ evt.TextData = GetFieldString(xe, "statement");
+ evt.ObjectName = GetFieldString(xe, "object_name");
+ MapPerformanceFields(xe, evt);
+ evt.Error = (int?)GetFieldInt64Nullable(xe, "result");
+ }
+
+ private void MapRpcStarting(IXEvent xe, TraceEvent evt)
+ {
+ evt.TextData = GetFieldString(xe, "statement");
+ evt.ObjectName = GetFieldString(xe, "object_name");
+ }
+
+ private void MapSpStmtCompleted(IXEvent xe, TraceEvent evt)
+ {
+ evt.TextData = GetFieldString(xe, "statement");
+ evt.ObjectId = (int?)GetFieldInt64Nullable(xe, "object_id");
+ evt.ObjectName = GetFieldString(xe, "object_name");
+ evt.NestLevel = (int?)GetFieldInt64Nullable(xe, "nest_level");
+ evt.LineNumber = (int?)GetFieldInt64Nullable(xe, "line_number");
+ evt.Offset = (int?)GetFieldInt64Nullable(xe, "offset");
+ MapPerformanceFields(xe, evt);
+
+ // source_database_id overrides the action-level database_id for statements
+ long? srcDbId = GetFieldInt64Nullable(xe, "source_database_id");
+ if (srcDbId.HasValue && srcDbId.Value > 0)
+ evt.DatabaseId = (int)srcDbId.Value;
+ }
+
+ private void MapSpStmtStarting(IXEvent xe, TraceEvent evt)
+ {
+ evt.TextData = GetFieldString(xe, "statement");
+ evt.ObjectId = (int?)GetFieldInt64Nullable(xe, "object_id");
+ evt.ObjectName = GetFieldString(xe, "object_name");
+ evt.NestLevel = (int?)GetFieldInt64Nullable(xe, "nest_level");
+ }
+
+ private void MapPerformanceFields(IXEvent xe, TraceEvent evt)
+ {
+ // XE reports cpu_time in microseconds; ReadTrace schema stores CPU in milliseconds
+ long? cpuMicroseconds = GetFieldInt64Nullable(xe, "cpu_time");
+ evt.CPU = cpuMicroseconds.HasValue ? cpuMicroseconds.Value / 1000 : (long?)null;
+
+ evt.Duration = GetFieldInt64Nullable(xe, "duration");
+ evt.Writes = GetFieldInt64Nullable(xe, "writes");
+ evt.RowCount = GetFieldInt64Nullable(xe, "row_count");
+
+ // Reads = logical_reads + physical_reads (matching XEtoTrcConversions.h Add behavior)
+ long? logicalReads = GetFieldInt64Nullable(xe, "logical_reads");
+ long? physicalReads = GetFieldInt64Nullable(xe, "physical_reads");
+ if (logicalReads.HasValue || physicalReads.HasValue)
+ evt.Reads = (logicalReads ?? 0) + (physicalReads ?? 0);
+
+ // EndTime = StartTime + Duration for completed events
+ if (evt.StartTime.HasValue && evt.Duration.HasValue)
+ evt.EndTime = evt.StartTime.Value.AddMicroseconds(evt.Duration.Value);
+ }
+
+ #region Field/Action Helpers
+
+ private static string GetFieldString(IXEvent xe, string fieldName)
+ {
+ try
+ {
+ if (xe.Fields != null && xe.Fields.TryGetValue(fieldName, out object val) && val != null)
+ return val.ToString();
+ }
+ catch (Exception ex)
+ {
+ System.Diagnostics.Debug.WriteLine($"[XelFileReader] GetFieldString('{fieldName}'): {ex.Message}");
+ }
+ return null;
+ }
+
+ private static long? GetFieldInt64Nullable(IXEvent xe, string fieldName)
+ {
+ try
+ {
+ if (xe.Fields != null && xe.Fields.TryGetValue(fieldName, out object val) && val != null)
+ return Convert.ToInt64(val);
+ }
+ catch (Exception ex)
+ {
+ System.Diagnostics.Debug.WriteLine($"[XelFileReader] GetFieldInt64Nullable('{fieldName}'): {ex.Message}");
+ }
+ return null;
+ }
+
+ private static string GetActionString(IXEvent xe, string actionName)
+ {
+ try
+ {
+ if (xe.Actions != null && xe.Actions.TryGetValue(actionName, out object val) && val != null)
+ return val.ToString();
+ }
+ catch (Exception ex)
+ {
+ System.Diagnostics.Debug.WriteLine($"[XelFileReader] GetActionString('{actionName}'): {ex.Message}");
+ }
+ return null;
+ }
+
+ private static long GetActionInt64(IXEvent xe, string actionName)
+ {
+ try
+ {
+ if (xe.Actions != null && xe.Actions.TryGetValue(actionName, out object val) && val != null)
+ return Convert.ToInt64(val);
+ }
+ catch (Exception ex)
+ {
+ System.Diagnostics.Debug.WriteLine($"[XelFileReader] GetActionInt64('{actionName}'): {ex.Message}");
+ }
+ return 0;
+ }
+
+ #endregion
+ }
+
+ // Extension method for DateTime.AddMicroseconds (not available in .NET Framework 4.8)
+ internal static class DateTimeExtensions
+ {
+ public static DateTime AddMicroseconds(this DateTime dt, long microseconds)
+ {
+ return dt.AddTicks(microseconds * 10);
+ }
+ }
+}
diff --git a/TraceEventImporter/Schema/CreateSchema.sql b/TraceEventImporter/Schema/CreateSchema.sql
new file mode 100644
index 00000000..3a4a69bd
--- /dev/null
+++ b/TraceEventImporter/Schema/CreateSchema.sql
@@ -0,0 +1,425 @@
+-- CreateSchema.sql
+-- Creates the ReadTrace schema and all tables.
+-- Ported verbatim from SRC/READ80TRACE/res/tsql/*.sql
+
+IF NOT EXISTS (SELECT 1 FROM sys.schemas WHERE name = 'ReadTrace')
+ EXEC('CREATE SCHEMA ReadTrace')
+GO
+
+-- ===========================================
+-- Reference / Metadata Tables
+-- ===========================================
+
+IF OBJECT_ID('ReadTrace.tblMiscInfo', 'U') IS NOT NULL DROP TABLE ReadTrace.tblMiscInfo
+GO
+CREATE TABLE ReadTrace.tblMiscInfo
+(
+ Attribute nvarchar(50) NOT NULL,
+ Value nvarchar(2000) NULL,
+ iRow int NOT NULL IDENTITY(1,1) PRIMARY KEY CLUSTERED
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblTraceFiles', 'U') IS NOT NULL DROP TABLE ReadTrace.tblTraceFiles
+GO
+CREATE TABLE ReadTrace.tblTraceFiles
+(
+ FileProcessed int IDENTITY(1, 1) NOT NULL,
+ FirstSeqNumber bigint NULL,
+ LastSeqNumber bigint NULL,
+ FirstEventTime datetime NULL,
+ LastEventTime datetime NULL,
+ EventsRead bigint NOT NULL,
+ TraceFileName nvarchar(512) NOT NULL,
+ CONSTRAINT PK_tblTraceFiles PRIMARY KEY (FileProcessed)
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblTracedEvents', 'U') IS NOT NULL DROP TABLE ReadTrace.tblTracedEvents
+GO
+CREATE TABLE ReadTrace.tblTracedEvents
+(
+ EventID smallint NOT NULL CONSTRAINT PK_TracedEvents PRIMARY KEY
+)
+GO
+
+IF OBJECT_ID('ReadTrace.trace_events', 'U') IS NOT NULL DROP TABLE ReadTrace.trace_events
+GO
+CREATE TABLE ReadTrace.trace_events
+(
+ trace_event_id int NOT NULL PRIMARY KEY CLUSTERED,
+ category_id int NOT NULL,
+ name nvarchar(128) NOT NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblUniqueAppNames', 'U') IS NOT NULL DROP TABLE ReadTrace.tblUniqueAppNames
+GO
+CREATE TABLE ReadTrace.tblUniqueAppNames
+(
+ [iID] [int] NOT NULL PRIMARY KEY IDENTITY(1,1),
+ [AppName] nvarchar(256) NOT NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblUniqueLoginNames', 'U') IS NOT NULL DROP TABLE ReadTrace.tblUniqueLoginNames
+GO
+CREATE TABLE ReadTrace.tblUniqueLoginNames
+(
+ [iID] [int] NOT NULL PRIMARY KEY IDENTITY(1,1),
+ [LoginName] nvarchar(256) NOT NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblProcedureNames', 'U') IS NOT NULL DROP TABLE ReadTrace.tblProcedureNames
+GO
+CREATE TABLE ReadTrace.tblProcedureNames
+(
+ DBID int NULL,
+ ObjectID int NULL,
+ SpecialProcID tinyint NULL,
+ [Name] nvarchar(388) NOT NULL
+)
+GO
+
+-- ===========================================
+-- Unique / Normalization Tables
+-- ===========================================
+
+IF OBJECT_ID('ReadTrace.tblUniqueBatches', 'U') IS NOT NULL DROP TABLE ReadTrace.tblUniqueBatches
+GO
+CREATE TABLE ReadTrace.tblUniqueBatches
+(
+ Seq bigint NOT NULL,
+ HashID bigint NOT NULL,
+ OrigText nvarchar(max) NOT NULL,
+ NormText nvarchar(max) NOT NULL,
+ SpecialProcID tinyint NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblUniqueStatements', 'U') IS NOT NULL DROP TABLE ReadTrace.tblUniqueStatements
+GO
+CREATE TABLE ReadTrace.tblUniqueStatements
+(
+ Seq bigint NOT NULL,
+ HashID bigint NOT NULL,
+ OrigText nvarchar(max) NULL,
+ NormText nvarchar(max) NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblUniquePlans', 'U') IS NOT NULL DROP TABLE ReadTrace.tblUniquePlans
+GO
+CREATE TABLE ReadTrace.tblUniquePlans
+(
+ Seq bigint NOT NULL,
+ PlanHashID bigint NOT NULL,
+ DBID int NULL,
+ NormPlanText nvarchar(max) NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblUniquePlanRows', 'U') IS NOT NULL DROP TABLE ReadTrace.tblUniquePlanRows
+GO
+CREATE TABLE ReadTrace.tblUniquePlanRows
+(
+ PlanHashID bigint NOT NULL,
+ Rows bigint NULL,
+ Executes bigint NULL,
+ StmtText nvarchar(max) NOT NULL,
+ StmtID int NOT NULL,
+ NodeID smallint NOT NULL,
+ Parent smallint NULL,
+ PhysicalOp varchar(30) NULL,
+ LogicalOp varchar(30) NULL,
+ Argument nvarchar(256) NULL,
+ DefinedValues nvarchar(256) NULL,
+ EstimateRows float NULL,
+ EstimateIO float NULL,
+ EstimateCPU float NULL,
+ AvgRowSize int NULL,
+ TotalSubtreeCost float NULL,
+ OutputList nvarchar(256) NULL,
+ Warnings varchar(100) NULL,
+ Type varchar(30) NULL,
+ Parallel tinyint NULL,
+ EstimateExecutions float NULL,
+ RowOrder smallint NOT NULL
+)
+GO
+
+-- ===========================================
+-- Fact Tables
+-- ===========================================
+
+IF OBJECT_ID('ReadTrace.tblBatches', 'U') IS NOT NULL DROP TABLE ReadTrace.tblBatches
+GO
+CREATE TABLE ReadTrace.tblBatches
+(
+ BatchSeq bigint NOT NULL,
+ HashID bigint NOT NULL,
+ Session int NOT NULL,
+ Request int NOT NULL,
+ ConnId bigint NOT NULL,
+ StartTime datetime NULL,
+ EndTime datetime NULL,
+ Duration bigint NULL,
+ Reads bigint NULL,
+ Writes bigint NULL,
+ CPU bigint NULL,
+ fRPCEvent tinyint NOT NULL,
+ DBID int NULL,
+ StartSeq bigint NULL,
+ EndSeq bigint NULL,
+ AttnSeq bigint NULL,
+ ConnSeq bigint NULL,
+ TextData nvarchar(max) NULL,
+ OrigRowCount bigint NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblStatements', 'U') IS NOT NULL DROP TABLE ReadTrace.tblStatements
+GO
+CREATE TABLE ReadTrace.tblStatements
+(
+ StmtSeq bigint NOT NULL,
+ HashID bigint NOT NULL,
+ Session int NOT NULL,
+ Request int NOT NULL,
+ ConnId bigint NOT NULL,
+ StartTime datetime NULL,
+ EndTime datetime NULL,
+ Duration bigint NULL,
+ Reads bigint NULL,
+ Writes bigint NULL,
+ CPU bigint NULL,
+ Rows bigint NULL,
+ DBID int NULL,
+ ObjectID int NULL,
+ NestLevel tinyint NULL,
+ fDynamicSQL bit NULL,
+ StartSeq bigint NULL,
+ EndSeq bigint NULL,
+ ConnSeq bigint NULL,
+ BatchSeq bigint NULL,
+ ParentStmtSeq bigint NULL,
+ AttnSeq bigint NULL,
+ TextData nvarchar(max) NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblPlans', 'U') IS NOT NULL DROP TABLE ReadTrace.tblPlans
+GO
+CREATE TABLE ReadTrace.tblPlans
+(
+ Seq bigint NOT NULL,
+ PlanHashID bigint NOT NULL,
+ DBID int NULL,
+ BatchSeq bigint NULL,
+ StmtSeq bigint NULL,
+ Session int NOT NULL,
+ Request int NOT NULL,
+ ConnId bigint NOT NULL,
+ StartTime datetime NOT NULL,
+ DOP tinyint NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblPlanRows', 'U') IS NOT NULL DROP TABLE ReadTrace.tblPlanRows
+GO
+CREATE TABLE ReadTrace.tblPlanRows
+(
+ Seq bigint NOT NULL,
+ Rows bigint NULL,
+ Executes bigint NULL,
+ EstimateRows float NULL,
+ EstimateExecutes float NULL,
+ RowOrder smallint NOT NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblConnections', 'U') IS NOT NULL DROP TABLE ReadTrace.tblConnections
+GO
+CREATE TABLE ReadTrace.tblConnections
+(
+ ConnSeq bigint NOT NULL,
+ Session int NOT NULL,
+ StartTime datetime NULL,
+ EndTime datetime NULL,
+ Duration bigint NULL,
+ Reads bigint NULL,
+ Writes bigint NULL,
+ CPU bigint NULL,
+ ApplicationName nvarchar(256) NULL,
+ LoginName nvarchar(256) NULL,
+ HostName nvarchar(256) NULL,
+ NTDomainName nvarchar(256) NULL,
+ NTUserName nvarchar(256) NULL,
+ StartSeq bigint NULL,
+ EndSeq bigint NULL,
+ TextData nvarchar(max) NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblInterestingEvents', 'U') IS NOT NULL DROP TABLE ReadTrace.tblInterestingEvents
+GO
+CREATE TABLE ReadTrace.tblInterestingEvents
+(
+ Seq bigint NOT NULL,
+ EventID int NOT NULL,
+ Session int NOT NULL,
+ Request int NOT NULL,
+ ConnId bigint NOT NULL,
+ StartTime datetime NULL,
+ EndTime datetime NULL,
+ Duration bigint NULL,
+ DBID int NULL,
+ IntegerData int NULL,
+ EventSubclass int NULL,
+ TextData varchar(1000) NULL,
+ ObjectID int NULL,
+ Error int NULL,
+ BatchSeq bigint NULL,
+ Severity int NULL,
+ State int NULL
+)
+GO
+
+-- ===========================================
+-- Time Intervals and Aggregation Tables
+-- ===========================================
+
+IF OBJECT_ID('ReadTrace.tblTimeIntervals', 'U') IS NOT NULL DROP TABLE ReadTrace.tblTimeIntervals
+GO
+CREATE TABLE ReadTrace.tblTimeIntervals
+(
+ TimeInterval int NOT NULL IDENTITY(1, 1) PRIMARY KEY NONCLUSTERED,
+ StartTime datetime NOT NULL,
+ EndTime datetime NOT NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblBatchPartialAggs', 'U') IS NOT NULL DROP TABLE ReadTrace.tblBatchPartialAggs
+GO
+CREATE TABLE ReadTrace.tblBatchPartialAggs
+(
+ [HashID] [bigint] NOT NULL,
+ [TimeInterval] [int] NOT NULL,
+ [StartingEvents] [int] NOT NULL,
+ [CompletedEvents] [int] NOT NULL,
+ [AttentionEvents] [int] NOT NULL,
+ [MinDuration] [bigint] NULL,
+ [MaxDuration] [bigint] NULL,
+ [TotalDuration] [bigint] NULL,
+ [MinReads] [bigint] NULL,
+ [MaxReads] [bigint] NULL,
+ [TotalReads] [bigint] NULL,
+ [MinWrites] [bigint] NULL,
+ [MaxWrites] [bigint] NULL,
+ [TotalWrites] [bigint] NULL,
+ [MinCPU] [bigint] NULL,
+ [MaxCPU] [bigint] NULL,
+ [TotalCPU] [bigint] NULL,
+ [AppNameID] int NOT NULL,
+ [LoginNameID] int NOT NULL,
+ [DBID] int NOT NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblStmtPartialAggs', 'U') IS NOT NULL DROP TABLE ReadTrace.tblStmtPartialAggs
+GO
+CREATE TABLE ReadTrace.tblStmtPartialAggs
+(
+ [HashID] [bigint] NOT NULL,
+ [TimeInterval] [int] NOT NULL,
+ [ObjectID] int NULL,
+ [DBID] int NULL,
+ [AppNameID] int NOT NULL,
+ [LoginNameID] int NOT NULL,
+ [StartingEvents] [int] NOT NULL,
+ [CompletedEvents] [int] NOT NULL,
+ [AttentionEvents] [int] NOT NULL,
+ [MinDuration] [bigint] NULL,
+ [MaxDuration] [bigint] NULL,
+ [TotalDuration] [bigint] NULL,
+ [MinReads] [bigint] NULL,
+ [MaxReads] [bigint] NULL,
+ [TotalReads] [bigint] NULL,
+ [MinWrites] [bigint] NULL,
+ [MaxWrites] [bigint] NULL,
+ [TotalWrites] [bigint] NULL,
+ [MinCPU] [bigint] NULL,
+ [MaxCPU] [bigint] NULL,
+ [TotalCPU] [bigint] NULL
+)
+GO
+
+IF OBJECT_ID('ReadTrace.tblComparisonBatchPartialAggs', 'U') IS NOT NULL DROP TABLE ReadTrace.tblComparisonBatchPartialAggs
+GO
+CREATE TABLE ReadTrace.tblComparisonBatchPartialAggs
+(
+ [b.HashID] [bigint] NULL,
+ [c.HashID] [bigint] NULL,
+ [b.StartingEvents] [bigint] NOT NULL,
+ [b.CompletedEvents] [bigint] NOT NULL,
+ [b.TotalCPU] [bigint] NOT NULL,
+ [b.TotalDuration] [bigint] NOT NULL,
+ [b.TotalReads] [bigint] NOT NULL,
+ [b.TotalWrites] [bigint] NOT NULL,
+ [c.StartingEvents] [bigint] NOT NULL,
+ [c.CompletedEvents] [bigint] NOT NULL,
+ [c.TotalCPU] [bigint] NOT NULL,
+ [c.TotalDuration] [bigint] NOT NULL,
+ [c.TotalReads] [bigint] NOT NULL,
+ [c.TotalWrites] [bigint] NOT NULL,
+ [ProjectedCPUDiff] numeric(38,4) NOT NULL,
+ [ProjectedReadsDiff] numeric(38,4) NOT NULL,
+ [ProjectedWritesDiff] numeric(38,4) NOT NULL,
+ [ProjectedDurationDiff] numeric(38,4) NOT NULL
+)
+GO
+
+-- ===========================================
+-- Warnings Table
+-- ===========================================
+
+IF OBJECT_ID('ReadTrace.tblWarnings', 'U') IS NOT NULL DROP TABLE ReadTrace.tblWarnings
+GO
+CREATE TABLE ReadTrace.tblWarnings
+(
+ WarningID int NOT NULL IDENTITY(1, 1) PRIMARY KEY,
+ WarningMessage varchar(2000) NOT NULL,
+ NumberOfTimes int NULL,
+ FirstGlobalSeq bigint NULL,
+ fMayAffectCPU bit NOT NULL,
+ fMayAffectIO bit NOT NULL,
+ fMayAffectDuration bit NOT NULL,
+ fAffectsEventAssociation bit NOT NULL
+)
+GO
+
+-- ===========================================
+-- Views
+-- ===========================================
+
+IF OBJECT_ID('ReadTrace.vwBatchPartialAggsByGroupTimeInterval', 'V') IS NOT NULL
+ DROP VIEW ReadTrace.vwBatchPartialAggsByGroupTimeInterval
+GO
+CREATE VIEW ReadTrace.vwBatchPartialAggsByGroupTimeInterval
+AS
+SELECT
+ t.StartTime,
+ t.EndTime,
+ a.TimeInterval,
+ SUM(a.StartingEvents) AS StartingEvents,
+ SUM(a.CompletedEvents) AS CompletedEvents,
+ SUM(a.AttentionEvents) AS Attentions,
+ SUM(a.TotalDuration) AS Duration,
+ SUM(a.TotalReads) AS Reads,
+ SUM(a.TotalWrites) AS Writes,
+ SUM(a.TotalCPU) AS CPU
+FROM ReadTrace.tblBatchPartialAggs a
+INNER JOIN ReadTrace.tblTimeIntervals t ON a.TimeInterval = t.TimeInterval
+GROUP BY a.TimeInterval, t.StartTime, t.EndTime
+GO
diff --git a/TraceEventImporter/Schema/PostLoadFixups.sql b/TraceEventImporter/Schema/PostLoadFixups.sql
new file mode 100644
index 00000000..cfdca323
--- /dev/null
+++ b/TraceEventImporter/Schema/PostLoadFixups.sql
@@ -0,0 +1,92 @@
+-- PostLoadFixups.sql
+-- Run after all data is bulk-loaded to create indexes and reconcile relationships.
+
+-- ===========================================
+-- Primary / Clustered Indexes
+-- ===========================================
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblBatches') AND name = 'PK_tblBatches')
+ ALTER TABLE ReadTrace.tblBatches ADD CONSTRAINT PK_tblBatches PRIMARY KEY CLUSTERED (BatchSeq)
+GO
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblStatements') AND name = 'PK_tblStatements')
+ ALTER TABLE ReadTrace.tblStatements ADD CONSTRAINT PK_tblStatements PRIMARY KEY CLUSTERED (StmtSeq)
+GO
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblConnections') AND name = 'PK_tblConnections')
+ ALTER TABLE ReadTrace.tblConnections ADD CONSTRAINT PK_tblConnections PRIMARY KEY CLUSTERED (ConnSeq)
+GO
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblUniqueBatches') AND name = 'PK_tblUniqueBatches')
+ ALTER TABLE ReadTrace.tblUniqueBatches ADD CONSTRAINT PK_tblUniqueBatches PRIMARY KEY CLUSTERED (HashID)
+GO
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblUniqueStatements') AND name = 'PK_tblUniqueStatements')
+ ALTER TABLE ReadTrace.tblUniqueStatements ADD CONSTRAINT PK_tblUniqueStatements PRIMARY KEY CLUSTERED (HashID)
+GO
+
+-- ===========================================
+-- Nonclustered Indexes
+-- ===========================================
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblBatches') AND name = 'tblBatches_HashID')
+ CREATE NONCLUSTERED INDEX tblBatches_HashID ON ReadTrace.tblBatches (HashID)
+GO
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblBatches') AND name = 'tblBatches_Session')
+ CREATE NONCLUSTERED INDEX tblBatches_Session ON ReadTrace.tblBatches (Session, Request)
+GO
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblStatements') AND name = 'tblStatements_HashID')
+ CREATE NONCLUSTERED INDEX tblStatements_HashID ON ReadTrace.tblStatements (HashID)
+GO
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblStatements') AND name = 'tblStatements_BatchSeq')
+ CREATE NONCLUSTERED INDEX tblStatements_BatchSeq ON ReadTrace.tblStatements (BatchSeq)
+GO
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblInterestingEvents') AND name = 'tblInterestingEvents_BatchSeq')
+ CREATE NONCLUSTERED INDEX tblInterestingEvents_BatchSeq ON ReadTrace.tblInterestingEvents (BatchSeq)
+GO
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblBatchPartialAggs') AND name = 'tblBatchPartialAggs_HashID')
+ CREATE NONCLUSTERED INDEX tblBatchPartialAggs_HashID ON ReadTrace.tblBatchPartialAggs (HashID, TimeInterval)
+GO
+
+IF NOT EXISTS (SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('ReadTrace.tblStmtPartialAggs') AND name = 'tblStmtPartialAggs_HashID')
+ CREATE NONCLUSTERED INDEX tblStmtPartialAggs_HashID ON ReadTrace.tblStmtPartialAggs (HashID, TimeInterval)
+GO
+
+-- ===========================================
+-- Post-load Fixups: Link ConnSeq in tblBatches
+-- ===========================================
+UPDATE b
+SET b.ConnSeq = c.ConnSeq
+FROM ReadTrace.tblBatches b
+INNER JOIN ReadTrace.tblConnections c ON b.Session = c.Session
+WHERE b.ConnSeq IS NULL
+GO
+
+-- ===========================================
+-- Post-load Fixups: Link ParentStmtSeq for nested statements
+-- ===========================================
+;WITH StmtParents AS
+(
+ SELECT
+ s.StmtSeq,
+ s.BatchSeq,
+ s.NestLevel,
+ s.Session,
+ s.Request,
+ s.StartTime,
+ LAG(s.StmtSeq) OVER (PARTITION BY s.Session, s.Request ORDER BY s.StmtSeq) AS PrevStmtSeq,
+ LAG(s.NestLevel) OVER (PARTITION BY s.Session, s.Request ORDER BY s.StmtSeq) AS PrevNestLevel
+ FROM ReadTrace.tblStatements s
+ WHERE s.NestLevel > 1
+)
+UPDATE s
+SET s.ParentStmtSeq = sp.PrevStmtSeq
+FROM ReadTrace.tblStatements s
+INNER JOIN StmtParents sp ON s.StmtSeq = sp.StmtSeq
+WHERE sp.PrevNestLevel IS NOT NULL AND sp.PrevNestLevel < sp.NestLevel
+GO
diff --git a/TraceEventImporter/TraceEventImporter.csproj b/TraceEventImporter/TraceEventImporter.csproj
new file mode 100644
index 00000000..8cf7ea5a
--- /dev/null
+++ b/TraceEventImporter/TraceEventImporter.csproj
@@ -0,0 +1,30 @@
+
+
+ net48
+ TraceEventImporter
+ TraceEventImporter
+ true
+ true
+ ..\NexusInterfaces\SqlNexus.snk
+ 7.3
+ false
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/TraceEventImporter/TraceEventImporterPlugin.cs b/TraceEventImporter/TraceEventImporterPlugin.cs
new file mode 100644
index 00000000..c605eaad
--- /dev/null
+++ b/TraceEventImporter/TraceEventImporterPlugin.cs
@@ -0,0 +1,534 @@
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using System.Text.RegularExpressions;
+using System.Windows.Forms;
+using Microsoft.Data.SqlClient;
+using NexusInterfaces;
+using TraceEventImporter.Database;
+using TraceEventImporter.Models;
+using TraceEventImporter.Processing;
+using TraceEventImporter.Readers;
+
+namespace TraceEventImporter
+{
+ ///
+ /// SqlNexus importer plugin that reads .xel trace files, normalizes SQL text,
+ /// computes HashIDs, and bulk-loads data into the ReadTrace schema.
+ /// Coexists with the existing ReadTraceNexusImporter (which handles .trc files
+ /// by shelling out to ReadTrace.exe).
+ /// Discovered automatically by SqlNexus via DLL reflection.
+ ///
+ public class TraceEventImporterPlugin : INexusImporter, INexusProgressReporter
+ {
+ // Options
+ private const string OPTION_ENABLED = "Enabled";
+ private const string OPTION_DROP_EXISTING = "Drop existing ReadTrace tables";
+ private const string OPTION_INTERVAL_SECONDS = "Aggregation interval (seconds)";
+ private const string OPTION_USE_LOCAL_SERVER_TIME = "Import events using local server time (not UTC)";
+
+ // Private state
+ private ILogger _logger;
+ private string _connStr;
+ private string _server;
+ private bool _useWindowsAuth;
+ private string _sqlLogin;
+ private string _sqlPassword;
+ private string _database;
+ private string _fileMask;
+ private ImportState _state = ImportState.Idle;
+ private volatile bool _cancelled;
+ private long _totalRowsInserted;
+ private long _totalLinesProcessed;
+ private long _currentPosition;
+ private readonly ArrayList _knownRowsets = new ArrayList();
+ private readonly Dictionary _options = new Dictionary();
+
+ public TraceEventImporterPlugin()
+ {
+ _options.Add(OPTION_DROP_EXISTING, true);
+ _options.Add(OPTION_INTERVAL_SECONDS, 60);
+ _options.Add(OPTION_ENABLED, true);
+ _options.Add(OPTION_USE_LOCAL_SERVER_TIME, false);
+ }
+
+ #region INexusImporter
+
+ public Guid ID => new Guid("B7A3C2D1-E4F5-4A6B-8C9D-0E1F2A3B4C5D");
+
+ public string Name => "Trace Event Importer (Managed)";
+
+ // .trc files are handled by ReadTraceNexusImporter (ReadTrace.exe).
+ // This importer handles XEL files only.
+ public string[] SupportedMasks => new[] { "*pssdiag*.xel", "*LogScout*.xel" };
+
+ public Dictionary Options => _options;
+
+ public Form OptionsDialog => null;
+
+ public string[] PreScripts => new string[0];
+
+ public string[] PostScripts => new string[] { "ReadTracePostProcessing.sql" };
+
+ public ImportState State
+ {
+ get => _state;
+ private set
+ {
+ _state = value;
+ OnStatusChanged(EventArgs.Empty);
+ }
+ }
+
+ public bool Cancelled
+ {
+ get => _cancelled;
+ set => _cancelled = value;
+ }
+
+ public ArrayList KnownRowsets => _knownRowsets;
+
+ public long TotalRowsInserted => _totalRowsInserted;
+
+ public long TotalLinesProcessed => _totalLinesProcessed;
+
+ public void Initialize(string Filemask, string connString, string Server,
+ bool UseWindowsAuth, string SQLLogin, string SQLPassword, string DatabaseName, ILogger Logger)
+ {
+ _fileMask = Filemask;
+ _connStr = connString;
+ _server = Server;
+ _useWindowsAuth = UseWindowsAuth;
+ _sqlLogin = SQLLogin;
+ _sqlPassword = SQLPassword;
+ _database = DatabaseName;
+ _logger = Logger;
+
+ _state = ImportState.Idle;
+ _cancelled = false;
+ _totalRowsInserted = 0;
+ _totalLinesProcessed = 0;
+ }
+
+ public bool DoImport()
+ {
+ try
+ {
+ LogMessage("TraceEventImporter: Starting import...");
+ State = ImportState.Importing;
+
+ // 1. Find trace files
+ string dir = Path.GetDirectoryName(_fileMask);
+ string mask = Path.GetFileName(_fileMask);
+ if (string.IsNullOrEmpty(dir) || !Directory.Exists(dir))
+ {
+ LogMessage("TraceEventImporter: Directory not found: " + dir);
+ State = ImportState.NoFiles;
+ return false;
+ }
+
+ string[] files = Directory.GetFiles(dir, mask).OrderBy(f => f).ToArray();
+
+ if (files.Length == 0)
+ {
+ LogMessage("TraceEventImporter: No eligible trace files found.");
+ State = ImportState.NoFiles;
+ return false;
+ }
+
+ LogMessage($"TraceEventImporter: Processing {files.Length} file(s)...");
+
+ // 2. Deploy schema
+ LogMessage("TraceEventImporter: Creating ReadTrace schema and tables...");
+ DeploySchema();
+
+ // 3. Process all files
+ var store = new UniqueStore();
+ var processor = new EventProcessor(store);
+ int intervalSeconds = Convert.ToInt32(_options[OPTION_INTERVAL_SECONDS]);
+ long globalSeq = 0;
+
+ // When importing with local server time, shift every event timestamp by the
+ // UTC-to-local offset so StartTime/EndTime are stored in local time, matching
+ // the behaviour of ReadTraceNexusImporter's -B flag passed to ReadTrace.exe.
+ TimeSpan localTimeOffset = TimeSpan.Zero;
+ if ((bool)_options[OPTION_USE_LOCAL_SERVER_TIME])
+ {
+ decimal offsetHours = GetLocalServerTimeOffset();
+ localTimeOffset = TimeSpan.FromHours((double)offsetHours);
+ LogMessage($"TraceEventImporter: Local server time offset = {offsetHours} hours; timestamps will be shifted accordingly.");
+ }
+
+ using (var writer = new BulkWriter(_connStr))
+ {
+ foreach (string file in files)
+ {
+ if (_cancelled) break;
+
+ LogMessage($"TraceEventImporter: Reading {Path.GetFileName(file)}...");
+
+ ITraceEventReader reader = new XelFileReader(globalSeq);
+
+ long fileFirstSeq = long.MaxValue;
+ long fileLastSeq = 0;
+ DateTime? fileFirstTime = null;
+ DateTime? fileLastTime = null;
+ long fileEventsRead = 0;
+
+ foreach (TraceEvent evt in reader.ReadEvents(file))
+ {
+ if (_cancelled) break;
+
+ if (localTimeOffset != TimeSpan.Zero)
+ {
+ if (evt.StartTime.HasValue)
+ evt.StartTime = evt.StartTime.Value.Add(localTimeOffset);
+ if (evt.EndTime.HasValue)
+ evt.EndTime = evt.EndTime.Value.Add(localTimeOffset);
+ }
+
+ processor.ProcessEvent(evt);
+ _totalLinesProcessed++;
+ fileEventsRead++;
+
+ if (evt.Seq < fileFirstSeq) fileFirstSeq = evt.Seq;
+ if (evt.Seq > fileLastSeq) fileLastSeq = evt.Seq;
+ if (evt.Seq > globalSeq) globalSeq = evt.Seq;
+
+ if (evt.StartTime.HasValue)
+ {
+ if (!fileFirstTime.HasValue || evt.StartTime.Value < fileFirstTime.Value)
+ fileFirstTime = evt.StartTime;
+ if (!fileLastTime.HasValue || evt.StartTime.Value > fileLastTime.Value)
+ fileLastTime = evt.StartTime;
+ }
+
+ if (fileEventsRead % 10000 == 0)
+ {
+ _currentPosition = fileEventsRead;
+ OnProgressChanged(EventArgs.Empty);
+ }
+ }
+
+ if (fileEventsRead > 0)
+ {
+ writer.WriteTraceFile(fileFirstSeq, fileLastSeq, fileFirstTime, fileLastTime, fileEventsRead, Path.GetFileName(file));
+ }
+
+ LogMessage($"TraceEventImporter: {Path.GetFileName(file)} - {fileEventsRead} events read.");
+ }
+
+ if (_cancelled)
+ {
+ LogMessage("TraceEventImporter: Import cancelled.");
+ State = ImportState.Idle;
+ return false;
+ }
+
+ // 4. Finalize processor (flush pending connections)
+ processor.Finalize();
+
+ // 5. Write all data
+ LogMessage("TraceEventImporter: Writing data to database...");
+
+ writer.WriteMiscInfo("SchemaVersion", "3.0");
+ writer.WriteMiscInfo("LoadDateTime", DateTime.UtcNow.ToString("yyyy-MM-dd HH:mm:ss"));
+ writer.WriteMiscInfo("ImporterVersion", "TraceEventImporter 1.0");
+
+ var tracedEvents = store.GetTracedEventIds();
+ writer.WriteTracedEvents(tracedEvents);
+ var uniqueAppNames = store.GetUniqueAppNames();
+ writer.WriteUniqueAppNames(uniqueAppNames);
+ var uniqueLoginNames = store.GetUniqueLoginNames();
+ writer.WriteUniqueLoginNames(uniqueLoginNames);
+ var procedureNames = store.GetProcedureNames();
+ writer.WriteProcedureNames(procedureNames);
+
+ var uniqueBatches = store.GetUniqueBatches();
+ writer.WriteUniqueBatches(uniqueBatches);
+ var uniqueStatements = store.GetUniqueStatements();
+ writer.WriteUniqueStatements(uniqueStatements);
+
+ writer.WriteConnections(processor.Connections);
+ writer.WriteBatches(processor.Batches);
+ writer.WriteStatements(processor.Statements);
+ writer.WriteInterestingEvents(processor.InterestingEvents);
+
+ // 6. Aggregation
+ LogMessage("TraceEventImporter: Computing aggregations...");
+ var aggregator = new Aggregator(intervalSeconds);
+ aggregator.Compute(processor.Batches, processor.Statements);
+
+ writer.WriteTimeIntervals(aggregator.TimeIntervals);
+ writer.WriteBatchPartialAggs(aggregator.BatchAggs);
+ writer.WriteStmtPartialAggs(aggregator.StmtAggs);
+
+ _totalRowsInserted = writer.TotalRowsInserted;
+
+ LogMessage("TraceEventImporter: --- Row counts per table ---");
+ LogMessage($"TraceEventImporter: tblTracedEvents: {tracedEvents.Count()}");
+ LogMessage($"TraceEventImporter: tblUniqueAppNames: {uniqueAppNames.Count()}");
+ LogMessage($"TraceEventImporter: tblUniqueLoginNames: {uniqueLoginNames.Count()}");
+ LogMessage($"TraceEventImporter: tblProcedureNames: {procedureNames.Count()}");
+ LogMessage($"TraceEventImporter: tblUniqueBatches: {uniqueBatches.Count()}");
+ LogMessage($"TraceEventImporter: tblUniqueStatements: {uniqueStatements.Count()}");
+ LogMessage($"TraceEventImporter: tblConnections: {processor.Connections.Count}");
+ LogMessage($"TraceEventImporter: tblBatches: {processor.Batches.Count}");
+ LogMessage($"TraceEventImporter: tblStatements: {processor.Statements.Count}");
+ LogMessage($"TraceEventImporter: tblInterestingEvents: {processor.InterestingEvents.Count}");
+ LogMessage($"TraceEventImporter: tblTimeIntervals: {aggregator.TimeIntervals.Count}");
+ LogMessage($"TraceEventImporter: tblBatchPartialAggs: {aggregator.BatchAggs.Count}");
+ LogMessage($"TraceEventImporter: tblStmtPartialAggs: {aggregator.StmtAggs.Count}");
+ LogMessage($"TraceEventImporter: Total rows inserted: {_totalRowsInserted}");
+ }
+
+ // 7. Post-load fixups
+ LogMessage("TraceEventImporter: Running post-load fixups...");
+ RunPostLoadFixups();
+
+ LogMessage($"TraceEventImporter: Import complete. {_totalRowsInserted} total rows inserted.");
+
+ // Always write the flag ('1' = timestamps already in local time, '0' = timestamps
+ // are UTC). SQLNexus_PostProcessing.sql reads this value to choose between a direct
+ // copy and a DATEADD offset conversion when populating StartTime_local/EndTime_local.
+ WriteLocalTimeFlag((bool)_options[OPTION_USE_LOCAL_SERVER_TIME]);
+
+ State = ImportState.Idle;
+ return true;
+ }
+ catch (Exception ex)
+ {
+ LogMessage($"TraceEventImporter: Error - {ex.Message}");
+ LogMessage(ex.ToString());
+ State = ImportState.Idle;
+ return false;
+ }
+ }
+
+ public void Cancel()
+ {
+ _cancelled = true;
+ State = ImportState.Canceling;
+ LogMessage("TraceEventImporter: Cancel requested.");
+ }
+
+ public event EventHandler StatusChanged;
+
+ public void OnStatusChanged(EventArgs e)
+ {
+ StatusChanged?.Invoke(this, e);
+ }
+
+ #endregion
+
+ #region INexusProgressReporter
+
+ public long CurrentPosition => _currentPosition;
+
+ public event EventHandler ProgressChanged;
+
+ public void OnProgressChanged(EventArgs e)
+ {
+ ProgressChanged?.Invoke(this, e);
+ }
+
+ #endregion
+
+ #region Private Helpers
+
+ private void DeploySchema()
+ {
+ string sql = LoadEmbeddedResource("TraceEventImporter.Schema.CreateSchema.sql");
+ LogMessage($"TraceEventImporter: Schema script loaded ({sql.Length} chars).");
+ ExecuteSqlScript(sql);
+ LogMessage("TraceEventImporter: Schema deployment complete.");
+ }
+
+ private void RunPostLoadFixups()
+ {
+ string sql = LoadEmbeddedResource("TraceEventImporter.Schema.PostLoadFixups.sql");
+ ExecuteSqlScript(sql);
+ }
+
+ private const string LOCAL_SRV_TIME_QUERY =
+ "SELECT ISNULL(CONVERT(decimal, PropertyValue), 0) UtcToLocalOffset " +
+ "FROM tbl_ServerProperties " +
+ "WHERE PropertyName = 'UTCOffset_in_Hours'";
+
+ ///
+ /// Reads the UTC-to-local offset (in hours) from the database.
+ /// Returns 0 if the value cannot be determined.
+ ///
+ private decimal GetLocalServerTimeOffset()
+ {
+ // Primary: tbl_ServerProperties
+ try
+ {
+ using (var cn = new SqlConnection(_connStr))
+ {
+ cn.Open();
+ using (var cmd = new SqlCommand(LOCAL_SRV_TIME_QUERY, cn))
+ {
+ cmd.CommandTimeout = 0;
+ object result = cmd.ExecuteScalar();
+ if (result != null && result != DBNull.Value)
+ {
+ decimal offset = Convert.ToDecimal(result);
+ LogMessage("TraceEventImporter: UTC_Offset from tbl_ServerProperties: " + offset);
+ return offset;
+ }
+ }
+ }
+ }
+ catch (Exception e)
+ {
+ LogMessage("TraceEventImporter: Could not read UTC offset from tbl_ServerProperties: " + e.Message);
+ }
+
+ // Fallback: tbl_server_times (Log Scout captures without PSSDIAG)
+ try
+ {
+ using (var cn = new SqlConnection(_connStr))
+ {
+ cn.Open();
+ using (var cmd = new SqlCommand(
+ "SELECT TOP 1 ISNULL(time_delta_hours * -1, 0) FROM dbo.tbl_server_times", cn))
+ {
+ cmd.CommandTimeout = 0;
+ object result = cmd.ExecuteScalar();
+ if (result != null && result != DBNull.Value)
+ {
+ decimal offset = Convert.ToDecimal(result);
+ LogMessage("TraceEventImporter: UTC_Offset from tbl_server_times: " + offset);
+ return offset;
+ }
+ }
+ }
+ }
+ catch (Exception e)
+ {
+ LogMessage("TraceEventImporter: Could not read UTC offset from tbl_server_times: " + e.Message);
+ }
+
+ LogMessage("TraceEventImporter: UTC offset not found; defaulting to 0.");
+ return 0;
+ }
+
+ ///
+ /// Writes a flag to ReadTrace.tblMiscInfo (and to tbl_ServerProperties /
+ /// tbl_server_times when present) that records whether timestamps in the ReadTrace
+ /// tables are already in local server time ( = true, value
+ /// '1') or in UTC (false, value '0').
+ /// SQLNexus_PostProcessing.sql reads this flag to choose between a direct copy
+ /// and a DATEADD offset conversion when populating
+ /// StartTime_local / EndTime_local.
+ ///
+ private void WriteLocalTimeFlag(bool isLocalTime)
+ {
+ // @flagStr = '1' or '0' for VARCHAR columns (tbl_ServerProperties, tblMiscInfo).
+ // @flagBit = 1 or 0 for the BIT column on tbl_server_times.
+ // sp_executesql's own @val parameter is fed from the outer @flagBit so the BIT
+ // update is also fully parameterised with no runtime concatenation.
+ const string sql =
+ // --- tbl_ServerProperties (primary) ---
+ "IF OBJECT_ID('dbo.tbl_ServerProperties') IS NOT NULL " +
+ "BEGIN " +
+ " IF EXISTS (SELECT 1 FROM dbo.tbl_ServerProperties WHERE PropertyName = 'ImportedTraceTimestampsInLocalTime') " +
+ " UPDATE dbo.tbl_ServerProperties SET PropertyValue = @flagStr WHERE PropertyName = 'ImportedTraceTimestampsInLocalTime'; " +
+ " ELSE " +
+ " INSERT INTO dbo.tbl_ServerProperties (PropertyName, PropertyValue) VALUES ('ImportedTraceTimestampsInLocalTime', @flagStr); " +
+ "END " +
+ // --- tbl_server_times (fallback) ---
+ // NOTE: ALTER TABLE and UPDATE must be in different batches; sp_executesql is
+ // used here so the UPDATE is compiled only after the column is already visible.
+ // The outer @flagBit parameter is forwarded into sp_executesql's own @val.
+ "IF OBJECT_ID('dbo.tbl_server_times') IS NOT NULL " +
+ "BEGIN " +
+ " IF COL_LENGTH('dbo.tbl_server_times', 'ImportedTraceTimestampsInLocalTime') IS NULL " +
+ " ALTER TABLE dbo.tbl_server_times ADD [ImportedTraceTimestampsInLocalTime] BIT NULL; " +
+ " IF COL_LENGTH('dbo.tbl_server_times', 'ImportedTraceTimestampsInLocalTime') IS NOT NULL " +
+ " EXEC sp_executesql N'UPDATE dbo.tbl_server_times SET [ImportedTraceTimestampsInLocalTime] = @val', N'@val BIT', @val = @flagBit; " +
+ "END " +
+ // --- ReadTrace.tblMiscInfo (guaranteed fallback) ---
+ "IF OBJECT_ID('ReadTrace.tblMiscInfo') IS NOT NULL " +
+ "BEGIN " +
+ " IF EXISTS (SELECT 1 FROM ReadTrace.tblMiscInfo WHERE Attribute = 'ImportedTraceTimestampsInLocalTime') " +
+ " UPDATE ReadTrace.tblMiscInfo SET Value = @flagStr WHERE Attribute = 'ImportedTraceTimestampsInLocalTime'; " +
+ " ELSE " +
+ " INSERT INTO ReadTrace.tblMiscInfo (Attribute, Value) VALUES ('ImportedTraceTimestampsInLocalTime', @flagStr); " +
+ "END";
+
+ using (var cn = new SqlConnection(_connStr))
+ {
+ try
+ {
+ cn.Open();
+ using (var cmd = new SqlCommand(sql, cn))
+ {
+ cmd.CommandTimeout = 0;
+ cmd.Parameters.AddWithValue("@flagStr", isLocalTime ? "1" : "0");
+ cmd.Parameters.AddWithValue("@flagBit", isLocalTime);
+ cmd.ExecuteNonQuery();
+ }
+ LogMessage("TraceEventImporter: Wrote 'ImportedTraceTimestampsInLocalTime'=" + (isLocalTime ? "1" : "0") + " to tbl_ServerProperties, tbl_server_times, and/or ReadTrace.tblMiscInfo.");
+ }
+ catch (Exception e)
+ {
+ LogMessage("TraceEventImporter: Could not write local time flag: " + e.Message);
+ }
+ }
+ }
+
+ private void ExecuteSqlScript(string script)
+ {
+ string[] batches = Regex.Split(script, @"^\s*GO\s*$", RegexOptions.Multiline | RegexOptions.IgnoreCase);
+
+ int executedCount = 0;
+ using (var conn = new SqlConnection(_connStr))
+ {
+ conn.Open();
+ LogMessage($"TraceEventImporter: Executing SQL script ({batches.Length} batches) on database '{conn.Database}'...");
+ foreach (string batch in batches)
+ {
+ string trimmed = batch.Trim();
+ if (trimmed.Length == 0) continue;
+
+ using (var cmd = new SqlCommand(trimmed, conn))
+ {
+ cmd.CommandTimeout = 0;
+ cmd.ExecuteNonQuery();
+ executedCount++;
+ }
+ }
+ LogMessage($"TraceEventImporter: Executed {executedCount} SQL batch(es) successfully.");
+ }
+ }
+
+ private static string LoadEmbeddedResource(string resourceName)
+ {
+ var assembly = Assembly.GetExecutingAssembly();
+ using (Stream stream = assembly.GetManifestResourceStream(resourceName))
+ {
+ if (stream == null)
+ throw new InvalidOperationException($"Embedded resource not found: {resourceName}");
+ using (var reader = new StreamReader(stream))
+ {
+ return reader.ReadToEnd();
+ }
+ }
+ }
+
+ private void LogMessage(string msg)
+ {
+ if (_logger != null)
+ _logger.LogMessage(msg);
+ else
+ System.Diagnostics.Trace.WriteLine(msg);
+ }
+
+ #endregion
+ }
+}
diff --git a/sqlnexus.sln b/sqlnexus.sln
index 45f9171d..a65796a6 100644
--- a/sqlnexus.sln
+++ b/sqlnexus.sln
@@ -28,6 +28,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution
Setup-Related\SetupSQLNexusPrereq.ps1 = Setup-Related\SetupSQLNexusPrereq.ps1
EndProjectSection
EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "TraceEventImporter", "TraceEventImporter\TraceEventImporter.csproj", "{08FCB9FF-C633-4B28-947B-39D553091600}"
+EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ErrorLogImporter", "ErrorLogImporter\ErrorLogImporter.csproj", "{B3A4C5D6-E7F8-4A9B-0C1D-2E3F4A5B6C7D}"
EndProject
Global
@@ -397,6 +399,56 @@ Global
{CCF72846-1645-4548-8E66-A2FFCE83692D}.Release|x64.Build.0 = Release|Any CPU
{CCF72846-1645-4548-8E66-A2FFCE83692D}.Release|x86.ActiveCfg = Release|Any CPU
{CCF72846-1645-4548-8E66-A2FFCE83692D}.Release|x86.Build.0 = Release|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug|Mixed Platforms.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug|Win32.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug|Win32.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug|x64.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug|x64.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug|x86.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug|x86.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug64|Any CPU.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug64|Any CPU.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug64|Mixed Platforms.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug64|Mixed Platforms.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug64|Win32.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug64|Win32.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug64|x64.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug64|x64.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug64|x86.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Debug64|x86.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.DebugLocal|Any CPU.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.DebugLocal|Any CPU.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.DebugLocal|Mixed Platforms.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.DebugLocal|Mixed Platforms.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.DebugLocal|Win32.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.DebugLocal|Win32.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.DebugLocal|x64.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.DebugLocal|x64.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.DebugLocal|x86.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.DebugLocal|x86.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Production|Any CPU.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Production|Any CPU.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Production|Mixed Platforms.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Production|Mixed Platforms.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Production|Win32.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Production|Win32.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Production|x64.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Production|x64.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Production|x86.ActiveCfg = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Production|x86.Build.0 = Debug|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Release|Any CPU.Build.0 = Release|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Release|Mixed Platforms.ActiveCfg = Release|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Release|Mixed Platforms.Build.0 = Release|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Release|Win32.ActiveCfg = Release|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Release|Win32.Build.0 = Release|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Release|x64.ActiveCfg = Release|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Release|x64.Build.0 = Release|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Release|x86.ActiveCfg = Release|Any CPU
+ {08FCB9FF-C633-4B28-947B-39D553091600}.Release|x86.Build.0 = Release|Any CPU
{B3A4C5D6-E7F8-4A9B-0C1D-2E3F4A5B6C7D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{B3A4C5D6-E7F8-4A9B-0C1D-2E3F4A5B6C7D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{B3A4C5D6-E7F8-4A9B-0C1D-2E3F4A5B6C7D}.Debug|Mixed Platforms.ActiveCfg = Debug|Any CPU
diff --git a/sqlnexus/AppConfig.xml b/sqlnexus/AppConfig.xml
index 036b6499..2101ebc5 100644
--- a/sqlnexus/AppConfig.xml
+++ b/sqlnexus/AppConfig.xml
@@ -18,6 +18,8 @@
+
+
diff --git a/sqlnexus/SQLNexus_PostProcessing.sql b/sqlnexus/SQLNexus_PostProcessing.sql
index c222f64d..8b6fc3c1 100644
--- a/sqlnexus/SQLNexus_PostProcessing.sql
+++ b/sqlnexus/SQLNexus_PostProcessing.sql
@@ -47,35 +47,83 @@ END;
GO
IF (
- (OBJECT_ID('tbl_ServerProperties') IS NOT NULL)
- OR (OBJECT_ID('tbl_server_times') IS NOT NULL)
+ (OBJECT_ID('[ReadTrace].[tblBatches]') IS NOT NULL)
+ OR (OBJECT_ID('[ReadTrace].[tblStatements]') IS NOT NULL)
+ OR (OBJECT_ID('[ReadTrace].[tblConnections]') IS NOT NULL)
)
BEGIN
- --get the offset from one of two possible tables
- DECLARE @utc_to_local_offset NUMERIC(3, 0) = 0;
+ -- Check whether the importer flagged that timestamps are already in local server time.
+ -- When true, StartTime/EndTime in the ReadTrace tables are already local, so
+ -- StartTime_local/EndTime_local should be a direct copy rather than an offset conversion.
+ DECLARE @timestamps_are_local BIT = 0;
+
+ IF OBJECT_ID('dbo.tbl_ServerProperties') IS NOT NULL
+ BEGIN
+ IF EXISTS (
+ SELECT 1 FROM dbo.tbl_ServerProperties
+ WHERE PropertyName = 'ImportedTraceTimestampsInLocalTime' AND PropertyValue = '1'
+ )
+ SET @timestamps_are_local = 1;
+ END;
+ ELSE IF OBJECT_ID('dbo.tbl_server_times') IS NOT NULL
+ AND COL_LENGTH('dbo.tbl_server_times', 'ImportedTraceTimestampsInLocalTime') IS NOT NULL
+ BEGIN
+ IF EXISTS (
+ SELECT 1 FROM dbo.tbl_server_times
+ WHERE [ImportedTraceTimestampsInLocalTime] = 1
+ )
+ SET @timestamps_are_local = 1;
+ END;
- IF OBJECT_ID('tbl_ServerProperties') IS NOT NULL
+ -- Fallback: ReadTrace.tblMiscInfo is written by both managed importers and is always
+ -- present even when PSSDIAG / Log Scout diagnostic tables (tbl_ServerProperties,
+ -- tbl_server_times) are absent (e.g. when only the Trace Event Importer is enabled).
+ IF @timestamps_are_local = 0 AND OBJECT_ID('ReadTrace.tblMiscInfo') IS NOT NULL
BEGIN
- SELECT @utc_to_local_offset = PropertyValue
- FROM dbo.tbl_ServerProperties
- WHERE PropertyName = 'UTCOffset_in_Hours';
+ IF EXISTS (
+ SELECT 1 FROM ReadTrace.tblMiscInfo
+ WHERE Attribute = 'ImportedTraceTimestampsInLocalTime' AND Value = '1'
+ )
+ SET @timestamps_are_local = 1;
END;
- ELSE IF OBJECT_ID('tbl_server_times') IS NOT NULL
+
+ --get the offset from one of two possible tables (defaults to 0 when neither is present)
+ DECLARE @utc_to_local_offset NUMERIC(3, 0) = 0;
+
+ IF @timestamps_are_local = 0
BEGIN
- SELECT TOP 1
- @utc_to_local_offset = time_delta_hours * -1
- FROM dbo.tbl_server_times;
+ IF OBJECT_ID('tbl_ServerProperties') IS NOT NULL
+ BEGIN
+ SELECT @utc_to_local_offset = ISNULL(CONVERT(NUMERIC(3, 0), PropertyValue), 0)
+ FROM dbo.tbl_ServerProperties
+ WHERE PropertyName = 'UTCOffset_in_Hours';
+ END;
+ ELSE IF OBJECT_ID('tbl_server_times') IS NOT NULL
+ BEGIN
+ SELECT TOP 1
+ @utc_to_local_offset = ISNULL(CONVERT(NUMERIC(3, 0), time_delta_hours * -1), 0)
+ FROM dbo.tbl_server_times;
+ END;
END;
+
+ -- Guard: ensure @utc_to_local_offset is never NULL (DATEADD returns NULL when number arg is NULL)
+ SET @utc_to_local_offset = ISNULL(@utc_to_local_offset, 0);
+
--update the new columns in tblBatches with local times
IF (
(COLUMNPROPERTY(OBJECT_ID('[ReadTrace].[tblBatches]'), 'StartTime_local', 'ColumnId') IS NOT NULL)
AND (COLUMNPROPERTY(OBJECT_ID('[ReadTrace].[tblBatches]'), 'EndTime_local', 'ColumnId') IS NOT NULL)
)
BEGIN
- UPDATE [ReadTrace].[tblBatches]
- SET StartTime_local = DATEADD(HOUR, @utc_to_local_offset, StartTime),
- EndTime_local = DATEADD(HOUR, @utc_to_local_offset, EndTime);
+ IF @timestamps_are_local = 1
+ UPDATE [ReadTrace].[tblBatches]
+ SET StartTime_local = StartTime,
+ EndTime_local = EndTime;
+ ELSE
+ UPDATE [ReadTrace].[tblBatches]
+ SET StartTime_local = DATEADD(HOUR, @utc_to_local_offset, StartTime),
+ EndTime_local = DATEADD(HOUR, @utc_to_local_offset, EndTime);
END;
--update the new columns in tblStatements with local times
@@ -84,9 +132,14 @@ BEGIN
AND (COLUMNPROPERTY(OBJECT_ID('[ReadTrace].[tblStatements]'), 'EndTime_local', 'ColumnId') IS NOT NULL)
)
BEGIN
- UPDATE [ReadTrace].[tblStatements]
- SET StartTime_local = DATEADD(HOUR, @utc_to_local_offset, StartTime),
- EndTime_local = DATEADD(HOUR, @utc_to_local_offset, EndTime);
+ IF @timestamps_are_local = 1
+ UPDATE [ReadTrace].[tblStatements]
+ SET StartTime_local = StartTime,
+ EndTime_local = EndTime;
+ ELSE
+ UPDATE [ReadTrace].[tblStatements]
+ SET StartTime_local = DATEADD(HOUR, @utc_to_local_offset, StartTime),
+ EndTime_local = DATEADD(HOUR, @utc_to_local_offset, EndTime);
END;
@@ -96,10 +149,14 @@ BEGIN
AND (COLUMNPROPERTY(OBJECT_ID('[ReadTrace].[tblConnections]'), 'EndTime_local', 'ColumnId') IS NOT NULL)
)
BEGIN
-
- UPDATE [ReadTrace].[tblConnections]
- SET StartTime_local = DATEADD(HOUR, @utc_to_local_offset, StartTime),
- EndTime_local = DATEADD(HOUR, @utc_to_local_offset, EndTime);
+ IF @timestamps_are_local = 1
+ UPDATE [ReadTrace].[tblConnections]
+ SET StartTime_local = StartTime,
+ EndTime_local = EndTime;
+ ELSE
+ UPDATE [ReadTrace].[tblConnections]
+ SET StartTime_local = DATEADD(HOUR, @utc_to_local_offset, StartTime),
+ EndTime_local = DATEADD(HOUR, @utc_to_local_offset, EndTime);
END;
END;
diff --git a/sqlnexus/fmImport.cs b/sqlnexus/fmImport.cs
index c0f65e2f..ad1d582b 100644
--- a/sqlnexus/fmImport.cs
+++ b/sqlnexus/fmImport.cs
@@ -1,4 +1,4 @@
-using System;
+using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
@@ -412,6 +412,71 @@ public void EnumImporters()
tsiImporters.DropDownItems.Clear();
EnumImportersFromDirectory(Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + @"\SqlNexus\Importers");
EnumImportersFromDirectory(Application.StartupPath);
+ EnforceTraceImporterExclusivity();
+
+ // Final check: warn for any expected importer that never made it into the menu,
+ // regardless of which directory was scanned. This is the most user-visible signal.
+ var registeredNames = new HashSet(StringComparer.OrdinalIgnoreCase);
+ foreach (ToolStripMenuItem item in tsiImporters.DropDownItems)
+ {
+ INexusImporter prod = item.Tag as INexusImporter;
+ if (prod != null)
+ registeredNames.Add(prod.Name);
+ }
+
+ // Map DLL base name → the Name the importer registers itself under
+ var expectedDllToName = new Dictionary(StringComparer.OrdinalIgnoreCase)
+ {
+ { "TraceEventImporter", TRACEEVENT_IMPORTER_NAME },
+ { "ReadTraceNexusImporter", READTRACE_IMPORTER_NAME },
+ };
+
+ foreach (var kvp in expectedDllToName)
+ {
+ if (!registeredNames.Contains(kvp.Value))
+ {
+ MainForm.LogMessage(string.Format(
+ "WARNING: Importer '{0}' (from {1}.dll) did not register in the Import menu. " +
+ "Check the application log for load errors, or verify that {1}.dll is present in '{2}'.",
+ kvp.Value, kvp.Key, Application.StartupPath), MessageOptions.Both);
+ }
+ }
+ }
+
+ ///
+ /// If both ReadTrace and TraceEventImporter are enabled, disable ReadTrace
+ /// (TraceEventImporter is the preferred/modern one).
+ ///
+ private void EnforceTraceImporterExclusivity()
+ {
+ bool readTraceEnabled = IsImporterEnabled(READTRACE_IMPORTER_NAME);
+ bool traceEventEnabled = IsImporterEnabled(TRACEEVENT_IMPORTER_NAME);
+
+ if (readTraceEnabled && traceEventEnabled)
+ {
+ DisableImporterByName(READTRACE_IMPORTER_NAME);
+ MainForm.LogMessage(string.Format("Both '{0}' and '{1}' were enabled. Automatically disabled '{1}' to prevent conflicts.", TRACEEVENT_IMPORTER_NAME, READTRACE_IMPORTER_NAME), MessageOptions.Silent);
+ }
+ }
+
+ ///
+ /// Checks whether the specified importer's Enabled option is checked.
+ ///
+ private bool IsImporterEnabled(string importerName)
+ {
+ foreach (ToolStripMenuItem importerTsi in tsiImporters.DropDownItems)
+ {
+ INexusImporter prod = importerTsi.Tag as INexusImporter;
+ if (prod == null || prod.Name != importerName)
+ continue;
+
+ foreach (ToolStripMenuItem optionTsi in importerTsi.DropDownItems)
+ {
+ if (optionTsi.Text == "Enabled")
+ return optionTsi.Checked;
+ }
+ }
+ return false;
}
private String FileVersions(String file)
{
@@ -441,7 +506,15 @@ private List OrderedImporterFiles(string[] files)
else if (file.ToUpper().Contains("READTRACE"))
{
ImporterList.Add(200, file);
-
+ }
+ else if (file.ToUpper().Contains("TRACEEVENTIMPORTER"))
+ {
+ // TraceEventImporter must run after RowsetImportEngine (100) so that
+ // tbl_ServerProperties already exists when GetLocalServerTimeOffset()
+ // and WriteLocalTimeFlag() are called. Place it between Rowset (100)
+ // and ReadTrace (200); ReadTrace and TraceEventImporter are mutually
+ // exclusive so they will never both be present at the same time.
+ ImporterList.Add(150, file);
}
else
{
@@ -472,15 +545,29 @@ private List OrderedImporterFiles(string[] files)
return OrderedImporters;
}
+ // The set of assembly names that sqlnexus.exe directly references at compile time.
+ // Built once from the executing assembly's manifest so it stays in sync automatically:
+ // any new importer added as a ProjectReference to sqlnexus.csproj is immediately
+ // included without any code change here.
+ private static readonly Lazy> _referencedByHost = new Lazy>(() =>
+ {
+ var names = new HashSet(StringComparer.OrdinalIgnoreCase);
+ foreach (AssemblyName asmName in Assembly.GetExecutingAssembly().GetReferencedAssemblies())
+ names.Add(asmName.Name);
+ return names;
+ });
+
private void EnumImportersFromDirectory(string importerDirectory)
{
if (!Directory.Exists(importerDirectory))
return;
-
+
string[] Files = Directory.GetFiles(importerDirectory, "*.DLL");
List OrderedFiles = OrderedImporterFiles(Files);
- // List of option names
+ // Track which expected DLL names were actually seen in this directory so that
+ // EnumImporters() can warn about any that are present on disk but failed to register.
+ var seenExpected = new HashSet(StringComparer.OrdinalIgnoreCase);
foreach (string file in OrderedFiles)
{
@@ -492,20 +579,55 @@ private void EnumImportersFromDirectory(string importerDirectory)
MainForm.LogMessage(String.Format(Properties.Resources.Msg_NativeImage, file));
continue;
}
+
+ string dllBaseName = Path.GetFileNameWithoutExtension(file);
+ // A DLL is "expected" if sqlnexus.exe directly references it at compile time
+ // (i.e. it was added as a ProjectReference). No list to maintain here.
+ bool isExpected = _referencedByHost.Value.Contains(dllBaseName);
+
Assembly Assem;
try
{
Assem = Assembly.LoadFile(file);
+ }
+ catch (Exception ex)
+ {
+ string msg = string.Format("Assembly '{0}' could not be loaded: {1}", Path.GetFileName(file), ex.Message);
+ // Use a visible log level when a known importer DLL fails to load
+ MainForm.LogMessage(msg, isExpected ? MessageOptions.Both : MessageOptions.Silent);
+ continue;
+ }
-
+ Type[] typs;
+ try
+ {
+ typs = Assem.GetExportedTypes();
+ }
+ catch (ReflectionTypeLoadException rtle)
+ {
+ // Log the loader exceptions — these reveal exactly which dependency is missing
+ var loaderMsgs = rtle.LoaderExceptions != null
+ ? string.Join("; ", rtle.LoaderExceptions
+ .Where(le => le != null)
+ .Select(le => le.Message))
+ : rtle.Message;
+ string msg = string.Format(
+ "Assembly '{0}' loaded but type inspection failed (missing dependency?): {1}",
+ Path.GetFileName(file), loaderMsgs);
+ MainForm.LogMessage(msg, isExpected ? MessageOptions.Both : MessageOptions.Silent);
+ continue;
}
catch (Exception ex)
{
- MainForm.LogMessage("Assembly " + file + " could not be used as an importer: " + ex.Message, MessageOptions.Silent);
+ string msg = string.Format(
+ "Assembly '{0}' loaded but GetExportedTypes() failed: {1}",
+ Path.GetFileName(file), ex.Message);
+ MainForm.LogMessage(msg, isExpected ? MessageOptions.Both : MessageOptions.Silent);
continue;
}
- Type[] typs = Assem.GetExportedTypes();
+ int importersRegisteredFromThisDll = 0;
+
foreach (Type typ in typs)
{
//Ignore abstract classes
@@ -523,7 +645,23 @@ private void EnumImportersFromDirectory(string importerDirectory)
//If we get in here, the Class implements the interface, so add it to the list
//and bail
- INexusImporter prod = (INexusImporter)Assem.CreateInstance(typ.FullName, true);
+ INexusImporter prod;
+ try
+ {
+ prod = (INexusImporter)Assem.CreateInstance(typ.FullName, true);
+ }
+ catch (Exception ex)
+ {
+ MainForm.LogMessage(string.Format(
+ "Failed to instantiate or cast '{0}' from '{1}': {2}",
+ typ.FullName, Path.GetFileName(file), ex.Message),
+ isExpected ? MessageOptions.Both : MessageOptions.Silent);
+ continue;
+ }
+
+ importersRegisteredFromThisDll++;
+ if (isExpected)
+ seenExpected.Add(dllBaseName);
prod.StatusChanged += new System.EventHandler(this.ImportStatusChanged);
if (prod is INexusProgressReporter)
@@ -560,7 +698,8 @@ private void EnumImportersFromDirectory(string importerDirectory)
"Ignore events associated with PSSDIAG activity",
"Disable event requirement checks",
"Import to SQL (Linux Perf)",
- "Drop existing tables (Linux Perf)"
+ "Drop existing tables (Linux Perf)",
+ "Drop existing ReadTrace tables"
};
foreach (string optionName in optionNames)
@@ -570,10 +709,12 @@ private void EnumImportersFromDirectory(string importerDirectory)
{
// Construct the userSavedKey using the product name and option name
string userSavedKey = string.Format("{0}.{1}", prod.Name, optionName);
- // Get the userSavedValue using ImportOptions
- bool userSavedValue = ImportOptions.IsEnabled(userSavedKey);
- // Update the option with the userSavedValue
- prod.Options[optionName] = userSavedValue;
+ // Only restore saved value if it was actually saved before
+ if (ImportOptions.HasOption(userSavedKey))
+ {
+ bool userSavedValue = ImportOptions.IsEnabled(userSavedKey);
+ prod.Options[optionName] = userSavedValue;
+ }
}
}
@@ -590,7 +731,7 @@ private void EnumImportersFromDirectory(string importerDirectory)
subtsi.Tag = prod.OptionsDialog;
subtsi.Click += new System.EventHandler(this.tsiDialog_Click);
}
- else // boolean
+ else if (prod.Options[option] is bool) // boolean
{
m_OptionList.Add(subtsi);
@@ -607,6 +748,11 @@ private void EnumImportersFromDirectory(string importerDirectory)
subtsi.Click += new System.EventHandler(this.tsiBool_Click);
}
+ else
+ {
+ // Non-boolean, non-dialog options (e.g. int) are not shown in the menu
+ continue;
+ }
tsi.DropDownItems.Add(subtsi);
}
@@ -619,6 +765,46 @@ private void EnumImportersFromDirectory(string importerDirectory)
};
}
}
+
+ // Log if a DLL that looks like an importer loaded and reflected cleanly but
+ // registered nothing — this catches mismatched interface versions, wrong SNK, etc.
+ if (importersRegisteredFromThisDll == 0 && isExpected)
+ {
+ MainForm.LogMessage(string.Format(
+ "WARNING: '{0}' loaded successfully but registered no INexusImporter entries. " +
+ "The importer will not appear in the Import menu. " +
+ "Check that the DLL targets the correct NexusInterfaces version.",
+ Path.GetFileName(file)), MessageOptions.Both);
+ }
+ else if (importersRegisteredFromThisDll > 0)
+ {
+ MainForm.LogMessage(string.Format(
+ "'{0}' registered {1} importer(s).", Path.GetFileName(file), importersRegisteredFromThisDll),
+ MessageOptions.Silent);
+ }
+ }
+
+ // After scanning, log any compile-time-referenced DLL that wasn't even found on
+ // disk in this directory. Only check DLLs that are both referenced AND absent;
+ // support libraries (Azure.Core, System.Memory, etc.) are expected not to be here
+ // when scanning the AppData importers directory, so we limit to DLLs whose names
+ // suggest they are importers (contain "Import") to avoid log noise.
+ var foundOnDiskNames = new HashSet(
+ OrderedFiles.Select(f => Path.GetFileNameWithoutExtension(f)),
+ StringComparer.OrdinalIgnoreCase);
+
+ foreach (string refName in _referencedByHost.Value)
+ {
+ if (refName.IndexOf("Import", StringComparison.OrdinalIgnoreCase) < 0)
+ continue; // skip non-importer support libraries
+
+ if (!foundOnDiskNames.Contains(refName))
+ {
+ MainForm.LogMessage(string.Format(
+ "Referenced importer DLL '{0}.dll' was not found in '{1}'. " +
+ "The importer will not appear in the Import menu.",
+ refName, importerDirectory), MessageOptions.Silent);
+ }
}
// Add the SQLDiag/AlwaysOn XEL option under Importers with sub-items
@@ -1392,8 +1578,9 @@ private void RunPostScripts(string importerName)
if (string.IsNullOrEmpty(importerName))
return;
- // Only execute ReadTracePostProcessing.sql when the current importer is the ReadTrace importer.
- bool isReadTraceImporter = importerName.Equals("ReadTrace (SQL XEL/TRC files)", StringComparison.OrdinalIgnoreCase);
+ // Only execute ReadTracePostProcessing.sql when the current importer is ReadTrace or TraceEventImporter (both use the ReadTrace schema).
+ bool isReadTraceImporter = importerName.Equals("ReadTrace (SQL XEL/TRC files)", StringComparison.OrdinalIgnoreCase)
+ || importerName.Equals(TRACEEVENT_IMPORTER_NAME, StringComparison.OrdinalIgnoreCase);
// If nothing to run, skip executing the post scripts.
if (!PostScripts.TryGetValue(importerName, out var scripts) || scripts == null || scripts.Length == 0)
@@ -1587,6 +1774,10 @@ private void tsiSaveOptions_Click(object sender, EventArgs e)
}
+ // Importer names used for mutual exclusivity
+ private const string READTRACE_IMPORTER_NAME = "ReadTrace (SQL XEL/TRC Files)";
+ private const string TRACEEVENT_IMPORTER_NAME = "Trace Event Importer (Managed)";
+
private void tsiBool_Click(object sender, EventArgs e)
{
ToolStripMenuItem tsi = (sender as ToolStripMenuItem);
@@ -1594,12 +1785,60 @@ private void tsiBool_Click(object sender, EventArgs e)
prod.Options[tsi.Text] = tsi.Checked;
if (ImportOptions.IsEnabled("SaveImportOptions"))
{
-
ImportOptions.Set(string.Format("{0}.{1}", prod.Name, tsi.Text), tsi.Checked);
- //LogMessage("strip: " + string.Format("{0}.{1}", prod.Name, tsi.Name), MessageOptions.Silent);
+ }
+
+ // Mutual exclusivity: enabling one trace importer disables the other
+ if (tsi.Text == "Enabled" && tsi.Checked)
+ {
+ string otherImporterName = null;
+ if (prod.Name == TRACEEVENT_IMPORTER_NAME)
+ otherImporterName = READTRACE_IMPORTER_NAME;
+ else if (prod.Name == READTRACE_IMPORTER_NAME)
+ otherImporterName = TRACEEVENT_IMPORTER_NAME;
+ if (otherImporterName != null)
+ {
+ DisableImporterByName(otherImporterName);
+
+ // Close all dropdown menus so the MessageBox isn't hidden behind them
+ cmOptions.Close();
+
+ MessageBox.Show(this,
+ string.Format("You have enabled '{0}'.\nThe '{1}' importer has been automatically disabled to prevent both from writing to the same tables.", prod.Name, otherImporterName),
+ "Importer Conflict",
+ MessageBoxButtons.OK,
+ MessageBoxIcon.Information);
+ }
}
+ }
+ ///
+ /// Finds the importer menu item by name and unchecks its Enabled option.
+ ///
+ private void DisableImporterByName(string importerName)
+ {
+ foreach (ToolStripMenuItem importerTsi in tsiImporters.DropDownItems)
+ {
+ INexusImporter otherProd = importerTsi.Tag as INexusImporter;
+ if (otherProd == null || otherProd.Name != importerName)
+ continue;
+
+ foreach (ToolStripMenuItem optionTsi in importerTsi.DropDownItems)
+ {
+ if (optionTsi.Text == "Enabled")
+ {
+ optionTsi.Checked = false;
+ otherProd.Options["Enabled"] = false;
+ if (ImportOptions.IsEnabled("SaveImportOptions"))
+ {
+ ImportOptions.Set(string.Format("{0}.Enabled", importerName), false);
+ }
+ break;
+ }
+ }
+ break;
+ }
}
private void tsiDialog_Click(object sender, EventArgs e)
diff --git a/sqlnexus/sqlnexus.csproj b/sqlnexus/sqlnexus.csproj
index 72b4479b..3210aa76 100644
--- a/sqlnexus/sqlnexus.csproj
+++ b/sqlnexus/sqlnexus.csproj
@@ -1,4 +1,4 @@
-
+
@@ -870,6 +870,10 @@
{ccf72846-1645-4548-8e66-a2ffce83692d}
LinuxPerfImporter
+
+ {08FCB9FF-C633-4B28-947B-39D553091600}
+ TraceEventImporter
+
{2217E9CA-442E-46C5-AB6C-7A46AE41A22C}
NexusInterfaces