Skip to content

Instantly share code, notes, and snippets.

@DvdKhl
Created June 20, 2024 19:49
Show Gist options
  • Save DvdKhl/d042ed05e3237136265295cb39ecb4f4 to your computer and use it in GitHub Desktop.
Save DvdKhl/d042ed05e3237136265295cb39ecb4f4 to your computer and use it in GitHub Desktop.
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net8.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Data.SqlClient" Version="5.2.1" />
</ItemGroup>
</Project>
using Microsoft.Data.SqlClient;
using System.Collections;
using System.Data.Common;
using System.Diagnostics.CodeAnalysis;
using var conn = new SqlConnection("Server=localhost;Database=Main;Trusted_Connection=sspi;Encrypt=False;TrustServerCertificate=True");
await conn.OpenAsync().ConfigureAwait(false);
using var reader = new LogDataReader() { Count = 100 };
using var sqlBulkCopy = new SqlBulkCopy(conn, SqlBulkCopyOptions.CheckConstraints | SqlBulkCopyOptions.FireTriggers, null);
sqlBulkCopy.DestinationTableName = $"LogEntry";
sqlBulkCopy.BatchSize = reader.Count;
sqlBulkCopy.ColumnOrderHints.Add("LoggedOnUtc", SortOrder.Ascending);
sqlBulkCopy.ColumnMappings.Clear();
for (int i = 0; i < 10; i++) sqlBulkCopy.ColumnMappings.Add(i, i);
await sqlBulkCopy.WriteToServerAsync(reader).ConfigureAwait(false);
public class LogDataReader : DbDataReader {
private int position;
public int Count { get; set; }
public override int FieldCount => 10;
public override bool HasRows => true;
public override int Depth { get; } = 1;
public override bool IsClosed { get; } = false;
public override int RecordsAffected { get; } = -1;
public override bool Read() => position++ < Count;
public override object GetValue(int i) {
return i switch {
0 => 1,
1 => DateTime.Now,
2 => "CategoryName",
3 => 1,
4 => "EventName",
5 => 1,
6 => "[]",
7 => "{}",
8 => DBNull.Value,
9 => "Message",
_ => throw new Exception(),
};
}
public override object this[int ordinal] => throw new NotImplementedException();
public override object this[string name] => throw new NotImplementedException();
public override bool GetBoolean(int ordinal) => throw new NotImplementedException();
public override byte GetByte(int ordinal) => throw new NotImplementedException();
public override long GetBytes(int ordinal, long dataOffset, byte[]? buffer, int bufferOffset, int length) => throw new NotImplementedException();
public override char GetChar(int ordinal) => throw new NotImplementedException();
public override long GetChars(int ordinal, long dataOffset, char[]? buffer, int bufferOffset, int length) => throw new NotImplementedException();
public override string GetDataTypeName(int ordinal) => throw new NotImplementedException();
public override DateTime GetDateTime(int ordinal) => throw new NotImplementedException();
public override decimal GetDecimal(int ordinal) => throw new NotImplementedException();
public override double GetDouble(int ordinal) => throw new NotImplementedException();
public override IEnumerator GetEnumerator() => throw new NotImplementedException();
[return: DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields | DynamicallyAccessedMemberTypes.PublicProperties)]
public override Type GetFieldType(int ordinal) => throw new NotImplementedException();
public override float GetFloat(int ordinal) => throw new NotImplementedException();
public override Guid GetGuid(int ordinal) => throw new NotImplementedException();
public override short GetInt16(int ordinal) => throw new NotImplementedException();
public override int GetInt32(int ordinal) => throw new NotImplementedException();
public override long GetInt64(int ordinal) => throw new NotImplementedException();
public override string GetName(int ordinal) => throw new NotImplementedException();
public override int GetOrdinal(string name) => throw new NotImplementedException();
public override string GetString(int ordinal) => throw new NotImplementedException();
public override int GetValues(object[] values) => throw new NotImplementedException();
public override bool IsDBNull(int ordinal) => throw new NotImplementedException();
public override bool NextResult() => false;
}
create or alter view PartitonInfo as
select
[SchemaName] = object_schema_name(p.object_id),
[TableName] = object_name(p.object_id),
[IndexName] = i.name,
[StorageType] = i.type,
[DataCompressionType] = p.data_compression,
[SchemeName] = ps.name,
[FunctionName] = pf.name,
[PartitionNumber] = p.partition_number,
[IsRightBoundary] = pf.boundary_value_on_right,
[LeftBoundary] = prvLeft.value,
[RightBoundary] = prvRight.value,
[IsCompressed] = cast(iif([csrgps].object_id is null, 1, 0) as bit),
[RowCount] = p.rows,
[UsedBytes] = pStat.used_page_count * 8 * 1024,
[ReservedBytes] = pStat.reserved_page_count * 8 * 1024
from sys.partitions p
left join (
select object_id, partition_number
from sys.dm_db_column_store_row_group_physical_stats csrgps
where csrgps.delta_store_hobt_id is not null and state = 1
group by object_id, partition_number
) csrgps on csrgps.object_id = p.object_id and csrgps.partition_number = p.partition_number
inner join sys.dm_db_partition_stats as pStat on pStat.partition_id = p.partition_id
inner join sys.indexes i on i.object_id = p.object_id
inner join sys.partition_schemes ps on ps.data_space_id = i.data_space_id
inner join sys.partition_functions pf on pf.function_id = ps.function_id
inner join sys.destination_data_spaces dds on dds.partition_scheme_id = i.data_space_id and dds.destination_id = p.partition_number
left join sys.partition_range_values prvLeft on prvLeft.function_id = ps.function_id and prvLeft.boundary_id = p.partition_number - 1
left join sys.partition_range_values prvRight on prvRight.function_id = ps.function_id and prvRight.boundary_id = p.partition_number
where object_name(p.object_id) = 'LogEntry'
go
drop table if exists [LogEntry]
if exists (select 1 from sys.partition_schemes where [name] = 'PSCH_Logging_Daily7Of9')
begin
drop partition SCHEME [PSCH_Logging_Daily7Of9]
drop partition function [PFCT_Logging_Daily7Of9]
end
go
CREATE PARTITION FUNCTION [PFCT_Logging_Daily7Of9](datetime2(7)) AS RANGE RIGHT FOR VALUES (N'2024-01-2T00:00:00.000', N'2024-01-03T00:00:00.000', N'2024-01-04T00:00:00.000', N'2024-01-05T00:00:00.000', N'2024-01-06T00:00:00.000', N'2024-01-07T00:00:00.000', N'2024-01-08T00:00:00.000', N'2024-01-09T00:00:00.000')
CREATE PARTITION SCHEME [PSCH_Logging_Daily7Of9] AS PARTITION [PFCT_Logging_Daily7Of9] TO ([PRIMARY], [PRIMARY], [PRIMARY], [PRIMARY], [PRIMARY], [PRIMARY], [PRIMARY], [PRIMARY], [PRIMARY])
CREATE TABLE [LogEntry](
[AppInstanceId] [bigint] NOT NULL,
[LoggedOnUtc] [datetime2](7) NOT NULL,
[CategoryName] [nvarchar](256) NOT NULL,
[EventCode] [int] NOT NULL,
[EventName] [nvarchar](256) NULL,
[LogLevel] [int] NOT NULL,
[ScopeJson] [nvarchar](max) NULL,
[StateJson] [nvarchar](max) NULL,
[ExceptionJson] [nvarchar](max) NULL,
[Message] [nvarchar](max) NULL
) ON [PSCH_Logging_Daily7Of9]([LoggedOnUtc])
create clustered columnstore index [CIX_LogEntry] on [LogEntry]
insert into [LogEntry]([AppInstanceId], [LoggedOnUtc], [CategoryName], [EventCode], [EventName], [LogLevel], [ScopeJson], [StateJson], [ExceptionJson], [Message])
select gs.value, dateadd(day, gs.value / 100000, '2024-01-01T00:00:00.000'), 'CategoryName', 1, 'EventName', 1, '[]', '{}', null, 'Message'
from generate_series(0, 700000-1, 1) gs
ALTER INDEX [CIX_LogEntry] ON [dbo].[LogEntry] REBUILD PARTITION = ALL WITH (DATA_COMPRESSION = COLUMNSTORE)
select * from PartitonInfo order by [SchemaName], [TableName], [IndexName], [PartitionNumber]
/*
CREATE EVENT SESSION [HighMemUsageSqlBulkCopy] ON SERVER
ADD EVENT sqlserver.query_post_execution_showplan,
ADD EVENT sqlserver.sql_batch_completed(
ACTION(package0.event_sequence,sqlserver.client_app_name,sqlserver.client_pid,sqlserver.database_id,sqlserver.database_name,sqlserver.nt_username,sqlserver.query_hash,sqlserver.server_principal_name,sqlserver.session_id)
WHERE ([package0].[equal_boolean]([sqlserver].[is_system],(0))))
WITH (MAX_MEMORY=16384 KB,EVENT_RETENTION_MODE=ALLOW_SINGLE_EVENT_LOSS,MAX_DISPATCH_LATENCY=5 SECONDS,MAX_EVENT_SIZE=0 KB,MEMORY_PARTITION_MODE=PER_CPU,TRACK_CAUSALITY=ON,STARTUP_STATE=OFF)
GO
insert into [LogEntry]([AppInstanceId], [LoggedOnUtc], [CategoryName], [EventCode], [EventName], [LogLevel], [ScopeJson], [StateJson], [ExceptionJson], [Message])
select gs.value, '2024-01-08T00:00:00.000', 'CategoryName', 1, 'EventName', 1, '[]', '{}', null, 'Message'
from generate_series(0, 100-1, 1) gs
drop view PartitonInfo
drop table if exists [LogEntry]
drop partition SCHEME [PSCH_Logging_Daily7Of9]
drop partition function [PFCT_Logging_Daily7Of9]
*/
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment