Created
July 15, 2020 11:42
-
-
Save SQLDBAWithABeard/ee3991c18b31d78c7237d95de6590fb4 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<# | |
Nasty, horrible, dirty method of filling a database with duff data. | |
We used this for some data movement load testing into Azure | |
It uses Itziks method of creating a numbers table and then converts them into the right(ish) | |
datatype which it gets from the schema. | |
Nothing more No PKs, no FKs, no constraints | |
All dates, datetimes etc are 1901-01-01 00:00:00 | |
All bits are 1 | |
All columns are filled to the max | |
It takes an average row count per schema from a csv file (because thats what I needed) | |
and uses that as the row count for the table. | |
The csv looks like | |
Schema,Average of RowCount | |
schema1,3024524.22 | |
schema2,16366.25 | |
schema3,8084948.89 | |
schema4,2868741.5 | |
It requires dbatools | |
It creates a sql file which you can then run. | |
Seriously, if you want to do this properly, then use a tool that you pay for. | |
The best tool IMHO is SQL Data Generator from Redgate. | |
https://www.red-gate.com/products/sql-development/sql-data-generator/ | |
Rob Sewell @SqlDbaWithBeard | |
15/07/2020 | |
#> | |
$database = 'beard' | |
$fileName = 'c:\temp\beard.sql' | |
$SqlInstance = beard | |
$Rowcounts = Import-Csv -Path C:\temp\beard.csv | |
$tables = Get-DbaDbTable -SqlInstance $SqlInstance -Database $database | |
"Use $database | |
GO | |
DECLARE @Date NVARCHAR(20) | |
SET @Date = CAST(GETDATE() AS NVARCHAR(20)) | |
PRINT @Date + ' Starting loading $database at' | |
GO | |
" | Out-File $fileName | |
$tables.ForEach{ | |
$tableName = '[' + $psitem.Schema + '].[' + $psitem.Name + ']' | |
$tableName | |
$RowNumber = ($Rowcounts | Where Schema -eq $psitem.Schema).'Average of RowCount' | |
$MaxLength = @{Name='MaxLength';Expression = {$Psitem.Datatype.MaximumLength}} | |
$NumericPrecision = @{Name='NumericPrecision';Expression = {$Psitem.Datatype.NumericPrecision}} | |
$NumericScale = @{Name='NumericScale';Expression = {$Psitem.Datatype.NumericScale}} | |
$Datatype = @{Name='Datatype';Expression = {$Psitem.DataType.Name}} | |
$Columns = $PSItem.Columns |Where Identity -eq $false | Select Name, $Datatype,$MaxLength,Identity,$NumericPrecision,$NumericScale | |
# $columns | |
$select = '' | |
foreach($column in $columns){ | |
if($column.DataType -eq 'int'){ | |
$select += " k as '$($column.Name)' ," | |
} | |
elseif($column.DataType -eq 'bigint'){ | |
$select += " k as '$($column.Name)' ," | |
} | |
elseif($column.DataType -eq 'varchar'){ | |
$select += " LTRIM(STR(k,$($column.MaxLength))) as '$($column.Name)' ," | |
} | |
elseif($column.DataType -eq 'nvarchar'){ | |
$select += " LTRIM(STR(k,$($column.MaxLength))) as '$($column.Name)' ," | |
} | |
elseif($column.DataType -eq 'char'){ | |
$select += " LTRIM(STR(k,$($column.MaxLength))) as '$($column.Name)' ," | |
} | |
elseif($column.DataType -eq 'date'){ | |
$select += " '1901-01-01 00:00:00' as '$($column.Name)' ," | |
} | |
elseif($column.DataType -eq 'datetime2'){ | |
$select += " '1901-01-01 00:00:00' as '$($column.Name)' ," | |
} | |
elseif($column.DataType -eq 'datetime'){ | |
$select += " '1901-01-01 00:00:00' as '$($column.Name)' ," | |
} | |
elseif($column.DataType -eq 'numeric'){ | |
# Precision - scale to get length | |
$Reduce = $column.NumericPrecision - $column.NumericScale | |
# multiple by 0.1 to the power of reduce and cast as int | |
[int]$value = $rownumber * ([math]::Pow(0.1,$Reduce)) | |
$select += " CAST($value as Numeric($($column.NumericPrecision),$($column.NumericScale))) as '$($column.Name)' ," | |
} | |
elseif($column.DataType -eq 'decimal'){ | |
# Precision - scale to get length | |
$Reduce = $column.NumericPrecision - $column.NumericScale | |
# multiple by 0.1 to the power of reduce and cast as int | |
[int]$value = $rownumber * ([math]::Pow(0.1,$Reduce)) | |
$select += " CAST($value as decimal($($column.NumericPrecision),$($column.NumericScale))) as '$($column.Name)' ," | |
} | |
elseif($column.DataType -eq 'bit'){ | |
$select += " 1 as '$($column.Name)' ," | |
} | |
# well, why not | |
if($column.Nullable){ | |
$select += " NULL ," | |
}else{ | |
$select += " 1 ," | |
} | |
} | |
try{ | |
$select = $select.Substring(0,$select.Length-1) | |
}catch{ | |
"Error in $TableName" | |
} | |
$SelectStatement = 'Select ' + $select | |
$intquery = @" | |
DECLARE @Date NVARCHAR(20) | |
SET @Date = GETDATE() | |
PRINT @Date + ' Inserting $rowNumber rows into $tableName' | |
; WITH | |
L0 AS (SELECT c FROM (SELECT 1 UNION ALL SELECT 1) AS D(c)), -- 2^1 | |
L1 AS (SELECT 1 AS c FROM L0 AS A CROSS JOIN L0 AS B), -- 2^2 | |
L2 AS (SELECT 1 AS c FROM L1 AS A CROSS JOIN L1 AS B), -- 2^4 | |
L3 AS (SELECT 1 AS c FROM L2 AS A CROSS JOIN L2 AS B), -- 2^8 | |
L4 AS (SELECT 1 AS c FROM L3 AS A CROSS JOIN L3 AS B), -- 2^16 | |
L5 AS (SELECT 1 AS c FROM L4 AS A CROSS JOIN L4 AS B), -- 2^32 | |
Nums AS (SELECT ROW_NUMBER() OVER(ORDER BY (SELECT NULL)) AS k FROM L5) | |
INSERT INTO $tableName WITH (TABLOCK) -- to reduce logging in simple mode_ | |
$SelectStatement | |
from nums | |
where k <= $rowNumber | |
GO | |
DECLARE @Date NVARCHAR(20) | |
SET @Date = CAST(GETDATE() AS NVARCHAR(20)) | |
PRINT @Date + ' Finished Inserting $rowNumber rows into $tableName' | |
GO | |
"@ | |
$intquery |Out-File $fileName -Append | |
} | |
"DECLARE @Date NVARCHAR(20) | |
SET @Date = CAST(GETDATE() AS NVARCHAR(20)) | |
PRINT @Date + ' Finished loading $database at'" |Out-File $fileName -Append |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment