Create a gist now

Instantly share code, notes, and snippets.

Stack Overflow Build Reference Docs
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="PrepareStaticContent" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<!-- Passed in Parameters -->
<configuration></configuration>
<workingDir></workingDir>
<buildNumber></buildNumber>
<buildViews>false</buildViews>
<minifyJs>true</minifyJs>
<TargetsDirectory></TargetsDirectory>
<!-- Passed in config replacements -->
<datacenter></datacenter>
<tier></tier>
<connectionStringIPMappings></connectionStringIPMappings>
<connectionString></connectionString>
<errorConnectionString></errorConnectionString>
<moonspeakDontTrackUsage>false</moonspeakDontTrackUsage>
<moonspeaktools></moonspeaktools>
<usePrecompiledViewEngine>false</usePrecompiledViewEngine>
<alternativeServers></alternativeServers>
<!-- Derived Parameters -->
<WebSourceDirectory>$(workingDir)\StackOverflow</WebSourceDirectory>
<LocalizationSourceDirectory>$(workingDir)\StackOverflow.Localization</LocalizationSourceDirectory>
</PropertyGroup>
<!--
#### THIS IS HOW WE ROLL:
CompileWeb - ReplaceConfigs - - - - - - BuildViews - - - - - - - - - - - - - PrepareStaticContent
\ /|
'- BundleJavaScript - TranslateJsContent - CompileNode - '
##### NOTE:
since msbuild requires separate projects for parallel execution of targets, this build file is copied 2 times,
the DefaultTargets of each copy is set to one of BuildViews, CompileNode or CompressJSContent. thus the absence
of the DependesOnTarget="ReplaceConfigs" on those _call_ targets
-->
<!-- Imports -->
<Import Project="$(TargetsDirectory)\MSBuild.Community.Tasks.Targets"/>
<UsingTask TaskName="JSBundlerTask"
AssemblyFile="$(TargetsDirectory)\StackOverflow.BuildTasks.dll" />
<!-- Rebuilds both the web and test projects.. -->
<Target Name="CompileWeb">
<MSBuild Projects="$(LocalizationSourceDirectory)\StackOverflow.Localization.csproj"
Properties="Configuration=$(configuration)"
Targets="Build"/>
<Message Text="CompileWeb complete" />
</Target>
<Target Name="ReplaceConfigs" DependsOnTargets="CompileWeb">
<!-- Replace "DefaultConnectionString" - note the appending of provider name in ReplacementText -->
<FileUpdate
Files="$(WebSourceDirectory)\connectionStrings.config"
Regex="connectionString=.*"
ReplacementText="connectionString=&quot;$(connectionString)&quot; providerName=&quot;System.Data.SqlClient&quot; /&gt;"
/>
<!-- Always turn off web app debug setting -->
<FileUpdate
Files="$(WebSourceDirectory)\Web.config"
Regex="compilation debug=&quot;true&quot;"
ReplacementText="compilation debug=&quot;false&quot;"
/>
<!-- Always turn on batch view compilation -->
<FileUpdate
Files="$(WebSourceDirectory)\Web.config"
Regex="batch=&quot;false&quot; targetFramework="
ReplacementText="batch=&quot;true&quot; targetFramework="
/>
<!-- Disable multithreading when building MoonSpeak views on prod, it speeds up the local story but is hella dangerous on start -->
<FileUpdate
Files="$(WebSourceDirectory)\Web.config"
Regex="allowMultiThreading=&quot;true&quot;"
ReplacementText="allowMultiThreading=&quot;false&quot;"
/>
<!-- Only track string usage on dev, it's pointless time on prod -->
<FileUpdate
Files="$(WebSourceDirectory)\Web.config"
Regex="dontTrackUsage=&quot;false&quot;"
ReplacementText="dontTrackUsage=&quot;$(moonspeakDontTrackUsage)&quot;"
/>
<!-- errorstore on prod will point to a database -->
<FileUpdate
Files="$(WebSourceDirectory)\Web.config"
Regex="&lt;ErrorStore type=&quot;Memory&quot;\s?/&gt;"
ReplacementText="&lt;ErrorStore type=&quot;SQL&quot; connectionString=&quot;$(errorConnectionString)&quot; /&gt;"
/>
<!-- datacenter will be change based on build location -->
<FileUpdate
Files="$(WebSourceDirectory)\appSettings.config"
Regex="&lt;add key=&quot;Datacenter&quot; value=&quot;&quot;\s?/&gt;"
ReplacementText="&lt;add key=&quot;Datacenter&quot; value=&quot;$(datacenter)&quot; /&gt;"
/>
<!-- tier will be change based on build settings -->
<FileUpdate
Files="$(WebSourceDirectory)\appSettings.config"
Regex="&lt;add key=&quot;Tier&quot; value=&quot;Local&quot;\s?/&gt;"
ReplacementText="&lt;add key=&quot;Tier&quot; value=&quot;$(tier)&quot; /&gt;"
/>
<!-- IP mappings will change based on build location -->
<FileUpdate
Files="$(WebSourceDirectory)\appSettings.config"
Regex="&lt;add key=&quot;ConnectionStringIPMappings&quot; value=&quot;&quot;\s?/&gt;"
ReplacementText="&lt;add key=&quot;ConnectionStringIPMappings&quot; value=&quot;$(connectionStringIPMappings)&quot; /&gt;"
/>
<FileUpdate
Files="$(WebSourceDirectory)\appSettings.config"
Regex="&lt;add key=&quot;usePrecompiledViewEngine&quot; value=&quot;false&quot;\s?/&gt;"
ReplacementText="&lt;add key=&quot;usePrecompiledViewEngine&quot; value=&quot;$(usePrecompiledViewEngine)&quot; /&gt;"
/>
<FileUpdate
Files="$(WebSourceDirectory)\appSettings.config"
Regex="&lt;add key=&quot;AlternativeServers&quot; value=&quot;&quot;\s?/&gt;"
ReplacementText="&lt;add key=&quot;AlternativeServers&quot; value=&quot;$(alternativeServers)&quot; /&gt;"
/>
<!-- in production, these are static files (created in the BundleJavaScript target) -->
<FileUpdate
Files="$(WebSourceDirectory)\Web.config"
Regex="^.*JSBundlerHandler.*$"
ReplacementText=" "
Multiline="True"
/>
<!-- update connection string for fetching translations -->
<FileUpdate
Files="$(WebSourceDirectory)\appSettings.config"
Regex="&lt;add key=&quot;LocalizedStringsConnectionString&quot; value=&quot;[^&quot;]*&quot;\s?/&gt;"
ReplacementText="&lt;add key=&quot;LocalizedStringsConnectionString&quot; value=&quot;$(translationConnectionString)&quot; /&gt;"
/>
<!-- setup moonspeak tool configs -->
<Copy
SourceFiles="$(workingDir)\Build\Jerome.exe.config"
DestinationFolder="$(moonspeaktools)"
/>
<Copy
SourceFiles="$(workingDir)\Build\MoonSpeak.Importer.exe.config"
DestinationFolder="$(moonspeaktools)"
/>
<Message Text="ReplaceConfigs complete"/>
</Target>
<!-- Parallel bootstrapper -->
<Target Name="PrepareStaticContent" DependsOnTargets="ReplaceConfigs">
<PropertyGroup>
<BuildFile>$(workingDir)\Build\tc.website.ny.msbuild</BuildFile>
<CallBuildViews>$(workingDir)\Build\tc.website.ny.BuildViews.call</CallBuildViews>
<CompileNode>$(workingDir)\Build\tc.website.ny.CompileNode.call</CompileNode>
</PropertyGroup>
<!-- fan out setup-->
<Message Text="Creating fan out call projects"/>
<Copy SourceFiles="$(BuildFile);$(BuildFile)" DestinationFiles="$(CallBuildViews);$(CompileNode)" />
<FileUpdate
Files="$(CallBuildViews)"
Regex="DefaultTargets=&quot;PrepareStaticContent&quot;"
ReplacementText="DefaultTargets=&quot;BuildViews&quot;"
/>
<FileUpdate
Files="$(CompileNode)"
Regex="DefaultTargets=&quot;PrepareStaticContent&quot;"
ReplacementText="DefaultTargets=&quot;CompileNode&quot;"
/>
<!-- fan out execute -->
<MSBuild
BuildInParallel="true"
Projects="$(CallBuildViews);$(CompileNode);"
Properties="configuration=$(configuration);workingDir=$(workingDir);buildNumber=$(buildNumber);buildViews=$(buildViews);minifyJs=$(minifyJs);TargetsDirectory=$(TargetsDirectory);moonspeaktools=$(moonspeaktools)" />
<Message Text="PrepareStaticContent complete"/>
</Target>
<Target Name="BuildViews" Condition="$(buildViews) == 'true'">
<!-- Precompile views after we have changed web.config files, since this is what aspnet_compiler.exe uses when compiling -->
<MSBuild Projects="$(WebSourceDirectory)\StackOverflow.csproj"
Properties="Configuration=$(configuration);SolutionDir=$(workingDir);ReferencePath=$(WebSourceDirectory)\bin;MvcBuildViews=$(buildViews)"
Targets="BuildViews" />
<Message Text="BuildViews complete" />
</Target>
<Target Name="BundleJavaScript">
<ItemGroup>
<Files Include="$(WebSourceDirectory)\Content\js\**\*.jsbundle" />
</ItemGroup>
<JSBundlerTask BundleFiles="@(Files)" />
<Message Text="BundleJavaScript completed"/>
</Target>
<Target Name="TranslateJSContent" DependsOnTargets="BundleJavaScript">
<ItemGroup>
<JSFileToTranslate Include="$(WebSourceDirectory)\Content\Js\*.js" />
</ItemGroup>
<PropertyGroup>
<DataSource>$(LocalizationSourceDirectory)\bin\$(Configuration)\StackOverflow.Localization.dll</DataSource>
<JeromeExe>$(moonspeaktools)\Jerome.exe</JeromeExe>
<JeromeStdOutFile>$(workingDir)\JeromeStdOut.log</JeromeStdOutFile>
</PropertyGroup>
<!-- we want output -->
<MakeDir Directories="$(LocalizationSourceDirectory)\bin\App_Data" ContinueOnError="true" />
<Exec Command="$(JeromeExe) rewrite $(DataSource) &quot;@(JSFileToTranslate, ';')&quot; > &quot;$(JeromeStdOutFile)&quot; 2>&amp;1"
WorkingDirectory="$(LocalizationSourceDirectory)\bin\$(configuration)"
ContinueOnError="true">
<Output ItemName="JeromeExitCode" TaskParameter="ExitCode" />
</Exec>
<ReadLinesFromFile File="$(JeromeStdOutFile)">
<Output ItemName="JeromeStdOut" TaskParameter="Lines"/>
</ReadLinesFromFile>
<Message Text="Rewrite complete, exit code: %(JeromeExitCode.Identity) "/>
<Message Text="@(JeromeStdOut->'%(Identity)', '%0d%0a')"/>
<Error Condition="'%(JeromeExitCode.Identity)'!='0'"
Text="Jerome rewrite failed (exit code %(JeromeExitCode.Identity)): @(JeromeStdOut->'%(Identity)', '%0d%0a')" />
<ItemGroup>
<JSFileEnglish Include="$(WebSourceDirectory)\Content\Js\*.en.js" />
</ItemGroup>
<Copy SourceFiles="%(JSFileEnglish.Identity)" DestinationFiles="$([System.Text.RegularExpressions.Regex]::Replace('%(JSFileEnglish.Identity)', '\.en\.js$', '.js'))" />
<Message Text="TranslateJSContent complete" />
</Target>
<!-- Run node.js to generate our CSS and JS -->
<PropertyGroup>
<CompileNodeCommand>node.exe --max-old-space-size=8192 node-compile $(WebSourceDirectory)\Content</CompileNodeCommand>
</PropertyGroup>
<Target Name="CompileNode" DependsOnTargets="TranslateJSContent">
<PropertyGroup Condition="'$(minifyJs)' == 'true'">
<CompileNodeJsMinify>--minify</CompileNodeJsMinify>
</PropertyGroup>
<Message Text="Compiling JavaScript and Stylesheets"/>
<Exec WorkingDirectory="$(workingDir)\Build" Command="$(CompileNodeCommand) --glob=**/*.{css,less,js} $(CompileNodeJsMinify)" />
<Message Text="Compiling JavaScript and Stylesheets complete"/>
</Target>
</Project>
# Toggle-Site.ps1
function ToggleSite
{
param (
[System.Management.Automation.Runspaces.PSSession]$ServerSession,
[string]$Action,
[string]$Site
)
trap [Exception] {
$error = $("ERROR - IIS Remoting: " + $_.Exception.GetType().FullName)
$detail = $($_.Exception.Message);
Write-Host $error -foregroundcolor "Red"
Write-Host $detail -foregroundcolor "Red"
"Exiting..."
exit 2;
}
Execute "IIS: ($($ServerSession.ComputerName) - $Site - $Action" {
if ($Action -eq "stop") {
"Shutting down $Site on $($ServerSession.ComputerName)..."
Invoke-Command $ServerSession {
param($Site)
Import-Module WebAdministration
WebAdministration\Stop-WebSite $Site
} -ArgumentList $Site -EA stop
} else {
"Starting $Site on $($ServerSession.ComputerName)..."
Invoke-Command $ServerSession {
param($Site)
Start-WebSite $Site
} -ArgumentList $Site -EA stop
}
"Success!"
}
}
# HAProxy.ps1
function HAProxyPost
{
param (
[string]$Server,
[string]$Action,
[string[]]$Backends,
[string[]]$HAProxyServers,
[int]$HAProxyPort
)
if ($HAProxyServers.Length -eq 0) {
Return
}
if ($HAProxyServers.Length -eq 1 -and $HAProxyServers[0].Length -eq 0) {
Return
}
Execute "HAProxy: $Server - $Action" {
foreach($Backend in $Backends) {
if ($Backend.Length -eq 0) {
Continue
}
try {
foreach($HAProxyServer in $HAProxyServers) {
$creds = [System.Convert]::ToBase64String([System.Text.Encoding]::ASCII.GetBytes("$Env:HAProxyUser`:$Env:HAProxyPass"))
$uri = "http://$HAProxyServer`:$HAProxyPort/path"
$headers = @{ Authorization = "Basic $creds" }
$params = @{ s = $server; action = $action; b = $Backend }
$response = Invoke-WebRequest -Uri $uri -Headers $headers -Method Post -Body $params -MaximumRedirection 0 -UseBasicParsing
if ($response.StatusCode -ne 303) {
"Failed $Action on $Server, exiting."
exit 1
}
}
} catch [Exception] {
"ERROR: problem contacting HAProxy: $($_.Exception.Message)"
exit 1
}
}
}
}
# Copy-Directory.ps1
function CopyDirectory {
param (
[string]$Server,
[string]$Source,
[string]$Destination,
[string[]]$Options
)
Execute "RoboCopy: $Server" {
if (-not (test-path $Source))
{
"Source directory not found: $Source - nothing to copy, aborting"
exit 1
}
if (-not (test-path $Destination))
{
"Directory does not exist, creating: $Destination"
mkdir $Destination
}
"Copying build output from $Source to $Destination..."
$CmdArgs = @("$Source","$Destination",$Options)
Write-Host "Copy Options: $Options"
$Output = robocopy @CmdArgs
if($Output -match "ERROR :") {
"Copy failed, aborting"
exit 1
} else {
"Copy Successful!"
}
}
}
# WebsiteDeploy.ps1
param (
[string[]]$HAProxyServers,
[int]$HAProxyPort,
[string[]]$Servers,
[string[]]$Backends,
[int]$Delay = 0,
[int]$DelayBetween = 0,
[string]$Site,
[string]$WorkingDir,
[string[]]$excludeFolders,
[string[]]$excludeFiles,
[switch]$CacheDirectoryRequired,
[string]$ContentSource,
[string]$ContentSStaticFolder,
[string]$BeginScript,
[string]$BeforeCopyScript,
[string]$AfterCopyScript,
[string]$EndScript,
[switch]$UpdatedStaticContentOnly
)
# TeamCity calls from cmd.exe, screwing the array here...we get what we *think* is one string
if ($Servers.Length -eq 1) {
$StartingServers = $Servers[0].split(',')
}
if ($Backends.Length -eq 1) {
$Backends = $Backends[0].split(',')
}
if ($HAProxyServers.Length -eq 1) {
$HAProxyServers = $HAProxyServers[0].split(',')
}
RunExternalScript $BeginScript
#5 Retries, Subdirectories, 8 Threads, No File Logging, No Folder Logging, Mirror, wait lock of 2 secs
$CopyOptions = @("/R:5","/E","/MT:8","/NP","/NFL","/NDL","/MIR","/W:2")
if ($ExcludeFolders.Length -gt 0) {
$CopyOptions = $CopyOptions + "/XD"
if ($ExcludeFolders.Length -eq 1) {
$ExcludeFolders = $ExcludeFolders[0].split(',')
}
foreach ($f in $excludeFolders) {
$CopyOptions = $CopyOptions + $f
}
}
if ($ExcludeFiles.Length -gt 0) {
$CopyOptions = $CopyOptions + "/XF"
if ($ExcludeFiles.Length -eq 1) {
$ExcludeFiles = $ExcludeFiles[0].split(',')
}
foreach ($f in $excludeFiles) {
$CopyOptions = $CopyOptions + $f
}
}
Write-Host "Copy Options:" @($copyOptions)
# if contentSource and contentSStaticFolder are specified, pre-copy content there on all servers before copying applications
if ($UpdatedStaticContentOnly) {
$StaticCopyOptions = $CopyOptions | foreach { if ($_ -ne "/MIR") { $_ } }
} else {
$StaticCopyOptions = $CopyOptions
}
if ($ContentSource -and $ContentSStaticFolder) {
for ($i=0; $i -lt $StartingServers.Length; $i++) {
$s = $StartingServers[$i]
Execute "Server: $s - Static Content" {
CopyDirectory -Server $s -Source $ContentSource -Destination "\\$s\c$\Sites\$ContentSStaticFolder" -Options $StaticCopyOptions
}
}
}
$global:ErroredServers = @()
function ProcessServers($Servers) {
# loop through all servers passed in
for ($i=0; $i -lt $Servers.Length; $i++) {
$s = $Servers[$i]
# generate a session on the target server for re-use
$ServerSession = $null
$ServerSession = Get-ServerSession $s
if ($ServerSession -ne $null) {
Execute "Server: $s" {
HAProxyPost -Server $s -Action "drain" -Backends $Backends -HAProxyServers $HAProxyServers -HAProxyPort $HAProxyPort
# delay between HAProxy taking a server out of rotation and actually killing the site, so current requests can finish
Delay -Delay $Delay
# kill website in IIS
ToggleSite -ServerSession $ServerSession -Action "stop" -Site $Site
# Inform HAProxy this server is down, so we don't come back up immediately
HAProxyPost -Server $s -Action "hdown" -Backends $Backends -HAProxyServers $HAProxyServers -HAProxyPort $HAProxyPort
#BeforeCopyScript
RunExternalScript $BeforeCopyScript
# robocopy!
CopyDirectory -Server $s -Source $WorkingDir -Destination "\\$s\c$\Sites\$Site" -Options $CopyOptions
#AfterCopyScript
RunExternalScript $AfterCopyScript
# restart website in IIS
ToggleSite -ServerSession $ServerSession -Action "start" -Site $Site -CacheDirectoryRequired:$CacheDirectoryRequired
# stick the site back in HAProxy rotation
HAProxyPost -Server $s -Action "ready" -Backends $Backends -HAProxyServers $HAProxyServers -HAProxyPort $HAProxyPort
# session cleanup
$ServerSession | Remove-PSSession
}
}
else {
$global:ErroredServers += $s
Write-Warning "Failed to Connect to $s and deploy the site. Total of $($ErroredServers.Count) failures."
}
if ($global:ErroredServers.Count -ge $Servers.Count / 2) {
Write-Error "$($global:ErroredServers.Count) of $($Servers.Count) failed - stopping before majority"
exit 1
}
if ($i -lt $Servers.Length - 1) {
Delay -Delay $DelayBetween
}
""
}
}
ProcessServers -Servers $StartingServers
if ($global:ErroredServers.count -gt 0) {
Write-Host "Resetting PSRemoting."
Enable-PSRemoting -Force -SkipNetworkProfileCheck -Verbose
$FailedServers = $global:ErroredServers
$global:ErroredServers = @()
Write-Host "Trying failed servers again."
ProcessServers -Servers $FailedServers
if ($global:ErroredServers.count -gt 0) {
$ofs = ', '
Write-Host "Failed to connect to $global:ErroredServers"
exit 1
}
}
RunExternalScript $EndScript
var glob = require('glob'),
path = require('path'),
fs = require('fs'),
uglify = require('uglify-js'),
CleanCSS = require('clean-css'),
less = require('less'),
EventEmitter = require('events').EventEmitter,
url = require('url'),
util = require('util'),
crypto = require('crypto'),
cluster = require('cluster'),
http = require('http'),
numCPUs = require('os').cpus().length,
staticRoot = process.argv[2],
filesGlob = '**/*.{css,less,js}', // for resolving css url()'s that start with /
minifyJs = false,
minifyCss = true,
contextToShow = 10,
testOnly = false,
maxWorkerCount = 8,
cacheFileLimit = 200;
if (!staticRoot) {
console.log('Usage: node node-compile folder mask');
console.log('Example: node node-compile ../StackOverflow/Content');
console.log('Example: node node-compile ../StackOverflow/Content --minify');
console.log('Example: node node-compile ../StackOverflow/Content --glob="Js/*.js" --minify');
console.log('Example: node node-compile ../StackOverflow/Content --glob="**/*.{css,less}"');
process.exit(-1);
}
// Usage
process.argv.forEach(function(arg) {
if(arg.indexOf('--glob') === 0)
filesGlob = arg.split('=')[1];
if(arg.indexOf('--minify') === 0)
minifyJs = true;
if(arg.indexOf('--test') === 0)
testOnly = true;
if(arg.indexOf('--less') === 0)
filesGlob = '**/*.less';
if(arg.indexOf('--no-less-compress') === 0)
minifyCss = false;
if (arg.indexOf('--context') === 0)
contextToShow = arg.split('=')[1];
});
siteRoot = root;
var events = new EventEmitter();
var rootStat = fs.statSync(staticRoot);
if (!(rootStat && rootStat.isDirectory())) {
console.error(root + ' is not a directory');
process.exit(-1);
}
var exclude = /(\\|\/)_.*\\|(\.min\.(css|js)$)|((\\|\/)_(.+?)\.less$)|((\\|\/)less(\\|\/).*\.less$)|\\PartialJS\\|\\third-party\\|((\\|\/)_design(\\|\/))/;
function processLess(file, source, done) {
var dirname = path.dirname(file);
var options = { paths: [dirname], compress: minifyCss };
less.render(source, options, function(e, css) {
if (e) fileError(file, e);
css = cacheBreakCss(file, css.css);
done(null, css);
});
}
var cleaner = new CleanCSS();
function processCss(file, source, done) {
var minCss = cleaner.minify(source);
minCss = cacheBreakCss(file, minCss);
done(null, minCss);
}
var compressor = uglify.Compressor({ warnings: false });
function processJs(file, source, done) {
try {
var ast = uglify.parse(source),
s = uglify.OutputStream({ quote_keys: true });
ast.figure_out_scope();
ast = ast.transform(compressor);
ast.figure_out_scope();
ast.compute_char_frequency();
ast.mangle_names();
ast.print(s);
var code = s.get(),
oldLength = source.length,
newLength = code.length;
var notes = 'Old size: ' + oldLength.toLocaleString() + ', New Size: ' + newLength.toLocaleString() + ' (' + Math.round(newLength/oldLength*100, 2) + '%)'
done(null, code, notes);
} catch (e) {
throw new Error('Error parsing ' + file + ': ' + e.message + '\nLine ' + e.line + ', Col ' + e.col + ', Pos ' + e.pos);
}
}
function getHash(file) {
var sha = crypto.createHash('sha1');
sha.setEncoding('hex');
var contents = fs.readFileSync(file);
sha.write(contents);
sha.end();
return sha.read();
}
var cacheBreakers = {},
outputCache = {};
function getCacheBreaker(file) {
if(typeof cacheBreakers[file] === 'undefined') {
cacheBreakers[file] = getHash(file);
}
return cacheBreakers[file];
}
function isSiteLocalImagePath(file) {
if(/^\/\//.test(file)) {
return false;
}
if(/^http/.test(file)) {
return false;
}
if(/^data:/.test(file)) {
return false;
}
return true;
}
function cacheBreakCss(file, css) {
return css.replace(/url\((['"]?)(.+?)\1\)/g, function(match, quote, p1, offset, fullText) {
// exclude already cache broken things
if(/\?v=/g.test(match)) {
return match;
}
// anything else isn't a file ref on the web site
if(!isSiteLocalImagePath(p1)) {
return match;
}
// remove things like IEfix on fonts
p1 = p1.replace(/\?#?iefix$|#.*$/, '');
// Local or absolute
var startFrom = file[0] === '/' ? siteRoot : path.dirname(file);
var urlpath = path.normalize(path.join(startFrom, p1));
var hash;
try {
hash = getCacheBreaker(urlpath);
return 'url(' + quote + p1 + '?v=' + hash.substr(0, 12) + quote + ')';
} catch (e) {
var errString = 'Error cache-breaking file: ' + match + ' at offset ' + offset + '\n' + e.toString();
errString += '\nContext in ' + file + ':\n' + getErrorContext(fullText, offset) + '\n';
fileError(file, errString);
return ''; // we're going boom globally - no one cares...but at least break the CSS, to be safe.
}
});
}
function getErrorContext(text, offset) {
var contextStart, contextEnd, lineStart, lineEnd, i = 0, j = 0;
for (i = offset; i > 0; i--) {
if (text.charAt(i) === '\n' || text.charAt(i) === '\r') {
j++;
if (j === 1) {
lineStart = i;
}
if (j === contextToShow) {
contextStart = i;
break;
}
}
}
j = 0;
for (i = offset; i < text.length; i++) {
if (text.charAt(i) === '\n' || text.charAt(i) === '\r') {
j++;
if (j === 1) {
lineEnd = i;
}
if (j === contextToShow) {
contextEnd = i;
break;
}
}
}
var context = '\033[33m' + text.substring(contextStart, lineStart) +
'\033[41m\033[37m' + text.substring(lineStart, lineEnd) +
'\033[33m\033[40m' + text.substring(lineEnd, contextEnd);
return context;
}
var transformers = {
'less' : {
name : function (name) { return name.replace(/\.less$/, '.css'); },
code : processLess
},
'css' : {
name : function (name) { return name; },
code : processCss,
// Only process if the less file is also not present
condition: function(name) { return !fs.existsSync(name.replace(/\.css$/, '.less')); }
},
'js': {
name: function (name) {
return minifyJs ? name : (name + '.min');
},
code: processJs
}
};
function fileError(filename, e, errorJSON) {
if(cluster.isWorker) {
process.send({ msg: 'file-error', filename: filename, e: e, errorJSON: JSON.stringify(e, null, 2) });
process.exit(-1);
}
if (e.constructor !== Error) {
//console.error('\033[33m' + (e.type === 'Parse' ? 'Parse ' : '') + 'Exception in [' + filename + ']: \n\033[31m' + (JSON.parse(errorJSON)) + '\033[0m');
throw new Error('\033[31mError compiling [' + filename + ']: ' + (e.message || e) + '\033[0m');
}
throw e;
}
function getExtension(filename) {
return filename.match(/\.([^\.]+)$/)[1];
}
function getFilename(filename) {
return filename.match(/\\([^\\]+)$/)[1];
}
function getOutputName(filename) {
return transformers[getExtension(filename)].name(filename);
}
function shouldRead(filename) {
var check = transformers[getExtension(filename)].condition;
return !check || check(filename);
}
function readFile(filename) {
fs.readFile(filename, {encoding:'utf-8'}, function(e, contents) {
if (e) fileError(filename,e);
events.emit('file-read', filename, contents.trim());
});
}
function cacheCheck(filename, contents) {
// For short files, like a beta site, check output cache first
if (contents.length <= cacheFileLimit) {
process.send({ msg: 'cache-check', filename: filename, contents: contents });
} else {
events.emit('file-ready', filename, contents);
}
}
function processFile(filename, contents) {
var transformer = transformers[getExtension(filename)].code;
if (!transformer) throw new Error('Unknown file format: ' + filename);
transformer(filename, contents, function(e, output, notes) {
if (e) {
fileError(filename,e);
return;
}
if (contents.length <= cacheFileLimit) {
process.send({ msg: 'cache-this', filename: filename, contents: contents, output: output });
}
events.emit('file-processed', filename, output, notes);
});
};
function writeOutputFile(filename, output, notes) {
var outfilename = getOutputName(filename);
if (testOnly) {
events.emit('file-saved', filename, outfilename, output, notes);
return;
}
fs.writeFile(outfilename, output, {encoding:'utf-8'}, function(e) {
if (e) fileError(filename,e);
events.emit('file-saved', filename, outfilename, output, notes);
});
};
function findFiles(root) {
glob(filesGlob, {cwd:root}, function(e, files) {
if (e) fileError(root,e);
// Convert to abs path
files = files.map(function(f) { return path.join(root, f); });
// Global Exclude
files = files.filter(function(f) { return !exclude.test(f); });
// Conditial check
files = files.filter(shouldRead);
// sort
var ordering = ['js', 'less', 'css'];
files.sort(function(a,b) {
var ax = getExtension(a);
var bx = getExtension(b);
return ax !== bx
? (ordering.indexOf(ax) - ordering.indexOf(bx))
: a.localeCompare(b);
});
events.emit('files-found', files);
});
};
function processFiles(files) {
var workerCount = Math.min(files.length, Math.max(numCPUs - 2, 2), maxWorkerCount),
doneCount = 0;
if (files.length === 0)
return;
var pump = function(worker) {
if(files.length > 0) {
var file = files.shift();
worker.send({compileFile: file});
} else {
worker.kill();
doneCount++;
if (doneCount === workerCount) {
events.emit('done');
}
}
};
function startWorker() {
var worker = cluster.fork();
worker.on('listening', function(address) {
console.log('Worker ' + worker.workerID + ' Started, Listening on ' + address.address + ':' + address.port);
worker.send({ setMinifyJs: minifyJs, setTestOnly: testOnly });
pump(worker);
}).on('message', function(msg) { // listening to requests FROM the workers TO the parent process
switch(msg.msg) {
case 'cache-check':
worker.send({ cacheResult: { filename: msg.filename, contents: msg.contents, output: outputCache[msg.contents] } });
break;
case 'cache-this':
outputCache[msg.contents] = msg.output;
break;
case 'file-error':
fileError(msg.filename, msg.e, msg.errorJSON);
break;
case 'completed':
if (msg.notes) {
console.info(getFilename(msg.file) + ': ' + msg.notes);
}
pump(worker);
break;
}
});
}
console.log(numCPUs + ' processor(s) detected, utilizing ' + workerCount + ' worker threads for ' + files.length + ' file(s).');
for (var i = 0; i < workerCount; i++) {
startWorker();
}
}
if (cluster.isMaster) {
events.on('begin', function() { console.time('Compile'); })
.on('begin', findFiles)
.on('files-found', function(files) {
console.log('Found ' + files.length + ' files to process with mask ' + filesGlob + ' in ' + staticRoot);
processFiles(files);
})
.on('done', function() { console.timeEnd('Compile'); })
.emit('begin', staticRoot);
}
else if (cluster.isWorker) {
events.on('file-found', function(filename) {
console.time('compiled [' + getExtension(filename) + ']: ' + getOutputName(filename));
readFile(filename);
})
.on('file-read', cacheCheck)
.on('file-ready', processFile)
.on('file-processed', writeOutputFile)
.on('file-saved', function(oldname, newname, output, notes) {
console.timeEnd('compiled [' + getExtension(oldname) + ']: ' + newname);
process.send({ msg: 'completed', file: newname, result: output, notes: notes });
});
var server = http.createServer(function(req, res) {
res.writeHead(200);
res.end('worker thread ahoy!\n');
});
server.listen(0, function() {
console.log('HTTP IPC server active, listening on port ' + server.address().port + '.');
});
process.on('message', function(msg) {
if (msg.setMinifyJs) {
minifyJs = msg.setMinifyJs;
}
if (msg.setTestOnly) {
testOnly = msg.setTestOnly;
}
if (msg.compileFile) {
events.emit('file-found', msg.compileFile);
}
if (msg.cacheResult) {
var result = msg.cacheResult;
if (result.output) {
writeOutputFile(result.filename, result.output, result.notes);
} else {
events.emit('file-ready', result.filename, result.contents);
}
}
});
}
function handleError(e) {
if(e.stack) {
console.error(e.stack);
} else {
console.error(e);
}
process.exit(-1);
}
process.on('uncaughtException', handleError)
.on('error', handleError);

Stack Exchange SQL Database Migrator

This is a simple migration program to handle updates to SQL databases for projects. It takes a directory of SQL scripts and applies any that haven't run yet to a database, in sequence alphabetcally.

The traditional setup is for migrator to be available both locally and run as part of a build. You create a single directory in your application with your SQL Migrations. It looks something like this:

\MyProject
   \Migrations
      \001 - First Migration.sql
      \002 - Add Users.Users_AccountId Index.sql
      \migrate.local.bat
      \migrate.dev.bat
      \migrate.prod.bat

That 002 - Add Users.Users_AccountId Index.sql migration may look like this:

If dbo.fnIndexExists('Users','Users_AccountId') = 0
Begin
  Create Nonclustered Index Users_AccountId ON Users (AccountId)
  With (Online = On, Sort_in_TempDB = On, Drop_Existing = Off)
End

Migrations should be able to be run repeatedly - so If checks should be the norm to see if work actually needs doing.

The migration.*.bat files are optional simple bat files to invoke the migrator locally, or against whatever instance you need handily. The migrator does not use them - they are a convention for devs only. Here's the migrator.local.bat file from the stackexchange.com project:

..\Build\Migrator-Fast --sitesonly=true --sites="Data Source=.;Initial Catalog=StackExchange;Integrated Security=True" %*
PAUSE

Note the %* at the end allows you to do things like migration.local.bat --force to force a replay of a migration with new content (and therefore a new hash). The other .bat files simple have other connection strings.

A migrator build step in TeamCity is a Command Line build step pointed at your executable location (usually in a \Build folder or similar) with Command parameters like this:

--sitesonly=true --sites=%system.connectionString% --migrationPath="%teamcity.build.workingDir%\stackexchange.com\StackExchange.Migrations" --force

Note --force is typical for dev where mistakes and changes happen. This should not be used in production build steps.

Notes

By default, migrations run inside a transaction to maintain data integrity. To disable this, the first line of your migration .sql file should be:

-- no transaction --

The migrator understands the GO syntax, it will break your migration into separate commands if issued. This can be handy when doing "must be the only statement in the batch" options like creating a function or creating a column and then using it in the same migration.

Handy Stuff

Some common functions to install for use in migrations (could be in your first migration!):

If Not Exists (Select 1 From INFORMATION_SCHEMA.ROUTINES Where SPECIFIC_SCHEMA = 'dbo' And SPECIFIC_NAME = 'fnIndexExists' And ROUTINE_TYPE = 'FUNCTION') 
Begin
   Exec('
Create Function [dbo].[fnIndexExists](@table_name nvarchar(max), @index_name nvarchar(max))
Returns bit 
Begin 
    Return (Select Cast(Count(*) as Bit) From sys.indexes Where object_id = Object_Id(@table_name) AND name = @index_name);
End')
End

If Not Exists (Select 1 From INFORMATION_SCHEMA.ROUTINES Where SPECIFIC_SCHEMA = 'dbo' And SPECIFIC_NAME = 'fnTableExists' And ROUTINE_TYPE = 'FUNCTION') 
Begin
   Exec('
Create Function [dbo].[fnTableExists](@table_name nvarchar(max))
Returns bit 
Begin 
    Return (Select Cast(Count(*) as Bit) From INFORMATION_SCHEMA.TABLES Where TABLE_SCHEMA = SCHEMA_NAME() And TABLE_NAME = @table_name);
End')
End

If Not Exists (Select 1 From INFORMATION_SCHEMA.ROUTINES Where SPECIFIC_SCHEMA = 'dbo' And SPECIFIC_NAME = 'fnColumnExists' And ROUTINE_TYPE = 'FUNCTION') 
Begin
   Exec('
Create Function [dbo].[fnColumnExists](@table_name nvarchar(max), @column_name nvarchar(max))
Returns bit 
Begin 
    Return (Select Cast(Count(*) as Bit) From INFORMATION_SCHEMA.COLUMNS Where TABLE_SCHEMA = SCHEMA_NAME() And TABLE_NAME = @table_name And COLUMN_NAME = @column_name);
End')
End
@StevenLiekens

I like the idea of creating SQL helper functions in the first migration that you can use in later migrations. It inspired me to create functions for other types of objects (sprocs, views, UDTs, ...). They can be found here: https://github.com/StevenLiekens/sqlhelpers

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment