Skip to content

Instantly share code, notes, and snippets.

@mike-lawrence
Created April 25, 2011 21:48
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mike-lawrence/941314 to your computer and use it in GitHub Desktop.
Save mike-lawrence/941314 to your computer and use it in GitHub Desktop.
Donkin et al LBA code and example of fitting the LBA to data
fptcdf = function(z,x0max,chi,driftrate,sddrift) {
zs = z*sddrift
zu = z*driftrate
chiminuszu = chi-zu
xx = chiminuszu-x0max
chizu = chiminuszu/zs
chizumax = xx/zs
tmp1 = zs*(dnorm(chizumax)-dnorm(chizu))
tmp2 = xx*pnorm(chizumax)-chiminuszu*pnorm(chizu)
1+(tmp1+tmp2)/x0max
}
fptpdf = function(z,x0max,chi,driftrate,sddrift) {
zs = z*sddrift
zu = z*driftrate
chiminuszu = chi-zu
chizu = chiminuszu/zs
chizumax = (chiminuszu-x0max)/zs
(driftrate*(pnorm(chizu)-pnorm(chizumax)) + sddrift*(dnorm(chizumax)-dnorm(chizu)))/x0max
}
allrtCDF = function(t,x0max,chi,drift,sdI) {
# Generates CDF for all RTs irrespective of response.
N = length(drift) # Number of responses.
tmp = array(dim = c(length(t),N))
for(i in 1:N){
tmp[,i] = fptcdf(
z = t
, x0max = x0max
, chi = chi
, driftrate = drift[i]
, sddrift = sdI
)
}
1-apply(1-tmp,1,prod)
}
n1PDF = function(t,x0max,chi,drift,sdI) {
# Generates defective PDF for responses on node #1.
N = length(drift) # Number of responses.
if(N>2){
tmp = array(dim = c(length(t),N-1))
for(i in 2:N){
tmp[,i-1] = fptcdf(
z = t
, x0max = x0max
, chi = chi
, driftrate = drift[i]
, sddrift = sdI
)
}
G = apply(1-tmp,1,prod)
}else{
G = 1-fptcdf(
z = t
, x0max = x0max
, chi = chi
, driftrate = drift[2]
, sddrift = sdI
)
}
G*fptpdf(
z = t
, x0max = x0max
, chi = chi
, driftrate = drift[1]
, sddrift = sdI
)
}
n1CDF = function(t,x0max,chi,drift,sdI) {
# Generates defective CDF for responses on node #1.
outs = numeric(length(t))
bounds = c(0,t)
for(i in 1:length(t)){
tmp = "error"
repeat{
if(bounds[i]> = bounds[i+1]){
outs[i] = 0
break
}
tmp = try(
integrate(
f = n1PDF
, lower = bounds[i]
, upper = bounds[i+1]
, x0max = x0max
, chi = chi
, drift = drift
, sdI = sdI
)$value
, silent = T
)
if(is.numeric(tmp)){
outs[i] = tmp
break
}
# Try smart lower bound.
if(bounds[i]< = 0){
bounds[i] = max(c((chi-0.98*x0max)/(max(mean(drift),drift[1])+2*sdI),0))
next
}
# Try smart upper bound.
if(bounds[i+1] == Inf){
bounds[i+1] = 0.02*chi/(mean(drift)-2*sdI)
next
}
stop("Error in n1CDF that I could not catch.")
}
}
cumsum(outs)
}
n1mean = function(x0max,chi,drift,sdI) {
# Generates mean RT for responses on node #1.
pc = n1CDF(Inf,x0max,chi,drift,sdI)
fn = function(t,x0max,chi,drift,sdI,pc){
t*n1PDF(t,x0max,chi,drift,sdI)/pc
}
tmp = integrate(
f = fn
, lower = 0
, upper = 100*chi
, x0max = x0max
, chi = chi
, pc = pc
, drift = drift
, sdI = sdI
)$value
list(mean = tmp,p = pc)
}
actCDF = function(z,t,x0max,chi,drift,sdI) {
# CDF for the distribution (over trials) of activation values at time t.
zat = (z-x0max)/t
zt = z/t
sdi2 = 2*(sdI^2)
exp1 = exp(-((zt-drift)^2)/sdi2)
exp2 = exp(-((zat-drift)^2)/sdi2)
tmp1 = t*sdI*(exp1-exp2)/sqrt(2*pi)
tmp2 = pnorm(zat,mean = drift,sd = sdI)
tmp3 = pnorm(zt,mean = drift,sd = sdI)
(tmp1+(x0max-z+drift*t)*tmp2+(z-drift*t)*tmp3)/x0max
}
actPDF = function(z,t,x0max,chi,drift,sdI) {
# CDF for the distribution (over trials) of activation values at time t.
tmp1 = pnorm((z-x0max)/t,mean = drift,sd = sdI)
tmp2 = pnorm(z/t,mean = drift,sd = sdI)
(-tmp1+tmp2)/x0max
}
lbameans = function(Is,sdI,x0max,Ter,chi) {
# Ter should be a vector of length ncond, the others atomic,
# except Is which is ncond x 2.
ncond = length(Is)/2
outm<-outp<-array(dim = c(ncond,2))
for(i in 1:ncond){
for(j in 1:2){
tmp = n1mean(x0max,chi,drift = Is[i,switch(j,1:2,2:1)],sdI)
outm[i,j] = tmp$mean+Ter[i]
outp[i,] = tmp$p
}
}
list(
mns = c(outm[1:ncond,2],outm[ncond:1,1])
, ps = c(outp[1:ncond,2],outp[ncond:1,1])
)
}
deadlineaccuracy = function(t,x0max,chi,drift,sdI,guess = .5,meth = "noboundary") {
# Works out deadline accuracy, using one of three
# methods:
# - noboundary = no implicity boundaries.
# - partial = uses implicit boundaries, and partial information.
# - nopartial = uses implicit boundaries and guesses otherwise.
meth = match.arg(meth,c("noboundary","partial","nopartial"))
noboundaries = function(t,x0max,chi,drift,sdI,ulimit = Inf){
# Probability of a correct response in a deadline experiment
# at times t, with no implicit boundaries.
N = length(drift)
tmpf = function(x,t,x0max,chi,drift,sdI){
if(N>2){
tmp = array(dim = c(length(x),N-1))
for(i in 2:N){
tmp[,i-1] = actCDF(x,t,x0max,chi,drift[i],sdI)
}
G = apply(tmp,1,prod)*actPDF(x,t,x0max,chi,drift[1],sdI)
}else{
G = actCDF(x,t,x0max,chi,drift[2],sdI)*actPDF(x,t,x0max,chi,drift[1],sdI)
}
}
outs = numeric(length(t))
for(i in 1:length(t)){
if(t[i]< = 0){
outs[i] = .5
}else{
outs[i] = integrate(
f = tmpf
, lower = -Inf
, upper = ulimit
, t = t[i]
, x0max = x0max
, drift = drift
, sdI = sdI
)$value
}
}
outs
}
if(meth == "noboundary"){
noboundaries(t,x0max,chi,drift,sdI,ulimit = Inf)
}else{
pt = n1CDF(t = t,x0max = x0max,chi = chi,drift = drift,sdI = sdI)
pa = allrtCDF(t = t,x0max = x0max,chi = chi,drift = drift,sdI = sdI)
pguess = switch(
meth
, "nopartial" = guess*(1-pa)
, "partial" = noboundaries(
t = t
, x0max = x0max
, chi = chi
, drift = drift
, sdI = sdI
, ulimit = chi
)
)
pt+pguess
}
}
# Like pnorm, punif etc evaluates the CDF of the linear BA model at a
# pre-specified set of quantiles. If input "qs" is null instead
# returns predicted quantiles given by qps.
source("1_lba-math.r")
pqlba = function(Is, sdI, x0max, Ter, chi, qs, qps = seq(.1, .9, .2)){
# Input checks.
if(length(Is)! = 2) stop("Not two drifts in pqlba.")
dop = (!is.null(qs)) # Switch for return p-values or q-values.
if(dop){
nq = dim(qs)[1] # qs should be quants x responses.
if(length(dim(qs))! = 2) stop("Crazy q-values in pqba.")
if((dim(qs)[2])! = 2) stop("Wrong number of response choices in q-values for pqlba.")
}
# First get probability of each response.
out = list(p = numeric(2))
out$pfail = prod(pnorm(-Is/sdI))
out$p[1] = n1CDF(t = Inf, x0max = x0max, chi = chi, drift = Is, sdI = sdI)
out$p[2] = n1CDF(t = Inf, x0max = x0max, chi = chi, drift = Is[2:1], sdI = sdI)
# out$p[2] = 1-out$p[1]
if(dop){
# Calculate probability masses in inter-quantile ranges.
out$predp = array(dim = c(nq+1, 2))
for(i in 1:2){
tmpI = switch(i, Is, Is[2:1])
tmp = n1CDF(t = qs[, i]-Ter, x0max = x0max, chi = chi, drift = tmpI, sdI = sdI)
out$predp[, i] = diff(c(0, tmp, out$p[i]))
}
}else{
# Calculate predicted quantile values.
out$predq = array(dim = c(length(qps), 2))
tmpf = function(t, drift, sdI, x0max, chi, p){
n1CDF(t = t, x0max = x0max, chi = chi, drift = drift, sdI = sdI)-p
}
for(i in 1:2){
tmpI = switch(i, Is, Is[2:1])
for(j in 1:length(qps)){
interval = switch((Ter<5)+1, c(1, 1e4), c(1e-3, 10)) # Sec or MSEC.
tmp = uniroot(
f = tmpf
, interval = interval
, x0max = x0max
, chi = chi
, drift = tmpI
, sdI = sdI
, p = qps[j]*out$p[i]
)
out$predq[j, i] = tmp$root+Ter
}
out$p[i] = n1CDF(t = Inf, x0max = x0max, chi = chi, drift = tmpI, sdI = sdI)
}
}
out$p = out$p+out$pfail/2
out
}
getpreds = function(I1, I2, sdI, x0max, Ter, chi, pred = F, dat, qps = seq(.1, .9, .2)){
# If pred = F, returns chi squared. If pred = T, returns a list
# like "dat" with predicted quantiles and probabilities.
# This function expects the parameters(I1, I2, x0max, chi, Ter, sdI)
# to be 3 arrays(stim1/stim2 x accuracy/neutral/speed).
mod = list()
if(pred){
mod = list(p = array(dim = c(3)), q = array(dim = c(length(qps), 2, 3)))
} else{
mod = list(p = array(dim = c(3)), q = array(dim = c(length(qps)+1, 2, 3)))
}
for(j in 1:3){
tmp = pqlba(
Is = c(I1[j], I2[j])
, sdI = sdI[j]
, x0max = x0max[j]
, Ter = Ter[j]
, chi = chi[j]
, qs = switch(pred+1, dat$q[, , j], NULL)
)
mod$p[j] = tmp$p[1]
mod$q[, , j] = switch(pred+1, tmp$predp, tmp$predq)
}
mod
}
obj = function(x, dat, pred = F, qps = seq(.1, .9, .2), trace = 0){
numits <<- numits+1
sdI = rep(exp(x[1]), 3)
x0max = rep(exp(x[2]), 3)
Ter = rep(exp(x[3]), 3)
chi = rep(exp(x[4]), 3)+x0max
I1 =(x[5:7])
I2 = 1-I1
preds = getpreds(
I1 = I1
, I2 = I2
, sdI = sdI
, x0max = x0max
, Ter = Ter
, chi = chi
, pred = pred
, dat = dat
, qps = qps
)
if(pred == F){
tmp = -sum(dat$pb*log(pmax(preds$q, 1e-10)))
if(trace>0){
if((numits%%trace) = = 0){
names(x) = NULL
print(c(exp(x[1:4]), x[5:7], tmp), 2)
}
}
return(tmp)
}else{
return(preds)
}
}
fitter = function(dat, maxit = seq(1000, 9000, 1000), qps = seq(.1, .9, .2), trace = 0){
fit = length(maxit)
I1 = qnorm((1-dat$p), sd = .3*sqrt(2))*2
I1 = .5+.5*I1
tmp = sum1 = .5
Ter = min(dat$q)*.9
x0max = mean(dat$q[4, , ]-dat$q[2, , ])*(4*tmp)
chi =(Ter/4)
sdI = 0.3
par = c(log(c(sdI, x0max, Ter, chi)), I1)
attr(par, "obj") = NA
for(fitnum in 1:fit){ # Do the fitting or not.
numits <<- 0
#return(par)
tmp = optim(
fn = obj
, par = par
, control = list(
maxit = maxit[fitnum]
, parscale = par
)
, dat = dat
, qps = qps
, trace = trace
)
out <- par <- tmp$par
attr(out, "obj") = tmp$value
}
out
}
#remove everything previously loaded in the R workspace
rm(list=ls())
#load all functions required to fit the LBA
source("2_pq-lba.r")
#setting this as T minimises the output printed to the R workspace
quiet=T
#set the quantiles used in the QMPE fitting
qps=seq(.1,.9,.2)
#which RT values in the data that should be discarded (smaller than the first element, larger than the second)
trim=c(180,10000)
#read in the data with column names "difficulty", "correct" and "rt"
rawdata=read.table("4_exampledata.txt",col.names=c("difficulty","correct","rt"))
#Format the data for QMPE fitting
#creates a vector indicating which data will be used based on values given to the trim variable defined earlier
use=(rawdata$rt>trim[1])&(rawdata$rt<trim[2])
#set up a list of names for variables for later use
nms=list(c("err","crct"),c("easy","medium","hard"))
dims=unlist(lapply(nms,length))
#q gets the values of the quantile values (set by qps above) for correct and error responses for each of the three difficulties
q=tapply(rawdata$rt[use],list(rawdata$correct[use],rawdata$difficulty[use]),quantile,probs=qps)
q=array(unlist(q),dim=c(length(qps),dims),dimnames=c(list(qps),nms))
#n gets the number of correct and error responses in each difficulty condition
n=tapply(rawdata$rt[use],list(rawdata$correct[use],rawdata$difficulty[use]),length)
#p gets the proportion of correct responses for each difficulty level
p=tapply(rawdata$correct[use],list(rawdata$difficulty[use]),mean)
#gives names to previous variables
dimnames(n)=nms ; dimnames(p)=nms[-1]
#calculates how many observations make up each quantile value for correct and error responses in each of the three difficulties
pb=array(rep(n,each=length(qps)+1),dim=c(length(qps)+1,dim(n)),
dimnames=c(list(NULL),dimnames(n)))*c(.1,.2,.2,.2,.2,.1)
#use p, n, q and pb to make a list
data=list(p=p,n=n,q=q,pb=pb)
#here is the call to fit the LBA to the model. It requires two arguments - dat, the data, and maxit, the number of iterations, or steps, the fitting algorithm should take when trying to fit the model to data
fit=fitter(dat=data,maxit=c(100,200,500,1000,2000,5000,10000))
#put the results of the fit into a variable called pars
pars=c(exp(fit[1:4]),fit[5:7])
#the fitting function gives back (b-A) instead of b. To make the output look like that given in Brown and Heathcote (2008) we add A to b-A to get b
pars[4]=pars[4]+pars[2]
#the fitting algorithm returns 1 minus the drift rates for the correct responses. Change these so that they are the actual drift rates
pars[5:7]=1-pars[5:7]
fit[5:7]=1-fit[5:7]
#give names to the parameters
names(pars)=c("s","A","ter","b","v1","v2","v3")
#print the parameters onscreen
print(pars,3)
1 1 371
1 1 583
1 1 331
1 1 441
1 1 379
1 1 484
1 1 378
1 1 336
1 1 432
1 1 317
1 1 621
1 1 471
1 1 332
1 1 473
1 1 550
1 1 686
1 1 521
1 1 533
1 1 529
1 1 478
1 1 358
1 1 585
1 1 542
1 1 451
1 1 302
1 1 664
1 1 313
1 1 431
1 1 807
1 1 458
1 1 371
1 1 752
1 1 693
1 1 618
1 1 426
1 1 533
1 1 612
1 1 632
1 1 546
1 1 359
1 1 495
1 1 679
1 1 425
1 1 387
1 1 449
1 1 477
1 1 466
1 1 614
1 1 663
1 1 520
1 1 358
1 1 386
1 1 424
1 1 431
1 1 879
1 1 328
1 1 602
1 1 477
1 1 306
1 1 386
1 1 533
1 1 387
1 1 404
1 1 697
1 1 637
1 1 845
1 1 604
1 1 474
1 1 468
1 1 714
1 0 543
1 1 480
1 1 350
1 1 337
1 1 552
1 1 1117
1 1 432
1 1 714
1 1 355
1 1 305
1 1 771
1 1 367
1 1 705
1 1 386
1 1 448
1 1 528
1 1 566
1 1 592
1 1 321
1 1 525
1 1 396
1 1 468
1 0 449
1 1 311
1 1 486
1 1 436
1 1 457
1 1 401
1 1 425
1 1 676
1 1 455
1 1 716
1 1 525
1 1 406
1 1 526
1 1 515
1 1 460
1 1 355
1 1 430
1 1 397
1 1 409
1 1 505
1 1 564
1 1 321
1 1 887
1 1 449
1 1 774
1 1 1007
1 1 551
1 1 629
1 1 540
1 1 644
1 1 407
1 1 537
1 1 408
1 1 653
1 1 823
1 1 317
1 1 380
1 1 697
1 1 637
1 1 581
1 1 342
1 1 370
1 1 284
1 1 367
1 1 527
1 1 513
1 1 336
1 1 320
1 1 473
1 1 571
1 1 422
1 1 921
1 1 455
1 1 387
1 1 354
1 1 633
1 1 640
1 1 574
1 1 689
1 1 784
1 1 685
1 1 491
1 1 363
1 1 343
1 1 554
1 1 457
1 1 690
1 1 287
1 1 521
1 1 870
1 1 429
1 1 343
1 1 401
1 1 338
1 1 670
1 1 559
1 1 409
1 1 465
1 1 404
1 1 610
1 1 536
1 1 429
1 1 341
1 1 591
1 1 425
1 1 519
1 1 439
1 0 594
1 1 401
1 1 429
1 1 625
1 0 552
1 1 311
1 0 548
1 1 516
1 1 448
1 1 579
1 1 502
1 1 541
1 1 525
1 1 290
1 1 395
1 1 552
1 1 315
1 1 405
1 1 369
1 0 387
1 1 385
1 1 390
1 1 322
1 1 383
1 1 622
1 1 316
1 1 315
1 1 823
1 1 899
1 1 300
1 1 554
1 1 403
1 1 1130
1 1 518
1 1 681
1 1 696
1 1 372
1 1 680
1 1 537
1 1 315
1 1 796
1 1 336
1 0 659
1 1 613
1 1 572
1 1 462
1 1 543
1 1 888
1 1 305
1 1 719
1 1 341
1 1 542
1 1 308
1 1 335
1 1 506
1 1 278
1 0 688
1 1 389
1 1 376
1 1 456
1 0 1230
1 1 451
1 1 421
1 1 847
1 1 533
1 1 408
1 1 421
1 1 478
1 1 535
1 1 346
1 1 569
1 1 521
1 1 555
1 1 542
1 1 372
1 1 288
1 1 472
1 1 529
1 1 485
1 1 463
1 1 412
1 1 432
1 1 534
1 1 296
1 1 725
1 1 370
1 0 460
1 1 497
1 1 698
1 1 454
1 1 478
1 1 323
1 1 870
1 1 587
1 1 393
1 1 658
1 1 431
1 1 557
1 1 515
1 0 562
1 1 565
1 1 469
1 1 455
1 1 342
1 1 374
1 1 572
1 1 406
1 1 507
1 1 652
1 1 305
1 1 357
1 1 575
1 1 548
1 1 1112
1 1 537
1 1 532
1 1 634
1 1 599
1 1 345
1 1 424
1 1 271
1 1 448
1 1 490
1 1 324
1 0 471
1 1 471
1 1 557
1 1 800
1 1 472
1 1 446
1 1 606
1 1 440
1 1 563
1 1 391
1 1 450
1 1 404
1 1 454
1 1 856
1 1 599
1 1 416
1 1 370
1 1 488
1 1 840
1 1 523
1 1 324
1 1 605
1 1 508
1 1 445
1 1 454
1 1 630
1 1 625
1 1 597
1 1 326
1 1 367
1 1 480
1 1 520
1 1 510
1 1 289
1 1 286
1 1 723
1 1 317
1 1 431
1 1 521
1 1 277
1 1 418
1 1 663
1 1 595
1 1 568
1 1 419
1 1 486
1 1 523
1 1 471
1 1 395
1 1 462
1 1 412
1 1 395
1 1 406
1 1 418
1 1 399
1 0 429
1 1 539
1 0 489
1 1 951
1 1 468
1 1 492
1 1 347
1 1 479
1 1 318
1 1 280
1 1 524
1 1 631
1 1 473
1 1 737
1 0 725
1 1 522
1 1 519
1 1 545
1 1 523
1 0 536
1 0 421
1 1 463
1 1 610
1 1 341
1 1 428
1 1 407
1 1 536
1 1 519
1 1 339
1 1 545
1 1 437
1 1 412
1 1 463
1 1 334
1 1 598
1 1 399
1 1 491
1 1 570
1 1 487
1 1 469
1 1 424
1 1 368
1 1 652
1 1 418
1 1 520
1 1 394
1 1 670
1 1 321
1 1 316
1 1 384
1 1 380
1 1 332
1 1 775
1 0 689
1 1 383
1 1 358
1 1 318
1 1 356
1 1 493
1 1 535
1 1 515
1 1 455
1 0 485
1 1 500
1 1 492
1 1 423
1 1 379
1 1 439
1 1 381
1 1 471
1 1 341
1 1 868
1 1 593
1 1 551
1 1 386
1 1 322
1 1 647
1 1 389
1 1 365
1 1 432
1 1 357
1 1 412
1 1 541
1 1 499
1 1 951
1 1 404
1 1 419
1 1 497
1 1 600
1 1 337
1 1 416
1 1 620
1 1 321
1 1 438
1 1 585
1 1 384
1 1 442
1 1 347
1 1 937
1 1 1111
1 1 578
1 1 702
1 1 326
1 1 479
1 1 390
1 1 326
1 1 472
1 1 475
1 1 317
1 1 524
1 1 345
1 1 597
1 1 444
1 1 515
1 1 306
1 1 690
1 1 467
1 0 403
1 1 340
1 1 398
1 1 331
1 1 318
1 1 531
1 1 376
1 1 1031
1 1 501
1 1 295
1 1 416
1 1 513
1 1 319
1 1 467
1 1 302
1 0 474
1 1 400
1 1 434
1 1 472
1 1 533
1 1 451
1 1 455
1 1 700
1 1 494
1 1 311
1 1 754
1 1 531
1 1 627
1 1 294
1 1 490
1 1 608
1 1 660
1 1 498
1 1 506
1 1 667
1 1 472
1 1 392
1 1 807
1 1 2030
1 0 463
1 1 574
1 1 382
1 1 477
1 1 495
1 1 528
1 1 642
1 1 389
1 1 398
1 1 395
1 1 939
1 1 384
1 1 489
1 1 310
1 1 593
1 1 653
1 1 672
1 1 498
1 1 711
1 1 576
1 1 421
1 1 465
1 1 553
1 1 625
1 1 432
1 1 513
1 1 421
1 1 303
1 1 430
1 1 307
1 1 454
1 1 294
1 1 423
1 1 444
1 1 464
1 1 477
1 1 459
1 1 394
1 1 446
1 1 662
1 1 416
1 1 513
1 1 449
1 0 775
1 1 580
1 1 476
1 1 495
1 1 545
1 1 571
1 1 471
1 1 366
1 1 542
1 1 350
1 1 451
1 1 503
1 1 468
1 1 332
1 1 515
1 1 361
1 1 357
1 1 414
1 1 311
1 1 681
1 1 524
1 1 311
1 1 288
1 1 644
1 1 1114
1 1 532
1 1 537
1 1 471
1 1 642
1 1 353
1 1 495
1 1 521
1 1 479
1 1 376
1 0 403
1 0 518
1 1 411
1 1 506
1 1 504
1 1 440
1 1 398
1 1 342
1 0 546
1 1 375
1 1 391
1 1 363
1 1 461
1 1 475
1 1 413
1 1 558
1 1 421
1 0 675
1 1 524
1 1 485
1 1 583
1 1 382
1 1 521
1 1 491
1 1 473
1 1 551
1 1 825
1 1 644
1 1 330
1 1 409
1 1 764
1 1 486
1 1 466
1 1 384
1 1 377
1 1 487
1 1 589
1 1 685
1 1 385
1 1 765
1 1 472
1 1 346
1 1 449
1 1 378
1 1 297
1 1 304
1 1 675
1 1 470
1 1 397
1 1 975
1 1 364
1 1 892
1 1 442
1 1 334
1 1 338
1 1 395
1 1 326
1 1 350
1 1 542
1 1 577
1 1 354
1 1 426
1 1 609
1 1 478
1 1 404
1 1 615
1 1 563
1 1 767
1 1 388
1 1 391
1 1 434
1 1 426
1 1 296
1 1 305
1 1 614
1 1 443
1 1 439
1 1 530
1 1 490
1 1 343
1 1 728
1 1 502
1 1 673
1 1 453
1 1 524
1 1 388
1 1 303
1 1 476
1 1 1162
1 1 435
1 1 454
1 1 357
1 1 535
1 1 378
1 1 505
1 0 352
1 1 314
1 1 347
1 1 360
1 1 849
1 1 937
1 1 593
1 1 464
1 1 338
1 1 407
1 1 380
1 1 557
1 1 394
1 1 477
1 1 483
1 1 428
1 1 578
1 0 943
1 1 537
1 1 503
1 1 485
1 1 417
1 1 698
1 1 451
1 1 433
1 1 525
1 1 322
1 1 375
1 1 361
1 0 597
1 1 1310
1 1 431
1 1 627
1 1 317
1 1 488
1 1 494
1 1 406
1 1 469
1 1 703
1 1 484
1 1 453
1 1 315
1 1 671
1 1 679
1 1 435
1 1 330
1 1 358
1 1 310
1 0 591
1 1 469
1 1 380
1 1 431
1 1 467
1 1 871
1 1 393
1 1 967
1 1 469
1 1 363
1 1 497
1 1 407
1 1 423
1 1 474
1 1 305
1 1 570
1 1 1041
1 1 518
1 1 345
1 1 306
1 1 519
1 1 548
1 1 317
1 1 752
1 1 637
1 1 394
1 1 569
1 1 601
1 1 526
1 1 444
1 1 505
1 1 326
1 1 558
1 0 458
1 1 359
1 1 439
1 1 683
1 1 393
1 1 495
1 1 412
1 1 494
1 1 463
1 1 521
1 1 330
1 1 477
1 1 493
1 1 417
1 1 450
1 1 669
1 1 409
1 1 797
1 1 447
1 1 497
1 1 470
1 0 697
1 1 384
1 1 465
1 1 333
1 1 350
1 0 859
1 1 457
1 1 327
1 1 542
1 1 1043
1 1 360
1 1 386
1 1 526
1 1 724
1 1 505
1 1 404
1 1 459
1 1 536
1 1 343
1 1 599
1 1 316
1 1 606
1 1 372
1 1 353
1 1 748
1 1 383
1 1 499
1 1 550
1 1 648
1 0 468
1 1 527
1 1 594
1 1 388
1 1 414
1 1 483
1 1 773
1 1 335
1 1 422
1 1 406
1 1 324
1 1 389
1 1 424
1 1 514
1 1 345
1 1 308
1 1 642
1 1 423
1 1 429
1 1 500
1 1 507
1 1 507
1 0 702
1 1 334
1 1 408
1 1 888
1 1 491
1 1 719
1 1 663
1 1 434
1 1 725
1 1 518
1 1 302
1 1 378
1 1 486
1 1 531
1 1 304
1 1 560
1 1 308
1 1 505
1 1 581
1 1 517
1 1 531
1 1 421
1 1 343
1 1 468
1 1 513
1 0 489
1 1 477
1 1 342
1 1 460
1 1 417
1 1 613
1 1 478
1 1 432
1 1 344
1 1 716
1 1 428
1 1 633
1 1 792
1 1 632
1 1 625
1 1 319
1 1 311
1 1 618
1 1 447
1 1 538
1 1 425
1 1 454
1 1 656
1 1 511
1 1 763
1 1 620
1 1 1485
1 1 356
1 1 483
1 1 580
1 1 443
1 1 534
1 1 480
1 1 642
1 1 681
1 0 399
1 1 365
1 1 388
1 1 420
1 1 725
1 1 444
1 1 344
1 1 518
1 1 550
1 1 452
1 1 846
1 1 539
1 1 383
1 1 436
1 1 312
1 1 648
1 1 612
1 1 541
1 1 499
1 1 458
1 1 382
1 1 422
1 1 297
1 1 450
1 1 649
1 1 348
1 1 538
1 0 566
1 1 576
1 1 424
1 1 298
1 1 477
1 1 435
1 1 559
1 1 417
1 1 539
1 1 534
1 1 302
1 1 482
1 1 653
1 1 480
1 0 354
1 1 494
1 1 582
1 1 443
1 1 317
1 1 650
1 1 641
1 1 426
1 1 774
1 1 504
1 1 596
1 1 455
1 1 648
1 1 923
1 1 392
1 1 285
1 1 703
1 1 463
1 1 327
1 1 422
1 1 556
1 1 638
1 1 492
1 1 456
1 1 600
1 1 315
1 1 620
1 1 547
1 1 586
1 1 530
1 1 400
1 1 617
1 1 378
1 1 619
1 1 410
1 1 347
1 1 512
1 1 373
1 1 464
1 1 460
1 1 700
1 1 535
1 1 626
1 0 816
1 1 464
1 1 621
1 1 413
1 1 451
1 1 626
1 1 378
1 1 332
1 1 597
1 1 1006
1 1 480
1 1 432
1 1 588
1 1 390
1 1 647
1 0 1045
1 1 383
2 1 727
2 1 472
2 0 511
2 1 492
2 1 432
2 1 551
2 0 445
2 1 341
2 1 718
2 1 398
2 1 588
2 1 602
2 1 496
2 1 414
2 0 1010
2 1 382
2 1 1523
2 1 521
2 1 609
2 1 311
2 1 552
2 1 660
2 0 774
2 0 418
2 1 340
2 0 532
2 1 495
2 1 613
2 1 427
2 1 611
2 1 545
2 1 342
2 1 433
2 1 460
2 1 360
2 1 691
2 1 547
2 0 473
2 1 337
2 1 606
2 0 674
2 1 665
2 1 747
2 1 814
2 1 539
2 1 542
2 1 423
2 0 449
2 1 481
2 1 1040
2 1 390
2 1 1609
2 1 423
2 0 616
2 1 462
2 1 646
2 1 447
2 1 504
2 1 441
2 0 540
2 1 350
2 1 327
2 1 619
2 1 428
2 1 338
2 1 378
2 1 535
2 1 628
2 1 573
2 1 406
2 1 658
2 1 511
2 1 478
2 1 500
2 1 563
2 1 466
2 1 404
2 1 695
2 1 285
2 0 930
2 1 1180
2 1 406
2 1 497
2 1 397
2 1 458
2 1 654
2 1 725
2 1 509
2 1 402
2 1 581
2 1 569
2 1 894
2 1 434
2 0 609
2 1 513
2 1 401
2 1 512
2 1 963
2 1 1022
2 1 411
2 1 630
2 1 518
2 1 307
2 1 429
2 1 406
2 1 694
2 0 2165
2 1 328
2 1 418
2 1 605
2 1 434
2 1 359
2 1 330
2 0 761
2 1 476
2 1 736
2 1 348
2 1 361
2 1 997
2 1 524
2 1 487
2 1 357
2 1 458
2 1 324
2 1 566
2 1 495
2 1 407
2 1 710
2 1 645
2 1 466
2 1 514
2 1 416
2 1 584
2 1 347
2 0 828
2 1 639
2 1 511
2 1 633
2 0 723
2 1 807
2 1 342
2 1 450
2 1 583
2 1 425
2 1 365
2 0 382
2 1 314
2 1 337
2 1 537
2 1 505
2 1 460
2 1 480
2 1 658
2 1 659
2 0 386
2 0 393
2 1 374
2 1 704
2 1 436
2 0 309
2 0 343
2 1 612
2 1 532
2 1 331
2 1 328
2 0 719
2 1 827
2 1 501
2 1 703
2 0 959
2 1 435
2 1 488
2 1 631
2 0 426
2 1 586
2 1 358
2 1 588
2 1 476
2 1 778
2 1 641
2 1 477
2 1 687
2 1 451
2 1 584
2 1 372
2 1 373
2 1 535
2 0 426
2 1 1095
2 1 506
2 1 547
2 0 505
2 1 554
2 1 457
2 1 796
2 1 353
2 1 641
2 1 653
2 1 547
2 0 502
2 1 343
2 1 370
2 1 480
2 1 428
2 1 762
2 1 446
2 0 1084
2 1 867
2 1 668
2 1 424
2 1 689
2 1 321
2 1 427
2 1 565
2 1 344
2 1 456
2 1 402
2 1 555
2 0 552
2 1 803
2 1 602
2 1 539
2 1 543
2 1 340
2 1 416
2 1 613
2 1 887
2 1 492
2 0 460
2 1 619
2 1 588
2 0 411
2 0 456
2 0 601
2 0 421
2 0 524
2 1 385
2 1 509
2 1 398
2 1 439
2 1 360
2 1 370
2 1 547
2 0 630
2 1 374
2 1 665
2 1 836
2 1 492
2 1 1581
2 1 569
2 1 607
2 1 549
2 1 494
2 1 524
2 1 554
2 1 540
2 1 627
2 1 689
2 1 391
2 1 902
2 1 339
2 1 595
2 1 492
2 1 545
2 1 719
2 1 323
2 1 444
2 1 375
2 1 698
2 1 485
2 1 408
2 1 720
2 0 655
2 1 969
2 1 372
2 1 511
2 1 450
2 0 536
2 0 408
2 1 546
2 1 843
2 1 625
2 1 431
2 1 457
2 1 374
2 1 614
2 1 532
2 1 427
2 1 345
2 1 456
2 1 733
2 1 577
2 1 496
2 1 387
2 1 514
2 0 628
2 1 659
2 1 606
2 1 518
2 0 658
2 1 559
2 1 426
2 0 467
2 1 2128
2 0 697
2 1 742
2 1 411
2 1 834
2 1 495
2 1 415
2 1 655
2 1 700
2 1 574
2 1 543
2 1 336
2 1 632
2 1 610
2 1 661
2 1 428
2 1 706
2 1 1856
2 1 649
2 1 522
2 1 650
2 1 390
2 1 654
2 1 483
2 1 312
2 1 535
2 1 748
2 1 528
2 1 392
2 1 409
2 1 517
2 0 416
2 0 345
2 1 491
2 1 681
2 1 781
2 1 604
2 1 635
2 1 382
2 1 594
2 1 462
2 1 382
2 1 448
2 0 478
2 1 657
2 1 416
2 1 394
2 1 555
2 1 591
2 0 458
2 1 589
2 1 479
2 1 455
2 1 640
2 1 350
2 0 498
2 0 643
2 1 638
2 1 507
2 1 773
2 0 575
2 1 589
2 1 465
2 1 527
2 1 665
2 1 988
2 0 820
2 1 408
2 1 487
2 1 419
2 1 469
2 0 471
2 1 721
2 1 841
2 1 560
2 1 330
2 1 626
2 1 376
2 1 568
2 1 467
2 1 542
2 1 430
2 1 3209
2 1 374
2 1 510
2 1 603
2 1 422
2 1 344
2 1 595
2 1 819
2 1 312
2 1 534
2 1 670
2 1 579
2 0 609
2 1 451
2 1 571
2 1 315
2 1 897
2 1 604
2 1 975
2 1 449
2 1 434
2 1 483
2 1 668
2 1 534
2 1 510
2 1 392
2 1 572
2 1 605
2 1 343
2 1 584
2 1 426
2 1 598
2 1 745
2 1 304
2 1 675
2 1 336
2 1 560
2 0 677
2 1 332
2 1 375
2 1 463
2 1 315
2 1 590
2 1 642
2 1 622
2 1 615
2 1 481
2 1 383
2 1 366
2 1 1375
2 1 616
2 1 430
2 1 494
2 1 462
2 1 435
2 0 527
2 1 547
2 1 415
2 1 416
2 1 391
2 1 289
2 1 597
2 0 526
2 0 462
2 0 1229
2 1 626
2 1 668
2 1 522
2 1 637
2 1 481
2 1 504
2 1 393
2 1 527
2 1 440
2 1 520
2 0 820
2 1 723
2 0 465
2 0 617
2 1 423
2 1 403
2 1 619
2 1 462
2 1 311
2 1 579
2 1 416
2 1 380
2 1 675
2 1 562
2 0 710
2 0 417
2 1 471
2 0 369
2 1 383
2 1 551
2 1 570
2 1 377
2 1 519
2 1 645
2 1 779
2 1 369
2 1 498
2 1 451
2 1 614
2 1 623
2 1 722
2 1 436
2 1 1056
2 1 360
2 1 368
2 1 531
2 1 844
2 1 464
2 1 395
2 0 717
2 0 465
2 1 510
2 1 510
2 1 416
2 0 430
2 1 625
2 1 574
2 1 805
2 1 568
2 1 670
2 0 480
2 1 519
2 1 362
2 1 1184
2 1 419
2 1 728
2 1 582
2 1 767
2 1 397
2 1 642
2 1 470
2 1 838
2 1 491
2 1 448
2 1 426
2 1 548
2 1 759
2 1 389
2 1 460
2 1 513
2 1 569
2 1 627
2 1 387
2 1 511
2 0 317
2 1 452
2 1 550
2 1 630
2 0 402
2 1 631
2 1 381
2 1 311
2 1 318
2 1 469
2 1 304
2 1 920
2 1 305
2 1 505
2 1 752
2 1 371
2 1 583
2 1 602
2 0 462
2 1 493
2 0 520
2 1 632
2 1 362
2 1 573
2 1 369
2 1 926
2 1 739
2 1 571
2 1 498
2 1 478
2 1 392
2 1 765
2 1 576
2 1 420
2 1 284
2 1 468
2 1 478
2 1 391
2 1 622
2 1 783
2 1 508
2 1 486
2 0 650
2 1 495
2 1 329
2 0 682
2 1 390
2 1 506
2 1 362
2 1 852
2 1 430
2 0 673
2 0 429
2 1 380
2 1 402
2 1 611
2 1 518
2 1 329
2 1 329
2 1 536
2 1 530
2 1 454
2 0 479
2 1 674
2 1 545
2 1 814
2 1 409
2 0 472
2 1 866
2 1 524
2 1 487
2 1 448
2 1 462
2 0 440
2 1 496
2 1 412
2 1 525
2 1 845
2 1 671
2 1 610
2 1 381
2 1 754
2 1 581
2 1 431
2 1 483
2 1 408
2 1 532
2 1 285
2 1 479
2 1 340
2 1 716
2 1 502
2 1 442
2 1 651
2 1 489
2 1 365
2 1 427
2 1 451
2 0 321
2 0 450
2 1 515
2 1 585
2 1 459
2 1 428
2 1 437
2 1 1140
2 1 777
2 1 857
2 1 454
2 1 360
2 1 507
2 1 538
2 1 463
2 1 648
2 1 865
2 0 448
2 0 523
2 1 563
2 1 531
2 1 513
2 0 727
2 1 316
2 1 403
2 1 453
2 1 344
2 0 562
2 1 352
2 1 749
2 1 793
2 1 704
2 1 437
2 1 646
2 1 537
2 1 749
2 0 442
2 1 364
2 1 408
2 1 346
2 1 708
2 1 354
2 0 574
2 1 334
2 0 584
2 1 1617
2 1 298
2 1 343
2 1 1845
2 1 586
2 1 595
2 1 548
2 1 857
2 0 440
2 1 482
2 1 512
2 1 538
2 1 372
2 1 459
2 1 606
2 1 654
2 1 376
2 1 785
2 0 464
2 1 888
2 1 319
2 1 455
2 1 415
2 1 565
2 1 370
2 1 577
2 0 351
2 1 438
2 1 441
2 1 575
2 1 703
2 0 688
2 1 469
2 0 333
2 1 362
2 0 454
2 1 429
2 1 620
2 1 440
2 1 486
2 1 645
2 0 559
2 1 379
2 1 638
2 1 565
2 1 373
2 1 542
2 1 424
2 1 444
2 1 605
2 1 461
2 0 427
2 1 588
2 1 1034
2 1 418
2 1 347
2 1 363
2 0 447
2 1 439
2 1 701
2 1 410
2 1 564
2 1 328
2 1 1595
2 1 483
2 1 479
2 1 520
2 0 1181
2 1 696
2 1 558
2 1 697
2 1 513
2 0 500
2 1 494
2 1 344
2 1 524
2 1 598
2 1 597
2 1 581
2 1 554
2 1 779
2 1 499
2 1 452
2 1 376
2 1 475
2 1 686
2 1 555
2 1 548
2 1 438
2 1 436
2 1 406
2 1 332
2 0 347
2 1 402
2 1 412
2 1 532
2 0 811
2 0 687
2 0 452
2 1 419
2 1 339
2 1 590
2 1 382
2 0 902
2 1 588
2 1 517
2 1 606
2 1 524
2 1 669
2 1 511
2 1 417
2 1 507
2 1 524
2 1 478
2 0 353
2 1 301
2 1 539
2 1 551
2 1 429
2 1 859
2 1 764
2 1 362
2 1 589
2 0 930
2 1 295
2 1 680
2 1 511
2 1 547
2 1 511
2 1 461
2 1 707
2 0 500
2 1 552
2 1 952
2 1 450
2 0 548
2 1 385
2 0 559
2 0 381
2 1 798
2 0 462
2 1 355
2 1 471
2 1 441
2 0 416
2 1 589
2 1 513
2 1 583
2 1 341
2 1 423
2 1 627
2 1 540
2 0 614
2 0 492
2 1 403
2 1 421
2 1 718
2 0 686
2 0 496
2 1 560
2 1 719
2 1 534
2 1 1151
2 1 572
2 1 620
2 0 357
2 1 592
2 1 398
2 1 417
2 1 567
2 1 738
2 0 797
2 1 549
2 1 592
2 1 597
2 1 880
2 1 699
2 1 321
2 1 549
2 1 490
2 1 629
2 0 543
2 0 618
2 1 1045
2 1 624
2 0 498
2 1 465
2 1 507
2 1 406
2 1 431
2 0 475
2 1 404
2 0 527
2 0 876
2 1 710
2 1 356
2 1 629
2 1 683
2 1 424
2 1 562
2 1 378
2 1 577
2 0 610
2 1 1027
2 1 515
2 1 609
2 1 418
2 0 432
2 1 322
2 1 418
2 1 359
2 1 750
2 1 596
2 1 393
2 1 394
2 0 519
2 1 484
2 1 794
2 1 404
2 1 553
2 1 446
2 1 2194
2 1 734
2 0 442
2 1 291
2 1 345
2 1 463
2 1 318
2 1 407
2 1 635
2 1 341
2 1 338
2 1 334
2 1 558
2 1 297
2 1 699
2 1 485
2 1 512
2 1 346
2 1 381
2 1 495
2 0 453
2 1 576
2 1 418
2 1 473
2 1 753
2 1 673
2 1 404
2 1 971
2 1 440
2 1 476
2 1 330
2 0 475
2 1 409
2 1 899
2 1 641
2 1 707
2 1 511
2 1 333
2 0 579
2 1 566
2 1 439
2 0 598
2 1 538
2 1 418
2 1 480
2 1 573
2 1 589
2 1 395
2 1 443
2 1 560
2 1 426
2 1 355
2 1 610
2 1 461
2 1 576
2 1 493
2 1 335
2 1 450
2 1 381
2 1 494
2 1 373
2 1 695
2 1 328
2 1 488
2 1 412
2 1 560
2 1 456
2 1 532
2 1 439
2 1 691
2 1 468
2 0 532
2 1 408
2 0 975
2 1 495
2 0 1156
2 1 500
2 1 687
2 1 453
2 1 980
2 1 338
2 1 483
2 1 576
2 1 456
2 1 671
2 1 470
2 1 317
2 1 344
2 1 621
2 1 441
2 1 625
2 1 422
2 1 638
2 1 558
2 1 323
2 1 371
2 0 473
2 1 659
2 1 354
2 1 490
2 1 335
2 1 403
3 0 976
3 1 372
3 1 467
3 0 863
3 1 425
3 1 685
3 1 506
3 1 462
3 1 2425
3 0 627
3 1 381
3 1 382
3 1 568
3 1 592
3 1 394
3 0 485
3 1 497
3 0 1116
3 1 1266
3 0 739
3 1 490
3 1 434
3 0 902
3 1 381
3 0 489
3 1 400
3 1 778
3 1 426
3 1 419
3 1 616
3 0 513
3 1 603
3 0 644
3 0 785
3 0 431
3 0 612
3 1 677
3 0 643
3 0 568
3 0 562
3 0 841
3 0 730
3 1 594
3 1 655
3 0 717
3 1 601
3 1 482
3 0 422
3 0 597
3 1 626
3 0 461
3 1 602
3 0 454
3 1 593
3 0 446
3 0 376
3 1 513
3 1 587
3 1 838
3 1 644
3 1 801
3 1 336
3 1 587
3 1 595
3 1 541
3 0 597
3 1 716
3 0 643
3 0 447
3 1 944
3 1 725
3 0 601
3 0 324
3 1 456
3 1 563
3 0 428
3 1 451
3 1 464
3 0 1519
3 0 459
3 1 728
3 0 896
3 1 672
3 1 542
3 1 460
3 0 688
3 1 555
3 1 655
3 0 343
3 0 608
3 1 623
3 1 436
3 0 506
3 1 397
3 0 480
3 0 1897
3 1 563
3 1 642
3 0 369
3 0 668
3 1 451
3 1 1039
3 1 331
3 1 383
3 0 544
3 0 342
3 1 471
3 0 733
3 1 300
3 1 427
3 0 384
3 1 426
3 1 446
3 1 746
3 1 369
3 1 293
3 1 396
3 1 445
3 0 2650
3 0 714
3 0 506
3 1 346
3 0 509
3 0 390
3 1 1918
3 1 1317
3 1 526
3 1 431
3 1 683
3 1 602
3 0 454
3 0 582
3 1 731
3 1 431
3 1 745
3 1 312
3 0 648
3 1 472
3 1 680
3 1 740
3 1 411
3 1 375
3 1 476
3 0 372
3 1 461
3 1 329
3 1 437
3 1 397
3 1 657
3 1 608
3 1 379
3 0 535
3 0 605
3 0 468
3 1 409
3 1 511
3 0 400
3 1 1198
3 1 524
3 1 427
3 0 801
3 1 1029
3 1 451
3 0 2314
3 1 366
3 1 686
3 1 640
3 0 1008
3 1 421
3 1 498
3 0 379
3 1 926
3 1 555
3 1 482
3 0 531
3 1 539
3 1 486
3 1 431
3 1 632
3 1 419
3 0 699
3 1 759
3 1 456
3 1 449
3 0 822
3 0 806
3 1 453
3 0 1239
3 0 2112
3 1 717
3 1 537
3 1 377
3 1 720
3 1 664
3 1 1793
3 0 493
3 1 410
3 1 431
3 1 485
3 1 463
3 0 1589
3 1 688
3 1 472
3 1 420
3 1 620
3 1 724
3 1 345
3 1 487
3 1 397
3 1 746
3 0 442
3 1 644
3 1 391
3 1 553
3 0 818
3 1 999
3 1 857
3 0 523
3 1 393
3 1 413
3 1 547
3 1 662
3 0 533
3 1 521
3 0 803
3 1 480
3 1 385
3 0 535
3 1 320
3 1 426
3 1 548
3 0 650
3 1 560
3 1 667
3 1 437
3 0 506
3 0 1207
3 1 418
3 0 460
3 1 1024
3 0 465
3 0 413
3 0 539
3 1 821
3 0 657
3 1 540
3 0 486
3 1 641
3 1 667
3 1 602
3 1 826
3 1 482
3 1 1192
3 1 736
3 0 598
3 0 491
3 0 529
3 0 492
3 1 545
3 1 650
3 1 352
3 1 428
3 1 673
3 0 1494
3 1 467
3 1 605
3 0 695
3 0 421
3 1 408
3 1 507
3 1 557
3 1 588
3 1 522
3 1 502
3 0 918
3 1 644
3 1 570
3 0 653
3 1 578
3 1 485
3 1 509
3 0 516
3 1 412
3 1 769
3 1 533
3 1 564
3 1 504
3 1 448
3 0 838
3 0 1038
3 1 526
3 1 478
3 1 599
3 0 631
3 1 542
3 0 568
3 1 1634
3 0 917
3 1 398
3 0 1145
3 1 425
3 1 517
3 1 791
3 1 853
3 1 416
3 1 498
3 0 966
3 1 457
3 1 442
3 0 1947
3 1 881
3 0 623
3 0 657
3 1 404
3 1 491
3 1 490
3 1 510
3 1 509
3 0 723
3 1 398
3 0 452
3 0 434
3 1 345
3 0 487
3 1 727
3 1 335
3 0 394
3 0 763
3 0 519
3 1 555
3 1 366
3 1 735
3 0 385
3 0 685
3 1 570
3 0 457
3 0 434
3 1 658
3 0 337
3 1 489
3 1 511
3 1 369
3 0 396
3 1 444
3 0 401
3 1 543
3 0 861
3 1 766
3 0 468
3 1 641
3 1 502
3 1 426
3 1 667
3 1 521
3 1 358
3 1 407
3 1 517
3 0 472
3 1 338
3 1 861
3 1 742
3 1 585
3 1 629
3 0 783
3 0 712
3 0 834
3 0 454
3 1 587
3 1 664
3 1 499
3 1 434
3 1 484
3 1 595
3 1 1317
3 1 1146
3 0 731
3 1 354
3 0 901
3 0 838
3 1 359
3 1 515
3 1 533
3 1 383
3 1 787
3 0 732
3 1 540
3 1 499
3 0 718
3 1 369
3 1 309
3 1 724
3 0 446
3 1 531
3 0 938
3 1 402
3 1 649
3 1 513
3 0 454
3 1 383
3 1 396
3 1 488
3 0 358
3 1 508
3 1 696
3 1 517
3 1 585
3 0 494
3 1 546
3 1 616
3 0 1621
3 1 338
3 0 456
3 1 320
3 0 1361
3 1 396
3 1 450
3 1 706
3 1 575
3 1 303
3 1 455
3 1 625
3 1 778
3 1 335
3 1 881
3 1 458
3 1 603
3 1 742
3 1 364
3 1 339
3 1 534
3 1 483
3 0 780
3 1 411
3 1 416
3 1 702
3 1 438
3 1 919
3 1 556
3 1 784
3 0 669
3 1 404
3 0 511
3 0 924
3 1 642
3 1 639
3 1 473
3 1 539
3 0 815
3 1 327
3 0 688
3 0 478
3 1 384
3 1 646
3 0 420
3 0 469
3 0 473
3 0 431
3 1 644
3 1 589
3 1 357
3 1 1112
3 0 515
3 1 2423
3 0 565
3 1 394
3 1 1218
3 1 387
3 1 625
3 0 392
3 0 1209
3 1 572
3 1 365
3 1 550
3 1 657
3 0 468
3 0 541
3 0 434
3 0 412
3 1 518
3 1 544
3 1 521
3 0 669
3 0 588
3 0 468
3 1 443
3 1 596
3 0 523
3 1 624
3 1 498
3 1 515
3 1 496
3 1 686
3 1 583
3 0 672
3 1 352
3 0 447
3 1 692
3 1 467
3 0 1003
3 1 1264
3 1 655
3 1 667
3 1 656
3 1 531
3 0 679
3 1 570
3 1 391
3 1 422
3 1 599
3 1 356
3 1 1194
3 1 625
3 1 519
3 0 518
3 1 509
3 1 1535
3 0 675
3 1 461
3 1 382
3 1 325
3 0 713
3 0 904
3 1 551
3 1 934
3 1 1239
3 1 554
3 1 753
3 0 577
3 0 843
3 1 702
3 1 392
3 0 414
3 1 369
3 1 306
3 0 464
3 0 453
3 0 693
3 1 785
3 1 368
3 0 527
3 0 743
3 0 498
3 1 565
3 0 890
3 0 672
3 0 481
3 1 473
3 1 432
3 1 639
3 1 525
3 1 430
3 0 632
3 1 468
3 1 397
3 0 504
3 0 425
3 1 360
3 1 584
3 1 485
3 0 857
3 1 812
3 0 600
3 0 1003
3 1 595
3 0 400
3 1 764
3 1 683
3 1 96131
3 1 1338
3 1 763
3 0 466
3 1 369
3 1 731
3 0 600
3 0 576
3 0 873
3 0 517
3 0 817
3 1 440
3 1 512
3 1 426
3 1 655
3 0 581
3 0 439
3 1 504
3 0 754
3 1 630
3 1 758
3 1 405
3 1 682
3 0 969
3 1 437
3 1 468
3 0 451
3 1 459
3 1 673
3 1 783
3 1 565
3 1 829
3 1 638
3 1 489
3 0 558
3 1 843
3 1 444
3 0 652
3 1 401
3 1 480
3 1 531
3 1 718
3 1 571
3 0 699
3 1 726
3 1 672
3 1 501
3 0 616
3 1 587
3 1 421
3 0 388
3 1 397
3 0 452
3 1 688
3 0 799
3 0 1041
3 0 686
3 1 568
3 0 535
3 1 524
3 1 395
3 0 511
3 1 829
3 0 535
3 1 687
3 0 501
3 0 420
3 1 362
3 1 605
3 1 575
3 1 872
3 1 656
3 1 654
3 1 556
3 1 712
3 1 407
3 0 383
3 1 400
3 1 558
3 1 661
3 1 770
3 1 322
3 1 298
3 0 1130
3 1 672
3 1 486
3 1 354
3 1 865
3 1 1040
3 1 559
3 1 639
3 1 504
3 1 593
3 1 581
3 1 401
3 1 435
3 1 965
3 1 776
3 1 432
3 1 467
3 0 499
3 1 522
3 0 411
3 1 306
3 1 928
3 0 779
3 1 828
3 1 697
3 1 397
3 0 529
3 0 627
3 1 439
3 1 833
3 1 644
3 1 441
3 0 400
3 1 586
3 1 439
3 0 390
3 1 403
3 0 505
3 1 627
3 0 597
3 1 636
3 1 366
3 1 348
3 1 511
3 0 463
3 0 456
3 0 502
3 1 819
3 1 478
3 0 514
3 1 549
3 1 583
3 0 1076
3 1 1254
3 1 696
3 0 605
3 0 429
3 1 701
3 1 569
3 1 332
3 1 535
3 1 674
3 0 488
3 1 385
3 0 488
3 1 576
3 0 605
3 0 583
3 0 772
3 1 666
3 1 417
3 0 734
3 1 3548
3 1 633
3 1 566
3 0 603
3 1 599
3 0 394
3 0 519
3 1 478
3 0 400
3 0 500
3 1 512
3 1 314
3 1 526
3 1 485
3 1 621
3 1 507
3 1 371
3 1 402
3 0 880
3 1 520
3 0 831
3 1 727
3 1 666
3 1 453
3 1 1397
3 1 368
3 0 559
3 1 374
3 1 954
3 1 635
3 1 488
3 1 769
3 1 532
3 0 527
3 1 370
3 1 569
3 1 857
3 1 1184
3 1 597
3 1 416
3 0 566
3 0 604
3 1 608
3 1 486
3 1 355
3 1 448
3 1 365
3 1 822
3 1 1351
3 1 558
3 1 332
3 0 292
3 0 647
3 1 669
3 1 869
3 1 395
3 1 967
3 1 482
3 0 595
3 0 600
3 0 438
3 0 400
3 0 584
3 1 524
3 1 417
3 1 665
3 1 539
3 1 305
3 1 526
3 1 792
3 1 666
3 0 651
3 0 720
3 1 339
3 0 836
3 1 704
3 1 1402
3 0 1166
3 1 462
3 1 501
3 0 378
3 1 606
3 0 410
3 1 365
3 1 607
3 0 1439
3 1 424
3 1 645
3 1 411
3 1 1367
3 0 680
3 1 364
3 1 550
3 1 383
3 1 814
3 0 719
3 1 698
3 0 721
3 0 647
3 1 459
3 0 557
3 1 488
3 1 473
3 0 583
3 1 455
3 1 777
3 1 430
3 0 895
3 1 459
3 1 874
3 1 745
3 0 392
3 1 479
3 0 836
3 1 581
3 1 827
3 0 574
3 1 1119
3 0 581
3 1 492
3 0 441
3 1 472
3 1 673
3 1 573
3 1 574
3 1 372
3 0 546
3 1 626
3 0 1124
3 1 499
3 1 585
3 1 455
3 0 565
3 0 656
3 0 543
3 1 472
3 1 584
3 1 613
3 1 565
3 1 790
3 1 526
3 0 526
3 1 598
3 1 325
3 0 481
3 1 370
3 1 379
3 0 416
3 0 462
3 0 416
3 0 560
3 1 1004
3 1 370
3 0 435
3 0 390
3 1 517
3 0 387
3 1 885
3 0 611
3 1 592
3 1 459
3 1 507
3 1 673
3 1 527
3 0 1347
3 0 562
3 1 795
3 0 413
3 1 430
3 1 464
3 0 408
3 1 423
3 1 382
3 1 879
3 1 590
3 0 491
3 0 355
3 1 589
3 1 919
3 1 446
3 0 434
3 1 864
3 0 382
3 0 740
3 1 1849
3 0 1109
3 1 940
3 1 335
3 0 490
3 1 899
3 1 729
3 1 555
3 1 1097
3 1 565
3 1 651
3 1 870
3 1 554
3 1 425
3 1 606
3 1 589
3 1 727
3 0 485
3 1 466
3 1 397
3 0 500
3 1 571
3 0 408
3 0 1242
3 1 437
3 1 609
3 1 625
3 1 1018
3 0 593
3 0 507
3 1 730
3 0 356
3 1 442
3 1 551
3 0 587
3 0 666
3 0 393
3 1 413
3 0 492
3 1 799
3 0 642
3 1 701
3 1 679
3 0 629
3 0 372
3 0 441
3 0 526
3 0 783
3 0 631
3 1 488
3 1 1885
3 1 583
3 1 772
3 1 444
3 1 387
3 1 532
3 1 350
3 1 667
3 0 621
3 1 437
3 1 476
3 0 486
3 1 686
3 1 390
3 1 522
3 0 722
3 1 731
3 0 417
3 1 359
3 1 1231
3 1 947
3 0 451
3 1 684
3 1 350
3 1 287
3 1 460
3 0 397
3 0 445
3 0 558
3 1 754
3 1 415
3 1 465
3 1 545
3 1 1328
3 1 334
3 1 513
3 0 545
3 1 408
3 0 437
3 1 583
3 1 377
3 1 666
3 0 433
3 1 380
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment