We use the Lalonde data that we encountered in HW2 and will encounter again in HW4. Here we focus on the observational data of the job training program. The job training program took place in 1976, and we are interested in its impact on 1978 earnings.

We obtain the data from a software called MatchIt. This is a subset of the data that you will deal with in HW4.

library("MatchIt")
data("lalonde")
lalonde

Estimate propensity score

– Add linear terms

Set re74 is the base covariate that we know that we need to add a priori

temp <- glm(treat ~ . , data = lalonde[, -9], family = "binomial")
temp0 <- glm(treat ~re74, data = lalonde[, -9], family = "binomial")
library(MASS)

Attaching package: ‘MASS’

The following object is masked from ‘package:dplyr’:

    select
## can set trace = F to have more concise messages
model <- stepAIC(temp0, scope=list(upper = formula(temp), lower = formula(temp0)), direction = "forward")
Start:  AIC=708.54
treat ~ re74

           Df Deviance    AIC
+ race      2   504.05 512.05
+ married   1   675.29 681.29
<none>          704.54 708.54
+ nodegree  1   702.77 708.77
+ educ      1   702.85 708.85
+ re75      1   704.15 710.15
+ age       1   704.23 710.23

Step:  AIC=512.05
treat ~ re74 + race

           Df Deviance    AIC
+ married   1   496.34 506.34
+ educ      1   501.25 511.25
<none>          504.05 512.05
+ re75      1   503.72 513.72
+ nodegree  1   503.82 513.82
+ age       1   503.94 513.94

Step:  AIC=506.34
treat ~ re74 + race + married

           Df Deviance    AIC
<none>          496.34 506.34
+ educ      1   494.55 506.55
+ re75      1   494.92 506.92
+ nodegree  1   495.94 507.94
+ age       1   496.00 508.00
summary(model)

Call:
glm(formula = treat ~ re74 + race + married, family = "binomial", 
    data = lalonde[, -9])

Deviance Residuals: 
    Min       1Q   Median       3Q      Max  
-1.5694  -0.4638  -0.3033   0.8308   2.6076  

Coefficients:
              Estimate Std. Error z value Pr(>|z|)    
(Intercept)  8.863e-01  1.565e-01   5.663 1.48e-08 ***
re74        -4.949e-05  2.275e-05  -2.176  0.02958 *  
racehispan  -2.150e+00  3.596e-01  -5.979 2.25e-09 ***
racewhite   -3.062e+00  2.826e-01 -10.833  < 2e-16 ***
married     -7.262e-01  2.631e-01  -2.760  0.00578 ** 
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

(Dispersion parameter for binomial family taken to be 1)

    Null deviance: 751.49  on 613  degrees of freedom
Residual deviance: 496.34  on 609  degrees of freedom
AIC: 506.34

Number of Fisher Scoring iterations: 5

– Add interactions

tempfull <- glm(treat ~(race + married + re74)^2 + I(re74^2), data = lalonde[, -9], family = "binomial")
library(MASS)
model <- stepAIC(model, scope=list(upper = formula(tempfull), lower = formula(model)), direction = "forward")
Start:  AIC=502.57
treat ~ re74 + race + married + I(re74^2) + re74:race

               Df Deviance    AIC
<none>              486.57 502.57
+ married:re74  1   485.92 503.92
+ race:married  2   485.13 505.13
summary(model)

Call:
glm(formula = treat ~ re74 + race + married + I(re74^2) + re74:race, 
    family = "binomial", data = lalonde)

Deviance Residuals: 
    Min       1Q   Median       3Q      Max  
-1.5612  -0.5542  -0.2251   0.8372   3.0364  

Coefficients:
                  Estimate Std. Error z value Pr(>|z|)    
(Intercept)      8.683e-01  1.664e-01   5.218 1.81e-07 ***
re74            -1.148e-04  6.260e-05  -1.834   0.0667 .  
racehispan      -2.034e+00  4.175e-01  -4.871 1.11e-06 ***
racewhite       -2.642e+00  3.183e-01  -8.300  < 2e-16 ***
married         -6.451e-01  2.665e-01  -2.420   0.0155 *  
I(re74^2)        4.976e-09  3.270e-09   1.522   0.1281    
re74:racehispan -2.781e-05  7.658e-05  -0.363   0.7165    
re74:racewhite  -1.644e-04  9.291e-05  -1.770   0.0768 .  
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

(Dispersion parameter for binomial family taken to be 1)

    Null deviance: 751.49  on 613  degrees of freedom
Residual deviance: 486.57  on 606  degrees of freedom
AIC: 502.57

Number of Fisher Scoring iterations: 7
## obtain linearized propensity scores
lps <- predict(model)
library(ggplot2)
temp.data <- data.frame(lps = lps, treated = as.factor(lalonde$treat))
ggplot(temp.data, aes(x = lps, fill = treated, color = treated)) + 
  geom_histogram(alpha = 0.5, position = "identity") + 
  xlab("Linearized propensity score") 

Stratification

Stratify the units into K = 5 equal strata is problematic due to poor overlapping

Strimming to improve overlapping

eps <- glm(treat ~ re74 + race + married + I(re74^2) + re74:race, data = lalonde, family = "binomial")$fitted.values

rm.idx <- which(eps < 0.1 | eps > 0.9)
idx.treated <- which((lalonde$treat == 1) & (eps > max(eps[lalonde$treat == 0])))
idx.control <- which((lalonde$treat == 0) & (eps < min(eps[lalonde$treat == 1])))
rm.idx <- union(rm.idx, c(idx.treated, idx.control))

lalonde <- lalonde[-rm.idx, ]
eps <- eps[-rm.idx]
lalonde
## First, create a data frame to store the current grouping information
temp <- data.frame(e = eps, treat = lalonde$treat,
                           b = 1)

t.max <-  1.96
# check whether t. stat is above t.max for first iteration
t.stat <- t.test(x = temp$e[temp$treat == 1], y =  temp$e[temp$treat == 0], var.equal=T)$statistic
condition = t.stat > t.max
# minimum n.t or n.c in each block
size <- 3
size.new <- 20

# continue until all t statistics are below t.max
set.seed(0)
while(condition)
{
  # calculate size in each group
  # we want to not split a block if it is too small
  b <- max(temp$b)
  ignore <- sapply(1:b, function(j) {
     n.t <- sum(temp$treat == 1 & temp$b == j)
     n.c <- sum(temp$treat == 0 & temp$b == j)
     return(n.t < size | n.c < size | (n.t + n.c < size.new * 2))
  })
  
  # split unbalanced blocks into more blocks
  split <- which((abs(t.stat) > t.max) & (!ignore))
  
  if(length(split) == 0)
    break

  
  ## we need to keep a current copy of the block information and which block to ignore as block assignments are going to change later
  b.current <- temp$b
  
  for(j in split)
  {
    
    cutoff <- median(temp$e[b.current == j])
    ## We split units into two new blocks
    ## extract the index of units belonging to each new stratum
    idx.s <- which(b.current == j & temp$e < cutoff)
    idx.l <- which(b.current == j & temp$e > cutoff)
    ## randomly put half of the ties into one category
    idx.e <- sample(which(temp$e == cutoff & b.current == j))
    n.tie <- length(idx.e)
    if (n.tie >= 1) {
      if (n.tie > 1)
        idx.s <- c(idx.s, idx.e[1:round(n.tie/2)])
      idx.l <- c(idx.l, idx.e[(round(n.tie/2)+ 1):n.tie])
    }
      
    ## we split only when new stratum has at least size number of control/treated units
    if (sum(temp$treat[idx.s]==1) > size && sum(temp$treat[idx.s]==0) > size && 
        sum(temp$treat[idx.l]==1) > size && sum(temp$treat[idx.l]==0) > size) {
      # anything above the current will have to be moved up 1
      temp$b[b.current > j] <- temp$b[b.current > j] + 1
      ## the upper new stratum will also have the block idx added by 1
      temp$b[idx.l] <- temp$b[idx.l] + 1
    }
    ## We don't do anything if we do not want to split
  }
  
   # calculate t statistic for each block
  b <- max(temp$b)
  t.stat <- sapply(1:b, function(j) {
    t.test(x = temp$e[temp$treat == 1 & temp$b == j], 
                        y = temp$e[temp$treat == 0 & temp$b == j], var.equal=T)$statistic
  })
  
  ## Update condition
  # check whether ANY blocks are above t.max
  # AND are not too small
  condition <- any(abs(t.stat) > t.max)
}

lalonde$blocks <- temp$b
table(lalonde[, c("treat", "blocks")])
     blocks
treat   1   2   3   4   5
    0  72  36  18   7  67
    1   7  12   8  19 130
## check the range of estimated propensity scores
sapply(1:max(lalonde$blocks), function(j) range(temp$e[temp$b == j]))
          [,1]      [,2]      [,3]      [,4]      [,5]
[1,] 0.1011357 0.1451386 0.2376577 0.4672575 0.5555612
[2,] 0.1451386 0.2376577 0.4567325 0.5555612 0.7043876
## CI from Neyman
est_per_block <- sapply(1:max(lalonde$blocks), function(idx) {
  Yt <- lalonde$re78[lalonde$blocks == idx & lalonde$treat == 1]
  Yc <- lalonde$re78[lalonde$blocks == idx & lalonde$treat == 0]
  
  tau_hat <- mean(Yt) - mean(Yc)

  st <- sd(Yt)
  sc <- sd(Yc)
  varhat_tauhat <- st^2/length(Yt) + sc^2/length(Yc)
  return(c(tau_hat, varhat_tauhat))
})
colnames(est_per_block)<- 1:max(lalonde$blocks)
rownames(est_per_block) <- c("mean", "var")
est_per_block
               1          2            3             4           5
mean    2252.462   -1206.69     3752.993     -980.4199    1318.621
var  7019811.047 2558729.60 11917739.160 12153089.3809 1021087.344
wts <- table(lalonde$blocks)/length(lalonde$blocks)

tau_hat <- sum(est_per_block[1, ] * wts)
sd_tau_hat <- sqrt(sum(est_per_block[2, ] * wts^2))
  

result <- c(tau_hat, sd_tau_hat, c(tau_hat- 1.96 * sd_tau_hat, tau_hat + 1.96 * sd_tau_hat))
names(result) <- c("est", "sd", "CI_lower", "CI_upper")
result
      est        sd  CI_lower  CI_upper 
1201.8050  864.2807 -492.1852 2895.7952 
LS0tCnRpdGxlOiAiUiBFeGFtcGxlIDVfMjogcHJvcGVuc2l0eSBzY29yZSBzdHJhdGlmaWNhdGlvbiB3aXRoIGxhbG9uZGUgZGF0YSIKb3V0cHV0OiBodG1sX25vdGVib29rCi0tLQoKV2UgdXNlIHRoZSBMYWxvbmRlIGRhdGEgdGhhdCB3ZSBlbmNvdW50ZXJlZCBpbiBIVzIgYW5kIHdpbGwgZW5jb3VudGVyIGFnYWluIGluIEhXNC4gSGVyZSB3ZSBmb2N1cyBvbiB0aGUgb2JzZXJ2YXRpb25hbCBkYXRhIG9mIHRoZSBqb2IgdHJhaW5pbmcgcHJvZ3JhbS4gVGhlIGpvYiB0cmFpbmluZyBwcm9ncmFtIHRvb2sgcGxhY2UgaW4gMTk3NiwgYW5kIHdlIGFyZSBpbnRlcmVzdGVkIGluIGl0cyBpbXBhY3Qgb24gMTk3OCBlYXJuaW5ncy4KCldlIG9idGFpbiB0aGUgZGF0YSBmcm9tIGEgc29mdHdhcmUgY2FsbGVkIE1hdGNoSXQuIFRoaXMgaXMgYSBzdWJzZXQgb2YgdGhlIGRhdGEgdGhhdCB5b3Ugd2lsbCBkZWFsIHdpdGggaW4gSFc0LgoKYGBge3J9CmxpYnJhcnkoIk1hdGNoSXQiKQpkYXRhKCJsYWxvbmRlIikKbGFsb25kZQpgYGAKCiMjIEVzdGltYXRlIHByb3BlbnNpdHkgc2NvcmUKCi0gU3RlcCAxOiBFc3RpbWF0ZSB0aGUgcHJvcGVuc2l0eSBzY29yZQoKLS0gQWRkIGxpbmVhciB0ZXJtcwoKU2V0IHJlNzQgaXMgdGhlIGJhc2UgY292YXJpYXRlIHRoYXQgd2Uga25vdyB0aGF0IHdlIG5lZWQgdG8gYWRkIGEgcHJpb3JpCmBgYHtyfQp0ZW1wIDwtIGdsbSh0cmVhdCB+IC4gLCBkYXRhID0gbGFsb25kZVssIC05XSwgZmFtaWx5ID0gImJpbm9taWFsIikKdGVtcDAgPC0gZ2xtKHRyZWF0IH5yZTc0LCBkYXRhID0gbGFsb25kZVssIC05XSwgZmFtaWx5ID0gImJpbm9taWFsIikKbGlicmFyeShNQVNTKQojIyBjYW4gc2V0IHRyYWNlID0gRiB0byBoYXZlIG1vcmUgY29uY2lzZSBtZXNzYWdlcwptb2RlbCA8LSBzdGVwQUlDKHRlbXAwLCBzY29wZT1saXN0KHVwcGVyID0gZm9ybXVsYSh0ZW1wKSwgbG93ZXIgPSBmb3JtdWxhKHRlbXAwKSksIGRpcmVjdGlvbiA9ICJmb3J3YXJkIikKc3VtbWFyeShtb2RlbCkKYGBgCgotLSBBZGQgaW50ZXJhY3Rpb25zCmBgYHtyfQp0ZW1wZnVsbCA8LSBnbG0odHJlYXQgfihyYWNlICsgbWFycmllZCArIHJlNzQpXjIgKyBJKHJlNzReMiksIGRhdGEgPSBsYWxvbmRlWywgLTldLCBmYW1pbHkgPSAiYmlub21pYWwiKQpsaWJyYXJ5KE1BU1MpCm1vZGVsIDwtIHN0ZXBBSUMobW9kZWwsIHNjb3BlPWxpc3QodXBwZXIgPSBmb3JtdWxhKHRlbXBmdWxsKSwgbG93ZXIgPSBmb3JtdWxhKG1vZGVsKSksIGRpcmVjdGlvbiA9ICJmb3J3YXJkIikKc3VtbWFyeShtb2RlbCkKCiMjIG9idGFpbiBsaW5lYXJpemVkIHByb3BlbnNpdHkgc2NvcmVzCmxwcyA8LSBwcmVkaWN0KG1vZGVsKQpgYGAKCgpgYGB7cn0KbGlicmFyeShnZ3Bsb3QyKQp0ZW1wLmRhdGEgPC0gZGF0YS5mcmFtZShscHMgPSBscHMsIHRyZWF0ZWQgPSBhcy5mYWN0b3IobGFsb25kZSR0cmVhdCkpCmdncGxvdCh0ZW1wLmRhdGEsIGFlcyh4ID0gbHBzLCBmaWxsID0gdHJlYXRlZCwgY29sb3IgPSB0cmVhdGVkKSkgKyAKICBnZW9tX2hpc3RvZ3JhbShhbHBoYSA9IDAuNSwgcG9zaXRpb24gPSAiaWRlbnRpdHkiKSArIAogIHhsYWIoIkxpbmVhcml6ZWQgcHJvcGVuc2l0eSBzY29yZSIpIApgYGAKCgoKIyMgU3RyYXRpZmljYXRpb24KU3RyYXRpZnkgdGhlIHVuaXRzIGludG8gSyA9IDUgZXF1YWwgc3RyYXRhIGlzIHByb2JsZW1hdGljIGR1ZSB0byBwb29yIG92ZXJsYXBwaW5nCgpTdHJpbW1pbmcgdG8gaW1wcm92ZSBvdmVybGFwcGluZwpgYGB7cn0KZXBzIDwtIGdsbSh0cmVhdCB+IHJlNzQgKyByYWNlICsgbWFycmllZCArIEkocmU3NF4yKSArIHJlNzQ6cmFjZSwgZGF0YSA9IGxhbG9uZGUsIGZhbWlseSA9ICJiaW5vbWlhbCIpJGZpdHRlZC52YWx1ZXMKCnJtLmlkeCA8LSB3aGljaChlcHMgPCAwLjEgfCBlcHMgPiAwLjkpCmlkeC50cmVhdGVkIDwtIHdoaWNoKChsYWxvbmRlJHRyZWF0ID09IDEpICYgKGVwcyA+IG1heChlcHNbbGFsb25kZSR0cmVhdCA9PSAwXSkpKQppZHguY29udHJvbCA8LSB3aGljaCgobGFsb25kZSR0cmVhdCA9PSAwKSAmIChlcHMgPCBtaW4oZXBzW2xhbG9uZGUkdHJlYXQgPT0gMV0pKSkKcm0uaWR4IDwtIHVuaW9uKHJtLmlkeCwgYyhpZHgudHJlYXRlZCwgaWR4LmNvbnRyb2wpKQoKbGFsb25kZSA8LSBsYWxvbmRlWy1ybS5pZHgsIF0KZXBzIDwtIGVwc1stcm0uaWR4XQpsYWxvbmRlCmBgYAoKCgotIE5vdywgd2UgcGVyZm9ybSBzZXF1ZW5jaW5nIHNwbGl0dGluZyB0byBmaW5kIHRoZSBzdWJjbGFzc2VzCmBgYHtyfQojIyBGaXJzdCwgY3JlYXRlIGEgZGF0YSBmcmFtZSB0byBzdG9yZSB0aGUgY3VycmVudCBncm91cGluZyBpbmZvcm1hdGlvbgp0ZW1wIDwtIGRhdGEuZnJhbWUoZSA9IGVwcywgdHJlYXQgPSBsYWxvbmRlJHRyZWF0LAogICAgICAgICAgICAgICAgICAgICAgICAgICBiID0gMSkKCnQubWF4IDwtICAxLjk2CiMgY2hlY2sgd2hldGhlciB0LiBzdGF0IGlzIGFib3ZlIHQubWF4IGZvciBmaXJzdCBpdGVyYXRpb24KdC5zdGF0IDwtIHQudGVzdCh4ID0gdGVtcCRlW3RlbXAkdHJlYXQgPT0gMV0sIHkgPSAgdGVtcCRlW3RlbXAkdHJlYXQgPT0gMF0sIHZhci5lcXVhbD1UKSRzdGF0aXN0aWMKY29uZGl0aW9uID0gdC5zdGF0ID4gdC5tYXgKIyBtaW5pbXVtIG4udCBvciBuLmMgaW4gZWFjaCBibG9jawpzaXplIDwtIDMKc2l6ZS5uZXcgPC0gMjAKCiMgY29udGludWUgdW50aWwgYWxsIHQgc3RhdGlzdGljcyBhcmUgYmVsb3cgdC5tYXgKc2V0LnNlZWQoMCkKd2hpbGUoY29uZGl0aW9uKQp7CiAgIyBjYWxjdWxhdGUgc2l6ZSBpbiBlYWNoIGdyb3VwCiAgIyB3ZSB3YW50IHRvIG5vdCBzcGxpdCBhIGJsb2NrIGlmIGl0IGlzIHRvbyBzbWFsbAogIGIgPC0gbWF4KHRlbXAkYikKICBpZ25vcmUgPC0gc2FwcGx5KDE6YiwgZnVuY3Rpb24oaikgewogICAgIG4udCA8LSBzdW0odGVtcCR0cmVhdCA9PSAxICYgdGVtcCRiID09IGopCiAgICAgbi5jIDwtIHN1bSh0ZW1wJHRyZWF0ID09IDAgJiB0ZW1wJGIgPT0gaikKICAgICByZXR1cm4obi50IDwgc2l6ZSB8IG4uYyA8IHNpemUgfCAobi50ICsgbi5jIDwgc2l6ZS5uZXcgKiAyKSkKICB9KQogIAogICMgc3BsaXQgdW5iYWxhbmNlZCBibG9ja3MgaW50byBtb3JlIGJsb2NrcwogIHNwbGl0IDwtIHdoaWNoKChhYnModC5zdGF0KSA+IHQubWF4KSAmICghaWdub3JlKSkKICAKICBpZihsZW5ndGgoc3BsaXQpID09IDApCiAgICBicmVhawoKICAKICAjIyB3ZSBuZWVkIHRvIGtlZXAgYSBjdXJyZW50IGNvcHkgb2YgdGhlIGJsb2NrIGluZm9ybWF0aW9uIGFuZCB3aGljaCBibG9jayB0byBpZ25vcmUgYXMgYmxvY2sgYXNzaWdubWVudHMgYXJlIGdvaW5nIHRvIGNoYW5nZSBsYXRlcgogIGIuY3VycmVudCA8LSB0ZW1wJGIKICAKICBmb3IoaiBpbiBzcGxpdCkKICB7CiAgICAKICAgIGN1dG9mZiA8LSBtZWRpYW4odGVtcCRlW2IuY3VycmVudCA9PSBqXSkKICAgICMjIFdlIHNwbGl0IHVuaXRzIGludG8gdHdvIG5ldyBibG9ja3MKICAgICMjIGV4dHJhY3QgdGhlIGluZGV4IG9mIHVuaXRzIGJlbG9uZ2luZyB0byBlYWNoIG5ldyBzdHJhdHVtCiAgICBpZHgucyA8LSB3aGljaChiLmN1cnJlbnQgPT0gaiAmIHRlbXAkZSA8IGN1dG9mZikKICAgIGlkeC5sIDwtIHdoaWNoKGIuY3VycmVudCA9PSBqICYgdGVtcCRlID4gY3V0b2ZmKQogICAgIyMgcmFuZG9tbHkgcHV0IGhhbGYgb2YgdGhlIHRpZXMgaW50byBvbmUgY2F0ZWdvcnkKICAgIGlkeC5lIDwtIHNhbXBsZSh3aGljaCh0ZW1wJGUgPT0gY3V0b2ZmICYgYi5jdXJyZW50ID09IGopKQogICAgbi50aWUgPC0gbGVuZ3RoKGlkeC5lKQogICAgaWYgKG4udGllID49IDEpIHsKICAgICAgaWYgKG4udGllID4gMSkKICAgICAgICBpZHgucyA8LSBjKGlkeC5zLCBpZHguZVsxOnJvdW5kKG4udGllLzIpXSkKICAgICAgaWR4LmwgPC0gYyhpZHgubCwgaWR4LmVbKHJvdW5kKG4udGllLzIpKyAxKTpuLnRpZV0pCiAgICB9CiAgICAgIAogICAgIyMgd2Ugc3BsaXQgb25seSB3aGVuIG5ldyBzdHJhdHVtIGhhcyBhdCBsZWFzdCBzaXplIG51bWJlciBvZiBjb250cm9sL3RyZWF0ZWQgdW5pdHMKICAgIGlmIChzdW0odGVtcCR0cmVhdFtpZHguc109PTEpID4gc2l6ZSAmJiBzdW0odGVtcCR0cmVhdFtpZHguc109PTApID4gc2l6ZSAmJiAKICAgICAgICBzdW0odGVtcCR0cmVhdFtpZHgubF09PTEpID4gc2l6ZSAmJiBzdW0odGVtcCR0cmVhdFtpZHgubF09PTApID4gc2l6ZSkgewogICAgICAjIGFueXRoaW5nIGFib3ZlIHRoZSBjdXJyZW50IHdpbGwgaGF2ZSB0byBiZSBtb3ZlZCB1cCAxCiAgICAgIHRlbXAkYltiLmN1cnJlbnQgPiBqXSA8LSB0ZW1wJGJbYi5jdXJyZW50ID4gal0gKyAxCiAgICAgICMjIHRoZSB1cHBlciBuZXcgc3RyYXR1bSB3aWxsIGFsc28gaGF2ZSB0aGUgYmxvY2sgaWR4IGFkZGVkIGJ5IDEKICAgICAgdGVtcCRiW2lkeC5sXSA8LSB0ZW1wJGJbaWR4LmxdICsgMQogICAgfQogICAgIyMgV2UgZG9uJ3QgZG8gYW55dGhpbmcgaWYgd2UgZG8gbm90IHdhbnQgdG8gc3BsaXQKICB9CiAgCiAgICMgY2FsY3VsYXRlIHQgc3RhdGlzdGljIGZvciBlYWNoIGJsb2NrCiAgYiA8LSBtYXgodGVtcCRiKQogIHQuc3RhdCA8LSBzYXBwbHkoMTpiLCBmdW5jdGlvbihqKSB7CiAgICB0LnRlc3QoeCA9IHRlbXAkZVt0ZW1wJHRyZWF0ID09IDEgJiB0ZW1wJGIgPT0gal0sIAogICAgICAgICAgICAgICAgICAgICAgICB5ID0gdGVtcCRlW3RlbXAkdHJlYXQgPT0gMCAmIHRlbXAkYiA9PSBqXSwgdmFyLmVxdWFsPVQpJHN0YXRpc3RpYwogIH0pCiAgCiAgIyMgVXBkYXRlIGNvbmRpdGlvbgogICMgY2hlY2sgd2hldGhlciBBTlkgYmxvY2tzIGFyZSBhYm92ZSB0Lm1heAogICMgQU5EIGFyZSBub3QgdG9vIHNtYWxsCiAgY29uZGl0aW9uIDwtIGFueShhYnModC5zdGF0KSA+IHQubWF4KQp9CgpsYWxvbmRlJGJsb2NrcyA8LSB0ZW1wJGIKdGFibGUobGFsb25kZVssIGMoInRyZWF0IiwgImJsb2NrcyIpXSkKCiMjIGNoZWNrIHRoZSByYW5nZSBvZiBlc3RpbWF0ZWQgcHJvcGVuc2l0eSBzY29yZXMKc2FwcGx5KDE6bWF4KGxhbG9uZGUkYmxvY2tzKSwgZnVuY3Rpb24oaikgcmFuZ2UodGVtcCRlW3RlbXAkYiA9PSBqXSkpCmBgYAoKLSBOZXh0LCB3ZSB0cmVhdCB0aGUgZGF0YSBhcyBmcm9tIGEgc3RyYXRpZmllZCByYW5kb21pemVkIGV4cGVyaW1lbnQgYW5kIHVzZSBOZXltYW4ncyBlc3RpbWF0b3IKYGBge3J9CiMjIENJIGZyb20gTmV5bWFuCmVzdF9wZXJfYmxvY2sgPC0gc2FwcGx5KDE6bWF4KGxhbG9uZGUkYmxvY2tzKSwgZnVuY3Rpb24oaWR4KSB7CiAgWXQgPC0gbGFsb25kZSRyZTc4W2xhbG9uZGUkYmxvY2tzID09IGlkeCAmIGxhbG9uZGUkdHJlYXQgPT0gMV0KICBZYyA8LSBsYWxvbmRlJHJlNzhbbGFsb25kZSRibG9ja3MgPT0gaWR4ICYgbGFsb25kZSR0cmVhdCA9PSAwXQogIAogIHRhdV9oYXQgPC0gbWVhbihZdCkgLSBtZWFuKFljKQoKICBzdCA8LSBzZChZdCkKICBzYyA8LSBzZChZYykKICB2YXJoYXRfdGF1aGF0IDwtIHN0XjIvbGVuZ3RoKFl0KSArIHNjXjIvbGVuZ3RoKFljKQogIHJldHVybihjKHRhdV9oYXQsIHZhcmhhdF90YXVoYXQpKQp9KQpjb2xuYW1lcyhlc3RfcGVyX2Jsb2NrKTwtIDE6bWF4KGxhbG9uZGUkYmxvY2tzKQpyb3duYW1lcyhlc3RfcGVyX2Jsb2NrKSA8LSBjKCJtZWFuIiwgInZhciIpCmVzdF9wZXJfYmxvY2sKd3RzIDwtIHRhYmxlKGxhbG9uZGUkYmxvY2tzKS9sZW5ndGgobGFsb25kZSRibG9ja3MpCgp0YXVfaGF0IDwtIHN1bShlc3RfcGVyX2Jsb2NrWzEsIF0gKiB3dHMpCnNkX3RhdV9oYXQgPC0gc3FydChzdW0oZXN0X3Blcl9ibG9ja1syLCBdICogd3RzXjIpKQogIAoKcmVzdWx0IDwtIGModGF1X2hhdCwgc2RfdGF1X2hhdCwgYyh0YXVfaGF0LSAxLjk2ICogc2RfdGF1X2hhdCwgdGF1X2hhdCArIDEuOTYgKiBzZF90YXVfaGF0KSkKbmFtZXMocmVzdWx0KSA8LSBjKCJlc3QiLCAic2QiLCAiQ0lfbG93ZXIiLCAiQ0lfdXBwZXIiKQpyZXN1bHQKYGBgCgo=