- Milk
- Toilet paper
- hand off GWT ticket to Josh D.
dig shiftyplatypus.com | |
; <<>> DiG 9.8.3-P1 <<>> shiftyplatypus.com | |
;; global options: +cmd | |
;; Got answer: | |
;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 26231 | |
;; flags: qr rd ra; QUERY: 1, ANSWER: 5, AUTHORITY: 0, ADDITIONAL: 0 | |
;; QUESTION SECTION: | |
;shiftyplatypus.com. IN A |
curl -v -o /dev/null http://news.ycombinator.com | |
* About to connect() to news.ycombinator.com port 80 (#0) | |
* Trying 198.41.190.47... | |
* Connected to news.ycombinator.com (198.41.190.47) port 80 (#0) | |
> GET / HTTP/1.1 | |
> User-Agent: curl/7.29.0 | |
> Host: news.ycombinator.com | |
> Accept: */* | |
> | |
< HTTP/1.1 301 Moved Permanently |
root@ipv6test:~# traceroute6 canhazip.com | |
traceroute to canhazip.com (2400:cb00:2048:1::8d65:74a9) from 2607:f878:fe00:96::2, 30 hops max, 24 byte packets | |
1 gw-151.lax-02.us.sixxs.net (2607:f878:fe00:96::1) 12.376 ms 12.151 ms 12.168 ms | |
2 uslax02.sixxs.net (2607:f878:3:400::2) 12.239 ms 12.267 ms 12.205 ms | |
3 2607:f878:3:a::1 (2607:f878:3:a::1) 13.195 ms 13.237 ms 13.167 ms | |
4 2607:f878::1d0 (2607:f878::1d0) 13.192 ms 12.911 ms 12.862 ms | |
5 2400:8800:7f02::1 (2400:8800:7f02::1) 151.405 ms 234.243 ms 263.046 ms | |
6 2001:504:13::150 (2001:504:13::150) 76.241 ms 72.099 ms 71.913 ms | |
7 ae3-50g.cr1.lax1.us.nlayer.net (2001:590::451f:7c71) 81.133 ms 88.084 ms 79.429 ms | |
8 ae1-50g.ar1.lax1.us.nlayer.net (2001:590::451f:7f8a) 77.019 ms 74.702 ms 75.187 ms |
' ============================================================= | |
' Insert Earned hours formula and | |
' calculate all earned hours on CPInutVolume | |
' ============================================================= | |
xRef1 = Sheets("CPInputVolume").Cells(1, 6) - 19 'formula located in 1F that counts how many rows of data we have, used to loop through code below until end of our data | |
restartLoop: 'used down below after check for problem data (didn't complete all 3 questions) | |
Counter1 = 1 'used to match against xRef1 so we know when we reach the bottom of the data |