I hereby claim:
- I am madhuvishy on github.
- I am madhuvishy (https://keybase.io/madhuvishy) on keybase.
- I have a public key ASAvFDvvXxT0uONcCxAIhax-D0tgAGRRCKUzoTK4KRaxsgo
To claim this, I am signing this object:
I hereby claim:
To claim this, I am signing this object:
2016-06-02 04:10:58 Uploaded 1 File to: file:/tmp/madhuvishy/5446a37a-1035-41df-aa20-a21281d6edbe/hive_2016-06-02_16-10-47_186_3701812949139477878-1/-local-10004/HashTable-Stage-2/MapJoin-mapfile01--.hashtable (16148 bytes) | |
2016-06-02 04:10:58 End of local task; Time Taken: 2.472 sec. | |
Execution completed successfully | |
MapredLocal task succeeded | |
Launching Job 1 out of 1 | |
Number of reduce tasks not specified. Estimated from input data size: 153 | |
In order to change the average load for a reducer (in bytes): | |
set hive.exec.reducers.bytes.per.reducer=<number> | |
In order to limit the maximum number of reducers: | |
set hive.exec.reducers.max=<number> |
#!/usr/bin/bash | |
# This script generates a changelog message by computing the commit messages | |
# since the last git tag, and appends it to the beginning of the changelog.md | |
# file. The format of the text it appends looks like this: | |
# | |
# ## v<New tag version> | |
# Commit message 1 | |
# Commit message 2 | |
# .. | |
# |
# Topic doesn't exist | |
from kafka import KafkaClient | |
from kafka import KeyedProducer | |
from kafka import SimpleProducer | |
from kafka.producer.base import Producer | |
from kafka.common import KafkaTimeoutError | |
topic = 'eventlogging_Nonsense' | |
broker = 'deployment-kafka01.eqiad.wmflabs:9092' | |
topic_create_timeout_seconds = 1.0 |
- job-template: | |
name: 'analytics-refinery-release' | |
project-type: maven | |
triggers: | |
- zuul | |
jdk: 'Ubuntu - OpenJdk 7' | |
node: contintLabsSlave && UbuntuTrusty | |
scm: | |
- git: | |
url: '$ZUUL_URL/$ZUUL_PROJECT' |
import mwparserfromhell | |
import mwclient | |
import csv | |
PARAMS = ['contact', 'project', 'status', 'purge'] | |
site = mwclient.Site('meta.wikimedia.org') | |
def read_schemas_list(schemas_list_file): | |
with open(schemas_list_file, 'r') as f: |
-- Combining overcounting and undercounting | |
USE wmf; | |
SELECT uri_host, | |
SUM(IF ((x_analytics_map['WMF-Last-Access'] IS NULL | |
OR (unix_timestamp(x_analytics_map['WMF-Last-Access'], 'dd-MMM-yyyy') | |
< unix_timestamp(to_date(dt), 'yyyy-MM-dd'))), 1, 0)) AS overcount, | |
SUM(IF ((x_analytics_map['WMF-Last-Access'] IS NOT NULL | |
AND (unix_timestamp(x_analytics_map['WMF-Last-Access'], 'dd-MMM-yyyy') |
(define (f n) | |
(if (< n 3) | |
n | |
(+ (f (- n 1)) | |
(* 2 (f (- n 2))) | |
(* 3 (f (- n 3)))))) | |
(define (fi n) | |
(define (iter a b c n) | |
(if (< n 3) |
def conquer(v1, v2): | |
i, j, k = 0, 0, 0 | |
merged = [] | |
print v1, v2, i, j, k | |
while k < (len(v1) + len(v2)): | |
if i >= len(v1): | |
return merged + v2[j:] | |
elif j >= len(v2): | |
return merged + v1[i:] | |
elif v1[i] < v2[j]: |
<html> | |
<head> | |
<title>Chain Reaction</title> | |
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.10.2/jquery.min.js"></script> | |
<script src="http://fabricjs.com/lib/fabric.js"></script> | |
<script src="chain.js"></script> | |
</head> | |
<body> |