Skip to content

Instantly share code, notes, and snippets.

select dp.id deck_page_id, dqry.id deck_query_id, dqry.name, dqry.sla, dqry.numerator_value, dqry.numerator_function, dqry.denominator_value,
dqry.denominator_function, dqry.deck_group_id, dqry.deck_group_order, dqry.graph_type, dqry.graph_legend, num.year, num.week_number, num.value num_value, denom.value denom_value, num.value / denom.value div_value, 100 - num.value / denom.value * 100.0 inv_percentage
from deck_page dp
join deck_page_group dpg on (dp.id = dpg.deck_page_id)
join deck_query dqry on (dpg.id= dqry.deck_group_id)
join (
SELECT *
FROM
(SELECT * FROM deck_query_weekly_rollup) p
UNPIVOT (value FOR agg_type IN (sum_value, avg_value, min_value, max_value, tp_25, tp_50, tp_75, tp_90)
>>> d = qe.logstash_query('@fields.hostname:"SEADCWEB10.hq.apfm.local"', "@fields.EventTime,@fields.request")
>>> len(d["rows"])
1100
>>> d["rows"] = len(d["rows"])
>>> d
{'query': '{"sort": {"@timestamp": {"order": "desc"}}, "fields": ["@fields.EventTime", "@fields.request"],
"size": 50,
"from": 1050,
"query": {"filtered": {"filter":
{"range": {"@timestamp": {"to": "2013-10-09T18:23:46.851697", "from": "2013-10-09T18:08:46.851697"}}},
id query_id run_date value run_time
11462275 5419 2013-09-15 17:24:00.000 0.448612 NULL
10173026 5419 2013-09-15 17:24:00.000 0.448612 NULL
20117379 5419 2013-09-15 17:24:00.000 0.448612 NULL
20819939 5419 2013-09-15 17:24:00.000 0.448612 NULL
21459125 5419 2013-09-15 17:24:00.000 0.448612 NULL
22140965 5419 2013-09-15 17:24:00.000 0.448612 NULL
22816090 5419 2013-09-15 17:24:00.000 0.448612 NULL
9530222 5419 2013-09-15 17:24:00.000 0.448612 NULL
8888297 5419 2013-09-15 17:24:00.000 0.448612 NULL
select table_counts.table_name, table_rows est_rows, count, table_rows / total_rows * 100 percent,
lpad('|', least((table_rows / total_rows) * 24, 24), '-') progress
from table_counts left join information_schema.tables on (information_schema.table_name = table_counts.table_name)
where table_schema = 'al_data';
+------------------------------+------------+--------+-----------+--------------------------+
| cities | est_rows | count | percent | progress |
+------------------------------+------------+--------+-----------+--------------------------+
| cities | 29 | 22 | 131.8182 | -----------------------| |
| city_data | 5470 | 8649 | 63.2443 | --------------| |
Bachelors of Science • Business Administration
University of Maryland • College Park, MD
145 credit hours (of 180 hour bachelor’s degree) in Computer Sciences
Portland State University • Portland, OR
Certificate: Registered Tax Return Preparer
Internal Revenue Service • Washington, D.C.
Certificate: Foundations of Geographic Information Systems
CREATE VIEW [dbo].[vw_Property_Advertising_Contract_Management]
AS
/********************************************************************************************************
Name : dbo.vw_Property_Advertising_Contract_Management
Description : Generic view on top of the table dbo.Property_Advertising_Contract_Management
Revision History
----------------------------------------------------------------------------------------------------------
Date Name Description
service: s3-to-bq
plugins:
- serverless-sqs-alarms-plugin
- serverless-external-s3-event
provider:
name: aws
runtime: python3.6
region: us-west-2
include *.json
def test_handler(self):
sqs = boto3.resource("sqs", "us-west-2")
job_queue = sqs.get_queue_by_name(QueueName=self.queue_name)
job_queue.send_message(
MessageBody=json.dumps(self.data)
)
bq_http = HttpMockSequence([
({'status': '200'}, self.bigquery_discovery),
def test_handler(self):
...
with patch.dict("os.environ", {"jobs": self.queue_name}):
with patch("clumpy.google.service", service):
job_check.handler({}, FakeContext(10))