Skip to content

Instantly share code, notes, and snippets.

@jkebinger
jkebinger / ProtoFieldIntrospectionStrategy.java
Created May 22, 2024 17:26
An assertj introspection strategy to compare protobufs only using the proto fields.
package cloud.prefab.test.helpers;
import com.google.protobuf.Descriptors;
import com.google.protobuf.MessageOrBuilder;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.assertj.core.api.recursive.comparison.RecursiveComparisonIntrospectionStrategy;
import org.assertj.core.util.introspection.IntrospectionError;
import logging
import prefab_cloud_python
from prefab_cloud_python import Options
def on_starting(server):
logging.warning("Starting server")
prefab_cloud_python.set_options(Options(collect_sync_interval=5))
@jkebinger
jkebinger / TimeIntervalGenerator.java
Created September 11, 2023 22:01
Generate a list of hourly or daily time spans in UTC milliseconds in any timezone
package cloud.prefab.server.utils;
import cloud.prefab.domain.Internal;
import com.google.common.base.Preconditions;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.List;
@jkebinger
jkebinger / gist:954205
Created May 3, 2011 20:51
Elastic Search Errors
org.elasticsearch.search.query.QueryPhaseExecutionException: [landscape][0]: query[ConstantScore(*:*)],from[0],size[10]: Query Failed [Failed to execute main query]
at org.elasticsearch.search.query.QueryPhase.execute(QueryPhase.java:215)
at org.elasticsearch.search.SearchService.executeQueryPhase(SearchService.java:222)
at org.elasticsearch.search.action.SearchServiceTransportAction.sendExecuteQuery(SearchServiceTransportAction.java:134)
at org.elasticsearch.action.search.type.TransportSearchQueryThenFetchAction$AsyncAction.sendExecuteFirstPhase(TransportSearchQueryThenFetchAction.java:76)
at org.elasticsearch.action.search.type.TransportSearchTypeAction$BaseAsyncAction.performFirstPhase(TransportSearchTypeAction.java:192)
at org.elasticsearch.action.search.type.TransportSearchTypeAction$BaseAsyncAction.access$000(TransportSearchTypeAction.java:75)
at org.elasticsearch.action.search.type.TransportSearchTypeAction$BaseAsyncAction$1.run(TransportSearchTypeAction.java:151)
at java.util.concurrent.Thread
@jkebinger
jkebinger / strata_map.R
Created February 2, 2011 00:39
move the colors out of aes to avoid scale_color_manual
# File-Name: strata_map.R
# Date: 2011-01-31
# Author: Drew Conway
# Email: drew.conway@nyu.edu
# Purpose: Create a map of bit.ly clicks on Strata
# Data Used: strata_lat_lon
# Packages Used: ggplot2
# Output File:
# Data Output:
# Machine: Drew Conway's MacBook Pro
select count(*),
substring(params from '"id"=>"([^"]*)"') as treatment_id,
coalesce(substring(params from '"brand"=>"([^"]*)"'),'f') as brand_flag,
coalesce(substring(params from '"disease_tag"=>"([^"]*)"'),'all') as disease_tag
from weblogs_2010_07 where page_name = 'treatments/show' and user_id is null
group by 2,3,4
order by 1 desc
require File.dirname(__FILE__) + '/../test_helper'
class SearchesControllerTest < ActionController::TestCase
include AuthenticatedTestHelper
context "show list of searches" do
setup do
login_as :quentin
puts "first context"
puts "controller's object id: #{@controller.object_id}"
# Zombie branch checker
# Designed to help you ferret out branches that can probably be deleted from your repository,
# this script will list all remote branches along with who last committed to that branch, when,
# and most importantly, if the remote branch has commits that are not in master.
# run in the root directory of your git repo
require 'rubygems'
require 'grit'
include Grit
def self.defer_indexing(connection, table_name, schema_name="public",&block)
fq_table_name = "#{schema_name}.#{table_name}"
connection.execute "SAVEPOINT defer_indexing"
constraints = constraints_for(connection,table_name,schema_name)
constraints.each{|c| connection.execute "alter table #{fq_table_name} drop constraint #{c['constraint_name']}"}
indexes = indexes_for(connection,table_name,schema_name)
indexes.each{|i| connection.execute "drop index if exists \"#{i['index_name']}\""}
connection = ActiveRecord::Base.connection
table = "some_table"
schema = "some_schema"
DBLoaderUtils.defer_indexing(connection,table,schema) do
connection.execute "insert into some_schema.some_table(id,col_1,col_2)
select id,col_1, col_2 from another_schema.another_table;"
end