Skip to content

Instantly share code, notes, and snippets.

@mrbrdo
Created December 7, 2015 13:11
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mrbrdo/61e6b3628acf57d94b6d to your computer and use it in GitHub Desktop.
Save mrbrdo/61e6b3628acf57d94b6d to your computer and use it in GitHub Desktop.
# https://github.com/elastic/elasticsearch-rails/pull/361
module Elasticsearch
module Model
module Adapter
# The default adapter for models which haven't one registered
#
module Sequel
Adapter.register(
self,
lambda do |klass|
defined?(::Sequel::Model) && klass.respond_to?(:ancestors) &&
klass.ancestors.include?(::Sequel::Model)
end)
# Module for implementing methods and logic related to fetching records
# from the database
#
module Records
# Return the collection of records fetched from the database
#
def records
sql_records = klass.where(klass.primary_key => ids)
# Re-order records based on the order from Elasticsearch hits
# by redefining `to_a`, unless the user has called `order()`
#
# TODO: Sequel
#
# sql_records.instance_exec(response.response['hits']['hits']) do |hits|
# define_singleton_method :to_a do
# if defined?(::ActiveRecord) && ::ActiveRecord::VERSION::MAJOR >= 4
# self.load
# else
# self.__send__(:exec_queries)
# end
# @records.sort_by { |record| hits.index { |hit| hit['_id'].to_s == record.id.to_s } }
# end
# end
sql_records
end
end
# Module for implementing methods and logic related to hooking into
# model lifecycle (e.g. to perform automatic index updates)
module Callbacks
# Handle index updates (creating, updating or deleting documents)
# when the model changes, by hooking into the lifecycle
def self.included(base)
base.plugin(self)
end
module InstanceMethods
def after_create
super
__elasticsearch__.index_document
end
def after_update
super
__elasticsearch__.update_document
end
def after_destroy
super
__elasticsearch__.delete_document
end
def as_json(opts)
to_hash.stringify_keys.as_json(opts)
end
end
end
# Module for efficiently fetching records from the database to import
# them into the index
#
module Importing
# @abstract Implement this method in your adapter
#
def __find_in_batches(options = {}, &block)
rows_per_fetch = options.fetch(:rows_per_fetch, 1_000)
if respond_to?(:use_cursor)
target.dataset.use_cursor(options)
.each_slice(rows_per_fetch, &block)
else
items = []
target.dataset.paged_each(options) do |item|
items << item
if items.length % rows_per_fetch == 0
yield items
items = []
end
end
yield items unless items.empty?
end
end
# @abstract Implement this method in your adapter
#
def __transform
lambda do |model|
{
index: {
_id: model.id,
data: model.__elasticsearch__.as_indexed_json
}
}
end
end
end
end
end
end
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment