Skip to content

Commit

Permalink
- Backend#reset_bundle, + Redis optimization, + functional special ba…
Browse files Browse the repository at this point in the history
…ckend test
  • Loading branch information
floere committed Dec 13, 2011
1 parent dc5492d commit 64eceb7
Show file tree
Hide file tree
Showing 6 changed files with 252 additions and 81 deletions.
10 changes: 0 additions & 10 deletions server/lib/picky/backends/backend.rb
Expand Up @@ -6,16 +6,6 @@ module Backends
#
class Backend

def reset bundle
[
create_inverted(bundle),
create_weights(bundle),
create_similarity(bundle),
create_configuration(bundle),
create_realtime(bundle)
]
end

# Returns the total score of the combinations.
#
# Default implementation. Override to speed up.
Expand Down
20 changes: 12 additions & 8 deletions server/lib/picky/backends/redis.rb
Expand Up @@ -180,12 +180,18 @@ def ids combinations, amount, offset
# Assume it's using EVALSHA.
#
begin
client.evalsha @@ids_sent_once,
identifiers.size,
*identifiers,
generate_intermediate_result_id,
offset,
(offset + amount)
if identifiers.size > 1
client.evalsha @@ids_sent_once,
identifiers.size,
*identifiers,
generate_intermediate_result_id,
offset,
(offset + amount)
else
client.zrange identifiers.first,
offset,
(offset + amount)
end
rescue RuntimeError => e
# Make the server have a SHA-1 for the script.
#
Expand All @@ -212,8 +218,6 @@ def ids combinations, amount, offset

# Little optimization.
#
# TODO Include in the scripting version as well.
#
if identifiers.size > 1
# Intersect and store.
#
Expand Down
19 changes: 11 additions & 8 deletions server/lib/picky/bundle.rb
Expand Up @@ -71,17 +71,20 @@ def backend
# Initializes all necessary indexes from the backend.
#
def reset_backend
# Extract specific indexes from backend.
#
@backend_inverted,
@backend_weights,
@backend_similarity,
@backend_configuration,
@backend_realtime = backend.reset self

create_backends
initialize_backends
end

# Extract specific indexes from backend.
#
def create_backends
@backend_inverted = backend.create_inverted self
@backend_weights = backend.create_weights self
@backend_similarity = backend.create_similarity self
@backend_configuration = backend.create_configuration self
@backend_realtime = backend.create_realtime self
end

# Initial indexes.
#
# Note that if the weights strategy doesn't need to be saved,
Expand Down
229 changes: 229 additions & 0 deletions server/spec/functional/backends/special_spec.rb
@@ -0,0 +1,229 @@
# encoding: utf-8
#
require 'spec_helper'

# To test the interface definition.
#
class BackendInterfaceTester < Picky::Backends::Backend
def create_inverted _
InternalBackendInterfaceTester.new
end
def create_weights _
InternalBackendInterfaceTester.new
end
def create_similarity _
InternalBackendInterfaceTester.new
end
def create_configuration _
InternalBackendInterfaceTester.new
end
def create_realtime _
InternalBackendInterfaceTester.new
end
end
class InternalBackendInterfaceTester

def initialize
@hash = {}
end

def initial
self
end

def empty
self
end

def [] key
@hash[key]
end

def []= key, value
@hash[key] = value
end

def clear
@hash.clear
end

def delete key
@hash.delete key
end

# dump/load
#

def dump _

end

def load
self
end

end

# Describes a Picky index that uses the Memory backend
# for data storage.
#
describe BackendInterfaceTester do

class Book
attr_reader :id, :title, :author
def initialize id, title, author
@id, @title, @author = id, title, author
end
end

attr_reader :data, :books

let(:data) do
Picky::Index.new(:books) do
source []
category :title, partial: Picky::Partial::Postfix.new(from: 1)
category :author, similarity: Picky::Generators::Similarity::DoubleMetaphone.new(3)
end
end
let(:books) { Picky::Search.new data }

its_to_s = ->(*) do
it 'searching for it' do
books.search('title').ids.should == ['1']
end
it 'searching for it using multiple words' do
books.search('title author').ids.should == ['1']
end
it 'searching for it using partial' do
books.search('tit').ids.should == ['1']
end
it 'searching for it using similarity' do
books.search('aothor~').ids.should == ['1']
end
it 'handles removing' do
data.remove 1

books.search('title').ids.should == []
end
it 'handles removing with more than one entry' do
data.add Book.new(2, 'title', 'author')

books.search('title').ids.should == ['2', '1']

data.remove '1'

books.search('title').ids.should == ['2']
end
it 'handles removing with three entries' do
data.add Book.new(2, 'title', 'author')
data.add Book.new(3, 'title', 'author')

books.search('title').ids.should == ['3', '2', '1']

data.remove '1'

books.search('title').ids.should == ['3', '2']
end
it 'handles replacing' do
data.replace Book.new(1, 'toitle', 'oithor')

books.search('title').ids.should == []
books.search('toitle').ids.should == ['1']
end
it 'handles clearing' do
data.clear

books.search('title').ids.should == []
end
it 'handles dumping and loading' do
data.dump
data.load

books.search('title').ids.should == ['1']
end
end

its_to_i = ->(*) do
it 'searching for it' do
books.search('title').ids.should == [1]
end
it 'searching for it using multiple words' do
books.search('title author').ids.should == [1]
end
it 'searching for it using partial' do
books.search('tit').ids.should == [1]
end
it 'searching for it using similarity' do
books.search('aothor~').ids.should == [1]
end
it 'handles removing' do
data.remove 1

books.search('title').ids.should == []
end
it 'handles removing with more than one entry' do
data.add Book.new(2, 'title', 'author')

books.search('title').ids.should == [2, 1]

data.remove 1

books.search('title').ids.should == [2]
end
it 'handles removing with three entries' do
data.add Book.new(2, 'title', 'author')
data.add Book.new(3, 'title', 'author')

books.search('title').ids.should == [3, 2, 1]

data.remove 1

books.search('title').ids.should == [3, 2]
end
it 'handles replacing' do
data.replace Book.new(1, 'toitle', 'oithor')

books.search('title').ids.should == []
books.search('toitle').ids.should == [1]
end
it 'handles clearing' do
data.clear

books.search('title').ids.should == []
end
it 'handles dumping and loading' do
data.dump
data.load

books.search('title').ids.should == [1]
end
end

context 'to_s key format' do
context 'immediately indexing backend (no dump needed)' do
before(:each) do
data.key_format :to_s
data.backend described_class.new
data.clear

data.add Book.new(1, 'title', 'author')
end

instance_eval &its_to_s
end
end
context 'to_i key format' do
context 'immediately indexing backend (no dump needed)' do
before(:each) do
data.key_format :to_i
data.backend described_class.new
data.clear

data.add Book.new(1, 'title', 'author')
end

instance_eval &its_to_i
end
end

end
51 changes: 0 additions & 51 deletions server/test_project/app.rb
Expand Up @@ -243,51 +243,6 @@ def initialize isbn
:partial => Picky::Partial::Substring.new(from: 1)
end

BackendModel = Struct.new :id, :name

# # To test the interface definition.
# #
# class InternalBackendInterfaceTester
#
# def initialize
# @hash = {}
# end
#
# def [] key
# @hash[key]
# end
#
# def []= key, value
# @hash[key] = value
# end
#
# # We need to implement this as we use it
# # in a Memory::JSON backend.
# #
# def to_json
# @hash.to_json
# end
#
# end
#
# backends_index = Picky::Index.new(:backends) do
# source [
# BackendModel.new(1, "Memory"),
# BackendModel.new(2, "Redis")
# ]
# backend Picky::Backends::Memory.new(
# inverted: ->(bundle) do
# Picky::Backends::Memory::JSON.new(bundle.index_path(:inverted))
# end,
# weights: Picky::Backends::Memory::JSON.new(
# "#{PICKY_ROOT}/index/#{PICKY_ENVIRONMENT}/funky_weights_path",
# empty: InternalBackendInterfaceTester.new,
# initial: InternalBackendInterfaceTester.new
# )
# )
# category :name
# end

# This checks that we can use a funky customized tokenizer.
#
NonStringDataSource = Struct.new :id, :nonstring
Expand Down Expand Up @@ -429,12 +384,6 @@ def tokenize nonstring
get %r{\A/japanese\z} do
japanese_search.search(params[:query], params[:ids] || 20, params[:offset] || 0).to_json
end
# backends_search = Search.new backends_index do
# searching case_sensitive: false
# end
# get %r{\A/backends\z} do
# backends_search.search(params[:query], params[:ids] || 20, params[:offset] || 0).to_json
# end
nonstring_search = Search.new nonstring_data_index
get %r{\A/nonstring\z} do
nonstring_search.search(params[:query], params[:ids] || 20, params[:offset] || 0).to_json
Expand Down
4 changes: 0 additions & 4 deletions server/test_project/spec/integration_spec.rb
Expand Up @@ -297,10 +297,6 @@
#
it { japanese.search("日").ids.should == [1] }

# # Different backends.
# #
# it { backends.search("Memor").ids.should == [1] }

# Different tokenizer.
#
it { nonstring.search("moo zap").ids.should == [2] }
Expand Down

0 comments on commit 64eceb7

Please sign in to comment.