Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Now searching with Sphinx for quick global search.

  • Loading branch information...
commit 5dbe11a3e77590ff3ee9034b439dce726fecc335 1 parent 289fc67
@vojto vojto authored
Showing with 4,067 additions and 2 deletions.
  1. +11 −0 app/controllers/searches_controller.rb
  2. +1 −1  app/views/layouts/_quick_search.html.haml
  3. +1 −1  config/routes.rb
  4. +37 −0 lib/sphinx_search_engine.rb
  5. +5 −0 lib/tasks/index.rake
  6. +41 −0 vendor/plugins/sphinx/README.rdoc
  7. +21 −0 vendor/plugins/sphinx/Rakefile
  8. +1 −0  vendor/plugins/sphinx/init.rb
  9. +5 −0 vendor/plugins/sphinx/install.rb
  10. +6 −0 vendor/plugins/sphinx/lib/sphinx.rb
  11. +1,093 −0 vendor/plugins/sphinx/lib/sphinx/client.rb
  12. +50 −0 vendor/plugins/sphinx/lib/sphinx/request.rb
  13. +69 −0 vendor/plugins/sphinx/lib/sphinx/response.rb
  14. +112 −0 vendor/plugins/sphinx/spec/client_response_spec.rb
  15. +469 −0 vendor/plugins/sphinx/spec/client_spec.rb
  16. +8 −0 vendor/plugins/sphinx/spec/fixtures/default_search.php
  17. +8 −0 vendor/plugins/sphinx/spec/fixtures/default_search_index.php
  18. +11 −0 vendor/plugins/sphinx/spec/fixtures/excerpt_custom.php
  19. +8 −0 vendor/plugins/sphinx/spec/fixtures/excerpt_default.php
  20. +11 −0 vendor/plugins/sphinx/spec/fixtures/excerpt_flags.php
  21. +9 −0 vendor/plugins/sphinx/spec/fixtures/field_weights.php
  22. +9 −0 vendor/plugins/sphinx/spec/fixtures/filter.php
  23. +9 −0 vendor/plugins/sphinx/spec/fixtures/filter_exclude.php
  24. +9 −0 vendor/plugins/sphinx/spec/fixtures/filter_float_range.php
  25. +9 −0 vendor/plugins/sphinx/spec/fixtures/filter_float_range_exclude.php
  26. +9 −0 vendor/plugins/sphinx/spec/fixtures/filter_range.php
  27. +9 −0 vendor/plugins/sphinx/spec/fixtures/filter_range_exclude.php
  28. +10 −0 vendor/plugins/sphinx/spec/fixtures/filter_range_int64.php
  29. +10 −0 vendor/plugins/sphinx/spec/fixtures/filter_ranges.php
  30. +10 −0 vendor/plugins/sphinx/spec/fixtures/filters.php
  31. +13 −0 vendor/plugins/sphinx/spec/fixtures/filters_different.php
  32. +9 −0 vendor/plugins/sphinx/spec/fixtures/geo_anchor.php
  33. +9 −0 vendor/plugins/sphinx/spec/fixtures/group_by_attr.php
  34. +9 −0 vendor/plugins/sphinx/spec/fixtures/group_by_attrpair.php
  35. +9 −0 vendor/plugins/sphinx/spec/fixtures/group_by_day.php
  36. +9 −0 vendor/plugins/sphinx/spec/fixtures/group_by_day_sort.php
  37. +9 −0 vendor/plugins/sphinx/spec/fixtures/group_by_month.php
  38. +9 −0 vendor/plugins/sphinx/spec/fixtures/group_by_week.php
  39. +9 −0 vendor/plugins/sphinx/spec/fixtures/group_by_year.php
  40. +10 −0 vendor/plugins/sphinx/spec/fixtures/group_distinct.php
  41. +9 −0 vendor/plugins/sphinx/spec/fixtures/id_range.php
  42. +9 −0 vendor/plugins/sphinx/spec/fixtures/id_range64.php
  43. +9 −0 vendor/plugins/sphinx/spec/fixtures/index_weights.php
  44. +8 −0 vendor/plugins/sphinx/spec/fixtures/keywords.php
  45. +9 −0 vendor/plugins/sphinx/spec/fixtures/limits.php
  46. +9 −0 vendor/plugins/sphinx/spec/fixtures/limits_cutoff.php
  47. +9 −0 vendor/plugins/sphinx/spec/fixtures/limits_max.php
  48. +9 −0 vendor/plugins/sphinx/spec/fixtures/limits_max_cutoff.php
  49. +9 −0 vendor/plugins/sphinx/spec/fixtures/match_all.php
  50. +9 −0 vendor/plugins/sphinx/spec/fixtures/match_any.php
  51. +9 −0 vendor/plugins/sphinx/spec/fixtures/match_boolean.php
  52. +9 −0 vendor/plugins/sphinx/spec/fixtures/match_extended.php
  53. +9 −0 vendor/plugins/sphinx/spec/fixtures/match_extended2.php
  54. +9 −0 vendor/plugins/sphinx/spec/fixtures/match_fullscan.php
  55. +9 −0 vendor/plugins/sphinx/spec/fixtures/match_phrase.php
  56. +9 −0 vendor/plugins/sphinx/spec/fixtures/max_query_time.php
  57. +12 −0 vendor/plugins/sphinx/spec/fixtures/miltiple_queries.php
  58. +9 −0 vendor/plugins/sphinx/spec/fixtures/ranking_bm25.php
  59. +9 −0 vendor/plugins/sphinx/spec/fixtures/ranking_none.php
  60. +9 −0 vendor/plugins/sphinx/spec/fixtures/ranking_proximity.php
  61. +9 −0 vendor/plugins/sphinx/spec/fixtures/ranking_proximity_bm25.php
  62. +9 −0 vendor/plugins/sphinx/spec/fixtures/ranking_wordcount.php
  63. +9 −0 vendor/plugins/sphinx/spec/fixtures/retries.php
  64. +9 −0 vendor/plugins/sphinx/spec/fixtures/retries_delay.php
  65. +9 −0 vendor/plugins/sphinx/spec/fixtures/select.php
  66. +11 −0 vendor/plugins/sphinx/spec/fixtures/set_override.php
  67. +9 −0 vendor/plugins/sphinx/spec/fixtures/sort_attr_asc.php
  68. +9 −0 vendor/plugins/sphinx/spec/fixtures/sort_attr_desc.php
  69. +9 −0 vendor/plugins/sphinx/spec/fixtures/sort_expr.php
  70. +9 −0 vendor/plugins/sphinx/spec/fixtures/sort_extended.php
  71. +9 −0 vendor/plugins/sphinx/spec/fixtures/sort_relevance.php
  72. +9 −0 vendor/plugins/sphinx/spec/fixtures/sort_time_segments.php
  73. +1,269 −0 vendor/plugins/sphinx/spec/fixtures/sphinxapi.php
  74. +8 −0 vendor/plugins/sphinx/spec/fixtures/update_attributes.php
  75. +8 −0 vendor/plugins/sphinx/spec/fixtures/update_attributes_mva.php
  76. +9 −0 vendor/plugins/sphinx/spec/fixtures/weights.php
  77. +67 −0 vendor/plugins/sphinx/spec/sphinx/sphinx-id64.conf
  78. +67 −0 vendor/plugins/sphinx/spec/sphinx/sphinx.conf
  79. +86 −0 vendor/plugins/sphinx/spec/sphinx/sphinx_test.sql
  80. +3 −0  vendor/plugins/sphinx/sphinx.yml.tpl
  81. +75 −0 vendor/plugins/sphinx/tasks/sphinx.rake
  82. +27 −0 vendor/plugins/sphinx/test.rb
View
11 app/controllers/searches_controller.rb
@@ -101,6 +101,17 @@ def broaden
redirect_to dataset_path(@dataset, :search_id => @search.id)
end
+ ########################################################################
+ # Quick search using Sphinx. If Sphinx search engine couldn't be used
+ # redirect to regular search.
+ def quick
+ query = params[:query_string]
+ engine = SphinxSearchEngine.new
+ search = engine.create_search_with_string(query)
+ engine.perform_search(search)
+ redirect_to search_path(search)
+ end
+
def show
@search = Search.find_by_id! params[:id]
View
2  app/views/layouts/_quick_search.html.haml
@@ -1,4 +1,4 @@
- if params[:controller] != "main" && params[:controller] != "search"
#quick_search
- - form_tag searches_path, :class => "search" do
+ - form_tag quick_searches_path, :class => "search" do
= text_field_tag :query_string
View
2  config/routes.rb
@@ -14,7 +14,7 @@
map.register '/register', :controller => 'users', :action => 'create'
map.signup '/signup', :controller => 'users', :action => 'new'
- map.resources :searches, :member => {:broaden => :get}
+ map.resources :searches, :member => {:broaden => :get}, :collection => {:quick => :post}
map.resources :user_roles
View
37 lib/sphinx_search_engine.rb
@@ -0,0 +1,37 @@
+class SphinxSearchEngine
+ def create_search_with_string(string)
+ search = Search.new
+ search.query_string = string
+ search.search_type = "text"
+
+ query = SearchQuery.query_with_string(string, :scope=>'global', :object=>nil)
+ search.query = query
+ query.save
+ search.save
+ search
+ end
+
+ def perform_search(search)
+ # TODO. If can't use Sphinx, return false or something.
+
+ sphinx_client = Sphinx::Client.new
+
+ all_results = []
+
+ datasets = DatasetDescription.all
+ datasets.each do |dataset|
+ sphinx_client.SetLimits(0, 10)
+ results = sphinx_client.Query(search.query_string, "index_#{dataset.identifier}")
+ results['matches'].each do |r|
+ all_results << {:table_name => dataset.identifier, :record_id => r['id'], :search_query_id => search.query.id}
+ end
+ end
+
+ values = all_results.collect{|r|"('#{r[:table_name]}', #{r[:record_id]}, #{r[:search_query_id]})"}.join(",")
+
+ sql_query = "INSERT INTO search_results(table_name, record_id, search_query_id) VALUES #{values}"
+
+ DatasetDescription.connection.execute(sql_query)
+
+ end
+end
View
5 lib/tasks/index.rake
@@ -0,0 +1,5 @@
+namespace :index do
+ task :update_config => :environment do
+ require File.join(Rails.root, "index", "update_config.rb")
+ end
+end
View
41 vendor/plugins/sphinx/README.rdoc
@@ -0,0 +1,41 @@
+=Sphinx Client API 0.9.9-dev (r1299)
+
+This document gives an overview of what is Sphinx itself and how to use in
+within Ruby on Rails. For more information or documentation,
+please go to http://www.sphinxsearch.com
+
+==Sphinx
+
+Sphinx is a standalone full-text search engine, meant to provide fast,
+size-efficient and relevant fulltext search functions to other applications.
+Sphinx was specially designed to integrate well with SQL databases and
+scripting languages. Currently built-in data sources support fetching data
+either via direct connection to MySQL, or from an XML pipe.
+
+Simplest way to communicate with Sphinx is to use <tt>searchd</tt> -
+a daemon to search through fulltext indices from external software.
+
+==Documentation
+
+You can create the documentation by running:
+
+ rake rdoc
+
+==Latest version
+
+You can always get latest version from
+http://kpumuk.info/projects/ror-plugins/sphinx
+
+==Credits
+
+Dmytro Shteflyuk <kpumuk@kpumuk.info> http://kpumuk.info
+
+Andrew Aksyonoff http://sphinxsearch.com/
+
+Special thanks to Alexey Kovyrin <alexey@kovyrin.net> http://blog.kovyrin.net
+
+==License
+
+This library is distributed under the terms of the Ruby license.
+You can freely distribute/modify this library.
+
View
21 vendor/plugins/sphinx/Rakefile
@@ -0,0 +1,21 @@
+require 'rake'
+require 'spec/rake/spectask'
+require 'rake/rdoctask'
+
+desc 'Default: run unit tests.'
+task :default => :spec
+
+desc 'Test the sphinx plugin.'
+Spec::Rake::SpecTask.new(:spec) do |t|
+ t.libs << 'lib'
+ t.pattern = 'spec/*_spec.rb'
+end
+
+desc 'Generate documentation for the sphinx plugin.'
+Rake::RDocTask.new(:rdoc) do |rdoc|
+ rdoc.rdoc_dir = 'rdoc'
+ rdoc.title = 'Sphinx Client API'
+ rdoc.options << '--line-numbers' << '--inline-source'
+ rdoc.rdoc_files.include('README')
+ rdoc.rdoc_files.include('lib/**/*.rb')
+end
View
1  vendor/plugins/sphinx/init.rb
@@ -0,0 +1 @@
+require File.dirname(__FILE__) + '/lib/sphinx'
View
5 vendor/plugins/sphinx/install.rb
@@ -0,0 +1,5 @@
+require 'fileutils'
+
+sphinx_config = File.dirname(__FILE__) + '/../../../config/sphinx.yml'
+FileUtils.cp File.dirname(__FILE__) + '/sphinx.yml.tpl', sphinx_config unless File.exist?(sphinx_config)
+puts IO.read(File.join(File.dirname(__FILE__), 'README'))
View
6 vendor/plugins/sphinx/lib/sphinx.rb
@@ -0,0 +1,6 @@
+require File.dirname(__FILE__) + '/sphinx/request'
+require File.dirname(__FILE__) + '/sphinx/response'
+require File.dirname(__FILE__) + '/sphinx/client'
+
+module Sphinx
+end
View
1,093 vendor/plugins/sphinx/lib/sphinx/client.rb
@@ -0,0 +1,1093 @@
+# = client.rb - Sphinx Client API
+#
+# Author:: Dmytro Shteflyuk <mailto:kpumuk@kpumuk.info>.
+# Copyright:: Copyright (c) 2006 - 2008 Dmytro Shteflyuk
+# License:: Distributes under the same terms as Ruby
+# Version:: 0.9.9-r1299
+# Website:: http://kpumuk.info/projects/ror-plugins/sphinx
+#
+# This library is distributed under the terms of the Ruby license.
+# You can freely distribute/modify this library.
+
+# ==Sphinx Client API
+#
+# The Sphinx Client API is used to communicate with <tt>searchd</tt>
+# daemon and get search results from Sphinx.
+#
+# ===Usage
+#
+# sphinx = Sphinx::Client.new
+# result = sphinx.Query('test')
+# ids = result['matches'].map { |match| match['id'] }.join(',')
+# posts = Post.find :all, :conditions => "id IN (#{ids})"
+#
+# docs = posts.map(&:body)
+# excerpts = sphinx.BuildExcerpts(docs, 'index', 'test')
+
+require 'socket'
+
+module Sphinx
+ # :stopdoc:
+
+ class SphinxError < StandardError; end
+ class SphinxArgumentError < SphinxError; end
+ class SphinxConnectError < SphinxError; end
+ class SphinxResponseError < SphinxError; end
+ class SphinxInternalError < SphinxError; end
+ class SphinxTemporaryError < SphinxError; end
+ class SphinxUnknownError < SphinxError; end
+
+ # :startdoc:
+
+ class Client
+
+ # :stopdoc:
+
+ # Known searchd commands
+
+ # search command
+ SEARCHD_COMMAND_SEARCH = 0
+ # excerpt command
+ SEARCHD_COMMAND_EXCERPT = 1
+ # update command
+ SEARCHD_COMMAND_UPDATE = 2
+ # keywords command
+ SEARCHD_COMMAND_KEYWORDS = 3
+
+ # Current client-side command implementation versions
+
+ # search command version
+ VER_COMMAND_SEARCH = 0x116
+ # excerpt command version
+ VER_COMMAND_EXCERPT = 0x100
+ # update command version
+ VER_COMMAND_UPDATE = 0x102
+ # keywords command version
+ VER_COMMAND_KEYWORDS = 0x100
+
+ # Known searchd status codes
+
+ # general success, command-specific reply follows
+ SEARCHD_OK = 0
+ # general failure, command-specific reply may follow
+ SEARCHD_ERROR = 1
+ # temporaty failure, client should retry later
+ SEARCHD_RETRY = 2
+ # general success, warning message and command-specific reply follow
+ SEARCHD_WARNING = 3
+
+ # :startdoc:
+
+ # Known match modes
+
+ # match all query words
+ SPH_MATCH_ALL = 0
+ # match any query word
+ SPH_MATCH_ANY = 1
+ # match this exact phrase
+ SPH_MATCH_PHRASE = 2
+ # match this boolean query
+ SPH_MATCH_BOOLEAN = 3
+ # match this extended query
+ SPH_MATCH_EXTENDED = 4
+ # match all document IDs w/o fulltext query, apply filters
+ SPH_MATCH_FULLSCAN = 5
+ # extended engine V2 (TEMPORARY, WILL BE REMOVED IN 0.9.8-RELEASE)
+ SPH_MATCH_EXTENDED2 = 6
+
+ # Known ranking modes (ext2 only)
+
+ # default mode, phrase proximity major factor and BM25 minor one
+ SPH_RANK_PROXIMITY_BM25 = 0
+ # statistical mode, BM25 ranking only (faster but worse quality)
+ SPH_RANK_BM25 = 1
+ # no ranking, all matches get a weight of 1
+ SPH_RANK_NONE = 2
+ # simple word-count weighting, rank is a weighted sum of per-field keyword occurence counts
+ SPH_RANK_WORDCOUNT = 3
+ # phrase proximity
+ SPH_RANK_PROXIMITY = 4
+
+ # Known sort modes
+
+ # sort by document relevance desc, then by date
+ SPH_SORT_RELEVANCE = 0
+ # sort by document date desc, then by relevance desc
+ SPH_SORT_ATTR_DESC = 1
+ # sort by document date asc, then by relevance desc
+ SPH_SORT_ATTR_ASC = 2
+ # sort by time segments (hour/day/week/etc) desc, then by relevance desc
+ SPH_SORT_TIME_SEGMENTS = 3
+ # sort by SQL-like expression (eg. "@relevance DESC, price ASC, @id DESC")
+ SPH_SORT_EXTENDED = 4
+ # sort by arithmetic expression in descending order (eg. "@id + max(@weight,1000)*boost + log(price)")
+ SPH_SORT_EXPR = 5
+
+ # Known filter types
+
+ # filter by integer values set
+ SPH_FILTER_VALUES = 0
+ # filter by integer range
+ SPH_FILTER_RANGE = 1
+ # filter by float range
+ SPH_FILTER_FLOATRANGE = 2
+
+ # Known attribute types
+
+ # this attr is just an integer
+ SPH_ATTR_INTEGER = 1
+ # this attr is a timestamp
+ SPH_ATTR_TIMESTAMP = 2
+ # this attr is an ordinal string number (integer at search time,
+ # specially handled at indexing time)
+ SPH_ATTR_ORDINAL = 3
+ # this attr is a boolean bit field
+ SPH_ATTR_BOOL = 4
+ # this attr is a float
+ SPH_ATTR_FLOAT = 5
+ # signed 64-bit integer
+ SPH_ATTR_BIGINT = 6
+ # this attr has multiple values (0 or more)
+ SPH_ATTR_MULTI = 0x40000000
+
+ # Known grouping functions
+
+ # group by day
+ SPH_GROUPBY_DAY = 0
+ # group by week
+ SPH_GROUPBY_WEEK = 1
+ # group by month
+ SPH_GROUPBY_MONTH = 2
+ # group by year
+ SPH_GROUPBY_YEAR = 3
+ # group by attribute value
+ SPH_GROUPBY_ATTR = 4
+ # group by sequential attrs pair
+ SPH_GROUPBY_ATTRPAIR = 5
+
+ # Constructs the <tt>Sphinx::Client</tt> object and sets options to their default values.
+ def initialize
+ # per-client-object settings
+ @host = 'localhost' # searchd host (default is "localhost")
+ @port = 9312 # searchd port (default is 9312)
+
+ # per-query settings
+ @offset = 0 # how many records to seek from result-set start (default is 0)
+ @limit = 20 # how many records to return from result-set starting at offset (default is 20)
+ @mode = SPH_MATCH_ALL # query matching mode (default is SPH_MATCH_ALL)
+ @weights = [] # per-field weights (default is 1 for all fields)
+ @sort = SPH_SORT_RELEVANCE # match sorting mode (default is SPH_SORT_RELEVANCE)
+ @sortby = '' # attribute to sort by (defualt is "")
+ @min_id = 0 # min ID to match (default is 0, which means no limit)
+ @max_id = 0 # max ID to match (default is 0, which means no limit)
+ @filters = [] # search filters
+ @groupby = '' # group-by attribute name
+ @groupfunc = SPH_GROUPBY_DAY # function to pre-process group-by attribute value with
+ @groupsort = '@group desc' # group-by sorting clause (to sort groups in result set with)
+ @groupdistinct = '' # group-by count-distinct attribute
+ @maxmatches = 1000 # max matches to retrieve
+ @cutoff = 0 # cutoff to stop searching at (default is 0)
+ @retrycount = 0 # distributed retries count
+ @retrydelay = 0 # distributed retries delay
+ @anchor = [] # geographical anchor point
+ @indexweights = [] # per-index weights
+ @ranker = SPH_RANK_PROXIMITY_BM25 # ranking mode (default is SPH_RANK_PROXIMITY_BM25)
+ @maxquerytime = 0 # max query time, milliseconds (default is 0, do not limit)
+ @fieldweights = {} # per-field-name weights
+ @overrides = [] # per-query attribute values overrides
+ @select = '*' # select-list (attributes or expressions, with optional aliases)
+
+ # per-reply fields (for single-query case)
+ @error = '' # last error message
+ @warning = '' # last warning message
+
+ @reqs = [] # requests storage (for multi-query case)
+ @mbenc = '' # stored mbstring encoding
+ end
+
+ # Get last error message.
+ def GetLastError
+ @error
+ end
+
+ # Get last warning message.
+ def GetLastWarning
+ @warning
+ end
+
+ # Set searchd host name (string) and port (integer).
+ def SetServer(host, port)
+ assert { host.instance_of? String }
+ assert { port.instance_of? Fixnum }
+
+ @host = host
+ @port = port
+ end
+
+ # Set offset and count into result set,
+ # and optionally set max-matches and cutoff limits.
+ def SetLimits(offset, limit, max = 0, cutoff = 0)
+ assert { offset.instance_of? Fixnum }
+ assert { limit.instance_of? Fixnum }
+ assert { max.instance_of? Fixnum }
+ assert { offset >= 0 }
+ assert { limit > 0 }
+ assert { max >= 0 }
+
+ @offset = offset
+ @limit = limit
+ @maxmatches = max if max > 0
+ @cutoff = cutoff if cutoff > 0
+ end
+
+ # Set maximum query time, in milliseconds, per-index,
+ # integer, 0 means "do not limit"
+ def SetMaxQueryTime(max)
+ assert { max.instance_of? Fixnum }
+ assert { max >= 0 }
+ @maxquerytime = max
+ end
+
+ # Set matching mode.
+ def SetMatchMode(mode)
+ assert { mode == SPH_MATCH_ALL \
+ || mode == SPH_MATCH_ANY \
+ || mode == SPH_MATCH_PHRASE \
+ || mode == SPH_MATCH_BOOLEAN \
+ || mode == SPH_MATCH_EXTENDED \
+ || mode == SPH_MATCH_FULLSCAN \
+ || mode == SPH_MATCH_EXTENDED2 }
+
+ @mode = mode
+ end
+
+ # Set ranking mode.
+ def SetRankingMode(ranker)
+ assert { ranker == SPH_RANK_PROXIMITY_BM25 \
+ || ranker == SPH_RANK_BM25 \
+ || ranker == SPH_RANK_NONE \
+ || ranker == SPH_RANK_WORDCOUNT \
+ || ranker == SPH_RANK_PROXIMITY }
+
+ @ranker = ranker
+ end
+
+ # Set matches sorting mode.
+ def SetSortMode(mode, sortby = '')
+ assert { mode == SPH_SORT_RELEVANCE \
+ || mode == SPH_SORT_ATTR_DESC \
+ || mode == SPH_SORT_ATTR_ASC \
+ || mode == SPH_SORT_TIME_SEGMENTS \
+ || mode == SPH_SORT_EXTENDED \
+ || mode == SPH_SORT_EXPR }
+ assert { sortby.instance_of? String }
+ assert { mode == SPH_SORT_RELEVANCE || !sortby.empty? }
+
+ @sort = mode
+ @sortby = sortby
+ end
+
+ # Bind per-field weights by order.
+ #
+ # DEPRECATED; use SetFieldWeights() instead.
+ def SetWeights(weights)
+ assert { weights.instance_of? Array }
+ weights.each do |weight|
+ assert { weight.instance_of? Fixnum }
+ end
+
+ @weights = weights
+ end
+
+ # Bind per-field weights by name.
+ #
+ # Takes string (field name) to integer name (field weight) hash as an argument.
+ # * Takes precedence over SetWeights().
+ # * Unknown names will be silently ignored.
+ # * Unbound fields will be silently given a weight of 1.
+ def SetFieldWeights(weights)
+ assert { weights.instance_of? Hash }
+ weights.each do |name, weight|
+ assert { name.instance_of? String }
+ assert { weight.instance_of? Fixnum }
+ end
+
+ @fieldweights = weights
+ end
+
+ # Bind per-index weights by name.
+ def SetIndexWeights(weights)
+ assert { weights.instance_of? Hash }
+ weights.each do |index, weight|
+ assert { index.instance_of? String }
+ assert { weight.instance_of? Fixnum }
+ end
+
+ @indexweights = weights
+ end
+
+ # Set IDs range to match.
+ #
+ # Only match records if document ID is beetwen <tt>min_id</tt> and <tt>max_id</tt> (inclusive).
+ def SetIDRange(min, max)
+ assert { min.instance_of?(Fixnum) or min.instance_of?(Bignum) }
+ assert { max.instance_of?(Fixnum) or max.instance_of?(Bignum) }
+ assert { min <= max }
+
+ @min_id = min
+ @max_id = max
+ end
+
+ # Set values filter.
+ #
+ # Only match those records where <tt>attribute</tt> column values
+ # are in specified set.
+ def SetFilter(attribute, values, exclude = false)
+ assert { attribute.instance_of? String }
+ assert { values.instance_of? Array }
+ assert { !values.empty? }
+
+ if values.instance_of?(Array) && values.size > 0
+ values.each do |value|
+ assert { value.instance_of? Fixnum }
+ end
+
+ @filters << { 'type' => SPH_FILTER_VALUES, 'attr' => attribute, 'exclude' => exclude, 'values' => values }
+ end
+ end
+
+ # Set range filter.
+ #
+ # Only match those records where <tt>attribute</tt> column value
+ # is beetwen <tt>min</tt> and <tt>max</tt> (including <tt>min</tt> and <tt>max</tt>).
+ def SetFilterRange(attribute, min, max, exclude = false)
+ assert { attribute.instance_of? String }
+ assert { min.instance_of? Fixnum or min.instance_of? Bignum }
+ assert { max.instance_of? Fixnum or max.instance_of? Bignum }
+ assert { min <= max }
+
+ @filters << { 'type' => SPH_FILTER_RANGE, 'attr' => attribute, 'exclude' => exclude, 'min' => min, 'max' => max }
+ end
+
+ # Set float range filter.
+ #
+ # Only match those records where <tt>attribute</tt> column value
+ # is beetwen <tt>min</tt> and <tt>max</tt> (including <tt>min</tt> and <tt>max</tt>).
+ def SetFilterFloatRange(attribute, min, max, exclude = false)
+ assert { attribute.instance_of? String }
+ assert { min.instance_of? Float }
+ assert { max.instance_of? Float }
+ assert { min <= max }
+
+ @filters << { 'type' => SPH_FILTER_FLOATRANGE, 'attr' => attribute, 'exclude' => exclude, 'min' => min, 'max' => max }
+ end
+
+ # Setup anchor point for geosphere distance calculations.
+ #
+ # Required to use <tt>@geodist</tt> in filters and sorting
+ # distance will be computed to this point. Latitude and longitude
+ # must be in radians.
+ #
+ # * <tt>attrlat</tt> -- is the name of latitude attribute
+ # * <tt>attrlong</tt> -- is the name of longitude attribute
+ # * <tt>lat</tt> -- is anchor point latitude, in radians
+ # * <tt>long</tt> -- is anchor point longitude, in radians
+ def SetGeoAnchor(attrlat, attrlong, lat, long)
+ assert { attrlat.instance_of? String }
+ assert { attrlong.instance_of? String }
+ assert { lat.instance_of? Float }
+ assert { long.instance_of? Float }
+
+ @anchor = { 'attrlat' => attrlat, 'attrlong' => attrlong, 'lat' => lat, 'long' => long }
+ end
+
+ # Set grouping attribute and function.
+ #
+ # In grouping mode, all matches are assigned to different groups
+ # based on grouping function value.
+ #
+ # Each group keeps track of the total match count, and the best match
+ # (in this group) according to current sorting function.
+ #
+ # The final result set contains one best match per group, with
+ # grouping function value and matches count attached.
+ #
+ # Groups in result set could be sorted by any sorting clause,
+ # including both document attributes and the following special
+ # internal Sphinx attributes:
+ #
+ # * @id - match document ID;
+ # * @weight, @rank, @relevance - match weight;
+ # * @group - groupby function value;
+ # * @count - amount of matches in group.
+ #
+ # the default mode is to sort by groupby value in descending order,
+ # ie. by '@group desc'.
+ #
+ # 'total_found' would contain total amount of matching groups over
+ # the whole index.
+ #
+ # WARNING: grouping is done in fixed memory and thus its results
+ # are only approximate; so there might be more groups reported
+ # in total_found than actually present. @count might also
+ # be underestimated.
+ #
+ # For example, if sorting by relevance and grouping by "published"
+ # attribute with SPH_GROUPBY_DAY function, then the result set will
+ # contain one most relevant match per each day when there were any
+ # matches published, with day number and per-day match count attached,
+ # and sorted by day number in descending order (ie. recent days first).
+ def SetGroupBy(attribute, func, groupsort = '@group desc')
+ assert { attribute.instance_of? String }
+ assert { groupsort.instance_of? String }
+ assert { func == SPH_GROUPBY_DAY \
+ || func == SPH_GROUPBY_WEEK \
+ || func == SPH_GROUPBY_MONTH \
+ || func == SPH_GROUPBY_YEAR \
+ || func == SPH_GROUPBY_ATTR \
+ || func == SPH_GROUPBY_ATTRPAIR }
+
+ @groupby = attribute
+ @groupfunc = func
+ @groupsort = groupsort
+ end
+
+ # Set count-distinct attribute for group-by queries.
+ def SetGroupDistinct(attribute)
+ assert { attribute.instance_of? String }
+ @groupdistinct = attribute
+ end
+
+ # Set distributed retries count and delay.
+ def SetRetries(count, delay = 0)
+ assert { count.instance_of? Fixnum }
+ assert { delay.instance_of? Fixnum }
+
+ @retrycount = count
+ @retrydelay = delay
+ end
+
+ # Set attribute values override
+ #
+ # There can be only one override per attribute.
+ # +values+ must be a hash that maps document IDs to attribute values.
+ def SetOverride(attrname, attrtype, values)
+ assert { attrname.instance_of? String }
+ assert { [SPH_ATTR_INTEGER, SPH_ATTR_TIMESTAMP, SPH_ATTR_BOOL, SPH_ATTR_FLOAT, SPH_ATTR_BIGINT].include?(attrtype) }
+ assert { values.instance_of? Hash }
+
+ @overrides << { 'attr' => attrname, 'type' => attrtype, 'values' => values }
+ end
+
+ # Set select-list (attributes or expressions), SQL-like syntax.
+ def SetSelect(select)
+ assert { select.instance_of? String }
+ @select = select
+ end
+
+ # Clear all filters (for multi-queries).
+ def ResetFilters
+ @filters = []
+ @anchor = []
+ end
+
+ # Clear groupby settings (for multi-queries).
+ def ResetGroupBy
+ @groupby = ''
+ @groupfunc = SPH_GROUPBY_DAY
+ @groupsort = '@group desc'
+ @groupdistinct = ''
+ end
+
+ # Clear all attribute value overrides (for multi-queries).
+ def ResetOverrides
+ @overrides = []
+ end
+
+ # Connect to searchd server and run given search query.
+ #
+ # <tt>query</tt> is query string
+
+ # <tt>index</tt> is index name (or names) to query. default value is "*" which means
+ # to query all indexes. Accepted characters for index names are letters, numbers,
+ # dash, and underscore; everything else is considered a separator. Therefore,
+ # all the following calls are valid and will search two indexes:
+ #
+ # sphinx.Query('test query', 'main delta')
+ # sphinx.Query('test query', 'main;delta')
+ # sphinx.Query('test query', 'main, delta')
+ #
+ # Index order matters. If identical IDs are found in two or more indexes,
+ # weight and attribute values from the very last matching index will be used
+ # for sorting and returning to client. Therefore, in the example above,
+ # matches from "delta" index will always "win" over matches from "main".
+ #
+ # Returns false on failure.
+ # Returns hash which has the following keys on success:
+ #
+ # * <tt>'matches'</tt> -- array of hashes {'weight', 'group', 'id'}, where 'id' is document_id.
+ # * <tt>'total'</tt> -- total amount of matches retrieved (upto SPH_MAX_MATCHES, see sphinx.h)
+ # * <tt>'total_found'</tt> -- total amount of matching documents in index
+ # * <tt>'time'</tt> -- search time
+ # * <tt>'words'</tt> -- hash which maps query terms (stemmed!) to ('docs', 'hits') hash
+ def Query(query, index = '*', comment = '')
+ assert { @reqs.empty? }
+ @reqs = []
+
+ self.AddQuery(query, index, comment)
+ results = self.RunQueries
+
+ # probably network error; error message should be already filled
+ return false unless results.instance_of?(Array)
+
+ @error = results[0]['error']
+ @warning = results[0]['warning']
+
+ return false if results[0]['status'] == SEARCHD_ERROR
+ return results[0]
+ end
+
+ # Add query to batch.
+ #
+ # Batch queries enable searchd to perform internal optimizations,
+ # if possible; and reduce network connection overheads in all cases.
+ #
+ # For instance, running exactly the same query with different
+ # groupby settings will enable searched to perform expensive
+ # full-text search and ranking operation only once, but compute
+ # multiple groupby results from its output.
+ #
+ # Parameters are exactly the same as in <tt>Query</tt> call.
+ # Returns index to results array returned by <tt>RunQueries</tt> call.
+ def AddQuery(query, index = '*', comment = '')
+ # build request
+
+ # mode and limits
+ request = Request.new
+ request.put_int @offset, @limit, @mode, @ranker, @sort
+ request.put_string @sortby
+ # query itself
+ request.put_string query
+ # weights
+ request.put_int_array @weights
+ # indexes
+ request.put_string index
+ # id64 range marker
+ request.put_int 1
+ # id64 range
+ request.put_int64 @min_id.to_i, @max_id.to_i
+
+ # filters
+ request.put_int @filters.length
+ @filters.each do |filter|
+ request.put_string filter['attr']
+ request.put_int filter['type']
+
+ case filter['type']
+ when SPH_FILTER_VALUES
+ request.put_int64_array filter['values']
+ when SPH_FILTER_RANGE
+ request.put_int64 filter['min'], filter['max']
+ when SPH_FILTER_FLOATRANGE
+ request.put_float filter['min'], filter['max']
+ else
+ raise SphinxInternalError, 'Internal error: unhandled filter type'
+ end
+ request.put_int filter['exclude'] ? 1 : 0
+ end
+
+ # group-by clause, max-matches count, group-sort clause, cutoff count
+ request.put_int @groupfunc
+ request.put_string @groupby
+ request.put_int @maxmatches
+ request.put_string @groupsort
+ request.put_int @cutoff, @retrycount, @retrydelay
+ request.put_string @groupdistinct
+
+ # anchor point
+ if @anchor.empty?
+ request.put_int 0
+ else
+ request.put_int 1
+ request.put_string @anchor['attrlat'], @anchor['attrlong']
+ request.put_float @anchor['lat'], @anchor['long']
+ end
+
+ # per-index weights
+ request.put_int @indexweights.length
+ @indexweights.each do |idx, weight|
+ request.put_string idx
+ request.put_int weight
+ end
+
+ # max query time
+ request.put_int @maxquerytime
+
+ # per-field weights
+ request.put_int @fieldweights.length
+ @fieldweights.each do |field, weight|
+ request.put_string field
+ request.put_int weight
+ end
+
+ # comment
+ request.put_string comment
+
+ # attribute overrides
+ request.put_int @overrides.length
+ for entry in @overrides do
+ request.put_string entry['attr']
+ request.put_int entry['type'], entry['values'].size
+ entry['values'].each do |id, val|
+ assert { id.instance_of?(Fixnum) || id.instance_of?(Bignum) }
+ assert { val.instance_of?(Fixnum) || val.instance_of?(Bignum) || val.instance_of?(Float) }
+
+ request.put_int64 id
+ case entry['type']
+ when SPH_ATTR_FLOAT
+ request.put_float val
+ when SPH_ATTR_BIGINT
+ request.put_int64 val
+ else
+ request.put_int val
+ end
+ end
+ end
+
+ # select-list
+ request.put_string @select
+
+ # store request to requests array
+ @reqs << request.to_s;
+ return @reqs.length - 1
+ end
+
+ # Run queries batch.
+ #
+ # Returns an array of result sets on success.
+ # Returns false on network IO failure.
+ #
+ # Each result set in returned array is a hash which containts
+ # the same keys as the hash returned by <tt>Query</tt>, plus:
+ #
+ # * <tt>'error'</tt> -- search error for this query
+ # * <tt>'words'</tt> -- hash which maps query terms (stemmed!) to ( "docs", "hits" ) hash
+ def RunQueries
+ if @reqs.empty?
+ @error = 'No queries defined, issue AddQuery() first'
+ return false
+ end
+
+ req = @reqs.join('')
+ nreqs = @reqs.length
+ @reqs = []
+ response = PerformRequest(:search, req, nreqs)
+
+ # parse response
+ begin
+ results = []
+ ires = 0
+ while ires < nreqs
+ ires += 1
+ result = {}
+
+ result['error'] = ''
+ result['warning'] = ''
+
+ # extract status
+ status = result['status'] = response.get_int
+ if status != SEARCHD_OK
+ message = response.get_string
+ if status == SEARCHD_WARNING
+ result['warning'] = message
+ else
+ result['error'] = message
+ results << result
+ next
+ end
+ end
+
+ # read schema
+ fields = []
+ attrs = {}
+ attrs_names_in_order = []
+
+ nfields = response.get_int
+ while nfields > 0
+ nfields -= 1
+ fields << response.get_string
+ end
+ result['fields'] = fields
+
+ nattrs = response.get_int
+ while nattrs > 0
+ nattrs -= 1
+ attr = response.get_string
+ type = response.get_int
+ attrs[attr] = type
+ attrs_names_in_order << attr
+ end
+ result['attrs'] = attrs
+
+ # read match count
+ count = response.get_int
+ id64 = response.get_int
+
+ # read matches
+ result['matches'] = []
+ while count > 0
+ count -= 1
+
+ if id64 != 0
+ doc = response.get_int64
+ weight = response.get_int
+ else
+ doc, weight = response.get_ints(2)
+ end
+
+ r = {} # This is a single result put in the result['matches'] array
+ r['id'] = doc
+ r['weight'] = weight
+ attrs_names_in_order.each do |a|
+ r['attrs'] ||= {}
+
+ case attrs[a]
+ when SPH_ATTR_BIGINT
+ # handle 64-bit ints
+ r['attrs'][a] = response.get_int64
+ when SPH_ATTR_FLOAT
+ # handle floats
+ r['attrs'][a] = response.get_float
+ else
+ # handle everything else as unsigned ints
+ val = response.get_int
+ if (attrs[a] & SPH_ATTR_MULTI) != 0
+ r['attrs'][a] = []
+ 1.upto(val) do
+ r['attrs'][a] << response.get_int
+ end
+ else
+ r['attrs'][a] = val
+ end
+ end
+ end
+ result['matches'] << r
+ end
+ result['total'], result['total_found'], msecs, words = response.get_ints(4)
+ result['time'] = '%.3f' % (msecs / 1000.0)
+
+ result['words'] = {}
+ while words > 0
+ words -= 1
+ word = response.get_string
+ docs, hits = response.get_ints(2)
+ result['words'][word] = { 'docs' => docs, 'hits' => hits }
+ end
+
+ results << result
+ end
+ #rescue EOFError
+ # @error = 'incomplete reply'
+ # raise SphinxResponseError, @error
+ end
+
+ return results
+ end
+
+ # Connect to searchd server and generate exceprts from given documents.
+ #
+ # * <tt>docs</tt> -- an array of strings which represent the documents' contents
+ # * <tt>index</tt> -- a string specifiying the index which settings will be used
+ # for stemming, lexing and case folding
+ # * <tt>words</tt> -- a string which contains the words to highlight
+ # * <tt>opts</tt> is a hash which contains additional optional highlighting parameters.
+ #
+ # You can use following parameters:
+ # * <tt>'before_match'</tt> -- a string to insert before a set of matching words, default is "<b>"
+ # * <tt>'after_match'</tt> -- a string to insert after a set of matching words, default is "<b>"
+ # * <tt>'chunk_separator'</tt> -- a string to insert between excerpts chunks, default is " ... "
+ # * <tt>'limit'</tt> -- max excerpt size in symbols (codepoints), default is 256
+ # * <tt>'around'</tt> -- how much words to highlight around each match, default is 5
+ # * <tt>'exact_phrase'</tt> -- whether to highlight exact phrase matches only, default is <tt>false</tt>
+ # * <tt>'single_passage'</tt> -- whether to extract single best passage only, default is false
+ # * <tt>'use_boundaries'</tt> -- whether to extract passages by phrase boundaries setup in tokenizer
+ # * <tt>'weight_order'</tt> -- whether to order best passages in document (default) or weight order
+ #
+ # Returns false on failure.
+ # Returns an array of string excerpts on success.
+ def BuildExcerpts(docs, index, words, opts = {})
+ assert { docs.instance_of? Array }
+ assert { index.instance_of? String }
+ assert { words.instance_of? String }
+ assert { opts.instance_of? Hash }
+
+ # fixup options
+ opts['before_match'] ||= '<b>';
+ opts['after_match'] ||= '</b>';
+ opts['chunk_separator'] ||= ' ... ';
+ opts['limit'] ||= 256;
+ opts['around'] ||= 5;
+ opts['exact_phrase'] ||= false
+ opts['single_passage'] ||= false
+ opts['use_boundaries'] ||= false
+ opts['weight_order'] ||= false
+
+ # build request
+
+ # v.1.0 req
+ flags = 1
+ flags |= 2 if opts['exact_phrase']
+ flags |= 4 if opts['single_passage']
+ flags |= 8 if opts['use_boundaries']
+ flags |= 16 if opts['weight_order']
+
+ request = Request.new
+ request.put_int 0, flags # mode=0, flags=1 (remove spaces)
+ # req index
+ request.put_string index
+ # req words
+ request.put_string words
+
+ # options
+ request.put_string opts['before_match']
+ request.put_string opts['after_match']
+ request.put_string opts['chunk_separator']
+ request.put_int opts['limit'].to_i, opts['around'].to_i
+
+ # documents
+ request.put_int docs.size
+ docs.each do |doc|
+ assert { doc.instance_of? String }
+
+ request.put_string doc
+ end
+
+ response = PerformRequest(:excerpt, request)
+
+ # parse response
+ begin
+ res = []
+ docs.each do |doc|
+ res << response.get_string
+ end
+ rescue EOFError
+ @error = 'incomplete reply'
+ raise SphinxResponseError, @error
+ end
+ return res
+ end
+
+ # Connect to searchd server, and generate keyword list for a given query.
+ #
+ # Returns an array of words on success.
+ def BuildKeywords(query, index, hits)
+ assert { query.instance_of? String }
+ assert { index.instance_of? String }
+ assert { hits.instance_of?(TrueClass) || hits.instance_of?(FalseClass) }
+
+ # build request
+ request = Request.new
+ # v.1.0 req
+ request.put_string query # req query
+ request.put_string index # req index
+ request.put_int hits ? 1 : 0
+
+ response = PerformRequest(:keywords, request)
+
+ # parse response
+ begin
+ res = []
+ nwords = response.get_int
+ 0.upto(nwords - 1) do |i|
+ tokenized = response.get_string
+ normalized = response.get_string
+
+ entry = { 'tokenized' => tokenized, 'normalized' => normalized }
+ entry['docs'], entry['hits'] = response.get_ints(2) if hits
+
+ res << entry
+ end
+ rescue EOFError
+ @error = 'incomplete reply'
+ raise SphinxResponseError, @error
+ end
+
+ return res
+ end
+
+ # Batch update given attributes in given rows in given indexes.
+ #
+ # * +index+ is a name of the index to be updated
+ # * +attrs+ is an array of attribute name strings.
+ # * +values+ is a hash where key is document id, and value is an array of
+ # * +mva+ identifies whether update MVA
+ # new attribute values
+ #
+ # Returns number of actually updated documents (0 or more) on success.
+ # Returns -1 on failure.
+ #
+ # Usage example:
+ # sphinx.UpdateAttributes('test1', ['group_id'], { 1 => [456] })
+ def UpdateAttributes(index, attrs, values, mva = false)
+ # verify everything
+ assert { index.instance_of? String }
+ assert { mva.instance_of?(TrueClass) || mva.instance_of?(FalseClass) }
+
+ assert { attrs.instance_of? Array }
+ attrs.each do |attr|
+ assert { attr.instance_of? String }
+ end
+
+ assert { values.instance_of? Hash }
+ values.each do |id, entry|
+ assert { id.instance_of? Fixnum }
+ assert { entry.instance_of? Array }
+ assert { entry.length == attrs.length }
+ entry.each do |v|
+ if mva
+ assert { v.instance_of? Array }
+ v.each { |vv| assert { vv.instance_of? Fixnum } }
+ else
+ assert { v.instance_of? Fixnum }
+ end
+ end
+ end
+
+ # build request
+ request = Request.new
+ request.put_string index
+
+ request.put_int attrs.length
+ for attr in attrs
+ request.put_string attr
+ request.put_int mva ? 1 : 0
+ end
+
+ request.put_int values.length
+ values.each do |id, entry|
+ request.put_int64 id
+ if mva
+ entry.each { |v| request.put_int_array v }
+ else
+ request.put_int(*entry)
+ end
+ end
+
+ response = PerformRequest(:update, request)
+
+ # parse response
+ begin
+ return response.get_int
+ rescue EOFError
+ @error = 'incomplete reply'
+ raise SphinxResponseError, @error
+ end
+ end
+
+ protected
+
+ # Connect to searchd server.
+ def Connect
+ begin
+ sock = TCPSocket.new(@host, @port)
+ rescue
+ @error = "connection to #{@host}:#{@port} failed"
+ raise SphinxConnectError, @error
+ end
+
+ v = sock.recv(4).unpack('N*').first
+ if v < 1
+ sock.close
+ @error = "expected searchd protocol version 1+, got version '#{v}'"
+ raise SphinxConnectError, @error
+ end
+
+ sock.send([1].pack('N'), 0)
+ sock
+ end
+
+ # Get and check response packet from searchd server.
+ def GetResponse(sock, client_version)
+ response = ''
+ len = 0
+
+ header = sock.recv(8)
+ if header.length == 8
+ status, ver, len = header.unpack('n2N')
+ left = len.to_i
+ while left > 0 do
+ begin
+ chunk = sock.recv(left)
+ if chunk
+ response << chunk
+ left -= chunk.length
+ end
+ rescue EOFError
+ break
+ end
+ end
+ end
+ sock.close
+
+ # check response
+ read = response.length
+ if response.empty? or read != len.to_i
+ @error = response.empty? \
+ ? 'received zero-sized searchd response' \
+ : "failed to read searchd response (status=#{status}, ver=#{ver}, len=#{len}, read=#{read})"
+ raise SphinxResponseError, @error
+ end
+
+ # check status
+ if (status == SEARCHD_WARNING)
+ wlen = response[0, 4].unpack('N*').first
+ @warning = response[4, wlen]
+ return response[4 + wlen, response.length - 4 - wlen]
+ end
+
+ if status == SEARCHD_ERROR
+ @error = 'searchd error: ' + response[4, response.length - 4]
+ raise SphinxInternalError, @error
+ end
+
+ if status == SEARCHD_RETRY
+ @error = 'temporary searchd error: ' + response[4, response.length - 4]
+ raise SphinxTemporaryError, @error
+ end
+
+ unless status == SEARCHD_OK
+ @error = "unknown status code: '#{status}'"
+ raise SphinxUnknownError, @error
+ end
+
+ # check version
+ if ver < client_version
+ @warning = "searchd command v.#{ver >> 8}.#{ver & 0xff} older than client's " +
+ "v.#{client_version >> 8}.#{client_version & 0xff}, some options might not work"
+ end
+
+ return response
+ end
+
+ # Connect, send query, get response.
+ def PerformRequest(command, request, additional = nil)
+ cmd = command.to_s.upcase
+ command_id = Sphinx::Client.const_get('SEARCHD_COMMAND_' + cmd)
+ command_ver = Sphinx::Client.const_get('VER_COMMAND_' + cmd)
+
+ sock = self.Connect
+ len = request.to_s.length + (additional != nil ? 4 : 0)
+ header = [command_id, command_ver, len].pack('nnN')
+ header << [additional].pack('N') if additional != nil
+ sock.send(header + request.to_s, 0)
+ response = self.GetResponse(sock, command_ver)
+ return Response.new(response)
+ end
+
+ # :stopdoc:
+ def assert
+ raise 'Assertion failed!' unless yield if $DEBUG
+ end
+ # :startdoc:
+ end
+end
View
50 vendor/plugins/sphinx/lib/sphinx/request.rb
@@ -0,0 +1,50 @@
+module Sphinx
+ # Pack ints, floats, strings, and arrays to internal representation
+ # needed by Sphinx search engine.
+ class Request
+ # Initialize new request.
+ def initialize
+ @request = ''
+ end
+
+ # Put int(s) to request.
+ def put_int(*ints)
+ ints.each { |i| @request << [i].pack('N') }
+ end
+
+ # Put 64-bit int(s) to request.
+ def put_int64(*ints)
+ ints.each { |i| @request << [i].pack('q').reverse }#[i >> 32, i & ((1 << 32) - 1)].pack('NN') }
+ end
+
+ # Put string(s) to request (first length, then the string itself).
+ def put_string(*strings)
+ strings.each { |s| @request << [s.length].pack('N') + s }
+ end
+
+ # Put float(s) to request.
+ def put_float(*floats)
+ floats.each do |f|
+ t1 = [f].pack('f') # machine order
+ t2 = t1.unpack('L*').first # int in machine order
+ @request << [t2].pack('N')
+ end
+ end
+
+ # Put array of ints to request (first length, then the array itself)
+ def put_int_array(arr)
+ put_int arr.length, *arr
+ end
+
+ # Put array of 64-bit ints to request (first length, then the array itself)
+ def put_int64_array(arr)
+ put_int arr.length
+ put_int64(*arr)
+ end
+
+ # Returns the entire message
+ def to_s
+ @request
+ end
+ end
+end
View
69 vendor/plugins/sphinx/lib/sphinx/response.rb
@@ -0,0 +1,69 @@
+module Sphinx
+ # Unpack internal Sphinx representation of ints, floats, strings, and arrays.
+ # needed by Sphinx search engine.
+ class Response
+ # Initialize new request.
+ def initialize(response)
+ @response = response
+ @position = 0
+ @size = response.length
+ end
+
+ # Gets current stream position.
+ def position
+ @position
+ end
+
+ # Gets response size.
+ def size
+ @size
+ end
+
+ # Returns <tt>true</tt> when response stream is out.
+ def eof?
+ @position >= @size
+ end
+
+ # Get int from stream.
+ def get_int
+ raise EOFError if @position + 4 > @size
+ value = @response[@position, 4].unpack('N*').first
+ @position += 4
+ return value
+ end
+
+ # Get 64-bit int from stream.
+ def get_int64
+ raise EOFError if @position + 8 > @size
+ hi, lo = @response[@position, 8].unpack('N*N*')
+ @position += 8
+ return (hi << 32) + lo
+ end
+
+ # Get array of <tt>count</tt> ints from stream.
+ def get_ints(count)
+ length = 4 * count
+ raise EOFError if @position + length > @size
+ values = @response[@position, length].unpack('N*' * count)
+ @position += length
+ return values
+ end
+
+ # Get string from stream.
+ def get_string
+ length = get_int
+ raise EOFError if @position + length > @size
+ value = length > 0 ? @response[@position, length] : ''
+ @position += length
+ return value
+ end
+
+ # Get float from stream.
+ def get_float
+ raise EOFError if @position + 4 > @size
+ uval = @response[@position, 4].unpack('N*').first;
+ @position += 4
+ return ([uval].pack('L')).unpack('f*').first
+ end
+ end
+end
View
112 vendor/plugins/sphinx/spec/client_response_spec.rb
@@ -0,0 +1,112 @@
+require File.dirname(__FILE__) + '/../init'
+
+# To execute these tests you need to execute sphinx_test.sql and configure sphinx using sphinx.conf
+# (both files are placed under sphinx directory)
+context 'The SphinxApi connected to Sphinx' do
+ setup do
+ @sphinx = Sphinx::Client.new
+ end
+
+ specify 'should parse response in Query method' do
+ result = @sphinx.Query('wifi', 'test1')
+ validate_results_wifi(result)
+ end
+
+ specify 'should process 64-bit keys in Query method' do
+ result = @sphinx.Query('wifi', 'test2')
+ result['total_found'].should == 3
+ result['matches'].length.should == 3
+ result['matches'][0]['id'].should == 4294967298
+ result['matches'][1]['id'].should == 4294967299
+ result['matches'][2]['id'].should == 4294967297
+ end
+
+ specify 'should parse batch-query responce in RunQueries method' do
+ @sphinx.AddQuery('wifi', 'test1')
+ @sphinx.AddQuery('gprs', 'test1')
+ results = @sphinx.RunQueries
+ results.should be_an_instance_of(Array)
+ results.length.should == 2
+ validate_results_wifi(results[0])
+ end
+
+ specify 'should parse response in BuildExcerpts method' do
+ result = @sphinx.BuildExcerpts(['what the world', 'London is the capital of Great Britain'], 'test1', 'the')
+ result.should == ['what <b>the</b> world', 'London is <b>the</b> capital of Great Britain']
+ end
+
+ specify 'should parse response in BuildKeywords method' do
+ result = @sphinx.BuildKeywords('wifi gprs', 'test1', true)
+ result.should == [
+ { 'normalized' => 'wifi', 'tokenized' => 'wifi', 'hits' => 6, 'docs' => 3 },
+ { 'normalized' => 'gprs', 'tokenized' => 'gprs', 'hits' => 1, 'docs' => 1 }
+ ]
+ end
+
+ specify 'should parse response in UpdateAttributes method' do
+ @sphinx.UpdateAttributes('test1', ['group_id'], { 2 => [1] }).should == 1
+ result = @sphinx.Query('wifi', 'test1')
+ result['matches'][0]['attrs']['group_id'].should == 1
+ @sphinx.UpdateAttributes('test1', ['group_id'], { 2 => [2] }).should == 1
+ result = @sphinx.Query('wifi', 'test1')
+ result['matches'][0]['attrs']['group_id'].should == 2
+ end
+
+ specify 'should parse response in UpdateAttributes method with MVA' do
+ @sphinx.UpdateAttributes('test1', ['tags'], { 2 => [[1, 2, 3, 4, 5, 6, 7, 8, 9]] }, true).should == 1
+ result = @sphinx.Query('wifi', 'test1')
+ result['matches'][0]['attrs']['tags'].should == [1, 2, 3, 4, 5, 6, 7, 8, 9]
+ @sphinx.UpdateAttributes('test1', ['tags'], { 2 => [[5, 6, 7, 8]] }, true).should == 1
+ result = @sphinx.Query('wifi', 'test1')
+ result['matches'][0]['attrs']['tags'].should == [5, 6, 7, 8]
+ end
+
+ specify 'should process errors in Query method' do
+ @sphinx.Query('wifi', 'fakeindex').should be_false
+ @sphinx.GetLastError.length.should_not == 0
+ end
+
+ specify 'should process errors in RunQueries method' do
+ @sphinx.AddQuery('wifi', 'fakeindex')
+ r = @sphinx.RunQueries
+ r[0]['error'].length.should_not == 0
+ end
+
+ def validate_results_wifi(result)
+ result['total_found'].should == 3
+ result['matches'].length.should == 3
+ result['time'].should_not be_nil
+ result['attrs'].should == {
+ 'group_id' => Sphinx::Client::SPH_ATTR_INTEGER,
+ 'created_at' => Sphinx::Client::SPH_ATTR_TIMESTAMP,
+ 'rating' => Sphinx::Client::SPH_ATTR_FLOAT,
+ 'tags' => Sphinx::Client::SPH_ATTR_MULTI | Sphinx::Client::SPH_ATTR_INTEGER
+ }
+ result['fields'].should == [ 'name', 'description' ]
+ result['total'].should == 3
+ result['matches'].should be_an_instance_of(Array)
+
+ result['matches'][0]['id'].should == 2
+ result['matches'][0]['weight'].should == 2
+ result['matches'][0]['attrs']['group_id'].should == 2
+ result['matches'][0]['attrs']['created_at'].should == 1175658555
+ result['matches'][0]['attrs']['tags'].should == [5, 6, 7, 8]
+ ('%0.2f' % result['matches'][0]['attrs']['rating']).should == '54.85'
+
+ result['matches'][1]['id'].should == 3
+ result['matches'][1]['weight'].should == 2
+ result['matches'][1]['attrs']['group_id'].should == 1
+ result['matches'][1]['attrs']['created_at'].should == 1175658647
+ result['matches'][1]['attrs']['tags'].should == [1, 7, 9, 10]
+ ('%0.2f' % result['matches'][1]['attrs']['rating']).should == '16.25'
+
+ result['matches'][2]['id'].should == 1
+ result['matches'][2]['weight'].should == 1
+ result['matches'][2]['attrs']['group_id'].should == 1
+ result['matches'][2]['attrs']['created_at'].should == 1175658490
+ result['matches'][2]['attrs']['tags'].should == [1, 2, 3, 4]
+ ('%0.2f' % result['matches'][2]['attrs']['rating']).should == '13.32'
+
+ result['words'].should == { 'wifi' => { 'hits' => 6, 'docs' => 3 } }
+ end
+end
View
469 vendor/plugins/sphinx/spec/client_spec.rb
@@ -0,0 +1,469 @@
+require File.dirname(__FILE__) + '/../init'
+
+class SphinxSpecError < StandardError; end
+
+module SphinxFixtureHelper
+ def sphinx_fixture(name)
+ `php #{File.dirname(__FILE__)}/fixtures/#{name}.php`
+ end
+end
+
+module SphinxApiCall
+ def create_sphinx
+ @sphinx = Sphinx::Client.new
+ @sock = mock('TCPSocket')
+ @sphinx.stub!(:Connect).and_return(@sock)
+ @sphinx.stub!(:GetResponse).and_raise(SphinxSpecError)
+ return @sphinx
+ end
+
+ def safe_call
+ yield
+ rescue SphinxSpecError
+ end
+end
+
+describe 'The Connect method of Sphinx::Client' do
+ before(:each) do
+ @sphinx = Sphinx::Client.new
+ @sock = mock('TCPSocket')
+ end
+
+ it 'should establish TCP connection to the server and initialize session' do
+ TCPSocket.should_receive(:new).with('localhost', 9312).and_return(@sock)
+ @sock.should_receive(:recv).with(4).and_return([1].pack('N'))
+ @sock.should_receive(:send).with([1].pack('N'), 0)
+ @sphinx.send(:Connect).should be(@sock)
+ end
+
+ it 'should raise exception when searchd protocol is not 1+' do
+ TCPSocket.should_receive(:new).with('localhost', 9312).and_return(@sock)
+ @sock.should_receive(:recv).with(4).and_return([0].pack('N'))
+ @sock.should_receive(:close)
+ lambda { @sphinx.send(:Connect) }.should raise_error(Sphinx::SphinxConnectError)
+ @sphinx.GetLastError.should == 'expected searchd protocol version 1+, got version \'0\''
+ end
+
+ it 'should raise exception on connection error' do
+ TCPSocket.should_receive(:new).with('localhost', 9312).and_raise(Errno::EBADF)
+ lambda { @sphinx.send(:Connect) }.should raise_error(Sphinx::SphinxConnectError)
+ @sphinx.GetLastError.should == 'connection to localhost:9312 failed'
+ end
+
+ it 'should use custom host and port' do
+ @sphinx.SetServer('anotherhost', 55555)
+ TCPSocket.should_receive(:new).with('anotherhost', 55555).and_raise(Errno::EBADF)
+ lambda { @sphinx.send(:Connect) }.should raise_error(Sphinx::SphinxConnectError)
+ end
+end
+
+describe 'The GetResponse method of Sphinx::Client' do
+ before(:each) do
+ @sphinx = Sphinx::Client.new
+ @sock = mock('TCPSocket')
+ @sock.should_receive(:close)
+ end
+
+ it 'should receive response' do
+ @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_OK, 1, 4].pack('n2N'))
+ @sock.should_receive(:recv).with(4).and_return([0].pack('N'))
+ @sphinx.send(:GetResponse, @sock, 1)
+ end
+
+ it 'should raise exception on zero-sized response' do
+ @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_OK, 1, 0].pack('n2N'))
+ lambda { @sphinx.send(:GetResponse, @sock, 1) }.should raise_error(Sphinx::SphinxResponseError)
+ end
+
+ it 'should raise exception when response is incomplete' do
+ @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_OK, 1, 4].pack('n2N'))
+ @sock.should_receive(:recv).with(4).and_raise(EOFError)
+ lambda { @sphinx.send(:GetResponse, @sock, 1) }.should raise_error(Sphinx::SphinxResponseError)
+ end
+
+ it 'should set warning message when SEARCHD_WARNING received' do
+ @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_WARNING, 1, 14].pack('n2N'))
+ @sock.should_receive(:recv).with(14).and_return([5].pack('N') + 'helloworld')
+ @sphinx.send(:GetResponse, @sock, 1).should == 'world'
+ @sphinx.GetLastWarning.should == 'hello'
+ end
+
+ it 'should raise exception when SEARCHD_ERROR received' do
+ @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_ERROR, 1, 9].pack('n2N'))
+ @sock.should_receive(:recv).with(9).and_return([1].pack('N') + 'hello')
+ lambda { @sphinx.send(:GetResponse, @sock, 1) }.should raise_error(Sphinx::SphinxInternalError)
+ @sphinx.GetLastError.should == 'searchd error: hello'
+ end
+
+ it 'should raise exception when SEARCHD_RETRY received' do
+ @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_RETRY, 1, 9].pack('n2N'))
+ @sock.should_receive(:recv).with(9).and_return([1].pack('N') + 'hello')
+ lambda { @sphinx.send(:GetResponse, @sock, 1) }.should raise_error(Sphinx::SphinxTemporaryError)
+ @sphinx.GetLastError.should == 'temporary searchd error: hello'
+ end
+
+ it 'should raise exception when unknown status received' do
+ @sock.should_receive(:recv).with(8).and_return([65535, 1, 9].pack('n2N'))
+ @sock.should_receive(:recv).with(9).and_return([1].pack('N') + 'hello')
+ lambda { @sphinx.send(:GetResponse, @sock, 1) }.should raise_error(Sphinx::SphinxUnknownError)
+ @sphinx.GetLastError.should == 'unknown status code: \'65535\''
+ end
+
+ it 'should set warning when server is older than client' do
+ @sock.should_receive(:recv).with(8).and_return([Sphinx::Client::SEARCHD_OK, 1, 9].pack('n2N'))
+ @sock.should_receive(:recv).with(9).and_return([1].pack('N') + 'hello')
+ @sphinx.send(:GetResponse, @sock, 5)
+ @sphinx.GetLastWarning.should == 'searchd command v.0.1 older than client\'s v.0.5, some options might not work'
+ end
+end
+
+describe 'The Query method of Sphinx::Client' do
+ include SphinxFixtureHelper
+ include SphinxApiCall
+
+ before(:each) do
+ @sphinx = create_sphinx
+ end
+
+ it 'should generate valid request with default parameters' do
+ expected = sphinx_fixture('default_search')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with default parameters and index' do
+ expected = sphinx_fixture('default_search_index')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.Query('query', 'index') rescue nil?
+ end
+
+ it 'should generate valid request with limits' do
+ expected = sphinx_fixture('limits')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetLimits(10, 20)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with limits and max number to retrieve' do
+ expected = sphinx_fixture('limits_max')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetLimits(10, 20, 30)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with limits and cutoff to retrieve' do
+ expected = sphinx_fixture('limits_cutoff')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetLimits(10, 20, 30, 40)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with max query time specified' do
+ expected = sphinx_fixture('max_query_time')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetMaxQueryTime(1000)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ describe 'with match' do
+ [ :all, :any, :phrase, :boolean, :extended, :fullscan, :extended2 ].each do |match|
+ it "should generate valid request for SPH_MATCH_#{match.to_s.upcase}" do
+ expected = sphinx_fixture("match_#{match}")
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetMatchMode(Sphinx::Client::const_get("SPH_MATCH_#{match.to_s.upcase}"))
+ @sphinx.Query('query') rescue nil?
+ end
+ end
+ end
+
+ describe 'with rank' do
+ [ :proximity_bm25, :bm25, :none, :wordcount, :proximity ].each do |rank|
+ it "should generate valid request for SPH_RANK_#{rank.to_s.upcase}" do
+ expected = sphinx_fixture("ranking_#{rank}")
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetRankingMode(Sphinx::Client.const_get("SPH_RANK_#{rank.to_s.upcase}"))
+ @sphinx.Query('query') rescue nil?
+ end
+ end
+ end
+
+ describe 'with sorting' do
+ [ :attr_desc, :relevance, :attr_asc, :time_segments, :extended, :expr ].each do |mode|
+ it "should generate valid request for SPH_SORT_#{mode.to_s.upcase}" do
+ expected = sphinx_fixture("sort_#{mode}")
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetSortMode(Sphinx::Client.const_get("SPH_SORT_#{mode.to_s.upcase}"), mode == :relevance ? '' : 'sortby')
+ @sphinx.Query('query') rescue nil?
+ end
+ end
+ end
+
+ it 'should generate valid request with weights' do
+ expected = sphinx_fixture('weights')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetWeights([10, 20, 30, 40])
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with field weights' do
+ expected = sphinx_fixture('field_weights')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetFieldWeights({'field1' => 10, 'field2' => 20})
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with index weights' do
+ expected = sphinx_fixture('index_weights')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetIndexWeights({'index1' => 10, 'index2' => 20})
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with ID range' do
+ expected = sphinx_fixture('id_range')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetIDRange(10, 20)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with ID range and 64-bit ints' do
+ expected = sphinx_fixture('id_range64')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetIDRange(8589934591, 17179869183)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with values filter' do
+ expected = sphinx_fixture('filter')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetFilter('attr', [10, 20, 30])
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with two values filters' do
+ expected = sphinx_fixture('filters')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetFilter('attr2', [40, 50])
+ @sphinx.SetFilter('attr1', [10, 20, 30])
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with values filter excluded' do
+ expected = sphinx_fixture('filter_exclude')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetFilter('attr', [10, 20, 30], true)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with values filter range' do
+ expected = sphinx_fixture('filter_range')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetFilterRange('attr', 10, 20)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with two filter ranges' do
+ expected = sphinx_fixture('filter_ranges')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetFilterRange('attr2', 30, 40)
+ @sphinx.SetFilterRange('attr1', 10, 20)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with filter range excluded' do
+ expected = sphinx_fixture('filter_range_exclude')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetFilterRange('attr', 10, 20, true)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with signed int64-based filter range' do
+ expected = sphinx_fixture('filter_range_int64')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetFilterRange('attr1', -10, 20)
+ @sphinx.SetFilterRange('attr2', -1099511627770, 1099511627780)
+ safe_call { @sphinx.Query('query') }
+ end
+
+ it 'should generate valid request with float filter range' do
+ expected = sphinx_fixture('filter_float_range')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetFilterFloatRange('attr', 10.5, 20.3)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with float filter excluded' do
+ expected = sphinx_fixture('filter_float_range_exclude')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetFilterFloatRange('attr', 10.5, 20.3, true)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with different filters' do
+ expected = sphinx_fixture('filters_different')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetFilterRange('attr1', 10, 20, true)
+ @sphinx.SetFilter('attr3', [30, 40, 50])
+ @sphinx.SetFilterRange('attr1', 60, 70)
+ @sphinx.SetFilter('attr2', [80, 90, 100], true)
+ @sphinx.SetFilterFloatRange('attr1', 60.8, 70.5)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with geographical anchor point' do
+ expected = sphinx_fixture('geo_anchor')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetGeoAnchor('attrlat', 'attrlong', 20.3, 40.7)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ describe 'with group by' do
+ [ :day, :week, :month, :year, :attr, :attrpair ].each do |groupby|
+ it "should generate valid request for SPH_GROUPBY_#{groupby.to_s.upcase}" do
+ expected = sphinx_fixture("group_by_#{groupby}")
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetGroupBy('attr', Sphinx::Client::const_get("SPH_GROUPBY_#{groupby.to_s.upcase}"))
+ @sphinx.Query('query') rescue nil?
+ end
+ end
+
+ it 'should generate valid request for SPH_GROUPBY_DAY with sort' do
+ expected = sphinx_fixture('group_by_day_sort')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetGroupBy('attr', Sphinx::Client::SPH_GROUPBY_DAY, 'somesort')
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with count-distinct attribute' do
+ expected = sphinx_fixture('group_distinct')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetGroupBy('attr', Sphinx::Client::SPH_GROUPBY_DAY)
+ @sphinx.SetGroupDistinct('attr')
+ @sphinx.Query('query') rescue nil?
+ end
+ end
+
+ it 'should generate valid request with retries count specified' do
+ expected = sphinx_fixture('retries')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetRetries(10)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request with retries count and delay specified' do
+ expected = sphinx_fixture('retries_delay')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetRetries(10, 20)
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request for SetOverride' do
+ expected = sphinx_fixture('set_override')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetOverride('attr1', Sphinx::Client::SPH_ATTR_INTEGER, { 10 => 20 })
+ @sphinx.SetOverride('attr2', Sphinx::Client::SPH_ATTR_FLOAT, { 11 => 30.3 })
+ @sphinx.SetOverride('attr3', Sphinx::Client::SPH_ATTR_BIGINT, { 12 => 1099511627780 })
+ @sphinx.Query('query') rescue nil?
+ end
+
+ it 'should generate valid request for SetSelect' do
+ expected = sphinx_fixture('select')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.SetSelect('attr1, attr2')
+ @sphinx.Query('query') rescue nil?
+ end
+end
+
+describe 'The RunQueries method of Sphinx::Client' do
+ include SphinxFixtureHelper
+
+ before(:each) do
+ @sphinx = Sphinx::Client.new
+ @sock = mock('TCPSocket')
+ @sphinx.stub!(:Connect).and_return(@sock)
+ @sphinx.stub!(:GetResponse).and_raise(Sphinx::SphinxError)
+ end
+
+ it 'should generate valid request for multiple queries' do
+ expected = sphinx_fixture('miltiple_queries')
+ @sock.should_receive(:send).with(expected, 0)
+
+ @sphinx.SetRetries(10, 20)
+ @sphinx.AddQuery('test1')
+ @sphinx.SetGroupBy('attr', Sphinx::Client::SPH_GROUPBY_DAY)
+ @sphinx.AddQuery('test2') rescue nil?
+
+ @sphinx.RunQueries rescue nil?
+ end
+end
+
+describe 'The BuildExcerpts method of Sphinx::Client' do
+ include SphinxFixtureHelper
+
+ before(:each) do
+ @sphinx = Sphinx::Client.new
+ @sock = mock('TCPSocket')
+ @sphinx.stub!(:Connect).and_return(@sock)
+ @sphinx.stub!(:GetResponse).and_raise(Sphinx::SphinxError)
+ end
+
+ it 'should generate valid request with default parameters' do
+ expected = sphinx_fixture('excerpt_default')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.BuildExcerpts(['10', '20'], 'index', 'word1 word2') rescue nil?
+ end
+
+ it 'should generate valid request with custom parameters' do
+ expected = sphinx_fixture('excerpt_custom')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.BuildExcerpts(['10', '20'], 'index', 'word1 word2', { 'before_match' => 'before',
+ 'after_match' => 'after',
+ 'chunk_separator' => 'separator',
+ 'limit' => 10 }) rescue nil?
+ end
+
+ it 'should generate valid request with flags' do
+ expected = sphinx_fixture('excerpt_flags')
+ @sock.should_receive(:send).with(expected, 0)
+ @sphinx.BuildExcerpts(['10', '20'], 'index', 'word1 word2', { 'exact_phrase' => true,
+ 'single_passage' => true,
+ 'use_boundaries' => true,
+ 'weight_order' => true }) rescue nil?
+ end
+end
+
+describe 'The BuildKeywords method of Sphinx::Client' do
+ include SphinxFixtureHelper
+ include SphinxApiCall
+
+ before(:each) do
+ @sphinx = create_sphinx
+ end
+
+ it 'should generate valid request' do
+ expected = sphinx_fixture('keywords')
+ @sock.should_receive(:send).with(expected, 0)
+ safe_call { @sphinx.BuildKeywords('test', 'index', true) }
+ end
+end
+
+describe 'The UpdateAttributes method of Sphinx::Client' do
+ include SphinxFixtureHelper
+ include SphinxApiCall
+
+ before(:each) do
+ @sphinx = create_sphinx
+ end
+
+ it 'should generate valid request' do
+ expected = sphinx_fixture('update_attributes')
+ @sock.should_receive(:send).with(expected, 0)
+ safe_call { @sphinx.UpdateAttributes('index', ['group'], { 123 => [456] }) }
+ end
+
+ it 'should generate valid request for MVA' do
+ expected = sphinx_fixture('update_attributes_mva')
+ @sock.should_receive(:send).with(expected, 0)
+ safe_call { @sphinx.UpdateAttributes('index', ['group', 'category'], { 123 => [ [456, 789], [1, 2, 3] ] }, true) }
+ end
+end
View
8 vendor/plugins/sphinx/spec/fixtures/default_search.php
@@ -0,0 +1,8 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->Query('query');
+
+?>
View
8 vendor/plugins/sphinx/spec/fixtures/default_search_index.php
@@ -0,0 +1,8 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->Query('query', 'index');
+
+?>
View
11 vendor/plugins/sphinx/spec/fixtures/excerpt_custom.php
@@ -0,0 +1,11 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->BuildExcerpts(array('10', '20'), 'index', 'word1 word2', array('before_match' => 'before',
+ 'after_match' => 'after',
+ 'chunk_separator' => 'separator',
+ 'limit' => 10));
+
+?>
View
8 vendor/plugins/sphinx/spec/fixtures/excerpt_default.php
@@ -0,0 +1,8 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->BuildExcerpts(array('10', '20'), 'index', 'word1 word2');
+
+?>
View
11 vendor/plugins/sphinx/spec/fixtures/excerpt_flags.php
@@ -0,0 +1,11 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->BuildExcerpts(array('10', '20'), 'index', 'word1 word2', array('exact_phrase' => true,
+ 'single_passage' => true,
+ 'use_boundaries' => true,
+ 'weight_order' => true));
+
+?>
View
9 vendor/plugins/sphinx/spec/fixtures/field_weights.php
@@ -0,0 +1,9 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->SetFieldWeights(array('field1' => 10, 'field2' => 20));
+$cl->Query('query');
+
+?>
View
9 vendor/plugins/sphinx/spec/fixtures/filter.php
@@ -0,0 +1,9 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->SetFilter('attr', array(10, 20, 30));
+$cl->Query('query');
+
+?>
View
9 vendor/plugins/sphinx/spec/fixtures/filter_exclude.php
@@ -0,0 +1,9 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->SetFilter('attr', array(10, 20, 30), true);
+$cl->Query('query');
+
+?>
View
9 vendor/plugins/sphinx/spec/fixtures/filter_float_range.php
@@ -0,0 +1,9 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->SetFilterFloatRange('attr', 10.5, 20.3);
+$cl->Query('query');
+
+?>
View
9 vendor/plugins/sphinx/spec/fixtures/filter_float_range_exclude.php
@@ -0,0 +1,9 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->SetFilterFloatRange('attr', 10.5, 20.3, true);
+$cl->Query('query');
+
+?>
View
9 vendor/plugins/sphinx/spec/fixtures/filter_range.php
@@ -0,0 +1,9 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->SetFilterRange('attr', 10, 20);
+$cl->Query('query');
+
+?>
View
9 vendor/plugins/sphinx/spec/fixtures/filter_range_exclude.php
@@ -0,0 +1,9 @@
+<?php
+
+require ("sphinxapi.php");
+
+$cl = new SphinxClient();
+$cl->SetFilterRange('attr', 10, 20, true);
+$cl->Query('query');
+
+?>