Skip to content
Browse files

first import

  • Loading branch information...
0 parents commit 893ddf525cef265cf2752b8303e87038692c1310 @mkristian committed
6 History.txt
@@ -0,0 +1,6 @@
+=== 1.0.0 / 2009-11-20
+
+* 1 major enhancement
+
+ * Birthday!
+
8 Manifest.txt
@@ -0,0 +1,8 @@
+.autotest
+History.txt
+Manifest.txt
+README.txt
+Rakefile
+bin/dm_lucene_adapter
+lib/dm_lucene_adapter.rb
+test/test_dm_lucene_adapter.rb
54 README.txt
@@ -0,0 +1,54 @@
+= dm-lucene-adapter
+
+*Homepage*: [http://dm-lucene-adapter.rubyforge.org]
+
+*Git*: [http://github.com/mkristian/dm-lucene-adapter]
+
+*Author*: Kristian Meier
+
+*Copyright*: 2009
+
+== DESCRIPTION:
+
+FIX (describe your package)
+
+== FEATURES/PROBLEMS:
+
+* FIX (list of features or problems)
+
+== SYNOPSIS:
+
+ FIX (code sample of usage)
+
+== REQUIREMENTS:
+
+* FIX (list of requirements)
+
+== INSTALL:
+
+* jgem install dm-lucene-adapter
+
+== LICENSE:
+
+(The MIT License)
+
+Copyright (c) 2009 Kristian Meier
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
11 Rakefile
@@ -0,0 +1,11 @@
+# -*- ruby -*-
+
+require 'rubygems'
+require 'hoe'
+
+Hoe.spec 'dm-lucene-adapter' do
+ p.developer('mkristian', 'm.kristian@web.de')
+ p.url = "http://dm-lucene-adapter.rubyforge.org"
+end
+
+# vim: syntax=ruby
1 lib/dm-lucene-adapter.rb
@@ -0,0 +1 @@
+require 'dm_lucene_adapter/dm_lucene_adapter'
125 lib/dm_lucene_adapter/dm_lucene_adapter.rb
@@ -0,0 +1,125 @@
+require 'dm_lucene_adapter/typed_abstract_adapter'
+require 'java'
+include_class "de.saumya.lucene.LuceneService"
+
+module DataMapper
+ module Adapters
+ class LuceneAdapter < TypedAbstractAdapter
+
+ def initialize(*args)
+ options = args[1]
+ @service = LuceneService.new(java.io.File.new(options[:index] || "index"))
+ super
+ end
+
+ # @param [Enumerable<Resource>] resources
+ # The list of resources (model instances) to create
+ #
+ # @return [Integer]
+ # The number of records that were actually saved into the data-store
+ #
+ # @api semipublic
+ def create(resources)
+ count = 0
+ indexer = @service.create_indexer
+ resources.each do |resource|
+ resource.id = indexer.next_id
+ map = {}
+ resource.attributes.each { |k,v| map[k.to_s] = v.to_s}
+ indexer.index(map)
+ count += 1
+ end
+ count
+ ensure
+ indexer.close if indexer
+ end
+
+ # @param [Query] query
+ # the query to match resources in the datastore
+ #
+ # @return [Enumerable<Hash>]
+ # an array of hashes to become resources
+ #
+ # @api semipublic
+ def do_read(query)
+# p query
+ reader = @service.create_reader
+ if(query.conditions.nil?)
+ result = []
+ reader.read_all.each do |resource|
+ map = {}
+ resource.each do |k,v|
+ map[k] = v
+ end
+ result << map
+ end
+ result
+ else
+ ops = query.conditions.operands
+ if(ops.size == 1 && ops[0].class == DataMapper::Query::Conditions::EqualToComparison && ops[0].subject.name == :id)
+ map = {}
+ reader.read(query.conditions.operands[0].value).each do |k,v|
+ map[k] = v
+ end
+ [map]
+ else
+ []
+ end
+ end
+ ensure
+ reader.close if reader
+ end
+
+ # @param [Hash(Property => Object)] attributes
+ # hash of attribute values to set, keyed by Property
+ # @param [Collection] collection
+ # collection of records to be updated
+ #
+ # @return [Integer]
+ # the number of records updated
+ #
+ # @api semipublic
+ def update(attributes, collection)
+ count = 0
+ deleter = @service.create_deleter
+ resources = read(collection.query)
+ resources.each do |resource|
+ deleter.delete(resource.id)
+ end
+ deleter.close
+ deleter = nil
+ indexer = @service.create_indexer
+ attributes = attributes_as_fields(attributes)
+ resources.each do |resource|
+ resource.update(attributes)
+ map = {}
+ resource.each { |k,v| map[k.to_s] = v.to_s}
+ indexer.index(map)
+ end
+ count
+ ensure
+ indexer.close if indexer rescue nil
+ deleter.close if deleter
+ end
+
+ # @param [Collection] collection
+ # collection of records to be deleted
+ #
+ # @return [Integer]
+ # the number of records deleted
+ #
+ # @api semipublic
+ def delete(collection)
+ count = 0
+ indexer = @service.create_deleter
+ collection.each do |resource|
+ indexer.delete(resource.id)
+ count += 1
+ end
+ count
+ ensure
+ indexer.close if indexer
+ end
+ end
+ end
+end
32 lib/dm_lucene_adapter/typed_abstract_adapter.rb
@@ -0,0 +1,32 @@
+require 'dm-core/adapters/abstract_adapter'
+module DataMapper
+ module Adapters
+ class TypedAbstractAdapter < AbstractAdapter
+
+ def do_read(query)
+ raise NotImplementedError, "#{self.class}#do_read not implemented"
+ end
+
+ def read(query)
+ result = do_read(query)
+ types = query.fields.map { |property| property.primitive }
+ result.each do |resource|
+ i = 0
+ resource.each do |k,v|
+ case types[i].to_s
+ when "Integer"
+ resource[k] = v.to_i
+ when "Float"
+ resource[k] = v.to_f
+ when "TrueClass"
+ resource[k] = v == "true"
+#TODO BigDecimal, Date, DateTime, Time
+ end
+ i += 1
+ end
+ end
+ result
+ end
+ end
+ end
+end
3 lib/dm_lucene_adapter/version.rb
@@ -0,0 +1,3 @@
+module DmLuceneAdapter
+ VERSION = '1.0.0'
+end
43 pom.xml
@@ -0,0 +1,43 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>de.saumya</groupId>
+ <artifactId>lucene</artifactId>
+ <packaging>jar</packaging>
+ <version>1.0-SNAPSHOT</version>
+ <name>app</name>
+ <url>http://maven.apache.org</url>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-core</artifactId>
+ <version>2.9.0</version>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.jruby.plugins</groupId>
+ <artifactId>jruby-rake-plugin</artifactId>
+ <version>1.5.0dev</version>
+ <executions>
+ <execution>
+ <id>spec</id>
+ <goals>
+ <goal>spec</goal>
+ </goals>
+ </execution>
+ <execution>
+ <phase>pre-integration-test</phase>
+ <goals>
+ <goal>install-gems</goal>
+ </goals>
+ <configuration>
+ <gems>rspec dm-core</gems>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
47 spec/dm_lucene_adapter_spec.rb
@@ -0,0 +1,47 @@
+require File.dirname(__FILE__) + '/spec_helper'
+
+describe DataMapper::Adapters::LuceneAdapter do
+
+ it "should create" do
+ Book.create(:author => "kristian", :title => "me and the corner").new?.should be_false
+ end
+
+ it "should read all" do
+ size = Book.all.size
+ (1..3).each do
+ Book.create(:author => "kristian", :title => "me and the corner")
+ end
+ b = Book.all
+ b.size.should == size + 3
+ end
+
+ it "should delete" do
+ books = []
+ (1..4).each do
+ books << Book.create(:author => "kristian", :title => "me and the corner")
+ end
+ id = books.last.id
+ size = Book.all.size
+ books.each do |b|
+ b.destroy if b.id % 2 == 0
+ end
+ b = Book.all
+ size.should == b.size + 2
+ book = Book.create(:author => "kristian", :title => "me and the corner")
+ id.should < book.id
+ end
+
+ it 'should read a single' do
+ book = Book.create(:author => "kristian", :title => "me and the corner")
+ b = Book.get(book.id)
+ b.id.should == book.id
+ end
+
+ it 'should update' do
+ book = Book.create(:author => "kristian", :title => "me and the corner")
+ book.author = "sanuka"
+ book.save
+ b = Book.get(book.id)
+ b.author.should == "sanuka"
+ end
+end
2 spec/spec.opts
@@ -0,0 +1,2 @@
+--colour
+--loadby random
24 spec/spec_helper.rb
@@ -0,0 +1,24 @@
+require 'rubygems'
+
+gem 'dm-core', ">0.10.0"
+require 'pathname'
+$LOAD_PATH << Pathname(__FILE__).dirname.parent.expand_path + 'lib'
+
+require 'dm-core'
+require 'dm-lucene-adapter'
+
+class Book
+
+ include DataMapper::Resource
+
+ property :id, Serial
+
+ property :author, String, :length => 128
+
+ property :title, String, :length => 255
+
+ property :published, Boolean, :default => true
+
+end
+
+DataMapper.setup(:default, :adapter => :lucene, :index => "tmp/index")
35 src/main/java/de/saumya/lucene/LuceneDeleter.java
@@ -0,0 +1,35 @@
+/**
+ *
+ */
+package de.saumya.lucene;
+
+import java.io.IOException;
+
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.StaleReaderException;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.TermDocs;
+import org.apache.lucene.store.LockObtainFailedException;
+
+public class LuceneDeleter {
+
+ private final IndexReader reader;
+
+ LuceneDeleter(final IndexReader reader) {
+ this.reader = reader;
+ }
+
+ public void delete(final int id) throws StaleReaderException,
+ CorruptIndexException, LockObtainFailedException, IOException {
+ final TermDocs terms = this.reader.termDocs(new Term("id", "" + id));
+ while (terms.next()) {
+ this.reader.deleteDocument(terms.doc());
+ }
+ terms.close();
+ }
+
+ public void close() throws CorruptIndexException, IOException {
+ this.reader.close();
+ }
+}
57 src/main/java/de/saumya/lucene/LuceneIndexer.java
@@ -0,0 +1,57 @@
+/**
+ *
+ */
+package de.saumya.lucene;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+
+public class LuceneIndexer {
+
+ private final IndexWriter writer;
+
+ private final IndexReader reader;
+
+ LuceneIndexer(final IndexWriter writer, final IndexReader reader) {
+ this.writer = writer;
+ this.reader = reader;
+ }
+
+ public int nextId() throws IOException {
+ // TODO use fieldselector
+ final int max = this.reader.maxDoc();
+ return max == 0 ? 1 : Integer.parseInt(this.reader.document(max - 1)
+ .getField("id")
+ .stringValue()) + 1;
+ }
+
+ public void index(final Map<String, String> resource)
+ throws CorruptIndexException, IOException {
+ final Document document = new Document();
+ for (final Map.Entry<String, String> entry : resource.entrySet()) {
+ if (entry.getValue() != null) {
+ document.add(new Field(entry.getKey(),
+ entry.getValue(),
+ Field.Store.YES,
+ Field.Index.ANALYZED));
+ }
+ }
+ this.writer.addDocument(document);
+ }
+
+ public void close() throws CorruptIndexException, IOException {
+ try {
+ this.writer.optimize();
+ this.writer.close();
+ }
+ finally {
+ this.reader.close();
+ }
+ }
+}
70 src/main/java/de/saumya/lucene/LuceneReader.java
@@ -0,0 +1,70 @@
+/**
+ *
+ */
+package de.saumya.lucene;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.StaleReaderException;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.Searcher;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.WildcardQuery;
+import org.apache.lucene.store.LockObtainFailedException;
+
+public class LuceneReader {
+
+ private final Searcher searcher;
+
+ LuceneReader(final Searcher searcher) {
+ this.searcher = searcher;
+ }
+
+ public Map<String, String> read(final int id) throws StaleReaderException,
+ CorruptIndexException, LockObtainFailedException, IOException {
+ final Query query = new TermQuery(new Term("id", id + ""));
+ final TopDocs docs = this.searcher.search(query, 1200000);
+ final Map<String, String> result = new HashMap<String, String>();
+ for (final ScoreDoc sdoc : docs.scoreDocs) {
+ final Document doc = this.searcher.doc(sdoc.doc);
+ for (final Object o : doc.getFields()) {
+ final Field f = (Field) o;
+ result.put(f.name(), f.stringValue());
+ }
+ }
+ return result;
+ }
+
+ public Collection<Map<String, String>> readAll()
+ throws StaleReaderException, CorruptIndexException,
+ LockObtainFailedException, IOException {
+ final Query query = new WildcardQuery(new Term("id", "*"));
+ final TopDocs docs = this.searcher.search(query, 1200000);
+ final Set<Map<String, String>> result = new HashSet<Map<String, String>>();
+ for (final ScoreDoc sdoc : docs.scoreDocs) {
+ final Map<String, String> map = new HashMap<String, String>();
+ final Document doc = this.searcher.doc(sdoc.doc);
+ for (final Object o : doc.getFields()) {
+ final Field f = (Field) o;
+ map.put(f.name(), f.stringValue());
+ }
+ result.add(map);
+ }
+ return result;
+ }
+
+ public void close() throws IOException {
+ this.searcher.close();
+ }
+}
41 src/main/java/de/saumya/lucene/LuceneService.java
@@ -0,0 +1,41 @@
+package de.saumya.lucene;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.index.CorruptIndexException;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.util.Version;
+
+public class LuceneService {
+
+ private final File indexDir;
+
+ public LuceneService(final File indexDir) {
+ this.indexDir = indexDir;
+ }
+
+ public LuceneIndexer createIndexer() throws IOException {
+ return new LuceneIndexer(new IndexWriter(FSDirectory.open(this.indexDir),
+ new StandardAnalyzer(Version.LUCENE_CURRENT),
+ !this.indexDir.exists(),
+ IndexWriter.MaxFieldLength.LIMITED),
+ IndexReader.open(FSDirectory.open(this.indexDir), true));
+ }
+
+ public LuceneReader createReader() throws CorruptIndexException,
+ IOException {
+ return new LuceneReader(new IndexSearcher(FSDirectory.open(this.indexDir),
+ true));
+ }
+
+ public LuceneDeleter createDeleter() throws CorruptIndexException,
+ IOException {
+ return new LuceneDeleter(IndexReader.open(FSDirectory.open(this.indexDir),
+ false));
+ }
+}

0 comments on commit 893ddf5

Please sign in to comment.
Something went wrong with that request. Please try again.