Skip to content
This repository has been archived by the owner on Nov 9, 2017. It is now read-only.

Commit

Permalink
Merge branch 'release' into integration/master
Browse files Browse the repository at this point in the history
  • Loading branch information
Patrick Huang committed Nov 28, 2012
2 parents b089f21 + 9c39fe8 commit 20a5a55
Show file tree
Hide file tree
Showing 4 changed files with 134 additions and 129 deletions.
4 changes: 2 additions & 2 deletions zanata-war/src/main/java/org/zanata/dao/TextFlowDAO.java
Expand Up @@ -447,12 +447,12 @@ public List<HTextFlow> getTextFlows(DocumentId documentId, int startIndex, int m
*/
public List<HTextFlow> getTextFlowByDocumentIdWithConstraint(DocumentId documentId, HLocale hLocale, FilterConstraints constraints, int firstResult, int maxResult)
{
FilterConstraintToQuery constraintToQuery = FilterConstraintToQuery.from(constraints);
FilterConstraintToQuery constraintToQuery = FilterConstraintToQuery.filterInSingleDocument(constraints, documentId);
String queryString = constraintToQuery.toHQL();
log.debug("\n query {}\n", queryString);

Query textFlowQuery = getSession().createQuery(queryString);
constraintToQuery.setQueryParameters(textFlowQuery, documentId, hLocale);
constraintToQuery.setQueryParameters(textFlowQuery, hLocale);
textFlowQuery.setFirstResult(firstResult).setMaxResults(maxResult);
textFlowQuery.setCacheable(true).setComment("TextFlowDAO.getTextFlowByDocumentIdWithConstraint");

Expand Down
@@ -1,12 +1,15 @@
package org.zanata.search;

import java.util.Collection;

import org.hibernate.Query;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.Disjunction;
import org.hibernate.criterion.Restrictions;
import org.zanata.model.HLocale;
import org.zanata.util.QueryBuilder;
import org.zanata.webtrans.shared.model.DocumentId;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;

import static org.hibernate.criterion.Restrictions.eq;
Expand All @@ -22,14 +25,30 @@ public class FilterConstraintToQuery
protected static final String STATE_LIST_NAMED_PARAM = "contentStateList";
protected static final String LOCALE_NAMED_PARAM = "locale";
protected static final String DOC_ID_NAMED_PARAM = "docId";
protected static final String DOC_IDS_LIST_NAMED_PARAM = "docIdList";
private static final String SEARCH_PLACEHOLDER = ":" + SEARCH_NAMED_PARAM;
private static final String STATE_LIST_PLACEHOLDER = ":" + STATE_LIST_NAMED_PARAM;
private static final String LOCALE_PLACEHOLDER = ":" + LOCALE_NAMED_PARAM;
private static final String DOC_ID_PLACEHOLDER = ":" + DOC_ID_NAMED_PARAM;
private static final String DOC_IDS_LIST_PLACEHOLDER = ":" + DOC_IDS_LIST_NAMED_PARAM;

private final FilterConstraints constraints;
private final boolean hasSearch;
private String searchString;
private DocumentId documentId;
private Collection<String> docIdList;

private FilterConstraintToQuery(FilterConstraints constraints, DocumentId documentId)
{
this(constraints);
this.documentId = documentId;
}

public FilterConstraintToQuery(FilterConstraints constraints, Collection<String> docIdList)
{
this(constraints);
this.docIdList = docIdList;
}

private FilterConstraintToQuery(FilterConstraints constraints)
{
Expand All @@ -42,15 +61,31 @@ private FilterConstraintToQuery(FilterConstraints constraints)
}
}

public static FilterConstraintToQuery from(FilterConstraints constraints)
public static FilterConstraintToQuery filterInSingleDocument(FilterConstraints constraints, DocumentId documentId)
{
Preconditions.checkNotNull(documentId);
return new FilterConstraintToQuery(constraints, documentId);
}

public static FilterConstraintToQuery filterInMultipleDocuments(FilterConstraints constraints, Collection<String> docIdList)
{
return new FilterConstraintToQuery(constraints);
Preconditions.checkNotNull(docIdList);
Preconditions.checkState(!docIdList.isEmpty());
return new FilterConstraintToQuery(constraints, docIdList);
}

public String toHQL()
{
String docIdCondition;
if (documentId != null)
{
docIdCondition = eq("tf.document.id", DOC_ID_PLACEHOLDER).toString();
}
else
{
docIdCondition = "tf.document.docId in (" + DOC_IDS_LIST_PLACEHOLDER + ")";
}
String obsoleteCondition = eq("tf.obsolete", "0").toString();
String docIdCondition = eq("tf.document.id", DOC_ID_PLACEHOLDER).toString();
String searchCondition = buildSearchCondition();
String stateCondition = buildStateCondition();

Expand Down Expand Up @@ -124,9 +159,16 @@ protected String buildStateCondition()
return stateInListCondition;
}

public Query setQueryParameters(Query textFlowQuery, DocumentId documentId, HLocale hLocale)
public Query setQueryParameters(Query textFlowQuery, HLocale hLocale)
{
textFlowQuery.setParameter(DOC_ID_NAMED_PARAM, documentId.getId());
if (documentId != null)
{
textFlowQuery.setParameter(DOC_ID_NAMED_PARAM, documentId.getId());
}
else
{
textFlowQuery.setParameterList(DOC_IDS_LIST_NAMED_PARAM, docIdList);
}
textFlowQuery.setParameter(LOCALE_NAMED_PARAM, hLocale.getId());
if (hasSearch)
{
Expand Down
Expand Up @@ -56,17 +56,17 @@
import org.zanata.model.HProjectIteration;
import org.zanata.model.HTextFlow;
import org.zanata.model.HTextFlowTarget;
import org.zanata.search.FilterConstraintToQuery;
import org.zanata.search.FilterConstraints;
import org.zanata.service.LocaleService;
import org.zanata.service.TextFlowSearchService;
import org.zanata.webtrans.shared.model.DocumentId;
import org.zanata.webtrans.shared.model.WorkspaceId;

import lombok.extern.slf4j.Slf4j;
import com.google.common.base.Function;
import com.google.common.collect.Lists;

import static org.zanata.util.QueryBuilder.select;
import static org.zanata.util.QueryBuilder.and;
import static org.zanata.util.QueryBuilder.or;
import lombok.extern.slf4j.Slf4j;

/**
* @author David Mason, <a href="mailto:damason@redhat.com">damason@redhat.com</a>
Expand All @@ -93,7 +93,6 @@ public class TextFlowSearchServiceImpl implements TextFlowSearchService
@In
private FullTextSession session;

// Disabled for now, due to the need for a left join
private static final boolean ENABLE_HQL_SEARCH = true;

@Override
Expand All @@ -109,11 +108,11 @@ public List<HTextFlow> findTextFlows(WorkspaceId workspace, List<String> documen
}

/**
* @param workspace
* @param workspace workspace
* @param documentPaths null or empty to search entire project, otherwise
* only results for the given document paths will be returned
* @param constraints
* @return
* @param constraints filter constraints
* @return list of matching text flows
*/
private List<HTextFlow> findTextFlowsByDocumentPaths(WorkspaceId workspace, List<String> documentPaths, FilterConstraints constraints)
{
Expand All @@ -126,9 +125,10 @@ private List<HTextFlow> findTextFlowsByDocumentPaths(WorkspaceId workspace, List
// empty targets are required.

// check that locale is valid for the workspace
HLocale hLocale;
try
{
localeServiceImpl.validateLocaleByProjectIteration(localeId, projectSlug, iterationSlug);
hLocale = localeServiceImpl.validateLocaleByProjectIteration(localeId, projectSlug, iterationSlug);
}
catch (ZanataServiceException e)
{
Expand All @@ -151,7 +151,7 @@ private List<HTextFlow> findTextFlowsByDocumentPaths(WorkspaceId workspace, List
// decisions is made on which option to use. Remove before signing off on this.
if (ENABLE_HQL_SEARCH)
{
return findTextFlowsWithDatabaseSearch(projectSlug, iterationSlug, localeId, documentPaths, constraints);
return findTextFlowsWithDatabaseSearch(projectSlug, iterationSlug, documentPaths, constraints, hLocale);
}
else
{
Expand All @@ -163,108 +163,29 @@ private List<HTextFlow> findTextFlowsByDocumentPaths(WorkspaceId workspace, List
*
* @see org.zanata.dao.TextFlowDAO#getTextFlowByDocumentIdWithConstraint(org.zanata.webtrans.shared.model.DocumentId, org.zanata.model.HLocale, org.zanata.search.FilterConstraints, int, int)
*/
private List<HTextFlow> findTextFlowsWithDatabaseSearch(String projectSlug, String iterationSlug,
LocaleId validatedLocaleId, List<String> documentPaths, FilterConstraints constraints)
private List<HTextFlow> findTextFlowsWithDatabaseSearch(String projectSlug, String iterationSlug, List<String> documentPaths, FilterConstraints constraints, HLocale hLocale)
{
// TODO wrap in method for batching list of documents
// assuming doclist has already been batched before this method call

HLocale loc = localeServiceImpl.getByLocaleId(validatedLocaleId);
Long locId = loc.getId();

ArrayList<String> projectDocStateConstraints = new ArrayList<String>();
projectDocStateConstraints.add("tf.document.projectIteration.project.slug = :project");
projectDocStateConstraints.add("tf.document.projectIteration.slug = :iteration");
projectDocStateConstraints.add("tf.document.obsolete = false");

boolean hasDocumentPaths = documentPaths != null && !documentPaths.isEmpty();
log.info("document paths: {}", documentPaths); //FIXME reduce log level
List<String> docPaths;
if (hasDocumentPaths)
{
projectDocStateConstraints.add("tf.document.docId in ( :doclist )");
}

ArrayList<ContentState> stateList = new ArrayList<ContentState>(2);
boolean includeAllStates = constraints.isIncludeNew() && constraints.isIncludeFuzzy() && constraints.isIncludeApproved();
if (!includeAllStates)
{
// a different approach is required to ensure that text flows with no
// target are returned iff new state is included.
if (constraints.isIncludeNew())
{
// exclude non-matching states (so that flows with no target will match)
projectDocStateConstraints.add("tf.targets[" + locId + "].state not in ( :statelist )");
if (!constraints.isIncludeFuzzy())
{
stateList.add(ContentState.NeedReview);
}
if (!constraints.isIncludeApproved())
{
stateList.add(ContentState.Approved);
}
}
else
{
// include matching states (so that flows with no target will not match)
projectDocStateConstraints.add("tf.targets[" + locId + "].state in ( :statelist )");
if (constraints.isIncludeFuzzy())
{
stateList.add(ContentState.NeedReview);
}
if (constraints.isIncludeApproved())
{
stateList.add(ContentState.Approved);
}
}
}

ArrayList<String> contentCheckList = new ArrayList<String>(12);
if (constraints.isSearchInSource())
{
for (int i = 0; i < 6; i++)
{
contentCheckList.add("tf.content" + i + " like :searchString");
}
}
if (constraints.isSearchInTarget())
{
String contentPrefix = "tf.targets[" + locId + "].content";
for (int i = 0; i < 6; i++)
{
contentCheckList.add(contentPrefix + i + " like :searchString");
}
}

String[] contentChecks = contentCheckList.toArray(new String[contentCheckList.size()]);
String[] projectDocStateChecks = projectDocStateConstraints.toArray(new String[projectDocStateConstraints.size()]);

String queryStr = select("distinct tf").from("HTextFlow tf")
.leftJoin("tf.targets tfts")
.where(and(and(projectDocStateChecks), or(contentChecks)))
.toQueryString();

String searchString = constraints.getSearchString();
searchString = "%" + searchString + "%";

org.hibernate.Query query = session.createQuery(queryStr)
.setParameter("searchString", searchString)
.setParameter("project", projectSlug)
.setParameter("iteration", iterationSlug);
if (hasDocumentPaths)
{
query.setParameterList("doclist", documentPaths);
docPaths = documentPaths;
}
if (!includeAllStates)
else
{
query.setParameterList("statelist", stateList);
List<HDocument> allDocuments = documentDAO.getAllByProjectIteration(projectSlug, iterationSlug);
docPaths = Lists.transform(allDocuments, HDocumentToDocId.FUNCTION);
}

FilterConstraintToQuery toQuery = FilterConstraintToQuery.filterInMultipleDocuments(constraints, docPaths);
String hql = toQuery.toHQL();
log.debug("hql for searching: {}", hql);
org.hibernate.Query query = session.createQuery(hql);
toQuery.setQueryParameters(query, hLocale);
@SuppressWarnings("unchecked")
List<HTextFlow> results = query.list();
if (constraints.isCaseSensitive())
{
results = filterCaseSensitive(results, constraints, locId);
}
return results;
List<HTextFlow> result = query.list();
return result;
}

/**
Expand Down Expand Up @@ -520,4 +441,14 @@ private static boolean contentIsValid(Collection<String> contents, FilterConstra
return valid;
}

private static enum HDocumentToDocId implements Function<HDocument, String>
{
FUNCTION;

@Override
public String apply(HDocument input)
{
return input.getDocId();
}
}
}

0 comments on commit 20a5a55

Please sign in to comment.