added query parsing (incomplete)

This commit is contained in:
2024-11-06 13:24:54 -05:00
parent 07d6e63457
commit 3ecbf006dd
11 changed files with 929 additions and 163 deletions

View File

@@ -17,7 +17,7 @@
<properties>
<alfresco.sdk.version>5.2.0</alfresco.sdk.version>
<alfresco.platform.version>23.3.0</alfresco.platform.version>
<alfresco.platform.version>7.0.0</alfresco.platform.version>
</properties>
<dependencyManagement>

View File

@@ -0,0 +1,67 @@
package com.inteligr8.alfresco.asie.cache;
import java.io.Serializable;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Collection;
import org.alfresco.repo.cache.SimpleCache;
public class MultiValueCache<K extends Serializable, V, C extends Collection<V>> implements SimpleCache<K, C> {
private SimpleCache<K, C> cache;
private Class<?> collectionType;
public MultiValueCache(SimpleCache<K, C> cache, Class<?> collectionType) {
this.cache = cache;
this.collectionType = collectionType;
}
@SuppressWarnings("unchecked")
public boolean add(K key, V value) {
C c = this.cache.get(key);
if (c != null)
return c.add(value);
try {
Constructor<?> constructor = this.collectionType.getConstructor();
c = (C) constructor.newInstance();
this.cache.put(key, c);
return c.add(value);
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException | InstantiationException e) {
throw new UnsupportedOperationException(e.getMessage(), e);
}
}
@Override
public void clear() {
this.cache.clear();
}
@Override
public C get(K key) {
return this.cache.get(key);
}
@Override
public boolean contains(K key) {
C c = this.cache.get(key);
return c == null ? false : !c.isEmpty();
}
@Override
public Collection<K> getKeys() {
return this.cache.getKeys();
}
@Override
public void put(K key, C value) {
this.cache.put(key, value);
}
@Override
public void remove(K key) {
this.cache.remove(key);
}
}

View File

@@ -0,0 +1,47 @@
package com.inteligr8.alfresco.asie.compute;
import java.util.Set;
import org.alfresco.repo.search.impl.parsers.CMISLexer;
import org.alfresco.repo.search.impl.parsers.CMISParser;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
import org.apache.commons.collections4.SetUtils;
import org.springframework.stereotype.Component;
@Component
public class CmisQueryInspector implements QueryInspector {
private Set<String> supportedLanguages = SetUtils.unmodifiableSet(
SearchService.LANGUAGE_CMIS_ALFRESCO,
SearchService.LANGUAGE_CMIS_STRICT,
SearchService.LANGUAGE_INDEX_CMIS,
SearchService.LANGUAGE_SOLR_CMIS);
@Override
public Set<String> getSupportedLanguages() {
return this.supportedLanguages;
}
@Override
public QueryValue findRequiredProperty(String cmisQuery, Operator defaultOperator, QName property) throws RecognitionException {
Tree tree = this.parseCmis(cmisQuery, defaultOperator);
}
protected Tree parseCmis(String cmisQuery, Operator defaultOperator) throws RecognitionException {
CharStream cs = new ANTLRStringStream(cmisQuery);
CMISLexer lexer = new CMISLexer(cs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
CMISParser parser = new CMISParser(tokens);
CommonTree tree = (CommonTree) parser.query().getTree();
return tree;
}
}

View File

@@ -0,0 +1,290 @@
package com.inteligr8.alfresco.asie.compute;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.Period;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import org.alfresco.repo.search.impl.parsers.FTSLexer;
import org.alfresco.repo.search.impl.parsers.FTSParser;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.repository.AssociationRef;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
import org.alfresco.service.cmr.repository.NodeRef;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
import org.apache.commons.collections4.SetUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class FtsQueryInspector implements QueryInspector {
private final Logger logger = LoggerFactory.getLogger(FtsQueryInspector.class);
private final Set<String> supportedLanguages = SetUtils.unmodifiableSet(
SearchService.LANGUAGE_FTS_ALFRESCO,
SearchService.LANGUAGE_INDEX_FTS_ALFRESCO,
SearchService.LANGUAGE_SOLR_FTS_ALFRESCO,
SearchService.LANGUAGE_LUCENE);
@Autowired
private NamespaceService namespaceService;
@Override
public Set<String> getSupportedLanguages() {
return this.supportedLanguages;
}
@Override
public List<QueryValue> findRequiredPropertyValues(String ftsQuery, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException {
Tree tree = this.parseFts(ftsQuery, defaultOperator);
tree = this.bypassSingleTermDisjunctions(tree);
if (tree == null)
return null;
Collection<Tree> trees = this.extractRequiredTerms(tree);
this.logger.trace("Found {} required terms in query: {}", trees.size(), ftsQuery);
this.filterPropertyTerms(trees, property);
this.logger.trace("Found {} required terms for property {} in query: {}", trees.size(), property, ftsQuery);
this.filterOutFuzzyTerms(trees);
this.logger.trace("Found {} required definitive terms for property {} in query: {}", trees.size(), property, ftsQuery);
List<QueryValue> values = new ArrayList<>(trees.size());
for (Tree t : trees)
values.add(this.extractValue(t, dataTypeDef));
return values;
}
protected Tree parseFts(String ftsQuery, Operator defaultOperator) throws RecognitionException {
CharStream cs = new ANTLRStringStream(ftsQuery);
FTSLexer lexer = new FTSLexer(cs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
FTSParser parser = new FTSParser(tokens);
parser.setDefaultFieldConjunction(defaultOperator.equals(Operator.AND));
parser.setMode(defaultOperator.equals(Operator.AND) ? FTSParser.Mode.DEFAULT_CONJUNCTION : FTSParser.Mode.DEFAULT_DISJUNCTION);
CommonTree tree = (CommonTree) parser.ftsQuery().getTree();
return tree;
}
protected Tree bypassSingleTermDisjunctions(Tree tree) {
while ("DISJUNCTION".equals(tree.getText()) && tree.getChildCount() == 1)
tree = tree.getChild(0);
if ("DISJUNCTION".equals(tree.getText()))
return null;
return tree;
}
protected Collection<Tree> extractRequiredTerms(Tree tree) {
while ("DISJUNCTION".equals(tree.getText()) && tree.getChildCount() == 1)
tree = tree.getChild(0);
List<Tree> terms = new LinkedList<>();
switch (tree.getText()) {
case "DISJUNCTION":
break;
case "CONJUNCTION":
for (int c = 0; c < tree.getChildCount(); c++) {
Collection<Tree> subtrees = this.extractRequiredTerms(tree.getChild(c));
if (subtrees == null || subtrees.isEmpty())
continue;
terms.addAll(subtrees);
}
break;
case "DEFAULT":
terms.add(tree);
break;
default:
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
}
return terms;
}
protected Collection<Tree> filterPropertyTerms(Collection<Tree> trees, QName property) {
if (trees.isEmpty())
return trees;
Set<String> prefixes = new HashSet<>(this.namespaceService.getPrefixes(property.getNamespaceURI()));
if (prefixes.isEmpty()) {
this.logger.warn("Unexpected/unsupported namespace: {}", property.getNamespaceURI());
trees.clear();
return trees;
}
Iterator<Tree> i = trees.iterator();
while (i.hasNext()) {
Tree tree = i.next();
if ("DEFAULT".equals(tree.getText()))
tree = tree.getChild(0);
int skip = -1;
switch (tree.getText()) {
case "TERM":
case "PHRASE":
case "EXACT_TERM":
case "EXACT_PHRASE":
skip = 1; // skip the value child
break;
case "RANGE":
skip = 4; // skip the inclusive, start, end, inclusive children
break;
default:
}
if (skip >= 0) {
Tree fieldRef = tree.getChild(skip);
if (!"FIELD_REF".equals(fieldRef.getText())) {
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
} else if (!fieldRef.getChild(0).getText().equals(property.getLocalName())) {
this.logger.trace("Found but ignoring property: {}", fieldRef.getChild(0).getText());
} else {
Tree prefix = fieldRef.getChild(1);
if (!"PREFIX".equals(prefix.getText())) {
this.logger.warn("Unexpected/unsupported tree: {}", tree.getText());
} else if (!prefixes.contains(prefix.getChild(0).getText())) {
this.logger.trace("Found but ignoring property: {}:{}", prefix.getChild(0).getText(), property.getLocalName());
} else {
// this will skip the remove()
continue;
}
}
}
i.remove();
}
return trees;
}
protected Collection<Tree> filterOutFuzzyTerms(Collection<Tree> trees) {
if (trees.isEmpty())
return trees;
Iterator<Tree> i = trees.iterator();
while (i.hasNext()) {
Tree tree = i.next();
if ("DEFAULT".equals(tree.getText()))
tree = tree.getChild(0);
switch (tree.getText()) {
case "EXACT_TERM":
case "EXACT_PHRASE":
case "RANGE":
break;
default:
i.remove();
}
}
return trees;
}
protected QueryValue extractValue(Tree tree, DataTypeDefinition dataTypeDef) {
if ("DEFAULT".equals(tree.getText()))
tree = tree.getChild(0);
switch (tree.getText()) {
case "RANGE":
return this.extractRangeValue(tree, dataTypeDef);
default:
}
String value = this.unquote(tree.getChild(0).getText());
switch (dataTypeDef.getName().getLocalName()) {
case "boolean":
return new QuerySingleValue<Boolean>(Boolean.parseBoolean(value));
case "double":
return new QuerySingleValue<Double>(Double.parseDouble(value));
case "float":
return new QuerySingleValue<Float>(Float.parseFloat(value));
case "int":
return new QuerySingleValue<Integer>(Integer.parseInt(value));
case "long":
return new QuerySingleValue<Long>(Long.parseLong(value));
case "date":
return new QuerySingleValue<LocalDate>(this.evaluateAsDate(value));
case "datetime":
return new QuerySingleValue<LocalDateTime>(this.evaluateAsDateTime(value));
case "period":
return new QuerySingleValue<Period>(Period.parse(value));
case "qname":
return new QuerySingleValue<QName>(QName.createQName(value, this.namespaceService));
case "noderef":
return new QuerySingleValue<NodeRef>(new NodeRef(value));
case "childassocref":
return new QuerySingleValue<ChildAssociationRef>(new ChildAssociationRef(value));
case "assocref":
return new QuerySingleValue<AssociationRef>(new AssociationRef(value));
case "locale":
return new QuerySingleValue<Locale>(new Locale(value));
default:
return new QuerySingleValue<String>(value);
}
}
protected QueryRangeValue<?> extractRangeValue(Tree tree, DataTypeDefinition dataTypeDef) {
boolean includeStart = "INCLUSIVE".equals(tree.getChild(0).getText());
String start = this.unquote(tree.getChild(1).getText());
String end = this.unquote(tree.getChild(2).getText());
boolean includeEnd = "INCLUSIVE".equals(tree.getChild(3).getText());
switch (dataTypeDef.getName().getLocalName()) {
case "double":
return new QueryRangeValue<Double>(includeStart, Double.parseDouble(start), includeEnd, Double.parseDouble(end));
case "float":
return new QueryRangeValue<Float>(includeStart, Float.parseFloat(start), includeEnd, Float.parseFloat(end));
case "int":
return new QueryRangeValue<Integer>(includeStart, Integer.parseInt(start), includeEnd, Integer.parseInt(end));
case "long":
return new QueryRangeValue<Long>(includeStart, Long.parseLong(start), includeEnd, Long.parseLong(end));
case "date":
return new QueryRangeValue<LocalDate>(includeStart, this.evaluateAsDate(start), includeEnd, this.evaluateAsDate(end));
case "datetime":
return new QueryRangeValue<LocalDateTime>(includeStart, this.evaluateAsDateTime(start), includeEnd, this.evaluateAsDateTime(end));
default:
throw new UnsupportedOperationException("The data type does not make sense for range evaluation: " + dataTypeDef.getName());
}
}
protected LocalDate evaluateAsDate(String str) {
if ("now".equalsIgnoreCase(str)) return LocalDate.now();
else return LocalDate.parse(str);
}
protected LocalDateTime evaluateAsDateTime(String str) {
if ("now".equalsIgnoreCase(str)) return LocalDateTime.now();
else return LocalDateTime.parse(str);
}
protected String unquote(String str) {
if (str.length() < 2) return str;
else if (str.charAt(0) == '\'' && str.charAt(str.length()-1) == '\'') return str.substring(1, str.length()-1);
else if (str.charAt(0) == '\"' && str.charAt(str.length()-1) == '\"') return str.substring(1, str.length()-1);
else return str;
}
}

View File

@@ -0,0 +1,74 @@
package com.inteligr8.alfresco.asie.compute;
import java.util.List;
import java.util.Set;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.RecognitionException;
public interface QueryInspector {
Set<String> getSupportedLanguages();
List<QueryValue> findRequiredPropertyValues(String query, Operator defaultOperator, QName property, DataTypeDefinition dataTypeDef) throws RecognitionException;
public interface QueryValue {
}
public class QuerySingleValue<T> implements QueryValue {
private T value;
public QuerySingleValue(T value) {
this.value = value;
}
public T getValue() {
return value;
}
@Override
public String toString() {
return this.value.toString();
}
}
public class QueryRangeValue<T> implements QueryValue {
private boolean includeStart;
private T start;
private boolean includeEnd;
private T end;
public QueryRangeValue(boolean includeStart, T start, boolean includeEnd, T end) {
this.includeStart = includeStart;
this.start = start;
this.includeEnd = includeEnd;
this.end = end;
}
public boolean isIncludeStart() {
return includeStart;
}
public boolean isIncludeEnd() {
return includeEnd;
}
public T getStart() {
return start;
}
public T getEnd() {
return end;
}
}
}

View File

@@ -0,0 +1,31 @@
package com.inteligr8.alfresco.asie.compute;
import java.util.List;
import java.util.Map;
import org.alfresco.service.cmr.search.SearchParameters;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class QueryInspectorFactory implements InitializingBean {
@Autowired
private List<QueryInspector> inspectors;
private Map<String, QueryInspector> languageInspectorMap;
@Override
public void afterPropertiesSet() throws Exception {
for (QueryInspector inspector : this.inspectors) {
for (String language : inspector.getSupportedLanguages())
this.languageInspectorMap.put(language, inspector);
}
}
public QueryInspector selectQueryInspector(SearchParameters searchParams) {
return this.languageInspectorMap.get(searchParams.getLanguage());
}
}

View File

@@ -8,8 +8,8 @@ import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.OptionalInt;
import java.util.Map.Entry;
import java.util.OptionalInt;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -21,34 +21,15 @@ import org.alfresco.repo.index.shard.ShardInstance;
import org.alfresco.repo.index.shard.ShardMethodEnum;
import org.alfresco.repo.index.shard.ShardRegistry;
import org.alfresco.repo.index.shard.ShardState;
import org.alfresco.repo.search.impl.QueryParserUtils;
import org.alfresco.repo.search.impl.parsers.AlfrescoFunctionEvaluationContext;
import org.alfresco.repo.search.impl.parsers.CMISLexer;
import org.alfresco.repo.search.impl.parsers.FTSLexer;
import org.alfresco.repo.search.impl.parsers.FTSParser;
import org.alfresco.repo.search.impl.parsers.FTSQueryParser;
import org.alfresco.repo.search.impl.querymodel.Conjunction;
import org.alfresco.repo.search.impl.querymodel.Constraint;
import org.alfresco.repo.search.impl.querymodel.Disjunction;
import org.alfresco.repo.search.impl.querymodel.FunctionalConstraint;
import org.alfresco.repo.search.impl.querymodel.QueryEngine;
import org.alfresco.repo.search.impl.querymodel.QueryModelFactory;
import org.alfresco.repo.search.impl.querymodel.QueryOptions;
import org.alfresco.repo.search.impl.querymodel.QueryOptions.Connective;
import org.alfresco.repo.search.impl.querymodel.impl.BaseConstraint;
import org.alfresco.repo.search.impl.querymodel.impl.lucene.LuceneQueryBuilderComponent;
import org.alfresco.repo.lock.JobLockService;
import org.alfresco.service.cmr.attributes.AttributeService;
import org.alfresco.service.cmr.attributes.AttributeService.AttributeQueryCallback;
import org.alfresco.service.cmr.dictionary.DataTypeDefinition;
import org.alfresco.service.cmr.dictionary.DictionaryService;
import org.alfresco.service.cmr.search.SearchParameters;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.alfresco.service.cmr.search.SearchService;
import org.alfresco.service.namespace.NamespaceService;
import org.alfresco.service.namespace.QName;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.tree.CommonTree;
import org.apache.commons.lang3.ObjectUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -59,6 +40,12 @@ import org.springframework.extensions.surf.util.AbstractLifecycleBean;
import org.springframework.stereotype.Component;
import com.inteligr8.alfresco.asie.Constants;
import com.inteligr8.alfresco.asie.cache.MultiValueCache;
import com.inteligr8.alfresco.asie.compute.QueryInspector;
import com.inteligr8.alfresco.asie.compute.QueryInspector.QueryRangeValue;
import com.inteligr8.alfresco.asie.compute.QueryInspector.QuerySingleValue;
import com.inteligr8.alfresco.asie.compute.QueryInspector.QueryValue;
import com.inteligr8.alfresco.asie.compute.QueryInspectorFactory;
import com.inteligr8.alfresco.asie.model.Node;
import com.inteligr8.alfresco.asie.model.ShardSet;
@@ -68,6 +55,8 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private final Pattern coreShardPattern = Pattern.compile("(.+)-[0-9]+");
private final QName shardLock = QName.createQName(Constants.NAMESPACE_ASIE, "shardLock");
@Autowired
private ShardStateService sss;
@@ -79,21 +68,30 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
private NamespaceService namespaceService;
@Autowired
@Qualifier(Constants.BEAN_SHARD_STATE_CACHE)
private SimpleCache<ShardInstance, ShardState> onlineShardCache;
private DictionaryService dictionaryService;
@Autowired
private QueryInspectorFactory queryInspectorFactory;
@Autowired
private JobLockService jobLockService;
@Autowired
@Qualifier(Constants.BEAN_FLOC_SHARD_NODE_CACHE)
private SimpleCache<Floc, Map<Integer, Set<ShardInstance>>> flocShardNodeCache;
@Autowired
@Qualifier(Constants.BEAN_ONLINE_SHARD_STATE_CACHE)
private SimpleCache<ShardInstance, ShardState> onlineNodeShardStateCache;
@Autowired
@Qualifier(Constants.BEAN_OFFILINE_SHARD_STATE_CACHE)
private SimpleCache<ShardInstance, ShardState> offlineShardCache;
private SimpleCache<ShardInstance, ShardState> offlineNodeShardStateCache;
@Autowired
@Qualifier(Constants.BEAN_CORE_EXPLICIT_CACHE)
private SimpleCache<String, QName> coreExplicitIdCache;
@Autowired
@Qualifier(Constants.BEAN_FLOC_CACHE)
private SimpleCache<Floc, Boolean> flocCache;
@Value("${inteligr8.asie.registerUnknownShardOffline}")
private boolean registerOffline;
@@ -105,6 +103,8 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
@Override
protected void onBootstrap(ApplicationEvent event) {
String lock = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
try {
this.attrService.getAttributes(new AttributeQueryCallback() {
@Override
public boolean handleAttribute(Long id, Serializable value, Serializable[] keys) {
@@ -113,56 +113,89 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
ShardState shardNodeState = (ShardState) value;
ShardInstance shardNode = shardNodeState.getShardInstance();
cacheShard(shardNode, shardNodeState, (String) keys[1]);
if (ShardMethodEnum.EXPLICIT_ID.toString().equals(shardNodeState.getPropertyBag().get("shard.method"))) {
String coreName = shardNode.getShard().getFloc().getPropertyBag().get("coreName");
if (coreName != null && !coreExplicitIdCache.contains(coreName)) {
String property = shardNodeState.getPropertyBag().get("shard.key");
QName propertyQname = QName.createQName(property, namespaceService);
logger.debug("Mapping core to explicit ID: {} => {}", coreName, propertyQname);
coreExplicitIdCache.put(coreName, propertyQname);
}
}
return true;
default:
return true;
}
}
}, Constants.ATTR_ASIE_NODES);
}, Constants.ATTR_ASIE_NODE_SHARD);
} finally {
this.jobLockService.releaseLock(lock, this.shardLock);
}
}
@Override
protected void onShutdown(ApplicationEvent event) {
}
protected void cacheShard(ShardInstance shardNode, ShardState shardNodeState, String nodeId) {
SimpleCache<ShardInstance, ShardState> shardCache = this.onlineShardCache;
ShardState cachedShardNodeState = this.onlineShardCache.get(shardNode);
/**
* This is private because it must be wrapped in a cluster-safe lock
*/
private void cacheShard(ShardInstance shardNode, ShardState shardNodeState, String nodeShardId) {
ShardInstance detachedShardNode = this.detach(shardNode);
SimpleCache<ShardInstance, ShardState> shardCache = this.onlineNodeShardStateCache;
ShardState cachedShardNodeState = this.onlineNodeShardStateCache.get(detachedShardNode);
if (cachedShardNodeState == null) {
cachedShardNodeState = this.offlineShardCache.get(shardNode);
shardCache = this.offlineShardCache;
cachedShardNodeState = this.offlineNodeShardStateCache.get(detachedShardNode);
shardCache = this.offlineNodeShardStateCache;
}
Shard shard = shardNode.getShard();
this.putPutAdd(this.flocShardNodeCache, shard.getFloc(), shard.getInstance(), detachedShardNode);
if (cachedShardNodeState == null) {
Boolean online = (Boolean) this.attrService.getAttribute(Constants.ATTR_ASIE_NODES, nodeId, Constants.ATTR_ONLINE);
Boolean online = (Boolean) this.attrService.getAttribute(Constants.ATTR_ASIE_NODE_SHARD, nodeShardId, Constants.ATTR_ONLINE);
if (online != null) {
if (online.booleanValue()) {
this.onlineShardCache.put(shardNode, cachedShardNodeState);
this.onlineNodeShardStateCache.put(detachedShardNode, cachedShardNodeState);
} else {
this.offlineShardCache.put(shardNode, cachedShardNodeState);
this.offlineNodeShardStateCache.put(detachedShardNode, cachedShardNodeState);
}
} else {
if (this.registerOffline) {
this.offlineShardCache.put(shardNode, cachedShardNodeState);
this.offlineNodeShardStateCache.put(detachedShardNode, cachedShardNodeState);
} else {
this.onlineShardCache.put(shardNode, cachedShardNodeState);
this.onlineNodeShardStateCache.put(detachedShardNode, cachedShardNodeState);
}
}
} else if (cachedShardNodeState.getLastIndexedTxId() < shardNodeState.getLastIndexedTxId()) {
shardCache.put(shardNode, shardNodeState);
// update the cached state if the state's last indexes transaction is later
shardCache.put(shardNode, this.detach(shardNodeState));
}
switch (shardNode.getShard().getFloc().getShardMethod()) {
case EXPLICIT_ID:
cacheExplicitShard(shardNode, shardNodeState);
break;
default:
}
}
private void cacheExplicitShard(ShardInstance shardNode, ShardState shardNodeState) {
String coreName = shardNode.getShard().getFloc().getPropertyBag().get("coreName");
if (coreName != null && !this.coreExplicitIdCache.contains(coreName)) {
String property = shardNodeState.getPropertyBag().get("shard.key");
QName propertyQName = QName.createQName(property, this.namespaceService);
this.logger.debug("Mapping core to explicit ID: {} => {}", coreName, propertyQName);
this.coreExplicitIdCache.put(coreName, propertyQName);
}
}
@Override
public void registerShardState(ShardState shardNodeState) {
ShardInstance shardNode = shardNodeState.getShardInstance();
Node node = new Node(shardNode);
this.fixFlocPropertyBag(shardNodeState);
String lock = this.jobLockService.getLock(this.shardLock, 2500L, 500L, 10);
try {
this.cacheShard(shardNode, shardNodeState, node.getId());
this.persistShards();
} finally {
this.jobLockService.releaseLock(lock, this.shardLock);
}
}
@@ -182,25 +215,50 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
}
protected String extractCoreName(String coreShardName) {
Matcher matcher = coreShardPattern.matcher(coreShardName);
Matcher matcher = this.coreShardPattern.matcher(coreShardName);
if (!matcher.matches())
return null;
return matcher.group(1);
}
@Override
public void registerShardState(ShardState shardNodeState) {
ShardInstance shardNode = shardNodeState.getShardInstance();
Node node = new Node(shardNode);
this.fixFlocPropertyBag(shardNodeState);
this.cacheShard(shardNode, shardNodeState, node.getId());
/**
* This is private because it must be wrapped in a cluster-safe lock
*/
private void persistShards() {
long onlineExpired = System.currentTimeMillis() - this.offlineIdleShardInSeconds * 1000L;
long offlineExpired = System.currentTimeMillis() - this.forgetOfflineShardInSeconds * 1000L;
for (ShardInstance shardNode : this.onlineNodeShardStateCache.getKeys()) {
String nodeShardId = new Node(shardNode).getId() + ";" + shardNode.getShard().getInstance();
ShardState shardNodeState = this.onlineNodeShardStateCache.get(shardNode);
if (shardNodeState.getLastUpdated() < onlineExpired) {
this.logger.warn("Taking shard offline: {}", shardNode);
this.onlineNodeShardStateCache.remove(shardNode);
this.offlineNodeShardStateCache.put(shardNode, shardNodeState);
} else {
this.attrService.setAttribute(shardNodeState, Constants.ATTR_ASIE_NODE_SHARD, nodeShardId, Constants.ATTR_STATE);
this.attrService.setAttribute(Boolean.TRUE, Constants.ATTR_ASIE_NODE_SHARD, nodeShardId, Constants.ATTR_ONLINE);
}
}
for (ShardInstance shardNode : this.offlineNodeShardStateCache.getKeys()) {
String nodeShardId = new Node(shardNode).getId() + ";" + shardNode.getShard().getInstance();
ShardState shardNodeState = this.offlineNodeShardStateCache.get(shardNode);
if (shardNodeState.getLastUpdated() < offlineExpired) {
this.logger.info("Forgetting about already offline shard: {}", shardNode);
this.offlineNodeShardStateCache.remove(shardNode);
} else {
this.attrService.setAttribute(shardNodeState, Constants.ATTR_ASIE_NODE_SHARD, nodeShardId, Constants.ATTR_STATE);
this.attrService.setAttribute(Boolean.FALSE, Constants.ATTR_ASIE_NODE_SHARD, nodeShardId, Constants.ATTR_ONLINE);
}
}
}
@Override
public Map<Floc, Map<Shard, Set<ShardState>>> getFlocs() {
Map<Floc, Map<Shard, Set<ShardState>>> flocs = new HashMap<>();
for (ShardInstance shardNode : this.onlineShardCache.getKeys()) {
for (ShardInstance shardNode : this.onlineNodeShardStateCache.getKeys()) {
Floc floc = shardNode.getShard().getFloc();
Map<Shard, Set<ShardState>> shards = flocs.get(floc);
@@ -211,7 +269,7 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
if (shardNodeStates == null)
shards.put(shardNode.getShard(), shardNodeStates = new HashSet<>());
ShardState shardNodeState = this.onlineShardCache.get(shardNode);
ShardState shardNodeState = this.onlineNodeShardStateCache.get(shardNode);
if (shardNodeState != null) // in case it was removed during the looping (very rare)
shardNodeStates.add(shardNodeState);
}
@@ -229,20 +287,20 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
long onlineExpired = System.currentTimeMillis() - this.offlineIdleShardInSeconds * 1000L;
long offlineExpired = System.currentTimeMillis() - this.forgetOfflineShardInSeconds * 1000L;
for (ShardInstance shardNode : this.onlineShardCache.getKeys()) {
ShardState shardNodeState = this.onlineShardCache.get(shardNode);
for (ShardInstance shardNode : this.onlineNodeShardStateCache.getKeys()) {
ShardState shardNodeState = this.onlineNodeShardStateCache.get(shardNode);
if (shardNodeState.getLastUpdated() < onlineExpired) {
this.logger.warn("Taking shard offline: {}", shardNode);
this.onlineShardCache.remove(shardNode);
this.offlineShardCache.put(shardNode, shardNodeState);
this.onlineNodeShardStateCache.remove(shardNode);
this.offlineNodeShardStateCache.put(shardNode, shardNodeState);
}
}
for (ShardInstance shardNode : this.offlineShardCache.getKeys()) {
ShardState shardNodeState = this.offlineShardCache.get(shardNode);
for (ShardInstance shardNode : this.offlineNodeShardStateCache.getKeys()) {
ShardState shardNodeState = this.offlineNodeShardStateCache.get(shardNode);
if (shardNodeState.getLastUpdated() < offlineExpired) {
this.logger.info("Forgetting about already offline shard: {}", shardNode);
this.offlineShardCache.remove(shardNode);
this.offlineNodeShardStateCache.remove(shardNode);
}
}
}
@@ -256,7 +314,7 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
public Set<Integer> getShardInstanceList(String coreName) {
Set<Integer> shardIds = new HashSet<>();
for (ShardInstance shardNode : this.onlineShardCache.getKeys()) {
for (ShardInstance shardNode : this.onlineNodeShardStateCache.getKeys()) {
shardIds.add(shardNode.getShard().getInstance());
}
@@ -270,94 +328,94 @@ public class SolrShardRegistry extends AbstractLifecycleBean implements ShardReg
@Override
public List<ShardInstance> getIndexSlice(SearchParameters searchParameters) {
for (Floc floc : this.flocCache.getKeys()) {
Set<Integer> shardIds = new HashSet<>();
if (searchParameters.getQuery() == null)
return Collections.emptyList();
List<ShardInstance> bestShards = null;
for (Floc floc : this.flocShardMultiCache.getKeys()) {
List<ShardInstance> shards = new LinkedList<>();
switch (floc.getShardMethod()) {
case EXPLICIT_ID:
String property = floc.getPropertyBag().get("shard.key");
// check filters and other parameters
if (searchParameters.getQuery() != null) {
SearchTerm term = this.extractPropertySearchTeam(searchParameters, property);
if (term != null && term.operator.equals("=")) {
try {
shardIds.add(Integer.parseInt(term.value));
} catch (NumberFormatException nfe) {
// skip
}
QName propertyQName = QName.createQName(property, this.namespaceService);
DataTypeDefinition dtdef = this.dictionaryService.getProperty(propertyQName).getDataType();
QueryInspector inspector = this.queryInspectorFactory.selectQueryInspector(searchParameters);
if (inspector == null)
continue;
Set<Integer> shardIds = new HashSet<>();
List<QueryValue> values = inspector.findRequiredPropertyValues(searchParameters.getQuery(), searchParameters.getDefaultOperator(), propertyQName, dtdef);
for (QueryValue value : values) {
if (value instanceof QuerySingleValue<?>) {
@SuppressWarnings("unchecked")
Number num = ((QuerySingleValue<? extends Number>) value).getValue();
shardIds.add(num.intValue());
} else if (value instanceof QueryRangeValue<?>) {
@SuppressWarnings("unchecked")
QueryRangeValue<? extends Number> num = (QueryRangeValue<? extends Number>) value;
int start = num.getStart().intValue();
if (!num.isIncludeStart())
start++;
int end = num.getStart().intValue();
if (!num.isIncludeEnd())
end--;
for (int shardId = start; shardId <= end; shardId++)
shardIds.add(shardId);
}
}
// shardIds to shardInstances
break;
}
}
searchParameters.get
// TODO Auto-generated method stub
return null;
}
private SearchTerm extractPropertySearchTeam(SearchParameters searchParameters, String property) {
switch (searchParameters.getLanguage()) {
case SearchService.LANGUAGE_CMIS_ALFRESCO:
case SearchService.LANGUAGE_CMIS_STRICT:
case SearchService.LANGUAGE_INDEX_CMIS:
case SearchService.LANGUAGE_SOLR_CMIS:
return this.extractCmisPropertySearchTerm(searchParameters, property, "=");
case SearchService.LANGUAGE_FTS_ALFRESCO:
case SearchService.LANGUAGE_INDEX_ALFRESCO:
case SearchService.LANGUAGE_INDEX_FTS_ALFRESCO:
case SearchService.LANGUAGE_LUCENE:
case SearchService.LANGUAGE_SOLR_ALFRESCO:
case SearchService.LANGUAGE_SOLR_FTS_ALFRESCO:
return this.extractFtsPropertySearchTerm(searchParameters, "=@" + property);
default:
return null;
}
// make no determination
}
@Autowired
private QueryEngine queryEngine;
@Autowired
private DictionaryService dictionaryService;
private SearchTerm extractFtsPropertySearchTerm(SearchParameters searchParameters, String field) {
// TODO include filter and other possible constraints
if (searchParameters.getQuery() == null)
return null;
CharStream cs = new ANTLRStringStream(searchParameters.getQuery());
FTSLexer lexer = new FTSLexer(cs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
FTSParser parser = new FTSParser(tokens);
parser.setDefaultFieldConjunction(searchParameters.getDefaultFTSOperator().equals(Operator.AND));
parser.setMode(searchParameters.getDefaultFTSOperator().equals(Operator.AND) ? FTSParser.Mode.DEFAULT_CONJUNCTION : FTSParser.Mode.DEFAULT_DISJUNCTION);
CommonTree ftsNode = (CommonTree) parser.ftsQuery().getTree();
if (!shards.isEmpty() && (bestShards == null || shards.size() < bestShards.size()))
bestShards = shards;
}
private SearchTerm extractCmisPropertySearchTerm(SearchParameters searchParameters, String field, String operator) {
// TODO include filter and other possible constraints
if (searchParameters.getQuery() == null)
return null;
CharStream cs = new ANTLRStringStream(searchParameters.getQuery());
CMISLexer lexer = new CMISLexer();
CommonTokenStream tokens = new CommonTokenStream(lexer);
FTSParser parser = new FTSParser(tokens);
parser.setDefaultFieldConjunction(searchParameters.getDefaultFTSOperator().equals(Operator.AND));
parser.setMode(searchParameters.getDefaultFTSOperator().equals(Operator.AND) ? FTSParser.Mode.DEFAULT_CONJUNCTION : FTSParser.Mode.DEFAULT_DISJUNCTION);
CommonTree ftsNode = (CommonTree) parser.ftsQuery().getTree();
return bestShards;
}
private class SearchTerm {
private String field;
private String operator;
private String value;
protected List<ShardInstance> getIndexSlice() {
}
private ShardInstance detach(ShardInstance shardNode) {
ShardInstance detachedShardNode = new ShardInstance();
detachedShardNode.setHostName(shardNode.getHostName());
detachedShardNode.setPort(shardNode.getPort());
detachedShardNode.setBaseUrl(shardNode.getBaseUrl());
return detachedShardNode;
}
private ShardState detach(ShardState shardState) {
ShardState detachedShardState = new ShardState();
detachedShardState.setLastIndexedChangeSetCommitTime(shardState.getLastIndexedChangeSetCommitTime());
detachedShardState.setLastIndexedChangeSetId(shardState.getLastIndexedChangeSetId());
detachedShardState.setLastIndexedTxCommitTime(shardState.getLastIndexedTxCommitTime());
detachedShardState.setLastIndexedTxId(shardState.getLastIndexedTxId());
detachedShardState.setLastUpdated(shardState.getLastUpdated());
detachedShardState.setMaster(shardState.isMaster());
detachedShardState.setPropertyBag(shardState.getPropertyBag());
return detachedShardState;
}
private <K1 extends Serializable, K2, V> boolean putPutAdd(SimpleCache<K1, Map<K2, Set<V>>> cache, K1 cacheKey, K2 mapKey, V mapValue) {
Map<K2, Set<V>> map = cache.get(cacheKey);
if (map == null)
map = new HashMap<>();
return this.putAdd(map, mapKey, mapValue);
}
private <K, V> boolean putAdd(Map<K, Set<V>> map, K key, V value) {
Set<V> set = map.get(key);
if (set == null)
set = new HashSet<>();
return set.add(value);
}
}

View File

@@ -4,10 +4,22 @@ inteligr8.asie.idleShardExpirationInSeconds=${}
# Overrides of alfresco-repository.jar/alfresco/caches.properties
cache.shardStateSharedCache.tx.maxItems=16384
cache.shardStateSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.shardStateSharedCache.maxItems=16384
cache.shardStateSharedCache.timeToLiveSeconds=1800
cache.shardStateSharedCache.maxIdleSeconds=0
cache.shardStateSharedCache.cluster.type=fully-distributed
cache.shardStateSharedCache.backup-count=1
cache.shardStateSharedCache.eviction-policy=LRU
cache.shardStateSharedCache.merge-policy=com.hazelcast.spi.merge.PutIfAbsentMergePolicy
cache.shardStateSharedCache.readBackupData=false
# maxItems needs to be greater than total shards, including HA instances
cache.offlineShardStateSharedCache.tx.maxItems=1024
cache.offlineShardStateSharedCache.tx.maxItems=16384
cache.offlineShardStateSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.offlineShardStateSharedCache.maxItems=1024
cache.offlineShardStateSharedCache.maxItems=16384
cache.offlineShardStateSharedCache.timeToLiveSeconds=1800
cache.offlineShardStateSharedCache.maxIdleSeconds=0
cache.offlineShardStateSharedCache.cluster.type=fully-distributed
@@ -16,9 +28,9 @@ cache.offlineShardStateSharedCache.eviction-policy=LRU
cache.offlineShardStateSharedCache.merge-policy=com.hazelcast.spi.merge.PutIfAbsentMergePolicy
cache.offlineShardStateSharedCache.readBackupData=false
cache.coreExplicitIdSharedCache.tx.maxItems=1024
cache.coreExplicitIdSharedCache.tx.maxItems=16384
cache.coreExplicitIdSharedCache.tx.statsEnabled=${caches.tx.statsEnabled}
cache.coreExplicitIdSharedCache.maxItems=1024
cache.coreExplicitIdSharedCache.maxItems=16384
cache.coreExplicitIdSharedCache.timeToLiveSeconds=1800
cache.coreExplicitIdSharedCache.maxIdleSeconds=0
cache.coreExplicitIdSharedCache.cluster.type=fully-distributed

View File

@@ -1,3 +0,0 @@
logger.inteligr8-asie.name=com.inteligr8.alfresco.asie
logger.inteligr8-asie.level=INFO

View File

@@ -0,0 +1,146 @@
package com.inteligr8.alfresco.asie;
import java.util.ArrayList;
import java.util.List;
import org.alfresco.repo.search.impl.parsers.FTSLexer;
import org.alfresco.repo.search.impl.parsers.FTSParser;
import org.alfresco.service.cmr.search.SearchParameters.Operator;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.Tree;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.module.SimpleModule;
public class QueryConstraintUnitTest {
private static final ObjectMapper om = new ObjectMapper();
@BeforeClass
public static void init() {
SimpleModule module = new SimpleModule();
module.addSerializer(Tree.class, new TreeSerializer());
om.registerModule(module);
}
@Test
public void testSingleExactTerm() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("=@cm:title:test", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "EXACT_TERM", "test");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleFuzzyTerm() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("@cm:title:test", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "TERM", "test");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleFuzzyString() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("@cm:title:'testing'", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "PHRASE", "'testing'");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleFuzzyStringDoubleQuotes() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("cm:title:\"testing\"", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "PHRASE", "\"testing\"");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testSingleRange() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("@cm:created:[NOW TO '2025-01-01T00:00:00'>", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
tree = this.validateChildren(tree, "CONJUNCTION");
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "RANGE", "INCLUSIVE", "NOW", "'2025-01-01T00:00:00'", "EXCLUSIVE");
tree = this.validateChildren(tree, "FIELD_REF", "created");
this.validate(tree, "PREFIX", "cm");
}
@Test
public void testTwoTerms() throws RecognitionException, JsonProcessingException {
Tree tree = this.parseFts("=@cm:title:test1 AND @cm:author:test2", Operator.AND);
tree = this.validateChildren(tree, "DISJUNCTION");
List<Tree> trees = this.validateChildren(tree, "CONJUNCTION", 2);
tree = trees.get(0);
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "EXACT_TERM", "test1");
tree = this.validateChildren(tree, "FIELD_REF", "title");
this.validate(tree, "PREFIX", "cm");
tree = trees.get(1);
tree = this.validateChildren(tree, "DEFAULT");
tree = this.validateChildren(tree, "TERM", "test2");
tree = this.validateChildren(tree, "FIELD_REF", "author");
this.validate(tree, "PREFIX", "cm");
}
protected void validate(Tree tree, String text, String... extraValues) {
Assert.assertNotNull(tree);
Assert.assertEquals(text, tree.getText());
Assert.assertEquals(extraValues.length, tree.getChildCount());
for (int c = 0; c < extraValues.length; c++)
Assert.assertEquals(extraValues[c], tree.getChild(c).getText());
}
protected Tree validateChildren(Tree tree, String text, String... extraValues) {
Assert.assertNotNull(tree);
Assert.assertEquals(text, tree.getText());
Assert.assertEquals(extraValues.length + 1, tree.getChildCount());
for (int c = 0; c < extraValues.length; c++)
Assert.assertEquals(extraValues[c], tree.getChild(c).getText());
return tree.getChild(extraValues.length);
}
protected List<Tree> validateChildren(Tree tree, String text, int count) {
Assert.assertNotNull(tree);
Assert.assertEquals(text, tree.getText());
Assert.assertEquals(count, tree.getChildCount());
List<Tree> children = new ArrayList<>();
for (int c = 0; c < tree.getChildCount(); c++)
children.add(tree.getChild(c));
return children;
}
protected Tree parseFts(String ftsQuery, Operator defaultOperator) throws RecognitionException, JsonProcessingException {
CharStream cs = new ANTLRStringStream(ftsQuery);
FTSLexer lexer = new FTSLexer(cs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
FTSParser parser = new FTSParser(tokens);
parser.setDefaultFieldConjunction(defaultOperator.equals(Operator.AND));
parser.setMode(defaultOperator.equals(Operator.AND) ? FTSParser.Mode.DEFAULT_CONJUNCTION : FTSParser.Mode.DEFAULT_DISJUNCTION);
CommonTree tree = (CommonTree) parser.ftsQuery().getTree();
return tree;
}
}

View File

@@ -0,0 +1,44 @@
package com.inteligr8.alfresco.asie;
import java.io.IOException;
import org.antlr.runtime.tree.Tree;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JavaType;
import com.fasterxml.jackson.databind.SerializerProvider;
import com.fasterxml.jackson.databind.ser.std.StdSerializer;
public class TreeSerializer extends StdSerializer<Tree> {
private static final long serialVersionUID = -2714782538361726878L;
public TreeSerializer() {
super(Tree.class);
}
public TreeSerializer(Class<Tree> type) {
super(type);
}
public TreeSerializer(JavaType type) {
super(type);
}
@Override
public void serialize(Tree value, JsonGenerator gen, SerializerProvider provider) throws IOException {
gen.writeStartObject();
if (value.getText() != null)
gen.writeStringField("text", value.getText());
if (value.getChildCount() > 0) {
gen.writeArrayFieldStart("children");
for (int c = 0; c < value.getChildCount(); c++)
gen.writeObject(value.getChild(c));
gen.writeEndArray();
}
gen.writeEndObject();
}
}