Skip to content

Commit

Permalink
[SOLR-10059] a new approach to tackle appended fq parameters
Browse files Browse the repository at this point in the history
Problems with duplicate parameter appending was the inability to expand macros in shard requests. This fix will sanitize all macros from `fq` parameters in shard requests (as they should have been expanded on the coordinator)
  • Loading branch information
tboeghk committed Feb 14, 2024
1 parent 7f080b9 commit 43b9011
Show file tree
Hide file tree
Showing 5 changed files with 233 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -218,8 +218,9 @@ public void handleRequest(SolrQueryRequest req, SolrQueryResponse rsp) {
Timer.Context timer = metrics.requestTimes.time();
try {
TestInjection.injectLeaderTragedy(req.getCore());
if (pluginInfo != null && pluginInfo.attributes.containsKey(USEPARAM))
if (pluginInfo != null && pluginInfo.attributes.containsKey(USEPARAM)) {
req.getContext().put(USEPARAM, pluginInfo.attributes.get(USEPARAM));
}
SolrPluginUtils.setDefaults(this, req, defaults, appends, invariants);
req.getContext().remove(USEPARAM);
rsp.setHttpCaching(httpCaching);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.util.List;
import java.util.Map;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.MultiMapSolrParams;
import org.apache.solr.common.params.SolrParams;
Expand All @@ -33,6 +34,7 @@
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.request.SolrRequestHandler;
import org.apache.solr.request.macro.MacroExpander;
import org.apache.solr.request.macro.MacroSanitizer;
import org.apache.solr.search.QueryParsing;
import org.noggit.JSONParser;
import org.noggit.ObjectBuilder;
Expand Down Expand Up @@ -164,7 +166,11 @@ public static void processParams(
newMap.putAll(MultiMapSolrParams.asMultiMap(invariants));
}

if (!isShard) { // Don't expand macros in shard requests
if (isShard) {
// sanitize all macros from fq parameters as they
// might corrupt handler local params
newMap = MacroSanitizer.sanitize(CommonParams.FQ, newMap);
} else { // Don't expand macros in shard requests
String[] doMacrosStr = newMap.get("expandMacros");
boolean doMacros = true;
if (doMacrosStr != null) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.request.macro;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;

public class MacroSanitizer {

/**
* Sanitizes macros for the given parameter in the given params set if present
*
* @param param the parameter whose values we should sanitzize
* @param params the parameter set
* @return the sanitized parameter set
*/
public static Map<String, String[]> sanitize(String param, Map<String, String[]> params) {
// quick peek into the values to check for macros
final boolean needsSanitizing =
params.containsKey(param)
&& Arrays.stream(params.get(param))
.anyMatch(s -> s.contains(MacroExpander.MACRO_START));

if (needsSanitizing) {
final String[] fqs = params.get(param);
final List<String> sanitizedFqs = new ArrayList<>(fqs.length);

for (int i = 0; i < fqs.length; i++) {
if (!fqs[i].contains(MacroExpander.MACRO_START)) {
sanitizedFqs.add(fqs[i]);
}
}

params.put(param, sanitizedFqs.toArray(new String[] {}));
}

return params;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.handler.component;

import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.client.solrj.request.UpdateRequest;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.cloud.AbstractDistribZkTestBase;
import org.apache.solr.cloud.ConfigRequest;
import org.apache.solr.cloud.SolrCloudTestCase;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.params.CommonParams;
import org.junit.BeforeClass;
import org.junit.Test;

public class SearchHandlerAppendsWithMacrosCloudTest extends SolrCloudTestCase {

private static String COLLECTION;
private static int NUM_SHARDS;
private static int NUM_REPLICAS;

@BeforeClass
public static void setupCluster() throws Exception {

// decide collection name ...
COLLECTION = "collection" + (1 + random().nextInt(100));
// ... and shard/replica/node numbers
NUM_SHARDS = (2 + random().nextInt(2)); // 0..2
NUM_REPLICAS = (1 + random().nextInt(2)); // 0..2

// create and configure cluster
configureCluster(NUM_SHARDS * NUM_REPLICAS /* nodeCount */)
.addConfig("conf", configset("cloud-dynamic"))
.configure();

// create an empty collection
CollectionAdminRequest.createCollection(COLLECTION, "conf", NUM_SHARDS, NUM_REPLICAS)
.processAndWait(cluster.getSolrClient(), DEFAULT_TIMEOUT);
AbstractDistribZkTestBase.waitForRecoveriesToFinish(
COLLECTION, ZkStateReader.from(cluster.getSolrClient()), false, true, DEFAULT_TIMEOUT);
}

@Test
public void test() throws Exception {

// field names
final String id = "id";
final String bee_si = "bee_sI";
final String forage_t = "forage_t";
final String handlerName = "/custom-select";

// add custom handlers (the exact custom handler names should not matter)
cluster
.getSolrClient()
.request(
new ConfigRequest(
"{\n"
+ " 'add-requesthandler': {\n"
+ " 'name' : '"
+ handlerName
+ "',\n"
+ " 'class' : 'org.apache.solr.handler.component.SearchHandler',\n"
+ " 'appends' : { 'fq' : '{!collapse tag=collapsing field="
+ bee_si
+ " sort=\"${collapseSort}\" }' }, \n"
+ " }\n"
+ "}"),
COLLECTION);

// add some documents
{
new UpdateRequest()
.add(sdoc(id, 1, bee_si, "bumble bee", forage_t, "nectar"))
.add(sdoc(id, 2, bee_si, "honey bee", forage_t, "propolis"))
.add(sdoc(id, 3, bee_si, "solitary bee", forage_t, "pollen"))
.commit(cluster.getSolrClient(), COLLECTION);
}

// compose the query
final SolrQuery solrQuery = new SolrQuery(bee_si + ":bee");
solrQuery.setParam(CommonParams.QT, handlerName);
solrQuery.setParam(CommonParams.SORT, "id desc");
solrQuery.setParam("collapseSort", "id asc");

// make the query
// the query wouold break with macros in shard response
final QueryResponse queryResponse =
new QueryRequest(solrQuery).process(cluster.getSolrClient(), COLLECTION);
assertNotNull(queryResponse);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.request.macro;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;

import java.util.HashMap;
import java.util.Map;
import org.apache.solr.common.params.CommonParams;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Test;

public class MacroSanitizerTest {

@Test
public void shouldReturnSameInstanceWhenNotSanitizing() {
// given
Map<String, String[]> params = new HashMap<>();

// when
Map<String, String[]> sanitized = MacroSanitizer.sanitize(CommonParams.FQ, params);

// then
assertSame(params, sanitized);
}

@Test
public void shouldNotSanitizeNonMacros() {
// given
Map<String, String[]> params = new HashMap<>();
params.put(
CommonParams.FQ,
new String[] {
"bee:up", "look:left", "{!collapse tag=collapsing field=bee sort=${collapseSort}}"
});
params.put("q", new String[] {"bee:honey"});

// when
Map<String, String[]> sanitized = MacroSanitizer.sanitize(CommonParams.FQ, params);

// then
assertEquals(2, sanitized.size());
assertEquals(2, sanitized.get("fq").length);
MatcherAssert.assertThat(sanitized.get("fq"), Matchers.arrayContaining("bee:up", "look:left"));
}
}

0 comments on commit 43b9011

Please sign in to comment.