From a081ed16c0c3295253408f999354e6bbfa5fe183 Mon Sep 17 00:00:00 2001 From: Nick Clemens Date: Wed, 1 Nov 2017 15:47:39 +0000 Subject: [PATCH] Bug 19559: Add '-' to list of characters we don't split search terms on Test plan assumes QueryAutoTruncate = automatically SearchEngine = Elasticsearch To test: 0 - Apply Unit test patch only 1 - prove t/db_dependent/Koha_SearchEngine_Elasticsearch_Search.t 2 - Should fail 3 - Apply this patch 4 - prove t/db_dependent/Koha_SearchEngine_Elasticsearch_Search.t 5 - should pass 6 - search for 'Local-number:"4"' (or a vlid biblionumber) 7 - should get expected result Signed-off-by: David Bourgault Signed-off-by: Julian Maurice Signed-off-by: Jonathan Druart --- Koha/SearchEngine/Elasticsearch/QueryBuilder.pm | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm b/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm index cdd0fc6bd1..a021a0cb52 100644 --- a/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm +++ b/Koha/SearchEngine/Elasticsearch/QueryBuilder.pm @@ -798,7 +798,7 @@ sub _truncate_terms { # '"donald duck" title:"the mouse" and peter" get split into # ['', '"donald duck"', '', ' ', '', 'title:"the mouse"', '', ' ', 'and', ' ', 'pete'] - my @tokens = split /((?:\w+:)?"[^"]+"|\s+)/, $query; + my @tokens = split /((?:[\w-]+:)?"[^"]+"|\s+)/, $query; # Filter out empty tokens my @words = grep { $_ !~ /^\s*$/ } @tokens; -- 2.39.5