summaryrefslogtreecommitdiff
path: root/src/test/java/org/elasticsearch/common
diff options
context:
space:
mode:
Diffstat (limited to 'src/test/java/org/elasticsearch/common')
-rw-r--r--src/test/java/org/elasticsearch/common/BooleansTests.java43
-rw-r--r--src/test/java/org/elasticsearch/common/ParseFieldTests.java74
-rw-r--r--src/test/java/org/elasticsearch/common/StringsTests.java36
-rw-r--r--src/test/java/org/elasticsearch/common/TableTests.java153
-rw-r--r--src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java106
-rw-r--r--src/test/java/org/elasticsearch/common/collect/Iterators2Tests.java50
-rw-r--r--src/test/java/org/elasticsearch/common/compress/CompressedStringTests.java55
-rw-r--r--src/test/java/org/elasticsearch/common/geo/GeoHashTests.java59
-rw-r--r--src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java256
-rw-r--r--src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java196
-rw-r--r--src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java101
-rw-r--r--src/test/java/org/elasticsearch/common/io/StreamsTests.java91
-rw-r--r--src/test/java/org/elasticsearch/common/io/streams/BytesStreamsTests.java90
-rw-r--r--src/test/java/org/elasticsearch/common/io/streams/HandlesStreamsTests.java80
-rw-r--r--src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java66
-rw-r--r--src/test/java/org/elasticsearch/common/lucene/LuceneTest.java49
-rw-r--r--src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java342
-rw-r--r--src/test/java/org/elasticsearch/common/lucene/search/MatchAllDocsFilterTests.java69
-rw-r--r--src/test/java/org/elasticsearch/common/lucene/search/MoreLikeThisQueryTests.java74
-rw-r--r--src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java66
-rw-r--r--src/test/java/org/elasticsearch/common/lucene/search/TermsFilterTests.java119
-rw-r--r--src/test/java/org/elasticsearch/common/lucene/search/XBooleanFilterLuceneTests.java391
-rw-r--r--src/test/java/org/elasticsearch/common/lucene/search/XBooleanFilterTests.java567
-rw-r--r--src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java266
-rw-r--r--src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java285
-rw-r--r--src/test/java/org/elasticsearch/common/path/PathTrieTests.java160
-rw-r--r--src/test/java/org/elasticsearch/common/recycler/AbstractRecyclerTests.java92
-rw-r--r--src/test/java/org/elasticsearch/common/recycler/ConcurrentRecyclerTests.java29
-rw-r--r--src/test/java/org/elasticsearch/common/recycler/LockedRecyclerTests.java29
-rw-r--r--src/test/java/org/elasticsearch/common/recycler/NoneRecyclerTests.java29
-rw-r--r--src/test/java/org/elasticsearch/common/recycler/QueueRecyclerTests.java29
-rw-r--r--src/test/java/org/elasticsearch/common/recycler/SoftConcurrentRecyclerTests.java29
-rw-r--r--src/test/java/org/elasticsearch/common/recycler/SoftThreadLocalRecyclerTests.java29
-rw-r--r--src/test/java/org/elasticsearch/common/recycler/ThreadLocalRecyclerTests.java29
-rw-r--r--src/test/java/org/elasticsearch/common/regex/RegexTests.java71
-rw-r--r--src/test/java/org/elasticsearch/common/rounding/RoundingTests.java44
-rw-r--r--src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java94
-rw-r--r--src/test/java/org/elasticsearch/common/settings/ImmutableSettingsTests.java178
-rw-r--r--src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java24
-rw-r--r--src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java24
-rw-r--r--src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java52
-rw-r--r--src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java52
-rw-r--r--src/test/java/org/elasticsearch/common/settings/loader/test-settings.json10
-rw-r--r--src/test/java/org/elasticsearch/common/settings/loader/test-settings.yml8
-rw-r--r--src/test/java/org/elasticsearch/common/unit/ByteSizeUnitTests.java83
-rw-r--r--src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java98
-rw-r--r--src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java62
-rw-r--r--src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java199
-rw-r--r--src/test/java/org/elasticsearch/common/unit/TimeValueTests.java69
-rw-r--r--src/test/java/org/elasticsearch/common/util/BigArraysTests.java195
-rw-r--r--src/test/java/org/elasticsearch/common/util/ByteUtilsTests.java109
-rw-r--r--src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java64
-rw-r--r--src/test/java/org/elasticsearch/common/util/SlicedDoubleListTests.java120
-rw-r--r--src/test/java/org/elasticsearch/common/util/SlicedLongListTests.java119
-rw-r--r--src/test/java/org/elasticsearch/common/util/SlicedObjectListTests.java147
-rw-r--r--src/test/java/org/elasticsearch/common/util/concurrent/CountDownTest.java105
-rw-r--r--src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java236
-rw-r--r--src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java277
-rw-r--r--src/test/java/org/elasticsearch/common/xcontent/builder/BuilderRawFieldTests.java124
-rw-r--r--src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java156
-rw-r--r--src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java105
-rw-r--r--src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java68
-rw-r--r--src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java456
63 files changed, 7488 insertions, 0 deletions
diff --git a/src/test/java/org/elasticsearch/common/BooleansTests.java b/src/test/java/org/elasticsearch/common/BooleansTests.java
new file mode 100644
index 0000000..7440d8c
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/BooleansTests.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.hamcrest.Matchers;
+import org.junit.Test;
+
+public class BooleansTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testIsBoolean() {
+ String[] booleans = new String[]{"true", "false", "on", "off", "yes", "no", "0", "1"};
+ String[] notBooleans = new String[]{"11", "00", "sdfsdfsf", "F", "T"};
+
+ for (String b : booleans) {
+ String t = "prefix" + b + "suffix";
+ assertThat("failed to recognize [" + b + "] as boolean", Booleans.isBoolean(t.toCharArray(), "prefix".length(), b.length()), Matchers.equalTo(true));
+ }
+
+ for (String nb : notBooleans) {
+ String t = "prefix" + nb + "suffix";
+ assertThat("recognized [" + nb + "] as boolean", Booleans.isBoolean(t.toCharArray(), "prefix".length(), nb.length()), Matchers.equalTo(false));
+ }
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/ParseFieldTests.java b/src/test/java/org/elasticsearch/common/ParseFieldTests.java
new file mode 100644
index 0000000..d1dca3a
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/ParseFieldTests.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common;
+
+import org.elasticsearch.ElasticsearchIllegalArgumentException;
+import org.elasticsearch.test.ElasticsearchTestCase;
+
+import java.util.EnumSet;
+
+import static org.hamcrest.CoreMatchers.*;
+
+public class ParseFieldTests extends ElasticsearchTestCase {
+
+ public void testParse() {
+ String[] values = new String[]{"foo_bar", "fooBar"};
+ ParseField field = new ParseField(randomFrom(values));
+ String[] deprecated = new String[]{"barFoo", "bar_foo"};
+ ParseField withDepredcations = field.withDeprecation("Foobar", randomFrom(deprecated));
+ assertThat(field, not(sameInstance(withDepredcations)));
+ assertThat(field.match(randomFrom(values), ParseField.EMPTY_FLAGS), is(true));
+ assertThat(field.match("foo bar", ParseField.EMPTY_FLAGS), is(false));
+ assertThat(field.match(randomFrom(deprecated), ParseField.EMPTY_FLAGS), is(false));
+ assertThat(field.match("barFoo", ParseField.EMPTY_FLAGS), is(false));
+
+
+ assertThat(withDepredcations.match(randomFrom(values), ParseField.EMPTY_FLAGS), is(true));
+ assertThat(withDepredcations.match("foo bar", ParseField.EMPTY_FLAGS), is(false));
+ assertThat(withDepredcations.match(randomFrom(deprecated), ParseField.EMPTY_FLAGS), is(true));
+ assertThat(withDepredcations.match("barFoo", ParseField.EMPTY_FLAGS), is(true));
+
+ // now with strict mode
+ EnumSet<ParseField.Flag> flags = EnumSet.of(ParseField.Flag.STRICT);
+ assertThat(field.match(randomFrom(values), flags), is(true));
+ assertThat(field.match("foo bar", flags), is(false));
+ assertThat(field.match(randomFrom(deprecated), flags), is(false));
+ assertThat(field.match("barFoo", flags), is(false));
+
+
+ assertThat(withDepredcations.match(randomFrom(values), flags), is(true));
+ assertThat(withDepredcations.match("foo bar", flags), is(false));
+ try {
+ withDepredcations.match(randomFrom(deprecated), flags);
+ fail();
+ } catch (ElasticsearchIllegalArgumentException ex) {
+
+ }
+
+ try {
+ withDepredcations.match("barFoo", flags);
+ fail();
+ } catch (ElasticsearchIllegalArgumentException ex) {
+
+ }
+
+
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/StringsTests.java b/src/test/java/org/elasticsearch/common/StringsTests.java
new file mode 100644
index 0000000..e6f75aa
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/StringsTests.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+public class StringsTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testToCamelCase() {
+ assertEquals("foo", Strings.toCamelCase("foo"));
+ assertEquals("fooBar", Strings.toCamelCase("fooBar"));
+ assertEquals("FooBar", Strings.toCamelCase("FooBar"));
+ assertEquals("fooBar", Strings.toCamelCase("foo_bar"));
+ assertEquals("fooBarFooBar", Strings.toCamelCase("foo_bar_foo_bar"));
+ assertEquals("fooBar", Strings.toCamelCase("foo_bar_"));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/TableTests.java b/src/test/java/org/elasticsearch/common/TableTests.java
new file mode 100644
index 0000000..919e1c4
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/TableTests.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common;
+
+import org.elasticsearch.ElasticsearchIllegalStateException;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.util.List;
+import java.util.Map;
+
+public class TableTests extends ElasticsearchTestCase {
+
+ @Test(expected = ElasticsearchIllegalStateException.class)
+ public void testFailOnStartRowWithoutHeader() {
+ Table table = new Table();
+ table.startRow();
+ }
+
+ @Test(expected = ElasticsearchIllegalStateException.class)
+ public void testFailOnEndHeadersWithoutStart() {
+ Table table = new Table();
+ table.endHeaders();
+ }
+
+ @Test(expected = ElasticsearchIllegalStateException.class)
+ public void testFailOnAddCellWithoutHeader() {
+ Table table = new Table();
+ table.addCell("error");
+ }
+
+ @Test(expected = ElasticsearchIllegalStateException.class)
+ public void testFailOnAddCellWithoutRow() {
+ Table table = this.getTableWithHeaders();
+ table.addCell("error");
+ }
+
+ @Test(expected = ElasticsearchIllegalStateException.class)
+ public void testFailOnEndRowWithoutStart() {
+ Table table = this.getTableWithHeaders();
+ table.endRow();
+ }
+
+ @Test(expected = ElasticsearchIllegalStateException.class)
+ public void testFailOnLessCellsThanDeclared() {
+ Table table = this.getTableWithHeaders();
+ table.startRow();
+ table.addCell("foo");
+ table.endRow(true);
+ }
+
+ @Test
+ public void testOnLessCellsThanDeclaredUnchecked() {
+ Table table = this.getTableWithHeaders();
+ table.startRow();
+ table.addCell("foo");
+ table.endRow(false);
+ }
+
+ @Test(expected = ElasticsearchIllegalStateException.class)
+ public void testFailOnMoreCellsThanDeclared() {
+ Table table = this.getTableWithHeaders();
+ table.startRow();
+ table.addCell("foo");
+ table.addCell("bar");
+ table.addCell("foobar");
+ }
+
+ @Test
+ public void testSimple() {
+ Table table = this.getTableWithHeaders();
+ table.startRow();
+ table.addCell("foo1");
+ table.addCell("bar1");
+ table.endRow();
+ table.startRow();
+ table.addCell("foo2");
+ table.addCell("bar2");
+ table.endRow();
+
+ // Check headers
+ List<Table.Cell> headers = table.getHeaders();
+ assertEquals(2, headers.size());
+ assertEquals("foo", headers.get(0).value.toString());
+ assertEquals(2, headers.get(0).attr.size());
+ assertEquals("f", headers.get(0).attr.get("alias"));
+ assertEquals("foo", headers.get(0).attr.get("desc"));
+ assertEquals("bar", headers.get(1).value.toString());
+ assertEquals(2, headers.get(1).attr.size());
+ assertEquals("b", headers.get(1).attr.get("alias"));
+ assertEquals("bar", headers.get(1).attr.get("desc"));
+
+ // Check rows
+ List<List<Table.Cell>> rows = table.getRows();
+ assertEquals(2, rows.size());
+ List<Table.Cell> row = rows.get(0);
+ assertEquals("foo1", row.get(0).value.toString());
+ assertEquals("bar1", row.get(1).value.toString());
+ row = rows.get(1);
+ assertEquals("foo2", row.get(0).value.toString());
+ assertEquals("bar2", row.get(1).value.toString());
+
+ // Check getAsMap
+ Map<String, List<Table.Cell>> map = table.getAsMap();
+ assertEquals(2, map.size());
+ row = map.get("foo");
+ assertEquals("foo1", row.get(0).value.toString());
+ assertEquals("foo2", row.get(1).value.toString());
+ row = map.get("bar");
+ assertEquals("bar1", row.get(0).value.toString());
+ assertEquals("bar2", row.get(1).value.toString());
+
+ // Check getHeaderMap
+ Map<String, Table.Cell> headerMap = table.getHeaderMap();
+ assertEquals(2, headerMap.size());
+ Table.Cell cell = headerMap.get("foo");
+ assertEquals("foo", cell.value.toString());
+ cell = headerMap.get("bar");
+ assertEquals("bar", cell.value.toString());
+
+ // Check findHeaderByName
+ cell = table.findHeaderByName("foo");
+ assertEquals("foo", cell.value.toString());
+ cell = table.findHeaderByName("missing");
+ assertNull(cell);
+ }
+
+ private Table getTableWithHeaders() {
+ Table table = new Table();
+ table.startHeaders();
+ table.addCell("foo", "alias:f;desc:foo");
+ table.addCell("bar", "alias:b;desc:bar");
+ table.endHeaders();
+ return table;
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java b/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java
new file mode 100644
index 0000000..5565cd4
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/breaker/MemoryCircuitBreakerTests.java
@@ -0,0 +1,106 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.breaker;
+
+import org.elasticsearch.common.unit.ByteSizeValue;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicReference;
+
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ * Tests for the Memory Aggregating Circuit Breaker
+ */
+public class MemoryCircuitBreakerTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testThreadedUpdatesToBreaker() throws Exception {
+ final int NUM_THREADS = 5;
+ final int BYTES_PER_THREAD = 1000;
+ final Thread[] threads = new Thread[NUM_THREADS];
+ final AtomicBoolean tripped = new AtomicBoolean(false);
+ final AtomicReference<Throwable> lastException = new AtomicReference<Throwable>(null);
+
+ final MemoryCircuitBreaker breaker = new MemoryCircuitBreaker(new ByteSizeValue((BYTES_PER_THREAD * NUM_THREADS) - 1), 1.0, logger);
+
+ for (int i = 0; i < NUM_THREADS; i++) {
+ threads[i] = new Thread(new Runnable() {
+ @Override
+ public void run() {
+ for (int j = 0; j < BYTES_PER_THREAD; j++) {
+ try {
+ breaker.addEstimateBytesAndMaybeBreak(1L);
+ } catch (CircuitBreakingException e) {
+ if (tripped.get()) {
+ assertThat("tripped too many times", true, equalTo(false));
+ } else {
+ assertThat(tripped.compareAndSet(false, true), equalTo(true));
+ }
+ } catch (Throwable e2) {
+ lastException.set(e2);
+ }
+ }
+ }
+ });
+
+ threads[i].start();
+ }
+
+ for (Thread t : threads) {
+ t.join();
+ }
+
+ assertThat("no other exceptions were thrown", lastException.get(), equalTo(null));
+ assertThat("breaker was tripped exactly once", tripped.get(), equalTo(true));
+ }
+
+ @Test
+ public void testConstantFactor() throws Exception {
+ final MemoryCircuitBreaker breaker = new MemoryCircuitBreaker(new ByteSizeValue(15), 1.6, logger);
+
+ // add only 7 bytes
+ breaker.addWithoutBreaking(7);
+
+ try {
+ // this won't actually add it because it trips the breaker
+ breaker.addEstimateBytesAndMaybeBreak(3);
+ fail("should never reach this");
+ } catch (CircuitBreakingException cbe) {
+ }
+
+ // shouldn't throw an exception
+ breaker.addEstimateBytesAndMaybeBreak(2);
+
+ assertThat(breaker.getUsed(), equalTo(9L));
+
+ // adding 3 more bytes (now at 12)
+ breaker.addWithoutBreaking(3);
+
+ try {
+ // Adding no bytes still breaks
+ breaker.addEstimateBytesAndMaybeBreak(0);
+ fail("should never reach this");
+ } catch (CircuitBreakingException cbe) {
+ }
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/collect/Iterators2Tests.java b/src/test/java/org/elasticsearch/common/collect/Iterators2Tests.java
new file mode 100644
index 0000000..65aa51c
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/collect/Iterators2Tests.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.collect;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Ordering;
+import com.google.common.collect.Sets;
+import org.apache.lucene.util.CollectionUtil;
+import org.elasticsearch.test.ElasticsearchTestCase;
+
+import java.util.Iterator;
+import java.util.List;
+
+public class Iterators2Tests extends ElasticsearchTestCase {
+
+ public void testDeduplicateSorted() {
+ final List<String> list = Lists.newArrayList();
+ for (int i = randomInt(100); i >= 0; --i) {
+ final int frequency = randomIntBetween(1, 10);
+ final String s = randomAsciiOfLength(randomIntBetween(2, 20));
+ for (int j = 0; j < frequency; ++j) {
+ list.add(s);
+ }
+ }
+ CollectionUtil.introSort(list);
+ final List<String> deduplicated = Lists.newArrayList();
+ for (Iterator<String> it = Iterators2.deduplicateSorted(list.iterator(), Ordering.natural()); it.hasNext(); ) {
+ deduplicated.add(it.next());
+ }
+ assertEquals(Lists.newArrayList(Sets.newTreeSet(list)), deduplicated);
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/compress/CompressedStringTests.java b/src/test/java/org/elasticsearch/common/compress/CompressedStringTests.java
new file mode 100644
index 0000000..9b63cbc
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/compress/CompressedStringTests.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.compress;
+
+import org.elasticsearch.common.settings.ImmutableSettings;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.not;
+
+/**
+ *
+ */
+public class CompressedStringTests extends ElasticsearchTestCase {
+
+ @Test
+ public void simpleTestsLZF() throws IOException {
+ simpleTests("lzf");
+ }
+
+ public void simpleTests(String compressor) throws IOException {
+ CompressorFactory.configure(ImmutableSettings.settingsBuilder().put("compress.default.type", compressor).build());
+ String str = "this is a simple string";
+ CompressedString cstr = new CompressedString(str);
+ assertThat(cstr.string(), equalTo(str));
+ assertThat(new CompressedString(str), equalTo(cstr));
+
+ String str2 = "this is a simple string 2";
+ CompressedString cstr2 = new CompressedString(str2);
+ assertThat(cstr2.string(), not(equalTo(str)));
+ assertThat(new CompressedString(str2), not(equalTo(cstr)));
+ assertThat(new CompressedString(str2), equalTo(cstr2));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java b/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java
new file mode 100644
index 0000000..55ff0bd
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/geo/GeoHashTests.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.geo;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+
+
+/**
+ * Tests for {@link GeoHashUtils}
+ */
+public class GeoHashTests extends ElasticsearchTestCase {
+
+
+ @Test
+ public void testGeohashAsLongRoutines() {
+
+ //Ensure that for all points at all supported levels of precision
+ // that the long encoding of a geohash is compatible with its
+ // String based counterpart
+ for (double lat=-90;lat<90;lat++)
+ {
+ for (double lng=-180;lng<180;lng++)
+ {
+ for(int p=1;p<=12;p++)
+ {
+ long geoAsLong = GeoHashUtils.encodeAsLong(lat,lng,p);
+ String geohash = GeoHashUtils.encode(lat,lng,p);
+
+ String geohashFromLong=GeoHashUtils.toString(geoAsLong);
+ assertEquals(geohash, geohashFromLong);
+ GeoPoint pos=GeoHashUtils.decode(geohash);
+ GeoPoint pos2=GeoHashUtils.decode(geoAsLong);
+ assertEquals(pos, pos2);
+ }
+ }
+
+ }
+ }
+
+
+}
diff --git a/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java b/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java
new file mode 100644
index 0000000..31b2900
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java
@@ -0,0 +1,256 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.geo;
+
+import com.spatial4j.core.shape.Shape;
+import com.spatial4j.core.shape.jts.JtsGeometry;
+import com.spatial4j.core.shape.jts.JtsPoint;
+import com.vividsolutions.jts.geom.*;
+import org.elasticsearch.common.geo.builders.ShapeBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+
+/**
+ * Tests for {@link GeoJSONShapeParser}
+ */
+public class GeoJSONShapeParserTests extends ElasticsearchTestCase {
+
+ private final static GeometryFactory GEOMETRY_FACTORY = new GeometryFactory();
+
+ @Test
+ public void testParse_simplePoint() throws IOException {
+ String pointGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Point")
+ .startArray("coordinates").value(100.0).value(0.0).endArray()
+ .endObject().string();
+
+ Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0));
+ assertGeometryEquals(new JtsPoint(expected, ShapeBuilder.SPATIAL_CONTEXT), pointGeoJson);
+ }
+
+ @Test
+ public void testParse_lineString() throws IOException {
+ String lineGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "LineString")
+ .startArray("coordinates")
+ .startArray().value(100.0).value(0.0).endArray()
+ .startArray().value(101.0).value(1.0).endArray()
+ .endArray()
+ .endObject().string();
+
+ List<Coordinate> lineCoordinates = new ArrayList<Coordinate>();
+ lineCoordinates.add(new Coordinate(100, 0));
+ lineCoordinates.add(new Coordinate(101, 1));
+
+ LineString expected = GEOMETRY_FACTORY.createLineString(
+ lineCoordinates.toArray(new Coordinate[lineCoordinates.size()]));
+ assertGeometryEquals(new JtsGeometry(expected, ShapeBuilder.SPATIAL_CONTEXT, false), lineGeoJson);
+ }
+
+ @Test
+ public void testParse_polygonNoHoles() throws IOException {
+ String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
+ .startArray("coordinates")
+ .startArray()
+ .startArray().value(100.0).value(1.0).endArray()
+ .startArray().value(101.0).value(1.0).endArray()
+ .startArray().value(101.0).value(0.0).endArray()
+ .startArray().value(100.0).value(0.0).endArray()
+ .startArray().value(100.0).value(1.0).endArray()
+ .endArray()
+ .endArray()
+ .endObject().string();
+
+ List<Coordinate> shellCoordinates = new ArrayList<Coordinate>();
+ shellCoordinates.add(new Coordinate(100, 0));
+ shellCoordinates.add(new Coordinate(101, 0));
+ shellCoordinates.add(new Coordinate(101, 1));
+ shellCoordinates.add(new Coordinate(100, 1));
+ shellCoordinates.add(new Coordinate(100, 0));
+
+ LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
+ Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null);
+ assertGeometryEquals(new JtsGeometry(expected, ShapeBuilder.SPATIAL_CONTEXT, false), polygonGeoJson);
+ }
+
+ @Test
+ public void testParse_polygonWithHole() throws IOException {
+ String polygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "Polygon")
+ .startArray("coordinates")
+ .startArray()
+ .startArray().value(100.0).value(1.0).endArray()
+ .startArray().value(101.0).value(1.0).endArray()
+ .startArray().value(101.0).value(0.0).endArray()
+ .startArray().value(100.0).value(0.0).endArray()
+ .startArray().value(100.0).value(1.0).endArray()
+ .endArray()
+ .startArray()
+ .startArray().value(100.2).value(0.8).endArray()
+ .startArray().value(100.2).value(0.2).endArray()
+ .startArray().value(100.8).value(0.2).endArray()
+ .startArray().value(100.8).value(0.8).endArray()
+ .startArray().value(100.2).value(0.8).endArray()
+ .endArray()
+ .endArray()
+ .endObject().string();
+
+ List<Coordinate> shellCoordinates = new ArrayList<Coordinate>();
+ shellCoordinates.add(new Coordinate(100, 0));
+ shellCoordinates.add(new Coordinate(101, 0));
+ shellCoordinates.add(new Coordinate(101, 1));
+ shellCoordinates.add(new Coordinate(100, 1));
+ shellCoordinates.add(new Coordinate(100, 0));
+
+ List<Coordinate> holeCoordinates = new ArrayList<Coordinate>();
+ holeCoordinates.add(new Coordinate(100.2, 0.2));
+ holeCoordinates.add(new Coordinate(100.8, 0.2));
+ holeCoordinates.add(new Coordinate(100.8, 0.8));
+ holeCoordinates.add(new Coordinate(100.2, 0.8));
+ holeCoordinates.add(new Coordinate(100.2, 0.2));
+
+ LinearRing shell = GEOMETRY_FACTORY.createLinearRing(
+ shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
+ LinearRing[] holes = new LinearRing[1];
+ holes[0] = GEOMETRY_FACTORY.createLinearRing(
+ holeCoordinates.toArray(new Coordinate[holeCoordinates.size()]));
+ Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, holes);
+ assertGeometryEquals(new JtsGeometry(expected, ShapeBuilder.SPATIAL_CONTEXT, false), polygonGeoJson);
+ }
+
+ @Test
+ public void testParse_multiPoint() throws IOException {
+ String multiPointGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPoint")
+ .startArray("coordinates")
+ .startArray().value(100.0).value(0.0).endArray()
+ .startArray().value(101.0).value(1.0).endArray()
+ .endArray()
+ .endObject().string();
+
+ List<Coordinate> multiPointCoordinates = new ArrayList<Coordinate>();
+ multiPointCoordinates.add(new Coordinate(100, 0));
+ multiPointCoordinates.add(new Coordinate(101, 1));
+
+ MultiPoint expected = GEOMETRY_FACTORY.createMultiPoint(
+ multiPointCoordinates.toArray(new Coordinate[multiPointCoordinates.size()]));
+ assertGeometryEquals(new JtsGeometry(expected, ShapeBuilder.SPATIAL_CONTEXT, false), multiPointGeoJson);
+ }
+
+ @Test
+ public void testParse_multiPolygon() throws IOException {
+ String multiPolygonGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "MultiPolygon")
+ .startArray("coordinates")
+ .startArray()
+ .startArray()
+ .startArray().value(102.0).value(2.0).endArray()
+ .startArray().value(103.0).value(2.0).endArray()
+ .startArray().value(103.0).value(3.0).endArray()
+ .startArray().value(102.0).value(3.0).endArray()
+ .startArray().value(102.0).value(2.0).endArray()
+ .endArray()
+ .endArray()
+ .startArray()
+ .startArray()
+ .startArray().value(100.0).value(0.0).endArray()
+ .startArray().value(101.0).value(0.0).endArray()
+ .startArray().value(101.0).value(1.0).endArray()
+ .startArray().value(100.0).value(1.0).endArray()
+ .startArray().value(100.0).value(0.0).endArray()
+ .endArray()
+ .startArray()
+ .startArray().value(100.2).value(0.8).endArray()
+ .startArray().value(100.2).value(0.2).endArray()
+ .startArray().value(100.8).value(0.2).endArray()
+ .startArray().value(100.8).value(0.8).endArray()
+ .startArray().value(100.2).value(0.8).endArray()
+ .endArray()
+ .endArray()
+ .endArray()
+ .endObject().string();
+
+ List<Coordinate> shellCoordinates = new ArrayList<Coordinate>();
+ shellCoordinates.add(new Coordinate(100, 0));
+ shellCoordinates.add(new Coordinate(101, 0));
+ shellCoordinates.add(new Coordinate(101, 1));
+ shellCoordinates.add(new Coordinate(100, 1));
+ shellCoordinates.add(new Coordinate(100, 0));
+
+ List<Coordinate> holeCoordinates = new ArrayList<Coordinate>();
+ holeCoordinates.add(new Coordinate(100.2, 0.2));
+ holeCoordinates.add(new Coordinate(100.8, 0.2));
+ holeCoordinates.add(new Coordinate(100.8, 0.8));
+ holeCoordinates.add(new Coordinate(100.2, 0.8));
+ holeCoordinates.add(new Coordinate(100.2, 0.2));
+
+ LinearRing shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
+ LinearRing[] holes = new LinearRing[1];
+ holes[0] = GEOMETRY_FACTORY.createLinearRing(holeCoordinates.toArray(new Coordinate[holeCoordinates.size()]));
+ Polygon withHoles = GEOMETRY_FACTORY.createPolygon(shell, holes);
+
+ shellCoordinates = new ArrayList<Coordinate>();
+ shellCoordinates.add(new Coordinate(102, 3));
+ shellCoordinates.add(new Coordinate(103, 3));
+ shellCoordinates.add(new Coordinate(103, 2));
+ shellCoordinates.add(new Coordinate(102, 2));
+ shellCoordinates.add(new Coordinate(102, 3));
+
+
+ shell = GEOMETRY_FACTORY.createLinearRing(shellCoordinates.toArray(new Coordinate[shellCoordinates.size()]));
+ Polygon withoutHoles = GEOMETRY_FACTORY.createPolygon(shell, null);
+
+ MultiPolygon expected = GEOMETRY_FACTORY.createMultiPolygon(new Polygon[] {withoutHoles, withHoles});
+
+ assertGeometryEquals(new JtsGeometry(expected, ShapeBuilder.SPATIAL_CONTEXT, false), multiPolygonGeoJson);
+ }
+
+ @Test
+ public void testThatParserExtractsCorrectTypeAndCoordinatesFromArbitraryJson() throws IOException {
+ String pointGeoJson = XContentFactory.jsonBuilder().startObject()
+ .startObject("crs")
+ .field("type", "name")
+ .startObject("properties")
+ .field("name", "urn:ogc:def:crs:OGC:1.3:CRS84")
+ .endObject()
+ .endObject()
+ .field("bbox", "foobar")
+ .field("type", "point")
+ .field("bubu", "foobar")
+ .startArray("coordinates").value(100.0).value(0.0).endArray()
+ .startObject("nested").startArray("coordinates").value(200.0).value(0.0).endArray().endObject()
+ .startObject("lala").field("type", "NotAPoint").endObject()
+ .endObject().string();
+
+ Point expected = GEOMETRY_FACTORY.createPoint(new Coordinate(100.0, 0.0));
+ assertGeometryEquals(new JtsPoint(expected, ShapeBuilder.SPATIAL_CONTEXT), pointGeoJson);
+ }
+
+ private void assertGeometryEquals(Shape expected, String geoJson) throws IOException {
+ XContentParser parser = JsonXContent.jsonXContent.createParser(geoJson);
+ parser.nextToken();
+ ElasticsearchGeoAssertions.assertEquals(ShapeBuilder.parse(parser).build(), expected);
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java b/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java
new file mode 100644
index 0000000..9bb1505
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.geo;
+
+import com.spatial4j.core.shape.Point;
+import com.spatial4j.core.shape.Rectangle;
+import com.spatial4j.core.shape.Shape;
+import com.vividsolutions.jts.geom.Coordinate;
+import com.vividsolutions.jts.geom.LineString;
+import com.vividsolutions.jts.geom.Polygon;
+import org.elasticsearch.common.geo.builders.ShapeBuilder;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertMultiLineString;
+import static org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions.assertMultiPolygon;
+/**
+ * Tests for {@link ShapeBuilder}
+ */
+public class ShapeBuilderTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testNewPoint() {
+ Point point = ShapeBuilder.newPoint(-100, 45).build();
+ assertEquals(-100D, point.getX(), 0.0d);
+ assertEquals(45D, point.getY(), 0.0d);
+ }
+
+ @Test
+ public void testNewRectangle() {
+ Rectangle rectangle = ShapeBuilder.newEnvelope().topLeft(-45, 30).bottomRight(45, -30).build();
+ assertEquals(-45D, rectangle.getMinX(), 0.0d);
+ assertEquals(-30D, rectangle.getMinY(), 0.0d);
+ assertEquals(45D, rectangle.getMaxX(), 0.0d);
+ assertEquals(30D, rectangle.getMaxY(), 0.0d);
+ }
+
+ @Test
+ public void testNewPolygon() {
+ Polygon polygon = ShapeBuilder.newPolygon()
+ .point(-45, 30)
+ .point(45, 30)
+ .point(45, -30)
+ .point(-45, -30)
+ .point(-45, 30).toPolygon();
+
+ LineString exterior = polygon.getExteriorRing();
+ assertEquals(exterior.getCoordinateN(0), new Coordinate(-45, 30));
+ assertEquals(exterior.getCoordinateN(1), new Coordinate(45, 30));
+ assertEquals(exterior.getCoordinateN(2), new Coordinate(45, -30));
+ assertEquals(exterior.getCoordinateN(3), new Coordinate(-45, -30));
+ }
+
+ @Test
+ public void testLineStringBuilder() {
+ // Building a simple LineString
+ ShapeBuilder.newLineString()
+ .point(-130.0, 55.0)
+ .point(-130.0, -40.0)
+ .point(-15.0, -40.0)
+ .point(-20.0, 50.0)
+ .point(-45.0, 50.0)
+ .point(-45.0, -15.0)
+ .point(-110.0, -15.0)
+ .point(-110.0, 55.0).build();
+
+ // Building a linestring that needs to be wrapped
+ ShapeBuilder.newLineString()
+ .point(100.0, 50.0)
+ .point(110.0, -40.0)
+ .point(240.0, -40.0)
+ .point(230.0, 60.0)
+ .point(200.0, 60.0)
+ .point(200.0, -30.0)
+ .point(130.0, -30.0)
+ .point(130.0, 60.0)
+ .build();
+
+ // Building a lineString on the dateline
+ ShapeBuilder.newLineString()
+ .point(-180.0, 80.0)
+ .point(-180.0, 40.0)
+ .point(-180.0, -40.0)
+ .point(-180.0, -80.0)
+ .build();
+
+ // Building a lineString on the dateline
+ ShapeBuilder.newLineString()
+ .point(180.0, 80.0)
+ .point(180.0, 40.0)
+ .point(180.0, -40.0)
+ .point(180.0, -80.0)
+ .build();
+ }
+
+ @Test
+ public void testMultiLineString() {
+ ShapeBuilder.newMultiLinestring()
+ .linestring()
+ .point(-100.0, 50.0)
+ .point(50.0, 50.0)
+ .point(50.0, 20.0)
+ .point(-100.0, 20.0)
+ .end()
+ .linestring()
+ .point(-100.0, 20.0)
+ .point(50.0, 20.0)
+ .point(50.0, 0.0)
+ .point(-100.0, 0.0)
+ .end()
+ .build();
+
+
+ // LineString that needs to be wrappped
+ ShapeBuilder.newMultiLinestring()
+ .linestring()
+ .point(150.0, 60.0)
+ .point(200.0, 60.0)
+ .point(200.0, 40.0)
+ .point(150.0, 40.0)
+ .end()
+ .linestring()
+ .point(150.0, 20.0)
+ .point(200.0, 20.0)
+ .point(200.0, 0.0)
+ .point(150.0, 0.0)
+ .end()
+ .build();
+ }
+
+ @Test
+ public void testPolygonSelfIntersection() {
+ try {
+ ShapeBuilder.newPolygon()
+ .point(-40.0, 50.0)
+ .point(40.0, 50.0)
+ .point(-40.0, -50.0)
+ .point(40.0, -50.0)
+ .close().build();
+ fail("Polygon self-intersection");
+ } catch (Throwable e) {}
+
+ }
+
+ @Test
+ public void testGeoCircle() {
+ ShapeBuilder.newCircleBuilder().center(0, 0).radius("100m").build();
+ ShapeBuilder.newCircleBuilder().center(+180, 0).radius("100m").build();
+ ShapeBuilder.newCircleBuilder().center(-180, 0).radius("100m").build();
+ ShapeBuilder.newCircleBuilder().center(0, 90).radius("100m").build();
+ ShapeBuilder.newCircleBuilder().center(0, -90).radius("100m").build();
+ }
+
+ @Test
+ public void testPolygonWrapping() {
+ Shape shape = ShapeBuilder.newPolygon()
+ .point(-150.0, 65.0)
+ .point(-250.0, 65.0)
+ .point(-250.0, -65.0)
+ .point(-150.0, -65.0)
+ .close().build();
+
+ assertMultiPolygon(shape);
+ }
+
+ @Test
+ public void testLineStringWrapping() {
+ Shape shape = ShapeBuilder.newLineString()
+ .point(-150.0, 65.0)
+ .point(-250.0, 65.0)
+ .point(-250.0, -65.0)
+ .point(-150.0, -65.0)
+ .build();
+
+ assertMultiLineString(shape);
+ }
+
+
+}
diff --git a/src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java b/src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java
new file mode 100644
index 0000000..a562131
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/hppc/HppcMapsTests.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.hppc;
+
+import com.carrotsearch.hppc.ObjectOpenHashSet;
+import org.elasticsearch.common.collect.HppcMaps;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class HppcMapsTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testIntersection() throws Exception {
+ assumeTrue(ASSERTIONS_ENABLED);
+ ObjectOpenHashSet<String> set1 = ObjectOpenHashSet.from("1", "2", "3");
+ ObjectOpenHashSet<String> set2 = ObjectOpenHashSet.from("1", "2", "3");
+ List<String> values = toList(HppcMaps.intersection(set1, set2));
+ assertThat(values.size(), equalTo(3));
+ assertThat(values.contains("1"), equalTo(true));
+ assertThat(values.contains("2"), equalTo(true));
+ assertThat(values.contains("3"), equalTo(true));
+
+ set1 = ObjectOpenHashSet.from("1", "2", "3");
+ set2 = ObjectOpenHashSet.from("3", "4", "5");
+ values = toList(HppcMaps.intersection(set1, set2));
+ assertThat(values.size(), equalTo(1));
+ assertThat(values.get(0), equalTo("3"));
+
+ set1 = ObjectOpenHashSet.from("1", "2", "3");
+ set2 = ObjectOpenHashSet.from("4", "5", "6");
+ values = toList(HppcMaps.intersection(set1, set2));
+ assertThat(values.size(), equalTo(0));
+
+ set1 = ObjectOpenHashSet.from();
+ set2 = ObjectOpenHashSet.from("3", "4", "5");
+ values = toList(HppcMaps.intersection(set1, set2));
+ assertThat(values.size(), equalTo(0));
+
+ set1 = ObjectOpenHashSet.from("1", "2", "3");
+ set2 = ObjectOpenHashSet.from();
+ values = toList(HppcMaps.intersection(set1, set2));
+ assertThat(values.size(), equalTo(0));
+
+ set1 = ObjectOpenHashSet.from();
+ set2 = ObjectOpenHashSet.from();
+ values = toList(HppcMaps.intersection(set1, set2));
+ assertThat(values.size(), equalTo(0));
+
+ set1 = null;
+ set2 = ObjectOpenHashSet.from();
+ try {
+ toList(HppcMaps.intersection(set1, set2));
+ fail();
+ } catch (AssertionError e) {}
+
+ set1 = ObjectOpenHashSet.from();
+ set2 = null;
+ try {
+ toList(HppcMaps.intersection(set1, set2));
+ fail();
+ } catch (AssertionError e) {}
+
+ set1 = null;
+ set2 = null;
+ try {
+ toList(HppcMaps.intersection(set1, set2));
+ fail();
+ } catch (AssertionError e) {}
+ }
+
+ private List<String> toList(Iterable<String> iterable) {
+ List<String> list = new ArrayList<String>();
+ for (String s : iterable) {
+ list.add(s);
+ }
+ return list;
+ }
+
+
+}
diff --git a/src/test/java/org/elasticsearch/common/io/StreamsTests.java b/src/test/java/org/elasticsearch/common/io/StreamsTests.java
new file mode 100644
index 0000000..cde97c5
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/io/StreamsTests.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.io;
+
+import com.google.common.base.Charsets;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.io.*;
+import java.util.Arrays;
+
+import static org.elasticsearch.common.io.Streams.*;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ * Unit tests for {@link org.elasticsearch.common.io.Streams}.
+ */
+public class StreamsTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testCopyFromInputStream() throws IOException {
+ byte[] content = "content".getBytes(Charsets.UTF_8);
+ ByteArrayInputStream in = new ByteArrayInputStream(content);
+ ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
+ long count = copy(in, out);
+
+ assertThat(count, equalTo((long) content.length));
+ assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true));
+ }
+
+ @Test
+ public void testCopyFromByteArray() throws IOException {
+ byte[] content = "content".getBytes(Charsets.UTF_8);
+ ByteArrayOutputStream out = new ByteArrayOutputStream(content.length);
+ copy(content, out);
+ assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true));
+ }
+
+ @Test
+ public void testCopyToByteArray() throws IOException {
+ byte[] content = "content".getBytes(Charsets.UTF_8);
+ ByteArrayInputStream in = new ByteArrayInputStream(content);
+ byte[] result = copyToByteArray(in);
+ assertThat(Arrays.equals(content, result), equalTo(true));
+ }
+
+ @Test
+ public void testCopyFromReader() throws IOException {
+ String content = "content";
+ StringReader in = new StringReader(content);
+ StringWriter out = new StringWriter();
+ int count = copy(in, out);
+ assertThat(content.length(), equalTo(count));
+ assertThat(out.toString(), equalTo(content));
+ }
+
+ @Test
+ public void testCopyFromString() throws IOException {
+ String content = "content";
+ StringWriter out = new StringWriter();
+ copy(content, out);
+ assertThat(out.toString(), equalTo(content));
+ }
+
+ @Test
+ public void testCopyToString() throws IOException {
+ String content = "content";
+ StringReader in = new StringReader(content);
+ String result = copyToString(in);
+ assertThat(result, equalTo(content));
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/io/streams/BytesStreamsTests.java b/src/test/java/org/elasticsearch/common/io/streams/BytesStreamsTests.java
new file mode 100644
index 0000000..3a34b17
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/io/streams/BytesStreamsTests.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.io.streams;
+
+import org.apache.lucene.util.Constants;
+import org.elasticsearch.common.io.stream.BytesStreamInput;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.hamcrest.Matchers.closeTo;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class BytesStreamsTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testSimpleStreams() throws Exception {
+ assumeTrue(Constants.JRE_IS_64BIT);
+ BytesStreamOutput out = new BytesStreamOutput();
+ out.writeBoolean(false);
+ out.writeByte((byte) 1);
+ out.writeShort((short) -1);
+ out.writeInt(-1);
+ out.writeVInt(2);
+ out.writeLong(-3);
+ out.writeVLong(4);
+ out.writeFloat(1.1f);
+ out.writeDouble(2.2);
+ int[] intArray = {1, 2, 3};
+ out.writeGenericValue(intArray);
+ long[] longArray = {1, 2, 3};
+ out.writeGenericValue(longArray);
+ float[] floatArray = {1.1f, 2.2f, 3.3f};
+ out.writeGenericValue(floatArray);
+ double[] doubleArray = {1.1, 2.2, 3.3};
+ out.writeGenericValue(doubleArray);
+ out.writeString("hello");
+ out.writeString("goodbye");
+ BytesStreamInput in = new BytesStreamInput(out.bytes().toBytes(), false);
+ assertThat(in.readBoolean(), equalTo(false));
+ assertThat(in.readByte(), equalTo((byte) 1));
+ assertThat(in.readShort(), equalTo((short) -1));
+ assertThat(in.readInt(), equalTo(-1));
+ assertThat(in.readVInt(), equalTo(2));
+ assertThat(in.readLong(), equalTo((long) -3));
+ assertThat(in.readVLong(), equalTo((long) 4));
+ assertThat((double) in.readFloat(), closeTo(1.1, 0.0001));
+ assertThat(in.readDouble(), closeTo(2.2, 0.0001));
+ assertThat(in.readGenericValue(), equalTo((Object)intArray));
+ assertThat(in.readGenericValue(), equalTo((Object)longArray));
+ assertThat(in.readGenericValue(), equalTo((Object)floatArray));
+ assertThat(in.readGenericValue(), equalTo((Object)doubleArray));
+ assertThat(in.readString(), equalTo("hello"));
+ assertThat(in.readString(), equalTo("goodbye"));
+ }
+
+ @Test
+ public void testGrowLogic() throws Exception {
+ assumeTrue(Constants.JRE_IS_64BIT);
+ BytesStreamOutput out = new BytesStreamOutput();
+ out.writeBytes(new byte[BytesStreamOutput.DEFAULT_SIZE - 5]);
+ assertThat(out.bufferSize(), equalTo(2048)); // remains the default
+ out.writeBytes(new byte[1 * 1024]);
+ assertThat(out.bufferSize(), equalTo(4608));
+ out.writeBytes(new byte[32 * 1024]);
+ assertThat(out.bufferSize(), equalTo(40320));
+ out.writeBytes(new byte[32 * 1024]);
+ assertThat(out.bufferSize(), equalTo(90720));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/io/streams/HandlesStreamsTests.java b/src/test/java/org/elasticsearch/common/io/streams/HandlesStreamsTests.java
new file mode 100644
index 0000000..a28082f
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/io/streams/HandlesStreamsTests.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.io.streams;
+
+import org.elasticsearch.common.io.stream.BytesStreamInput;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.HandlesStreamInput;
+import org.elasticsearch.common.io.stream.HandlesStreamOutput;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.*;
+
+/**
+ *
+ */
+public class HandlesStreamsTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testSharedStringHandles() throws Exception {
+ String test1 = "test1";
+ String test2 = "test2";
+ String test3 = "test3";
+ String test4 = "test4";
+ String test5 = "test5";
+ String test6 = "test6";
+
+ BytesStreamOutput bout = new BytesStreamOutput();
+ HandlesStreamOutput out = new HandlesStreamOutput(bout);
+ out.writeString(test1);
+ out.writeString(test1);
+ out.writeString(test2);
+ out.writeString(test3);
+ out.writeSharedString(test4);
+ out.writeSharedString(test4);
+ out.writeSharedString(test5);
+ out.writeSharedString(test6);
+
+ BytesStreamInput bin = new BytesStreamInput(bout.bytes());
+ HandlesStreamInput in = new HandlesStreamInput(bin);
+ String s1 = in.readString();
+ String s2 = in.readString();
+ String s3 = in.readString();
+ String s4 = in.readString();
+ String s5 = in.readSharedString();
+ String s6 = in.readSharedString();
+ String s7 = in.readSharedString();
+ String s8 = in.readSharedString();
+
+ assertThat(s1, equalTo(test1));
+ assertThat(s2, equalTo(test1));
+ assertThat(s3, equalTo(test2));
+ assertThat(s4, equalTo(test3));
+ assertThat(s5, equalTo(test4));
+ assertThat(s6, equalTo(test4));
+ assertThat(s7, equalTo(test5));
+ assertThat(s8, equalTo(test6));
+
+ assertThat(s1, not(sameInstance(s2)));
+ assertThat(s5, sameInstance(s6));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java b/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java
new file mode 100644
index 0000000..6c0ab25
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/joda/DateMathParserTests.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.joda;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.util.concurrent.TimeUnit;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ */
+public class DateMathParserTests extends ElasticsearchTestCase {
+
+ @Test
+ public void dataMathTests() {
+ DateMathParser parser = new DateMathParser(Joda.forPattern("dateOptionalTime"), TimeUnit.MILLISECONDS);
+
+ assertThat(parser.parse("now", 0), equalTo(0l));
+ assertThat(parser.parse("now+m", 0), equalTo(TimeUnit.MINUTES.toMillis(1)));
+ assertThat(parser.parse("now+1m", 0), equalTo(TimeUnit.MINUTES.toMillis(1)));
+ assertThat(parser.parse("now+11m", 0), equalTo(TimeUnit.MINUTES.toMillis(11)));
+
+ assertThat(parser.parse("now+1d", 0), equalTo(TimeUnit.DAYS.toMillis(1)));
+
+ assertThat(parser.parse("now+1m+1s", 0), equalTo(TimeUnit.MINUTES.toMillis(1) + TimeUnit.SECONDS.toMillis(1)));
+ assertThat(parser.parse("now+1m-1s", 0), equalTo(TimeUnit.MINUTES.toMillis(1) - TimeUnit.SECONDS.toMillis(1)));
+
+ assertThat(parser.parse("now+1m+1s/m", 0), equalTo(TimeUnit.MINUTES.toMillis(1)));
+ assertThat(parser.parseRoundCeil("now+1m+1s/m", 0), equalTo(TimeUnit.MINUTES.toMillis(2)));
+
+ assertThat(parser.parse("now+4y", 0), equalTo(TimeUnit.DAYS.toMillis(4*365 + 1)));
+ }
+
+ @Test
+ public void actualDateTests() {
+ DateMathParser parser = new DateMathParser(Joda.forPattern("dateOptionalTime"), TimeUnit.MILLISECONDS);
+
+ assertThat(parser.parse("1970-01-01", 0), equalTo(0l));
+ assertThat(parser.parse("1970-01-01||+1m", 0), equalTo(TimeUnit.MINUTES.toMillis(1)));
+ assertThat(parser.parse("1970-01-01||+1m+1s", 0), equalTo(TimeUnit.MINUTES.toMillis(1) + TimeUnit.SECONDS.toMillis(1)));
+
+ assertThat(parser.parse("2013-01-01||+1y", 0), equalTo(parser.parse("2013-01-01", 0) + TimeUnit.DAYS.toMillis(365)));
+ assertThat(parser.parse("2013-03-03||/y", 0), equalTo(parser.parse("2013-01-01", 0)));
+ assertThat(parser.parseRoundCeil("2013-03-03||/y", 0), equalTo(parser.parse("2014-01-01", 0)));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/lucene/LuceneTest.java b/src/test/java/org/elasticsearch/common/lucene/LuceneTest.java
new file mode 100644
index 0000000..0ae9781
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/lucene/LuceneTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.lucene;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.IsEqual.equalTo;
+
+import org.apache.lucene.util.Version;
+import org.elasticsearch.common.logging.ESLogger;
+import org.elasticsearch.common.logging.ESLoggerFactory;
+import org.junit.Test;
+
+/**
+ *
+ */
+public class LuceneTest {
+
+
+ /*
+ * simple test that ensures that we bumb the version on Upgrade
+ */
+ @Test
+ public void testVersion() {
+ ESLogger logger = ESLoggerFactory.getLogger(LuceneTest.class.getName());
+ Version[] values = Version.values();
+ assertThat(Version.LUCENE_CURRENT, equalTo(values[values.length-1]));
+ assertThat("Latest Lucene Version is not set after upgrade", Lucene.VERSION, equalTo(values[values.length-2]));
+ assertThat(Lucene.parseVersion(null, Lucene.VERSION, null), equalTo(Lucene.VERSION));
+ for (int i = 0; i < values.length-1; i++) {
+ // this should fail if the lucene version is not mapped as a string in Lucene.java
+ assertThat(Lucene.parseVersion(values[i].name().replaceFirst("^LUCENE_(\\d)(\\d)$", "$1.$2"), Version.LUCENE_CURRENT, logger), equalTo(values[i]));
+ }
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java b/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java
new file mode 100644
index 0000000..bb7de12
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/lucene/all/SimpleAllTests.java
@@ -0,0 +1,342 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.lucene.all;
+
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
+import org.apache.lucene.analysis.payloads.PayloadHelper;
+import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
+import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.StoredField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.*;
+import org.apache.lucene.search.*;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class SimpleAllTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testBoostOnEagerTokenizer() throws Exception {
+ AllEntries allEntries = new AllEntries();
+ allEntries.addText("field1", "all", 2.0f);
+ allEntries.addText("field2", "your", 1.0f);
+ allEntries.addText("field1", "boosts", 0.5f);
+ allEntries.reset();
+ // whitespace analyzer's tokenizer reads characters eagerly on the contrary to the standard tokenizer
+ final TokenStream ts = AllTokenStream.allTokenStream("any", allEntries, new WhitespaceAnalyzer(Lucene.VERSION));
+ final CharTermAttribute termAtt = ts.addAttribute(CharTermAttribute.class);
+ final PayloadAttribute payloadAtt = ts.addAttribute(PayloadAttribute.class);
+ ts.reset();
+ for (int i = 0; i < 3; ++i) {
+ assertTrue(ts.incrementToken());
+ final String term;
+ final float boost;
+ switch (i) {
+ case 0:
+ term = "all";
+ boost = 2;
+ break;
+ case 1:
+ term = "your";
+ boost = 1;
+ break;
+ case 2:
+ term = "boosts";
+ boost = 0.5f;
+ break;
+ default:
+ throw new AssertionError();
+ }
+ assertEquals(term, termAtt.toString());
+ final BytesRef payload = payloadAtt.getPayload();
+ if (payload == null || payload.length == 0) {
+ assertEquals(boost, 1f, 0.001f);
+ } else {
+ assertEquals(4, payload.length);
+ final float b = PayloadHelper.decodeFloat(payload.bytes, payload.offset);
+ assertEquals(boost, b, 0.001f);
+ }
+ }
+ assertFalse(ts.incrementToken());
+ }
+
+ @Test
+ public void testAllEntriesRead() throws Exception {
+ AllEntries allEntries = new AllEntries();
+ allEntries.addText("field1", "something", 1.0f);
+ allEntries.addText("field2", "else", 1.0f);
+
+ for (int i = 1; i < 30; i++) {
+ allEntries.reset();
+ char[] data = new char[i];
+ String value = slurpToString(allEntries, data);
+ assertThat("failed for " + i, value, equalTo("something else"));
+ }
+ }
+
+ private String slurpToString(AllEntries allEntries, char[] data) throws IOException {
+ StringBuilder sb = new StringBuilder();
+ while (true) {
+ int read = allEntries.read(data, 0, data.length);
+ if (read == -1) {
+ break;
+ }
+ sb.append(data, 0, read);
+ }
+ return sb.toString();
+ }
+
+ private void assertExplanationScore(IndexSearcher searcher, Query query, ScoreDoc scoreDoc) throws IOException {
+ final Explanation expl = searcher.explain(query, scoreDoc.doc);
+ assertEquals(scoreDoc.score, expl.getValue(), 0.00001f);
+ }
+
+ @Test
+ public void testSimpleAllNoBoost() throws Exception {
+ Directory dir = new RAMDirectory();
+ IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
+
+ Document doc = new Document();
+ doc.add(new Field("_id", "1", StoredField.TYPE));
+ AllEntries allEntries = new AllEntries();
+ allEntries.addText("field1", "something", 1.0f);
+ allEntries.addText("field2", "else", 1.0f);
+ allEntries.reset();
+ doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
+
+ indexWriter.addDocument(doc);
+
+ doc = new Document();
+ doc.add(new Field("_id", "2", StoredField.TYPE));
+ allEntries = new AllEntries();
+ allEntries.addText("field1", "else", 1.0f);
+ allEntries.addText("field2", "something", 1.0f);
+ allEntries.reset();
+ doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
+
+ indexWriter.addDocument(doc);
+
+ IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexSearcher searcher = new IndexSearcher(reader);
+
+ Query query = new AllTermQuery(new Term("_all", "else"));
+ TopDocs docs = searcher.search(query, 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(0));
+ assertExplanationScore(searcher, query, docs.scoreDocs[0]);
+ assertThat(docs.scoreDocs[1].doc, equalTo(1));
+ assertExplanationScore(searcher, query, docs.scoreDocs[1]);
+
+ query = new AllTermQuery(new Term("_all", "something"));
+ docs = searcher.search(query, 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(0));
+ assertExplanationScore(searcher, query, docs.scoreDocs[0]);
+ assertThat(docs.scoreDocs[1].doc, equalTo(1));
+ assertExplanationScore(searcher, query, docs.scoreDocs[1]);
+
+ indexWriter.close();
+ }
+
+ @Test
+ public void testSimpleAllWithBoost() throws Exception {
+ Directory dir = new RAMDirectory();
+ IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
+
+ Document doc = new Document();
+ doc.add(new Field("_id", "1", StoredField.TYPE));
+ AllEntries allEntries = new AllEntries();
+ allEntries.addText("field1", "something", 1.0f);
+ allEntries.addText("field2", "else", 1.0f);
+ allEntries.reset();
+ doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
+
+ indexWriter.addDocument(doc);
+
+ doc = new Document();
+ doc.add(new Field("_id", "2", StoredField.TYPE));
+ allEntries = new AllEntries();
+ allEntries.addText("field1", "else", 2.0f);
+ allEntries.addText("field2", "something", 1.0f);
+ allEntries.reset();
+ doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
+
+ indexWriter.addDocument(doc);
+
+ IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexSearcher searcher = new IndexSearcher(reader);
+
+ // this one is boosted. so the second doc is more relevant
+ Query query = new AllTermQuery(new Term("_all", "else"));
+ TopDocs docs = searcher.search(query, 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(1));
+ assertExplanationScore(searcher, query, docs.scoreDocs[0]);
+ assertThat(docs.scoreDocs[1].doc, equalTo(0));
+ assertExplanationScore(searcher, query, docs.scoreDocs[1]);
+
+ query = new AllTermQuery(new Term("_all", "something"));
+ docs = searcher.search(query, 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(0));
+ assertExplanationScore(searcher, query, docs.scoreDocs[0]);
+ assertThat(docs.scoreDocs[1].doc, equalTo(1));
+ assertExplanationScore(searcher, query, docs.scoreDocs[1]);
+
+ indexWriter.close();
+ }
+
+ @Test
+ public void testMultipleTokensAllNoBoost() throws Exception {
+ Directory dir = new RAMDirectory();
+ IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
+
+ Document doc = new Document();
+ doc.add(new Field("_id", "1", StoredField.TYPE));
+ AllEntries allEntries = new AllEntries();
+ allEntries.addText("field1", "something moo", 1.0f);
+ allEntries.addText("field2", "else koo", 1.0f);
+ allEntries.reset();
+ doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
+
+ indexWriter.addDocument(doc);
+
+ doc = new Document();
+ doc.add(new Field("_id", "2", StoredField.TYPE));
+ allEntries = new AllEntries();
+ allEntries.addText("field1", "else koo", 1.0f);
+ allEntries.addText("field2", "something moo", 1.0f);
+ allEntries.reset();
+ doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
+
+ indexWriter.addDocument(doc);
+
+ IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexSearcher searcher = new IndexSearcher(reader);
+
+ TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(0));
+ assertThat(docs.scoreDocs[1].doc, equalTo(1));
+
+ docs = searcher.search(new AllTermQuery(new Term("_all", "koo")), 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(0));
+ assertThat(docs.scoreDocs[1].doc, equalTo(1));
+
+ docs = searcher.search(new AllTermQuery(new Term("_all", "something")), 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(0));
+ assertThat(docs.scoreDocs[1].doc, equalTo(1));
+
+ docs = searcher.search(new AllTermQuery(new Term("_all", "moo")), 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(0));
+ assertThat(docs.scoreDocs[1].doc, equalTo(1));
+
+ indexWriter.close();
+ }
+
+ @Test
+ public void testMultipleTokensAllWithBoost() throws Exception {
+ Directory dir = new RAMDirectory();
+ IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
+
+ Document doc = new Document();
+ doc.add(new Field("_id", "1", StoredField.TYPE));
+ AllEntries allEntries = new AllEntries();
+ allEntries.addText("field1", "something moo", 1.0f);
+ allEntries.addText("field2", "else koo", 1.0f);
+ allEntries.reset();
+ doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
+
+ indexWriter.addDocument(doc);
+
+ doc = new Document();
+ doc.add(new Field("_id", "2", StoredField.TYPE));
+ allEntries = new AllEntries();
+ allEntries.addText("field1", "else koo", 2.0f);
+ allEntries.addText("field2", "something moo", 1.0f);
+ allEntries.reset();
+ doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
+
+ indexWriter.addDocument(doc);
+
+ IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexSearcher searcher = new IndexSearcher(reader);
+
+ TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(1));
+ assertThat(docs.scoreDocs[1].doc, equalTo(0));
+
+ docs = searcher.search(new AllTermQuery(new Term("_all", "koo")), 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(1));
+ assertThat(docs.scoreDocs[1].doc, equalTo(0));
+
+ docs = searcher.search(new AllTermQuery(new Term("_all", "something")), 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(0));
+ assertThat(docs.scoreDocs[1].doc, equalTo(1));
+
+ docs = searcher.search(new AllTermQuery(new Term("_all", "moo")), 10);
+ assertThat(docs.totalHits, equalTo(2));
+ assertThat(docs.scoreDocs[0].doc, equalTo(0));
+ assertThat(docs.scoreDocs[1].doc, equalTo(1));
+
+ indexWriter.close();
+ }
+
+ @Test
+ public void testNoTokensWithKeywordAnalyzer() throws Exception {
+ Directory dir = new RAMDirectory();
+ IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.KEYWORD_ANALYZER));
+
+ Document doc = new Document();
+ doc.add(new Field("_id", "1", StoredField.TYPE));
+ AllEntries allEntries = new AllEntries();
+ allEntries.reset();
+ doc.add(new TextField("_all", AllTokenStream.allTokenStream("_all", allEntries, Lucene.KEYWORD_ANALYZER)));
+
+ indexWriter.addDocument(doc);
+
+ IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexSearcher searcher = new IndexSearcher(reader);
+
+ TopDocs docs = searcher.search(new MatchAllDocsQuery(), 10);
+ assertThat(docs.totalHits, equalTo(1));
+ assertThat(docs.scoreDocs[0].doc, equalTo(0));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/lucene/search/MatchAllDocsFilterTests.java b/src/test/java/org/elasticsearch/common/lucene/search/MatchAllDocsFilterTests.java
new file mode 100644
index 0000000..0b85525
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/lucene/search/MatchAllDocsFilterTests.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.lucene.search;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class MatchAllDocsFilterTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testMatchAllDocsFilter() throws Exception {
+ Directory dir = new RAMDirectory();
+ IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
+
+ Document document = new Document();
+ document.add(new TextField("_id", "1", Field.Store.YES));
+ document.add(new TextField("text", "lucene", Field.Store.YES));
+ indexWriter.addDocument(document);
+
+ document = new Document();
+ document.add(new TextField("_id", "2", Field.Store.YES));
+ document.add(new TextField("text", "lucene release", Field.Store.YES));
+ indexWriter.addDocument(document);
+
+ IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexSearcher searcher = new IndexSearcher(reader);
+
+ XConstantScoreQuery query = new XConstantScoreQuery(Queries.MATCH_ALL_FILTER);
+ long count = Lucene.count(searcher, query);
+ assertThat(count, equalTo(2l));
+
+ reader.close();
+ indexWriter.close();
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/lucene/search/MoreLikeThisQueryTests.java b/src/test/java/org/elasticsearch/common/lucene/search/MoreLikeThisQueryTests.java
new file mode 100644
index 0000000..c2b51f6
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/lucene/search/MoreLikeThisQueryTests.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.lucene.search;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class MoreLikeThisQueryTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testSimple() throws Exception {
+ Directory dir = new RAMDirectory();
+ IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
+ indexWriter.commit();
+
+
+ Document document = new Document();
+ document.add(new TextField("_id", "1", Field.Store.YES));
+ document.add(new TextField("text", "lucene", Field.Store.YES));
+ indexWriter.addDocument(document);
+
+ document = new Document();
+ document.add(new TextField("_id", "2", Field.Store.YES));
+ document.add(new TextField("text", "lucene release", Field.Store.YES));
+ indexWriter.addDocument(document);
+
+ IndexReader reader = DirectoryReader.open(indexWriter, true);
+ IndexSearcher searcher = new IndexSearcher(reader);
+
+ MoreLikeThisQuery mltQuery = new MoreLikeThisQuery("lucene", new String[]{"text"}, Lucene.STANDARD_ANALYZER);
+ mltQuery.setLikeText("lucene");
+ mltQuery.setMinTermFrequency(1);
+ mltQuery.setMinDocFreq(1);
+ long count = Lucene.count(searcher, mltQuery);
+ assertThat(count, equalTo(2l));
+
+ reader.close();
+ indexWriter.close();
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java b/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java
new file mode 100644
index 0000000..cc2fac4
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/lucene/search/MultiPhrasePrefixQueryTests.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.lucene.search;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.*;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.store.RAMDirectory;
+import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+
+public class MultiPhrasePrefixQueryTests extends ElasticsearchTestCase {
+
+ @Test
+ public void simpleTests() throws Exception {
+ IndexWriter writer = new IndexWriter(new RAMDirectory(), new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
+ Document doc = new Document();
+ doc.add(new Field("field", "aaa bbb ccc ddd", TextField.TYPE_NOT_STORED));
+ writer.addDocument(doc);
+ IndexReader reader = DirectoryReader.open(writer, true);
+ IndexSearcher searcher = new IndexSearcher(reader);
+
+ MultiPhrasePrefixQuery query = new MultiPhrasePrefixQuery();
+ query.add(new Term("field", "aa"));
+ assertThat(Lucene.count(searcher, query), equalTo(1l));
+
+ query = new MultiPhrasePrefixQuery();
+ query.add(new Term("field", "aaa"));
+ query.add(new Term("field", "bb"));
+ assertThat(Lucene.count(searcher, query), equalTo(1l));
+
+ query = new MultiPhrasePrefixQuery();
+ query.setSlop(1);
+ query.add(new Term("field", "aaa"));
+ query.add(new Term("field", "cc"));
+ assertThat(Lucene.count(searcher, query), equalTo(1l));
+
+ query = new MultiPhrasePrefixQuery();
+ query.setSlop(1);
+ query.add(new Term("field", "xxx"));
+ assertThat(Lucene.count(searcher, query), equalTo(0l));
+ }
+} \ No newline at end of file
diff --git a/src/test/java/org/elasticsearch/common/lucene/search/TermsFilterTests.java b/src/test/java/org/elasticsearch/common/lucene/search/TermsFilterTests.java
new file mode 100644
index 0000000..1c84a0f
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/lucene/search/TermsFilterTests.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.lucene.search;
+
+import org.apache.lucene.analysis.core.KeywordAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.*;
+import org.apache.lucene.queries.TermFilter;
+import org.apache.lucene.queries.XTermsFilter;
+import org.apache.lucene.search.DocIdSet;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.FixedBitSet;
+import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.common.lucene.docset.DocIdSets;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.nullValue;
+
+/**
+ */
+public class TermsFilterTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testTermFilter() throws Exception {
+ String fieldName = "field1";
+ Directory rd = new RAMDirectory();
+ IndexWriter w = new IndexWriter(rd, new IndexWriterConfig(Lucene.VERSION, new KeywordAnalyzer()));
+ for (int i = 0; i < 100; i++) {
+ Document doc = new Document();
+ int term = i * 10; //terms are units of 10;
+ doc.add(new Field(fieldName, "" + term, StringField.TYPE_NOT_STORED));
+ doc.add(new Field("all", "xxx", StringField.TYPE_NOT_STORED));
+ w.addDocument(doc);
+ if ((i % 40) == 0) {
+ w.commit();
+ }
+ }
+ AtomicReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(w, true));
+ w.close();
+
+ TermFilter tf = new TermFilter(new Term(fieldName, "19"));
+ FixedBitSet bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
+ assertThat(bits, nullValue());
+
+ tf = new TermFilter(new Term(fieldName, "20"));
+ DocIdSet result = tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
+ bits = DocIdSets.toFixedBitSet(result.iterator(), reader.maxDoc());
+ assertThat(bits.cardinality(), equalTo(1));
+
+ tf = new TermFilter(new Term("all", "xxx"));
+ result = tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
+ bits = DocIdSets.toFixedBitSet(result.iterator(), reader.maxDoc());
+ assertThat(bits.cardinality(), equalTo(100));
+
+ reader.close();
+ rd.close();
+ }
+
+ @Test
+ public void testTermsFilter() throws Exception {
+ String fieldName = "field1";
+ Directory rd = new RAMDirectory();
+ IndexWriter w = new IndexWriter(rd, new IndexWriterConfig(Lucene.VERSION, new KeywordAnalyzer()));
+ for (int i = 0; i < 100; i++) {
+ Document doc = new Document();
+ int term = i * 10; //terms are units of 10;
+ doc.add(new Field(fieldName, "" + term, StringField.TYPE_NOT_STORED));
+ doc.add(new Field("all", "xxx", StringField.TYPE_NOT_STORED));
+ w.addDocument(doc);
+ if ((i % 40) == 0) {
+ w.commit();
+ }
+ }
+ AtomicReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(w, true));
+ w.close();
+
+ XTermsFilter tf = new XTermsFilter(new Term[]{new Term(fieldName, "19")});
+ FixedBitSet bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
+ assertThat(bits, nullValue());
+
+ tf = new XTermsFilter(new Term[]{new Term(fieldName, "19"), new Term(fieldName, "20")});
+ bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
+ assertThat(bits.cardinality(), equalTo(1));
+
+ tf = new XTermsFilter(new Term[]{new Term(fieldName, "19"), new Term(fieldName, "20"), new Term(fieldName, "10")});
+ bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
+ assertThat(bits.cardinality(), equalTo(2));
+
+ tf = new XTermsFilter(new Term[]{new Term(fieldName, "19"), new Term(fieldName, "20"), new Term(fieldName, "10"), new Term(fieldName, "00")});
+ bits = (FixedBitSet) tf.getDocIdSet(reader.getContext(), reader.getLiveDocs());
+ assertThat(bits.cardinality(), equalTo(2));
+
+ reader.close();
+ rd.close();
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/lucene/search/XBooleanFilterLuceneTests.java b/src/test/java/org/elasticsearch/common/lucene/search/XBooleanFilterLuceneTests.java
new file mode 100644
index 0000000..5aa4424
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/lucene/search/XBooleanFilterLuceneTests.java
@@ -0,0 +1,391 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.lucene.search;
+
+import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.*;
+import org.apache.lucene.queries.FilterClause;
+import org.apache.lucene.queries.XTermsFilter;
+import org.apache.lucene.search.*;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.RAMDirectory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.FixedBitSet;
+import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.core.IsEqual.equalTo;
+
+/**
+ * Tests ported from Lucene.
+ */
+public class XBooleanFilterLuceneTests extends ElasticsearchTestCase {
+
+ private Directory directory;
+ private AtomicReader reader;
+
+ @Before
+ public void setUp() throws Exception {
+ super.setUp();
+ directory = new RAMDirectory();
+ IndexWriter writer = new IndexWriter(directory, new IndexWriterConfig(Lucene.VERSION, new WhitespaceAnalyzer(Lucene.VERSION)));
+
+ //Add series of docs with filterable fields : acces rights, prices, dates and "in-stock" flags
+ addDoc(writer, "admin guest", "010", "20040101", "Y");
+ addDoc(writer, "guest", "020", "20040101", "Y");
+ addDoc(writer, "guest", "020", "20050101", "Y");
+ addDoc(writer, "admin", "020", "20050101", "Maybe");
+ addDoc(writer, "admin guest", "030", "20050101", "N");
+ writer.close();
+ reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(directory));
+ writer.close();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ super.tearDown();
+ reader.close();
+ directory.close();
+ }
+
+ private void addDoc(IndexWriter writer, String accessRights, String price, String date, String inStock) throws IOException {
+ Document doc = new Document();
+ doc.add(new TextField("accessRights", accessRights, Field.Store.YES));
+ doc.add(new TextField("price", price, Field.Store.YES));
+ doc.add(new TextField("date", date, Field.Store.YES));
+ doc.add(new TextField("inStock", inStock, Field.Store.YES));
+ writer.addDocument(doc);
+ }
+
+ private Filter getRangeFilter(String field, String lowerPrice, String upperPrice) {
+ return TermRangeFilter.newStringRange(field, lowerPrice, upperPrice, true, true);
+ }
+
+ private Filter getTermsFilter(String field, String text) {
+ return new XTermsFilter(new Term(field, text));
+ }
+
+ private Filter getWrappedTermQuery(String field, String text) {
+ return new QueryWrapperFilter(new TermQuery(new Term(field, text)));
+ }
+
+ private Filter getEmptyFilter() {
+ return new Filter() {
+ @Override
+ public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) {
+ return new FixedBitSet(context.reader().maxDoc());
+ }
+ };
+ }
+
+ private Filter getNullDISFilter() {
+ return new Filter() {
+ @Override
+ public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) {
+ return null;
+ }
+ };
+ }
+
+ private Filter getNullDISIFilter() {
+ return new Filter() {
+ @Override
+ public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) {
+ return new DocIdSet() {
+ @Override
+ public DocIdSetIterator iterator() {
+ return null;
+ }
+
+ @Override
+ public boolean isCacheable() {
+ return true;
+ }
+ };
+ }
+ };
+ }
+
+ private void tstFilterCard(String mes, int expected, Filter filt) throws Exception {
+ int actual = 0;
+ DocIdSet docIdSet = filt.getDocIdSet(reader.getContext(), reader.getLiveDocs());
+ if (docIdSet != null) {
+ DocIdSetIterator disi = docIdSet.iterator();
+ if (disi != null) {
+ while (disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
+ actual++;
+ }
+ }
+ }
+ assertThat(mes, actual, equalTo(expected));
+ }
+
+ @Test
+ public void testShould() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getTermsFilter("price", "030"), BooleanClause.Occur.SHOULD);
+ tstFilterCard("Should retrieves only 1 doc", 1, booleanFilter);
+
+ // same with a real DISI (no OpenBitSetIterator)
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getWrappedTermQuery("price", "030"), BooleanClause.Occur.SHOULD);
+ tstFilterCard("Should retrieves only 1 doc", 1, booleanFilter);
+ }
+
+ @Test
+ public void testShoulds() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getRangeFilter("price", "010", "020"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getRangeFilter("price", "020", "030"), BooleanClause.Occur.SHOULD);
+ tstFilterCard("Shoulds are Ored together", 5, booleanFilter);
+ }
+
+ @Test
+ public void testShouldsAndMustNot() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getRangeFilter("price", "010", "020"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getRangeFilter("price", "020", "030"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getTermsFilter("inStock", "N"), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("Shoulds Ored but AndNot", 4, booleanFilter);
+
+ booleanFilter.add(getTermsFilter("inStock", "Maybe"), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("Shoulds Ored but AndNots", 3, booleanFilter);
+
+ // same with a real DISI (no OpenBitSetIterator)
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getRangeFilter("price", "010", "020"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getRangeFilter("price", "020", "030"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getWrappedTermQuery("inStock", "N"), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("Shoulds Ored but AndNot", 4, booleanFilter);
+
+ booleanFilter.add(getWrappedTermQuery("inStock", "Maybe"), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("Shoulds Ored but AndNots", 3, booleanFilter);
+ }
+
+ @Test
+ public void testShouldsAndMust() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getRangeFilter("price", "010", "020"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getRangeFilter("price", "020", "030"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getTermsFilter("accessRights", "admin"), BooleanClause.Occur.MUST);
+ tstFilterCard("Shoulds Ored but MUST", 3, booleanFilter);
+
+ // same with a real DISI (no OpenBitSetIterator)
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getRangeFilter("price", "010", "020"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getRangeFilter("price", "020", "030"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getWrappedTermQuery("accessRights", "admin"), BooleanClause.Occur.MUST);
+ tstFilterCard("Shoulds Ored but MUST", 3, booleanFilter);
+ }
+
+ @Test
+ public void testShouldsAndMusts() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getRangeFilter("price", "010", "020"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getRangeFilter("price", "020", "030"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getTermsFilter("accessRights", "admin"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getRangeFilter("date", "20040101", "20041231"), BooleanClause.Occur.MUST);
+ tstFilterCard("Shoulds Ored but MUSTs ANDED", 1, booleanFilter);
+ }
+
+ @Test
+ public void testShouldsAndMustsAndMustNot() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getRangeFilter("price", "030", "040"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getTermsFilter("accessRights", "admin"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getRangeFilter("date", "20050101", "20051231"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getTermsFilter("inStock", "N"), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("Shoulds Ored but MUSTs ANDED and MustNot", 0, booleanFilter);
+
+ // same with a real DISI (no OpenBitSetIterator)
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getRangeFilter("price", "030", "040"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getWrappedTermQuery("accessRights", "admin"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getRangeFilter("date", "20050101", "20051231"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getWrappedTermQuery("inStock", "N"), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("Shoulds Ored but MUSTs ANDED and MustNot", 0, booleanFilter);
+ }
+
+ @Test
+ public void testJustMust() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getTermsFilter("accessRights", "admin"), BooleanClause.Occur.MUST);
+ tstFilterCard("MUST", 3, booleanFilter);
+
+ // same with a real DISI (no OpenBitSetIterator)
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getWrappedTermQuery("accessRights", "admin"), BooleanClause.Occur.MUST);
+ tstFilterCard("MUST", 3, booleanFilter);
+ }
+
+ @Test
+ public void testJustMustNot() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getTermsFilter("inStock", "N"), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("MUST_NOT", 4, booleanFilter);
+
+ // same with a real DISI (no OpenBitSetIterator)
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getWrappedTermQuery("inStock", "N"), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("MUST_NOT", 4, booleanFilter);
+ }
+
+ @Test
+ public void testMustAndMustNot() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getTermsFilter("inStock", "N"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getTermsFilter("price", "030"), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("MUST_NOT wins over MUST for same docs", 0, booleanFilter);
+
+ // same with a real DISI (no OpenBitSetIterator)
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getWrappedTermQuery("inStock", "N"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getWrappedTermQuery("price", "030"), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("MUST_NOT wins over MUST for same docs", 0, booleanFilter);
+ }
+
+ @Test
+ public void testEmpty() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ tstFilterCard("empty XBooleanFilter returns no results", 0, booleanFilter);
+ }
+
+ @Test
+ public void testCombinedNullDocIdSets() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getTermsFilter("price", "030"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getNullDISFilter(), BooleanClause.Occur.MUST);
+ tstFilterCard("A MUST filter that returns a null DIS should never return documents", 0, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getTermsFilter("price", "030"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getNullDISIFilter(), BooleanClause.Occur.MUST);
+ tstFilterCard("A MUST filter that returns a null DISI should never return documents", 0, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getTermsFilter("price", "030"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getNullDISFilter(), BooleanClause.Occur.SHOULD);
+ tstFilterCard("A SHOULD filter that returns a null DIS should be invisible", 1, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getTermsFilter("price", "030"), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getNullDISIFilter(), BooleanClause.Occur.SHOULD);
+ tstFilterCard("A SHOULD filter that returns a null DISI should be invisible", 1, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getTermsFilter("price", "030"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getNullDISFilter(), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("A MUST_NOT filter that returns a null DIS should be invisible", 1, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getTermsFilter("price", "030"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getNullDISIFilter(), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("A MUST_NOT filter that returns a null DISI should be invisible", 1, booleanFilter);
+ }
+
+ @Test
+ public void testJustNullDocIdSets() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getNullDISFilter(), BooleanClause.Occur.MUST);
+ tstFilterCard("A MUST filter that returns a null DIS should never return documents", 0, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getNullDISIFilter(), BooleanClause.Occur.MUST);
+ tstFilterCard("A MUST filter that returns a null DISI should never return documents", 0, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getNullDISFilter(), BooleanClause.Occur.SHOULD);
+ tstFilterCard("A single SHOULD filter that returns a null DIS should never return documents", 0, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getNullDISIFilter(), BooleanClause.Occur.SHOULD);
+ tstFilterCard("A single SHOULD filter that returns a null DISI should never return documents", 0, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getNullDISFilter(), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("A single MUST_NOT filter that returns a null DIS should be invisible", 5, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getNullDISIFilter(), BooleanClause.Occur.MUST_NOT);
+ tstFilterCard("A single MUST_NOT filter that returns a null DIS should be invisible", 5, booleanFilter);
+ }
+
+ @Test
+ public void testNonMatchingShouldsAndMusts() throws Exception {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getEmptyFilter(), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getTermsFilter("accessRights", "admin"), BooleanClause.Occur.MUST);
+ tstFilterCard(">0 shoulds with no matches should return no docs", 0, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getNullDISFilter(), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getTermsFilter("accessRights", "admin"), BooleanClause.Occur.MUST);
+ tstFilterCard(">0 shoulds with no matches should return no docs", 0, booleanFilter);
+
+ booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getNullDISIFilter(), BooleanClause.Occur.SHOULD);
+ booleanFilter.add(getTermsFilter("accessRights", "admin"), BooleanClause.Occur.MUST);
+ tstFilterCard(">0 shoulds with no matches should return no docs", 0, booleanFilter);
+ }
+
+ @Test
+ public void testToStringOfBooleanFilterContainingTermsFilter() {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ booleanFilter.add(getTermsFilter("inStock", "N"), BooleanClause.Occur.MUST);
+ booleanFilter.add(getTermsFilter("isFragile", "Y"), BooleanClause.Occur.MUST);
+
+ assertThat("BooleanFilter(+inStock:N +isFragile:Y)", equalTo(booleanFilter.toString()));
+ }
+
+ @Test
+ public void testToStringOfWrappedBooleanFilters() {
+ XBooleanFilter orFilter = new XBooleanFilter();
+
+ XBooleanFilter stockFilter = new XBooleanFilter();
+ stockFilter.add(new FilterClause(getTermsFilter("inStock", "Y"), BooleanClause.Occur.MUST));
+ stockFilter.add(new FilterClause(getTermsFilter("barCode", "12345678"), BooleanClause.Occur.MUST));
+
+ orFilter.add(new FilterClause(stockFilter, BooleanClause.Occur.SHOULD));
+
+ XBooleanFilter productPropertyFilter = new XBooleanFilter();
+ productPropertyFilter.add(new FilterClause(getTermsFilter("isHeavy", "N"), BooleanClause.Occur.MUST));
+ productPropertyFilter.add(new FilterClause(getTermsFilter("isDamaged", "Y"), BooleanClause.Occur.MUST));
+
+ orFilter.add(new FilterClause(productPropertyFilter, BooleanClause.Occur.SHOULD));
+
+ XBooleanFilter composedFilter = new XBooleanFilter();
+ composedFilter.add(new FilterClause(orFilter, BooleanClause.Occur.MUST));
+
+ assertThat(
+ "BooleanFilter(+BooleanFilter(BooleanFilter(+inStock:Y +barCode:12345678) BooleanFilter(+isHeavy:N +isDamaged:Y)))",
+ equalTo(composedFilter.toString())
+ );
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/lucene/search/XBooleanFilterTests.java b/src/test/java/org/elasticsearch/common/lucene/search/XBooleanFilterTests.java
new file mode 100644
index 0000000..32c08af
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/lucene/search/XBooleanFilterTests.java
@@ -0,0 +1,567 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.lucene.search;
+
+import org.apache.lucene.analysis.core.KeywordAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.index.*;
+import org.apache.lucene.queries.FilterClause;
+import org.apache.lucene.queries.TermFilter;
+import org.apache.lucene.search.*;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.FixedBitSet;
+import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.test.ElasticsearchLuceneTestCase;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.apache.lucene.search.BooleanClause.Occur.*;
+import static org.hamcrest.core.IsEqual.equalTo;
+
+/**
+ */
+public class XBooleanFilterTests extends ElasticsearchLuceneTestCase {
+
+ private Directory directory;
+ private AtomicReader reader;
+ private static final char[] distinctValues = new char[] {'a', 'b', 'c', 'd', 'v','z','y'};
+
+ @Before
+ public void setup() throws Exception {
+ super.setUp();
+ char[][] documentMatrix = new char[][] {
+ {'a', 'b', 'c', 'd', 'v'},
+ {'a', 'b', 'c', 'd', 'z'},
+ {'a', 'a', 'a', 'a', 'x'}
+ };
+
+ List<Document> documents = new ArrayList<Document>(documentMatrix.length);
+ for (char[] fields : documentMatrix) {
+ Document document = new Document();
+ for (int i = 0; i < fields.length; i++) {
+ document.add(new StringField(Integer.toString(i), String.valueOf(fields[i]), Field.Store.NO));
+ }
+ documents.add(document);
+ }
+ directory = newDirectory();
+ IndexWriter w = new IndexWriter(directory, new IndexWriterConfig(Lucene.VERSION, new KeywordAnalyzer()));
+ w.addDocuments(documents);
+ w.close();
+ reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(directory));
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ reader.close();
+ directory.close();
+ super.tearDown();
+
+ }
+
+ @Test
+ public void testWithTwoClausesOfEachOccur_allFixedBitsetFilters() throws Exception {
+ List<XBooleanFilter> booleanFilters = new ArrayList<XBooleanFilter>();
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(0, 'a', MUST, false), newFilterClause(1, 'b', MUST, false),
+ newFilterClause(2, 'c', SHOULD, false), newFilterClause(3, 'd', SHOULD, false),
+ newFilterClause(4, 'e', MUST_NOT, false), newFilterClause(5, 'f', MUST_NOT, false)
+ ));
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(4, 'e', MUST_NOT, false), newFilterClause(5, 'f', MUST_NOT, false),
+ newFilterClause(0, 'a', MUST, false), newFilterClause(1, 'b', MUST, false),
+ newFilterClause(2, 'c', SHOULD, false), newFilterClause(3, 'd', SHOULD, false)
+ ));
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(2, 'c', SHOULD, false), newFilterClause(3, 'd', SHOULD, false),
+ newFilterClause(4, 'e', MUST_NOT, false), newFilterClause(5, 'f', MUST_NOT, false),
+ newFilterClause(0, 'a', MUST, false), newFilterClause(1, 'b', MUST, false)
+ ));
+
+ for (XBooleanFilter booleanFilter : booleanFilters) {
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(true));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(false));
+ }
+ }
+
+ @Test
+ public void testWithTwoClausesOfEachOccur_allBitsBasedFilters() throws Exception {
+ List<XBooleanFilter> booleanFilters = new ArrayList<XBooleanFilter>();
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(0, 'a', MUST, true), newFilterClause(1, 'b', MUST, true),
+ newFilterClause(2, 'c', SHOULD, true), newFilterClause(3, 'd', SHOULD, true),
+ newFilterClause(4, 'e', MUST_NOT, true), newFilterClause(5, 'f', MUST_NOT, true)
+ ));
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(4, 'e', MUST_NOT, true), newFilterClause(5, 'f', MUST_NOT, true),
+ newFilterClause(0, 'a', MUST, true), newFilterClause(1, 'b', MUST, true),
+ newFilterClause(2, 'c', SHOULD, true), newFilterClause(3, 'd', SHOULD, true)
+ ));
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(2, 'c', SHOULD, true), newFilterClause(3, 'd', SHOULD, true),
+ newFilterClause(4, 'e', MUST_NOT, true), newFilterClause(5, 'f', MUST_NOT, true),
+ newFilterClause(0, 'a', MUST, true), newFilterClause(1, 'b', MUST, true)
+ ));
+
+ for (XBooleanFilter booleanFilter : booleanFilters) {
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(true));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(false));
+ }
+ }
+
+ @Test
+ public void testWithTwoClausesOfEachOccur_allFilterTypes() throws Exception {
+ List<XBooleanFilter> booleanFilters = new ArrayList<XBooleanFilter>();
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(0, 'a', MUST, true), newFilterClause(1, 'b', MUST, false),
+ newFilterClause(2, 'c', SHOULD, true), newFilterClause(3, 'd', SHOULD, false),
+ newFilterClause(4, 'e', MUST_NOT, true), newFilterClause(5, 'f', MUST_NOT, false)
+ ));
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(4, 'e', MUST_NOT, true), newFilterClause(5, 'f', MUST_NOT, false),
+ newFilterClause(0, 'a', MUST, true), newFilterClause(1, 'b', MUST, false),
+ newFilterClause(2, 'c', SHOULD, true), newFilterClause(3, 'd', SHOULD, false)
+ ));
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(2, 'c', SHOULD, true), newFilterClause(3, 'd', SHOULD, false),
+ newFilterClause(4, 'e', MUST_NOT, true), newFilterClause(5, 'f', MUST_NOT, false),
+ newFilterClause(0, 'a', MUST, true), newFilterClause(1, 'b', MUST, false)
+ ));
+
+ for (XBooleanFilter booleanFilter : booleanFilters) {
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(true));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(false));
+ }
+
+ booleanFilters.clear();
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(0, 'a', MUST, false), newFilterClause(1, 'b', MUST, true),
+ newFilterClause(2, 'c', SHOULD, false), newFilterClause(3, 'd', SHOULD, true),
+ newFilterClause(4, 'e', MUST_NOT, false), newFilterClause(5, 'f', MUST_NOT, true)
+ ));
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(4, 'e', MUST_NOT, false), newFilterClause(5, 'f', MUST_NOT, true),
+ newFilterClause(0, 'a', MUST, false), newFilterClause(1, 'b', MUST, true),
+ newFilterClause(2, 'c', SHOULD, false), newFilterClause(3, 'd', SHOULD, true)
+ ));
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(2, 'c', SHOULD, false), newFilterClause(3, 'd', SHOULD, true),
+ newFilterClause(4, 'e', MUST_NOT, false), newFilterClause(5, 'f', MUST_NOT, true),
+ newFilterClause(0, 'a', MUST, false), newFilterClause(1, 'b', MUST, true)
+ ));
+
+ for (XBooleanFilter booleanFilter : booleanFilters) {
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(true));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(false));
+ }
+ }
+
+ @Test
+ public void testWithTwoClausesOfEachOccur_singleClauseOptimisation() throws Exception {
+ List<XBooleanFilter> booleanFilters = new ArrayList<XBooleanFilter>();
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(1, 'b', MUST, true)
+ ));
+
+ for (XBooleanFilter booleanFilter : booleanFilters) {
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(true));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(false));
+ }
+
+ booleanFilters.clear();
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(1, 'c', MUST_NOT, true)
+ ));
+ for (XBooleanFilter booleanFilter : booleanFilters) {
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(3));
+ assertThat(result.get(0), equalTo(true));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(true));
+ }
+
+ booleanFilters.clear();
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(2, 'c', SHOULD, true)
+ ));
+ for (XBooleanFilter booleanFilter : booleanFilters) {
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(true));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(false));
+ }
+ }
+
+ @Test
+ public void testOnlyShouldClauses() throws Exception {
+ List<XBooleanFilter> booleanFilters = new ArrayList<XBooleanFilter>();
+ // 2 slow filters
+ // This case caused: https://github.com/elasticsearch/elasticsearch/issues/2826
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(1, 'a', SHOULD, true),
+ newFilterClause(1, 'b', SHOULD, true)
+ ));
+ // 2 fast filters
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(1, 'a', SHOULD, false),
+ newFilterClause(1, 'b', SHOULD, false)
+ ));
+ // 1 fast filters, 1 slow filter
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(1, 'a', SHOULD, true),
+ newFilterClause(1, 'b', SHOULD, false)
+ ));
+
+ for (XBooleanFilter booleanFilter : booleanFilters) {
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(3));
+ assertThat(result.get(0), equalTo(true));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(true));
+ }
+ }
+
+ @Test
+ public void testOnlyMustClauses() throws Exception {
+ List<XBooleanFilter> booleanFilters = new ArrayList<XBooleanFilter>();
+ // Slow filters
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(3, 'd', MUST, true),
+ newFilterClause(3, 'd', MUST, true)
+ ));
+ // 2 fast filters
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(3, 'd', MUST, false),
+ newFilterClause(3, 'd', MUST, false)
+ ));
+ // 1 fast filters, 1 slow filter
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(3, 'd', MUST, true),
+ newFilterClause(3, 'd', MUST, false)
+ ));
+ for (XBooleanFilter booleanFilter : booleanFilters) {
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(true));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(false));
+ }
+ }
+
+ @Test
+ public void testOnlyMustNotClauses() throws Exception {
+ List<XBooleanFilter> booleanFilters = new ArrayList<XBooleanFilter>();
+ // Slow filters
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(1, 'a', MUST_NOT, true),
+ newFilterClause(1, 'a', MUST_NOT, true)
+ ));
+ // 2 fast filters
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(1, 'a', MUST_NOT, false),
+ newFilterClause(1, 'a', MUST_NOT, false)
+ ));
+ // 1 fast filters, 1 slow filter
+ booleanFilters.add(createBooleanFilter(
+ newFilterClause(1, 'a', MUST_NOT, true),
+ newFilterClause(1, 'a', MUST_NOT, false)
+ ));
+ for (XBooleanFilter booleanFilter : booleanFilters) {
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(true));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(false));
+ }
+ }
+
+ @Test
+ public void testNonMatchingSlowShouldWithMatchingMust() throws Exception {
+ XBooleanFilter booleanFilter = createBooleanFilter(
+ newFilterClause(0, 'a', MUST, false),
+ newFilterClause(0, 'b', SHOULD, true)
+ );
+
+ DocIdSet docIdSet = booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs());
+ assertThat(docIdSet, equalTo(null));
+ }
+
+ @Test
+ public void testSlowShouldClause_atLeastOneShouldMustMatch() throws Exception {
+ XBooleanFilter booleanFilter = createBooleanFilter(
+ newFilterClause(0, 'a', MUST, false),
+ newFilterClause(1, 'a', SHOULD, true)
+ );
+
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(1));
+ assertThat(result.get(0), equalTo(false));
+ assertThat(result.get(1), equalTo(false));
+ assertThat(result.get(2), equalTo(true));
+
+ booleanFilter = createBooleanFilter(
+ newFilterClause(0, 'a', MUST, false),
+ newFilterClause(1, 'a', SHOULD, true),
+ newFilterClause(4, 'z', SHOULD, true)
+ );
+
+ result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(false));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(true));
+ }
+
+ @Test
+ // See issue: https://github.com/elasticsearch/elasticsearch/issues/4130
+ public void testOneFastMustNotOneFastShouldAndOneSlowShould() throws Exception {
+ XBooleanFilter booleanFilter = createBooleanFilter(
+ newFilterClause(4, 'v', MUST_NOT, false),
+ newFilterClause(4, 'z', SHOULD, false),
+ newFilterClause(4, 'x', SHOULD, true)
+ );
+
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(false));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(true));
+ }
+
+ @Test
+ public void testOneFastShouldClauseAndOneSlowShouldClause() throws Exception {
+ XBooleanFilter booleanFilter = createBooleanFilter(
+ newFilterClause(4, 'z', SHOULD, false),
+ newFilterClause(4, 'x', SHOULD, true)
+ );
+
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(false));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(true));
+ }
+
+ @Test
+ public void testOneMustClauseOneFastShouldClauseAndOneSlowShouldClause() throws Exception {
+ XBooleanFilter booleanFilter = createBooleanFilter(
+ newFilterClause(0, 'a', MUST, false),
+ newFilterClause(4, 'z', SHOULD, false),
+ newFilterClause(4, 'x', SHOULD, true)
+ );
+
+ FixedBitSet result = new FixedBitSet(reader.maxDoc());
+ result.or(booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs()).iterator());
+ assertThat(result.cardinality(), equalTo(2));
+ assertThat(result.get(0), equalTo(false));
+ assertThat(result.get(1), equalTo(true));
+ assertThat(result.get(2), equalTo(true));
+ }
+
+ private static FilterClause newFilterClause(int field, char character, BooleanClause.Occur occur, boolean slowerBitsBackedFilter) {
+ Filter filter;
+ if (slowerBitsBackedFilter) {
+ filter = new PrettyPrintFieldCacheTermsFilter(String.valueOf(field), String.valueOf(character));
+ } else {
+ Term term = new Term(String.valueOf(field), String.valueOf(character));
+ filter = new TermFilter(term);
+ }
+ return new FilterClause(filter, occur);
+ }
+
+ private static XBooleanFilter createBooleanFilter(FilterClause... clauses) {
+ XBooleanFilter booleanFilter = new XBooleanFilter();
+ for (FilterClause clause : clauses) {
+ booleanFilter.add(clause);
+ }
+ return booleanFilter;
+ }
+
+ @Test
+ public void testRandom() throws IOException {
+ int iterations = atLeast(400); // don't worry that is fast!
+ for (int iter = 0; iter < iterations; iter++) {
+ int numClauses = 1 + random().nextInt(10);
+ FilterClause[] clauses = new FilterClause[numClauses];
+ BooleanQuery topLevel = new BooleanQuery();
+ BooleanQuery orQuery = new BooleanQuery();
+ boolean hasMust = false;
+ boolean hasShould = false;
+ boolean hasMustNot = false;
+ for(int i = 0; i < numClauses; i++) {
+ int field = random().nextInt(5);
+ char value = distinctValues[random().nextInt(distinctValues.length)];
+ switch(random().nextInt(10)) {
+ case 9:
+ case 8:
+ case 7:
+ case 6:
+ case 5:
+ hasMust = true;
+ if (rarely()) {
+ clauses[i] = new FilterClause(new EmptyFilter(), MUST);
+ topLevel.add(new BooleanClause(new MatchNoDocsQuery(), MUST));
+ } else {
+ clauses[i] = newFilterClause(field, value, MUST, random().nextBoolean());
+ topLevel.add(new BooleanClause(new TermQuery(new Term(String.valueOf(field), String.valueOf(value))), MUST));
+ }
+ break;
+ case 4:
+ case 3:
+ case 2:
+ case 1:
+ hasShould = true;
+ if (rarely()) {
+ clauses[i] = new FilterClause(new EmptyFilter(), SHOULD);
+ orQuery.add(new BooleanClause(new MatchNoDocsQuery(), SHOULD));
+ } else {
+ clauses[i] = newFilterClause(field, value, SHOULD, random().nextBoolean());
+ orQuery.add(new BooleanClause(new TermQuery(new Term(String.valueOf(field), String.valueOf(value))), SHOULD));
+ }
+ break;
+ case 0:
+ hasMustNot = true;
+ if (rarely()) {
+ clauses[i] = new FilterClause(new EmptyFilter(), MUST_NOT);
+ topLevel.add(new BooleanClause(new MatchNoDocsQuery(), MUST_NOT));
+ } else {
+ clauses[i] = newFilterClause(field, value, MUST_NOT, random().nextBoolean());
+ topLevel.add(new BooleanClause(new TermQuery(new Term(String.valueOf(field), String.valueOf(value))), MUST_NOT));
+ }
+ break;
+
+ }
+ }
+ if (orQuery.getClauses().length > 0) {
+ topLevel.add(new BooleanClause(orQuery, MUST));
+ }
+ if (hasMustNot && !hasMust && !hasShould) { // pure negative
+ topLevel.add(new BooleanClause(new MatchAllDocsQuery(), MUST));
+ }
+ XBooleanFilter booleanFilter = createBooleanFilter(clauses);
+
+ FixedBitSet leftResult = new FixedBitSet(reader.maxDoc());
+ FixedBitSet rightResult = new FixedBitSet(reader.maxDoc());
+ DocIdSet left = booleanFilter.getDocIdSet(reader.getContext(), reader.getLiveDocs());
+ DocIdSet right = new QueryWrapperFilter(topLevel).getDocIdSet(reader.getContext(), reader.getLiveDocs());
+ if (left == null || right == null) {
+ if (left == null && right != null) {
+ assertThat(errorMsg(clauses, topLevel), (right.iterator() == null ? DocIdSetIterator.NO_MORE_DOCS : right.iterator().nextDoc()), equalTo(DocIdSetIterator.NO_MORE_DOCS));
+ }
+ if (left != null && right == null) {
+ assertThat(errorMsg(clauses, topLevel), (left.iterator() == null ? DocIdSetIterator.NO_MORE_DOCS : left.iterator().nextDoc()), equalTo(DocIdSetIterator.NO_MORE_DOCS));
+ }
+ } else {
+ DocIdSetIterator leftIter = left.iterator();
+ DocIdSetIterator rightIter = right.iterator();
+ if (leftIter != null) {
+ leftResult.or(leftIter);
+ }
+
+ if (rightIter != null) {
+ rightResult.or(rightIter);
+ }
+
+ assertThat(leftResult.cardinality(), equalTo(rightResult.cardinality()));
+ for (int i = 0; i < reader.maxDoc(); i++) {
+ assertThat(errorMsg(clauses, topLevel) + " -- failed at index " + i, leftResult.get(i), equalTo(rightResult.get(i)));
+ }
+ }
+ }
+ }
+
+ private String errorMsg(FilterClause[] clauses, BooleanQuery query) {
+ return query.toString() + " vs. " + Arrays.toString(clauses);
+ }
+
+
+ public static final class PrettyPrintFieldCacheTermsFilter extends FieldCacheTermsFilter {
+
+ private final String value;
+ private final String field;
+
+ public PrettyPrintFieldCacheTermsFilter(String field, String value) {
+ super(field, value);
+ this.field = field;
+ this.value = value;
+ }
+
+ @Override
+ public String toString() {
+ return "SLOW(" + field + ":" + value + ")";
+ }
+ }
+
+ public final class EmptyFilter extends Filter {
+
+ @Override
+ public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
+ return random().nextBoolean() ? new Empty() : null;
+ }
+
+ private class Empty extends DocIdSet {
+
+ @Override
+ public DocIdSetIterator iterator() throws IOException {
+ return null;
+ }
+ }
+ }
+
+}
+
diff --git a/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java b/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java
new file mode 100644
index 0000000..315b93e
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/lucene/store/InputStreamIndexInputTests.java
@@ -0,0 +1,266 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.lucene.store;
+
+import org.apache.lucene.store.IOContext;
+import org.apache.lucene.store.IndexInput;
+import org.apache.lucene.store.IndexOutput;
+import org.apache.lucene.store.RAMDirectory;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.lessThan;
+
+/**
+ *
+ */
+public class InputStreamIndexInputTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testSingleReadSingleByteLimit() throws IOException {
+ RAMDirectory dir = new RAMDirectory();
+ IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 1);
+ }
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 2);
+ }
+
+ output.close();
+
+ IndexInput input = dir.openInput("test", IOContext.DEFAULT);
+
+ for (int i = 0; i < 3; i++) {
+ InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ assertThat(is.actualSizeToRead(), equalTo(1l));
+ assertThat(is.read(), equalTo(1));
+ assertThat(is.read(), equalTo(-1));
+ }
+
+ for (int i = 0; i < 3; i++) {
+ InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ assertThat(is.actualSizeToRead(), equalTo(1l));
+ assertThat(is.read(), equalTo(2));
+ assertThat(is.read(), equalTo(-1));
+ }
+
+ assertThat(input.getFilePointer(), equalTo(input.length()));
+ InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
+ assertThat(is.actualSizeToRead(), equalTo(0l));
+ assertThat(is.read(), equalTo(-1));
+ }
+
+ @Test
+ public void testReadMultiSingleByteLimit1() throws IOException {
+ RAMDirectory dir = new RAMDirectory();
+ IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 1);
+ }
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 2);
+ }
+
+ output.close();
+
+ IndexInput input = dir.openInput("test", IOContext.DEFAULT);
+
+ byte[] read = new byte[2];
+
+ for (int i = 0; i < 3; i++) {
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
+ assertThat(is.actualSizeToRead(), equalTo(1l));
+ assertThat(is.read(read), equalTo(1));
+ assertThat(read[0], equalTo((byte) 1));
+ }
+
+ for (int i = 0; i < 3; i++) {
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
+ assertThat(is.actualSizeToRead(), equalTo(1l));
+ assertThat(is.read(read), equalTo(1));
+ assertThat(read[0], equalTo((byte) 2));
+ }
+
+ assertThat(input.getFilePointer(), equalTo(input.length()));
+ InputStreamIndexInput is = new InputStreamIndexInput(input, 1);
+ assertThat(is.actualSizeToRead(), equalTo(0l));
+ assertThat(is.read(read), equalTo(-1));
+ }
+
+ @Test
+ public void testSingleReadTwoBytesLimit() throws IOException {
+ RAMDirectory dir = new RAMDirectory();
+ IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 1);
+ }
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 2);
+ }
+
+ output.close();
+
+ IndexInput input = dir.openInput("test", IOContext.DEFAULT);
+
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ InputStreamIndexInput is = new InputStreamIndexInput(input, 2);
+ assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.read(), equalTo(1));
+ assertThat(is.read(), equalTo(1));
+ assertThat(is.read(), equalTo(-1));
+
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ is = new InputStreamIndexInput(input, 2);
+ assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.read(), equalTo(1));
+ assertThat(is.read(), equalTo(2));
+ assertThat(is.read(), equalTo(-1));
+
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ is = new InputStreamIndexInput(input, 2);
+ assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.read(), equalTo(2));
+ assertThat(is.read(), equalTo(2));
+ assertThat(is.read(), equalTo(-1));
+
+ assertThat(input.getFilePointer(), equalTo(input.length()));
+ is = new InputStreamIndexInput(input, 2);
+ assertThat(is.actualSizeToRead(), equalTo(0l));
+ assertThat(is.read(), equalTo(-1));
+ }
+
+ @Test
+ public void testReadMultiTwoBytesLimit1() throws IOException {
+ RAMDirectory dir = new RAMDirectory();
+ IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 1);
+ }
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 2);
+ }
+
+ output.close();
+
+ IndexInput input = dir.openInput("test", IOContext.DEFAULT);
+
+ byte[] read = new byte[2];
+
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ InputStreamIndexInput is = new InputStreamIndexInput(input, 2);
+ assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.read(read), equalTo(2));
+ assertThat(read[0], equalTo((byte) 1));
+ assertThat(read[1], equalTo((byte) 1));
+
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ is = new InputStreamIndexInput(input, 2);
+ assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.read(read), equalTo(2));
+ assertThat(read[0], equalTo((byte) 1));
+ assertThat(read[1], equalTo((byte) 2));
+
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ is = new InputStreamIndexInput(input, 2);
+ assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.read(read), equalTo(2));
+ assertThat(read[0], equalTo((byte) 2));
+ assertThat(read[1], equalTo((byte) 2));
+
+ assertThat(input.getFilePointer(), equalTo(input.length()));
+ is = new InputStreamIndexInput(input, 2);
+ assertThat(is.actualSizeToRead(), equalTo(0l));
+ assertThat(is.read(read), equalTo(-1));
+ }
+
+ @Test
+ public void testReadMultiFourBytesLimit() throws IOException {
+ RAMDirectory dir = new RAMDirectory();
+ IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 1);
+ }
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 2);
+ }
+
+ output.close();
+
+ IndexInput input = dir.openInput("test", IOContext.DEFAULT);
+
+ byte[] read = new byte[4];
+
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ InputStreamIndexInput is = new InputStreamIndexInput(input, 4);
+ assertThat(is.actualSizeToRead(), equalTo(4l));
+ assertThat(is.read(read), equalTo(4));
+ assertThat(read[0], equalTo((byte) 1));
+ assertThat(read[1], equalTo((byte) 1));
+ assertThat(read[2], equalTo((byte) 1));
+ assertThat(read[3], equalTo((byte) 2));
+
+ assertThat(input.getFilePointer(), lessThan(input.length()));
+ is = new InputStreamIndexInput(input, 4);
+ assertThat(is.actualSizeToRead(), equalTo(2l));
+ assertThat(is.read(read), equalTo(2));
+ assertThat(read[0], equalTo((byte) 2));
+ assertThat(read[1], equalTo((byte) 2));
+
+ assertThat(input.getFilePointer(), equalTo(input.length()));
+ is = new InputStreamIndexInput(input, 4);
+ assertThat(is.actualSizeToRead(), equalTo(0l));
+ assertThat(is.read(read), equalTo(-1));
+ }
+
+ @Test
+ public void testMarkRest() throws Exception {
+ RAMDirectory dir = new RAMDirectory();
+ IndexOutput output = dir.createOutput("test", IOContext.DEFAULT);
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 1);
+ }
+ for (int i = 0; i < 3; i++) {
+ output.writeByte((byte) 2);
+ }
+
+ output.close();
+
+ IndexInput input = dir.openInput("test", IOContext.DEFAULT);
+ InputStreamIndexInput is = new InputStreamIndexInput(input, 4);
+ assertThat(is.markSupported(), equalTo(true));
+ assertThat(is.read(), equalTo(1));
+ assertThat(is.read(), equalTo(1));
+ is.mark(0);
+ assertThat(is.read(), equalTo(1));
+ assertThat(is.read(), equalTo(2));
+ is.reset();
+ assertThat(is.read(), equalTo(1));
+ assertThat(is.read(), equalTo(2));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java b/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java
new file mode 100644
index 0000000..8556058
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/lucene/uid/VersionsTests.java
@@ -0,0 +1,285 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.lucene.uid;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.core.KeywordAnalyzer;
+import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
+import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
+import org.apache.lucene.document.*;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.index.*;
+import org.apache.lucene.index.FieldInfo.IndexOptions;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.common.Numbers;
+import org.elasticsearch.common.lucene.Lucene;
+import org.elasticsearch.index.mapper.internal.UidFieldMapper;
+import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
+import org.elasticsearch.index.merge.Merges;
+import org.elasticsearch.index.merge.policy.IndexUpgraderMergePolicy;
+import org.elasticsearch.test.ElasticsearchLuceneTestCase;
+import org.hamcrest.MatcherAssert;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.*;
+
+public class VersionsTests extends ElasticsearchLuceneTestCase {
+
+ public static DirectoryReader reopen(DirectoryReader reader) throws IOException {
+ return reopen(reader, true);
+ }
+
+ public static DirectoryReader reopen(DirectoryReader reader, boolean newReaderExpected) throws IOException {
+ DirectoryReader newReader = DirectoryReader.openIfChanged(reader);
+ if (newReader != null) {
+ reader.close();
+ } else {
+ assertFalse(newReaderExpected);
+ }
+ return newReader;
+ }
+ @Test
+ public void testVersions() throws Exception {
+ Directory dir = newDirectory();
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
+ DirectoryReader directoryReader = DirectoryReader.open(writer, true);
+ MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND));
+
+ Document doc = new Document();
+ doc.add(new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.FIELD_TYPE));
+ writer.addDocument(doc);
+ directoryReader = reopen(directoryReader);
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_SET));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(Versions.NOT_SET));
+
+ doc = new Document();
+ doc.add(new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.FIELD_TYPE));
+ doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, 1));
+ writer.updateDocument(new Term(UidFieldMapper.NAME, "1"), doc);
+ directoryReader = reopen(directoryReader);
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(1l));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(1l));
+
+ doc = new Document();
+ Field uid = new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.FIELD_TYPE);
+ Field version = new NumericDocValuesField(VersionFieldMapper.NAME, 2);
+ doc.add(uid);
+ doc.add(version);
+ writer.updateDocument(new Term(UidFieldMapper.NAME, "1"), doc);
+ directoryReader = reopen(directoryReader);
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(2l));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(2l));
+
+ // test reuse of uid field
+ doc = new Document();
+ version.setLongValue(3);
+ doc.add(uid);
+ doc.add(version);
+ writer.updateDocument(new Term(UidFieldMapper.NAME, "1"), doc);
+
+ directoryReader = reopen(directoryReader);
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(3l));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(3l));
+
+ writer.deleteDocuments(new Term(UidFieldMapper.NAME, "1"));
+ directoryReader = reopen(directoryReader);
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), nullValue());
+ directoryReader.close();
+ writer.close();
+ dir.close();
+ }
+
+ @Test
+ public void testNestedDocuments() throws IOException {
+ Directory dir = newDirectory();
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
+
+ List<Document> docs = new ArrayList<Document>();
+ for (int i = 0; i < 4; ++i) {
+ // Nested
+ Document doc = new Document();
+ doc.add(new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.NESTED_FIELD_TYPE));
+ docs.add(doc);
+ }
+ // Root
+ Document doc = new Document();
+ doc.add(new Field(UidFieldMapper.NAME, "1", UidFieldMapper.Defaults.FIELD_TYPE));
+ NumericDocValuesField version = new NumericDocValuesField(VersionFieldMapper.NAME, 5L);
+ doc.add(version);
+ docs.add(doc);
+
+ writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
+ DirectoryReader directoryReader = DirectoryReader.open(writer, true);
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(5l));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(5l));
+
+ version.setLongValue(6L);
+ writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
+ version.setLongValue(7L);
+ writer.updateDocuments(new Term(UidFieldMapper.NAME, "1"), docs);
+ directoryReader = reopen(directoryReader);
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(7l));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")).version, equalTo(7l));
+
+ writer.deleteDocuments(new Term(UidFieldMapper.NAME, "1"));
+ directoryReader = reopen(directoryReader);
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND));
+ assertThat(Versions.loadDocIdAndVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), nullValue());
+ directoryReader.close();
+ writer.close();
+ dir.close();
+ }
+
+ @Test
+ public void testBackwardCompatibility() throws IOException {
+ Directory dir = newDirectory();
+ IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(Lucene.VERSION, Lucene.STANDARD_ANALYZER));
+
+ DirectoryReader directoryReader = DirectoryReader.open(writer, true);
+ MatcherAssert.assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(Versions.NOT_FOUND));
+
+ Document doc = new Document();
+ UidField uidAndVersion = new UidField("1", 1L);
+ doc.add(uidAndVersion);
+ writer.addDocument(doc);
+
+ uidAndVersion.uid = "2";
+ uidAndVersion.version = 2;
+ writer.addDocument(doc);
+ writer.commit();
+
+ directoryReader = reopen(directoryReader);
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "1")), equalTo(1l));
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "2")), equalTo(2l));
+ assertThat(Versions.loadVersion(directoryReader, new Term(UidFieldMapper.NAME, "3")), equalTo(Versions.NOT_FOUND));
+ directoryReader.close();
+ writer.close();
+ dir.close();
+ }
+
+ // This is how versions used to be encoded
+ private static class UidField extends Field {
+ private static final FieldType FIELD_TYPE = new FieldType();
+ static {
+ FIELD_TYPE.setTokenized(true);
+ FIELD_TYPE.setIndexed(true);
+ FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
+ FIELD_TYPE.setStored(true);
+ FIELD_TYPE.freeze();
+ }
+ String uid;
+ long version;
+ UidField(String uid, long version) {
+ super(UidFieldMapper.NAME, uid, FIELD_TYPE);
+ this.uid = uid;
+ this.version = version;
+ }
+ @Override
+ public TokenStream tokenStream(Analyzer analyzer) throws IOException {
+ return new TokenStream() {
+ boolean finished = true;
+ final CharTermAttribute term = addAttribute(CharTermAttribute.class);
+ final PayloadAttribute payload = addAttribute(PayloadAttribute.class);
+ @Override
+ public boolean incrementToken() throws IOException {
+ if (finished) {
+ return false;
+ }
+ term.setEmpty().append(uid);
+ payload.setPayload(new BytesRef(Numbers.longToBytes(version)));
+ finished = true;
+ return true;
+ }
+ @Override
+ public void reset() throws IOException {
+ finished = false;
+ }
+ };
+ }
+ }
+
+ @Test
+ public void testMergingOldIndices() throws Exception {
+ final IndexWriterConfig iwConf = new IndexWriterConfig(Lucene.VERSION, new KeywordAnalyzer());
+ iwConf.setMergePolicy(new IndexUpgraderMergePolicy(iwConf.getMergePolicy()));
+ final Directory dir = newDirectory();
+ final IndexWriter iw = new IndexWriter(dir, iwConf);
+
+ // 1st segment, no _version
+ Document document = new Document();
+ // Add a dummy field (enough to trigger #3237)
+ document.add(new StringField("a", "b", Store.NO));
+ StringField uid = new StringField(UidFieldMapper.NAME, "1", Store.YES);
+ document.add(uid);
+ iw.addDocument(document);
+ uid.setStringValue("2");
+ iw.addDocument(document);
+ iw.commit();
+
+ // 2nd segment, old layout
+ document = new Document();
+ UidField uidAndVersion = new UidField("3", 3L);
+ document.add(uidAndVersion);
+ iw.addDocument(document);
+ uidAndVersion.uid = "4";
+ uidAndVersion.version = 4L;
+ iw.addDocument(document);
+ iw.commit();
+
+ // 3rd segment new layout
+ document = new Document();
+ uid.setStringValue("5");
+ Field version = new NumericDocValuesField(VersionFieldMapper.NAME, 5L);
+ document.add(uid);
+ document.add(version);
+ iw.addDocument(document);
+ uid.setStringValue("6");
+ version.setLongValue(6L);
+ iw.addDocument(document);
+ iw.commit();
+
+ final Map<String, Long> expectedVersions = ImmutableMap.<String, Long>builder()
+ .put("1", 0L).put("2", 0L).put("3", 0L).put("4", 4L).put("5", 5L).put("6", 6L).build();
+
+ // Force merge and check versions
+ Merges.forceMerge(iw, 1);
+ final AtomicReader ir = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(iw.getDirectory()));
+ final NumericDocValues versions = ir.getNumericDocValues(VersionFieldMapper.NAME);
+ assertThat(versions, notNullValue());
+ for (int i = 0; i < ir.maxDoc(); ++i) {
+ final String uidValue = ir.document(i).get(UidFieldMapper.NAME);
+ final long expectedVersion = expectedVersions.get(uidValue);
+ assertThat(versions.get(i), equalTo(expectedVersion));
+ }
+
+ iw.close();
+ assertThat(IndexWriter.isLocked(iw.getDirectory()), is(false));
+ ir.close();
+ dir.close();
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/path/PathTrieTests.java b/src/test/java/org/elasticsearch/common/path/PathTrieTests.java
new file mode 100644
index 0000000..6b60a45
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/path/PathTrieTests.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.path;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.util.Map;
+
+import static com.google.common.collect.Maps.newHashMap;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.nullValue;
+
+/**
+ *
+ */
+public class PathTrieTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testPath() {
+ PathTrie<String> trie = new PathTrie<String>();
+ trie.insert("/a/b/c", "walla");
+ trie.insert("a/d/g", "kuku");
+ trie.insert("x/b/c", "lala");
+ trie.insert("a/x/*", "one");
+ trie.insert("a/b/*", "two");
+ trie.insert("*/*/x", "three");
+ trie.insert("{index}/insert/{docId}", "bingo");
+
+ assertThat(trie.retrieve("a/b/c"), equalTo("walla"));
+ assertThat(trie.retrieve("a/d/g"), equalTo("kuku"));
+ assertThat(trie.retrieve("x/b/c"), equalTo("lala"));
+ assertThat(trie.retrieve("a/x/b"), equalTo("one"));
+ assertThat(trie.retrieve("a/b/d"), equalTo("two"));
+
+ assertThat(trie.retrieve("a/b"), nullValue());
+ assertThat(trie.retrieve("a/b/c/d"), nullValue());
+ assertThat(trie.retrieve("g/t/x"), equalTo("three"));
+
+ Map<String, String> params = newHashMap();
+ assertThat(trie.retrieve("index1/insert/12", params), equalTo("bingo"));
+ assertThat(params.size(), equalTo(2));
+ assertThat(params.get("index"), equalTo("index1"));
+ assertThat(params.get("docId"), equalTo("12"));
+ }
+
+ @Test
+ public void testEmptyPath() {
+ PathTrie<String> trie = new PathTrie<String>();
+ trie.insert("/", "walla");
+ assertThat(trie.retrieve(""), equalTo("walla"));
+ }
+
+ @Test
+ public void testDifferentNamesOnDifferentPath() {
+ PathTrie<String> trie = new PathTrie<String>();
+ trie.insert("/a/{type}", "test1");
+ trie.insert("/b/{name}", "test2");
+
+ Map<String, String> params = newHashMap();
+ assertThat(trie.retrieve("/a/test", params), equalTo("test1"));
+ assertThat(params.get("type"), equalTo("test"));
+
+ params.clear();
+ assertThat(trie.retrieve("/b/testX", params), equalTo("test2"));
+ assertThat(params.get("name"), equalTo("testX"));
+ }
+
+ @Test
+ public void testSameNameOnDifferentPath() {
+ PathTrie<String> trie = new PathTrie<String>();
+ trie.insert("/a/c/{name}", "test1");
+ trie.insert("/b/{name}", "test2");
+
+ Map<String, String> params = newHashMap();
+ assertThat(trie.retrieve("/a/c/test", params), equalTo("test1"));
+ assertThat(params.get("name"), equalTo("test"));
+
+ params.clear();
+ assertThat(trie.retrieve("/b/testX", params), equalTo("test2"));
+ assertThat(params.get("name"), equalTo("testX"));
+ }
+
+ @Test
+ public void testPreferNonWildcardExecution() {
+ PathTrie<String> trie = new PathTrie<String>();
+ trie.insert("{test}", "test1");
+ trie.insert("b", "test2");
+ trie.insert("{test}/a", "test3");
+ trie.insert("b/a", "test4");
+
+ Map<String, String> params = newHashMap();
+ assertThat(trie.retrieve("/b", params), equalTo("test2"));
+ assertThat(trie.retrieve("/b/a", params), equalTo("test4"));
+ }
+
+ @Test
+ public void testSamePathConcreteResolution() {
+ PathTrie<String> trie = new PathTrie<String>();
+ trie.insert("{x}/{y}/{z}", "test1");
+ trie.insert("{x}/_y/{k}", "test2");
+
+ Map<String, String> params = newHashMap();
+ assertThat(trie.retrieve("/a/b/c", params), equalTo("test1"));
+ assertThat(params.get("x"), equalTo("a"));
+ assertThat(params.get("y"), equalTo("b"));
+ assertThat(params.get("z"), equalTo("c"));
+ params.clear();
+ assertThat(trie.retrieve("/a/_y/c", params), equalTo("test2"));
+ assertThat(params.get("x"), equalTo("a"));
+ assertThat(params.get("k"), equalTo("c"));
+ }
+
+ @Test
+ public void testNamedWildcardAndLookupWithWildcard() {
+ PathTrie<String> trie = new PathTrie<String>();
+ trie.insert("x/{test}", "test1");
+ trie.insert("{test}/a", "test2");
+ trie.insert("/{test}", "test3");
+ trie.insert("/{test}/_endpoint", "test4");
+ trie.insert("/*/{test}/_endpoint", "test5");
+
+ Map<String, String> params = newHashMap();
+ assertThat(trie.retrieve("/x/*", params), equalTo("test1"));
+ assertThat(params.get("test"), equalTo("*"));
+
+ params = newHashMap();
+ assertThat(trie.retrieve("/b/a", params), equalTo("test2"));
+ assertThat(params.get("test"), equalTo("b"));
+
+ params = newHashMap();
+ assertThat(trie.retrieve("/*", params), equalTo("test3"));
+ assertThat(params.get("test"), equalTo("*"));
+
+ params = newHashMap();
+ assertThat(trie.retrieve("/*/_endpoint", params), equalTo("test4"));
+ assertThat(params.get("test"), equalTo("*"));
+
+ params = newHashMap();
+ assertThat(trie.retrieve("a/*/_endpoint", params), equalTo("test5"));
+ assertThat(params.get("test"), equalTo("*"));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/recycler/AbstractRecyclerTests.java b/src/test/java/org/elasticsearch/common/recycler/AbstractRecyclerTests.java
new file mode 100644
index 0000000..74c491b
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/recycler/AbstractRecyclerTests.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.recycler;
+
+import org.elasticsearch.ElasticsearchIllegalStateException;
+import org.elasticsearch.test.ElasticsearchTestCase;
+
+import java.util.Arrays;
+
+public abstract class AbstractRecyclerTests extends ElasticsearchTestCase {
+
+ protected static final Recycler.C<byte[]> RECYCLER_C = new Recycler.C<byte[]>() {
+
+ @Override
+ public byte[] newInstance(int sizing) {
+ return new byte[10];
+ }
+
+ @Override
+ public void clear(byte[] value) {
+ Arrays.fill(value, (byte) 0);
+ }
+
+ };
+
+ protected abstract Recycler<byte[]> newRecycler();
+
+ public void testReuse() {
+ Recycler<byte[]> r = newRecycler();
+ Recycler.V<byte[]> o = r.obtain();
+ assertFalse(o.isRecycled());
+ final byte[] b1 = o.v();
+ o.release();
+ o = r.obtain();
+ final byte[] b2 = o.v();
+ if (o.isRecycled()) {
+ assertSame(b1, b2);
+ } else {
+ assertNotSame(b1, b2);
+ }
+ o.release();
+ r.close();
+ }
+
+ public void testClear() {
+ Recycler<byte[]> r = newRecycler();
+ Recycler.V<byte[]> o = r.obtain();
+ getRandom().nextBytes(o.v());
+ o.release();
+ o = r.obtain();
+ for (int i = 0; i < o.v().length; ++i) {
+ assertEquals(0, o.v()[i]);
+ }
+ o.release();
+ r.close();
+ }
+
+ public void testDoubleRelease() {
+ final Recycler<byte[]> r = newRecycler();
+ final Recycler.V<byte[]> v1 = r.obtain();
+ v1.release();
+ try {
+ v1.release();
+ } catch (ElasticsearchIllegalStateException e) {
+ // impl has protection against double release: ok
+ return;
+ }
+ // otherwise ensure that the impl may not be returned twice
+ final Recycler.V<byte[]> v2 = r.obtain();
+ final Recycler.V<byte[]> v3 = r.obtain();
+ assertNotSame(v2.v(), v3.v());
+ r.close();
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/recycler/ConcurrentRecyclerTests.java b/src/test/java/org/elasticsearch/common/recycler/ConcurrentRecyclerTests.java
new file mode 100644
index 0000000..758041d
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/recycler/ConcurrentRecyclerTests.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.recycler;
+
+public class ConcurrentRecyclerTests extends AbstractRecyclerTests {
+
+ @Override
+ protected Recycler<byte[]> newRecycler() {
+ return Recyclers.concurrent(Recyclers.dequeFactory(RECYCLER_C, randomIntBetween(5, 10)), randomIntBetween(1,5));
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/recycler/LockedRecyclerTests.java b/src/test/java/org/elasticsearch/common/recycler/LockedRecyclerTests.java
new file mode 100644
index 0000000..9ffdf7a
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/recycler/LockedRecyclerTests.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.recycler;
+
+public class LockedRecyclerTests extends AbstractRecyclerTests {
+
+ @Override
+ protected Recycler<byte[]> newRecycler() {
+ return Recyclers.locked(Recyclers.deque(RECYCLER_C, randomIntBetween(5, 10)));
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/recycler/NoneRecyclerTests.java b/src/test/java/org/elasticsearch/common/recycler/NoneRecyclerTests.java
new file mode 100644
index 0000000..a60c0ba
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/recycler/NoneRecyclerTests.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.recycler;
+
+public class NoneRecyclerTests extends AbstractRecyclerTests {
+
+ @Override
+ protected Recycler<byte[]> newRecycler() {
+ return Recyclers.none(RECYCLER_C);
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/recycler/QueueRecyclerTests.java b/src/test/java/org/elasticsearch/common/recycler/QueueRecyclerTests.java
new file mode 100644
index 0000000..f693c30
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/recycler/QueueRecyclerTests.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.recycler;
+
+public class QueueRecyclerTests extends AbstractRecyclerTests {
+
+ @Override
+ protected Recycler<byte[]> newRecycler() {
+ return Recyclers.concurrentDeque(RECYCLER_C, randomIntBetween(5, 10));
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/recycler/SoftConcurrentRecyclerTests.java b/src/test/java/org/elasticsearch/common/recycler/SoftConcurrentRecyclerTests.java
new file mode 100644
index 0000000..0320ff5
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/recycler/SoftConcurrentRecyclerTests.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.recycler;
+
+public class SoftConcurrentRecyclerTests extends AbstractRecyclerTests {
+
+ @Override
+ protected Recycler<byte[]> newRecycler() {
+ return Recyclers.concurrent(Recyclers.softFactory(Recyclers.dequeFactory(RECYCLER_C, randomIntBetween(5, 10))), randomIntBetween(1, 5));
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/recycler/SoftThreadLocalRecyclerTests.java b/src/test/java/org/elasticsearch/common/recycler/SoftThreadLocalRecyclerTests.java
new file mode 100644
index 0000000..2a5d253
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/recycler/SoftThreadLocalRecyclerTests.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.recycler;
+
+public class SoftThreadLocalRecyclerTests extends AbstractRecyclerTests {
+
+ @Override
+ protected Recycler<byte[]> newRecycler() {
+ return Recyclers.threadLocal(Recyclers.softFactory(Recyclers.dequeFactory(RECYCLER_C, 10)));
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/recycler/ThreadLocalRecyclerTests.java b/src/test/java/org/elasticsearch/common/recycler/ThreadLocalRecyclerTests.java
new file mode 100644
index 0000000..5ab6892
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/recycler/ThreadLocalRecyclerTests.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.recycler;
+
+public class ThreadLocalRecyclerTests extends AbstractRecyclerTests {
+
+ @Override
+ protected Recycler<byte[]> newRecycler() {
+ return Recyclers.threadLocal(Recyclers.dequeFactory(RECYCLER_C, 10));
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/regex/RegexTests.java b/src/test/java/org/elasticsearch/common/regex/RegexTests.java
new file mode 100644
index 0000000..380bf90
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/regex/RegexTests.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.regex;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.util.Random;
+import java.util.regex.Pattern;
+
+import static org.hamcrest.Matchers.equalTo;
+
+public class RegexTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testFlags() {
+ String[] supportedFlags = new String[]{"CASE_INSENSITIVE", "MULTILINE", "DOTALL", "UNICODE_CASE", "CANON_EQ", "UNIX_LINES",
+ "LITERAL", "COMMENTS", "UNICODE_CHAR_CLASS"};
+ int[] flags = new int[]{Pattern.CASE_INSENSITIVE, Pattern.MULTILINE, Pattern.DOTALL, Pattern.UNICODE_CASE, Pattern.CANON_EQ,
+ Pattern.UNIX_LINES, Pattern.LITERAL, Pattern.COMMENTS, Regex.UNICODE_CHARACTER_CLASS};
+ Random random = getRandom();
+ int num = 10 + random.nextInt(100);
+ for (int i = 0; i < num; i++) {
+ int numFlags = random.nextInt(flags.length + 1);
+ int current = 0;
+ StringBuilder builder = new StringBuilder();
+ for (int j = 0; j < numFlags; j++) {
+ int index = random.nextInt(flags.length);
+ current |= flags[index];
+ builder.append(supportedFlags[index]);
+ if (j < numFlags - 1) {
+ builder.append("|");
+ }
+ }
+ String flagsToString = Regex.flagsToString(current);
+ assertThat(Regex.flagsFromString(builder.toString()), equalTo(current));
+ assertThat(Regex.flagsFromString(builder.toString()), equalTo(Regex.flagsFromString(flagsToString)));
+ Pattern.compile("\\w\\d{1,2}", current); // accepts the flags?
+ }
+ }
+
+ @Test(timeout = 1000)
+ public void testDoubleWildcardMatch() {
+ assertTrue(Regex.simpleMatch("ddd", "ddd"));
+ assertTrue(Regex.simpleMatch("d*d*d", "dadd"));
+ assertTrue(Regex.simpleMatch("**ddd", "dddd"));
+ assertFalse(Regex.simpleMatch("**ddd", "fff"));
+ assertTrue(Regex.simpleMatch("fff*ddd", "fffabcddd"));
+ assertTrue(Regex.simpleMatch("fff**ddd", "fffabcddd"));
+ assertFalse(Regex.simpleMatch("fff**ddd", "fffabcdd"));
+ assertTrue(Regex.simpleMatch("fff*******ddd", "fffabcddd"));
+ assertFalse(Regex.simpleMatch("fff******ddd", "fffabcdd"));
+ }
+
+} \ No newline at end of file
diff --git a/src/test/java/org/elasticsearch/common/rounding/RoundingTests.java b/src/test/java/org/elasticsearch/common/rounding/RoundingTests.java
new file mode 100644
index 0000000..cd77bfc
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/rounding/RoundingTests.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.rounding;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.lessThanOrEqualTo;
+
+
+
+public class RoundingTests extends ElasticsearchTestCase {
+
+ public void testInterval() {
+ final long interval = randomIntBetween(1, 100);
+ Rounding.Interval rounding = new Rounding.Interval(interval);
+ for (int i = 0; i < 1000; ++i) {
+ long l = Math.max(randomLong(), Long.MIN_VALUE + interval);
+ final long r = rounding.round(l);
+ String message = "round(" + l + ", interval=" + interval + ") = " + r;
+ assertEquals(message, 0, r % interval);
+ assertThat(message, r, lessThanOrEqualTo(l));
+ assertThat(message, r + interval, greaterThan(l));
+ }
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java b/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java
new file mode 100644
index 0000000..f57f4e2
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/rounding/TimeZoneRoundingTests.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.rounding;
+
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.joda.time.DateTimeZone;
+import org.joda.time.format.ISODateTimeFormat;
+import org.junit.Test;
+
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ */
+public class TimeZoneRoundingTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testUTCMonthRounding() {
+ TimeZoneRounding tzRounding = TimeZoneRounding.builder(DateTimeUnit.MONTH_OF_YEAR).build();
+ assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-01T00:00:00.000Z")));
+ assertThat(tzRounding.nextRoundingValue(utc("2009-02-01T00:00:00.000Z")), equalTo(utc("2009-03-01T00:00:00.000Z")));
+
+ tzRounding = TimeZoneRounding.builder(DateTimeUnit.WEEK_OF_WEEKYEAR).build();
+ assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-09T00:00:00.000Z")));
+ assertThat(tzRounding.nextRoundingValue(utc("2012-01-09T00:00:00.000Z")), equalTo(utc("2012-01-16T00:00:00.000Z")));
+
+ tzRounding = TimeZoneRounding.builder(DateTimeUnit.WEEK_OF_WEEKYEAR).postOffset(-TimeValue.timeValueHours(24).millis()).build();
+ assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-08T00:00:00.000Z")));
+ assertThat(tzRounding.nextRoundingValue(utc("2012-01-08T00:00:00.000Z")), equalTo(utc("2012-01-15T00:00:00.000Z")));
+ }
+
+ @Test
+ public void testDayTimeZoneRounding() {
+ TimeZoneRounding tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).preZone(DateTimeZone.forOffsetHours(-2)).build();
+ assertThat(tzRounding.round(0), equalTo(0l - TimeValue.timeValueHours(24).millis()));
+ assertThat(tzRounding.nextRoundingValue(0l - TimeValue.timeValueHours(24).millis()), equalTo(0l));
+
+ tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).preZone(DateTimeZone.forOffsetHours(-2)).postZone(DateTimeZone.forOffsetHours(-2)).build();
+ assertThat(tzRounding.round(0), equalTo(0l - TimeValue.timeValueHours(26).millis()));
+ assertThat(tzRounding.nextRoundingValue(0l - TimeValue.timeValueHours(26).millis()), equalTo(-TimeValue.timeValueHours(2).millis()));
+
+ tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).preZone(DateTimeZone.forOffsetHours(-2)).build();
+ assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-02T00:00:00")));
+ assertThat(tzRounding.nextRoundingValue(utc("2009-02-02T00:00:00")), equalTo(utc("2009-02-03T00:00:00")));
+
+ tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).preZone(DateTimeZone.forOffsetHours(-2)).postZone(DateTimeZone.forOffsetHours(-2)).build();
+ assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(time("2009-02-02T00:00:00", DateTimeZone.forOffsetHours(+2))));
+ assertThat(tzRounding.nextRoundingValue(time("2009-02-02T00:00:00", DateTimeZone.forOffsetHours(+2))), equalTo(time("2009-02-03T00:00:00", DateTimeZone.forOffsetHours(+2))));
+ }
+
+ @Test
+ public void testTimeTimeZoneRounding() {
+ TimeZoneRounding tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).preZone(DateTimeZone.forOffsetHours(-2)).build();
+ assertThat(tzRounding.round(0), equalTo(0l));
+ assertThat(tzRounding.nextRoundingValue(0l), equalTo(TimeValue.timeValueHours(1l).getMillis()));
+
+ tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).preZone(DateTimeZone.forOffsetHours(-2)).postZone(DateTimeZone.forOffsetHours(-2)).build();
+ assertThat(tzRounding.round(0), equalTo(0l - TimeValue.timeValueHours(2).millis()));
+ assertThat(tzRounding.nextRoundingValue(0l - TimeValue.timeValueHours(2).millis()), equalTo(0l - TimeValue.timeValueHours(1).millis()));
+
+ tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).preZone(DateTimeZone.forOffsetHours(-2)).build();
+ assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-03T01:00:00")));
+ assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T01:00:00")), equalTo(utc("2009-02-03T02:00:00")));
+
+ tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).preZone(DateTimeZone.forOffsetHours(-2)).postZone(DateTimeZone.forOffsetHours(-2)).build();
+ assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(time("2009-02-03T01:00:00", DateTimeZone.forOffsetHours(+2))));
+ assertThat(tzRounding.nextRoundingValue(time("2009-02-03T01:00:00", DateTimeZone.forOffsetHours(+2))), equalTo(time("2009-02-03T02:00:00", DateTimeZone.forOffsetHours(+2))));
+ }
+
+ private long utc(String time) {
+ return time(time, DateTimeZone.UTC);
+ }
+
+ private long time(String time, DateTimeZone zone) {
+ return ISODateTimeFormat.dateOptionalTimeParser().withZone(zone).parseMillis(time);
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/settings/ImmutableSettingsTests.java b/src/test/java/org/elasticsearch/common/settings/ImmutableSettingsTests.java
new file mode 100644
index 0000000..845f7e5
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/settings/ImmutableSettingsTests.java
@@ -0,0 +1,178 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.settings;
+
+import org.elasticsearch.common.settings.bar.BarTestClass;
+import org.elasticsearch.common.settings.foo.FooTestClass;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.hamcrest.Matchers;
+import org.junit.Test;
+
+import java.util.List;
+import java.util.Map;
+
+import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
+import static org.hamcrest.Matchers.*;
+
+/**
+ */
+public class ImmutableSettingsTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testGetAsClass() {
+ Settings settings = settingsBuilder()
+ .put("test.class", "bar")
+ .put("test.class.package", "org.elasticsearch.common.settings.bar")
+ .build();
+
+ // Assert that defaultClazz is loaded if setting is not specified
+ assertThat(settings.getAsClass("no.settings", FooTestClass.class, "org.elasticsearch.common.settings.", "TestClass").getName(),
+ equalTo(FooTestClass.class.getName()));
+
+ // Assert that correct class is loaded if setting contain name without package
+ assertThat(settings.getAsClass("test.class", FooTestClass.class, "org.elasticsearch.common.settings.", "TestClass").getName(),
+ equalTo(BarTestClass.class.getName()));
+
+ // Assert that class cannot be loaded if wrong packagePrefix is specified
+ try {
+ settings.getAsClass("test.class", FooTestClass.class, "com.example.elasticsearch.test.unit..common.settings.", "TestClass");
+ fail("Class with wrong package name shouldn't be loaded");
+ } catch (NoClassSettingsException ex) {
+ // Ignore
+ }
+
+ // Assert that package name in settings is getting correctly applied
+ assertThat(settings.getAsClass("test.class.package", FooTestClass.class, "com.example.elasticsearch.test.unit.common.settings.", "TestClass").getName(),
+ equalTo(BarTestClass.class.getName()));
+
+ }
+
+ @Test
+ public void testLoadFromDelimitedString() {
+ Settings settings = settingsBuilder()
+ .loadFromDelimitedString("key1=value1;key2=value2", ';')
+ .build();
+ assertThat(settings.get("key1"), equalTo("value1"));
+ assertThat(settings.get("key2"), equalTo("value2"));
+ assertThat(settings.getAsMap().size(), equalTo(2));
+ assertThat(settings.toDelimitedString(';'), equalTo("key1=value1;key2=value2;"));
+
+ settings = settingsBuilder()
+ .loadFromDelimitedString("key1=value1;key2=value2;", ';')
+ .build();
+ assertThat(settings.get("key1"), equalTo("value1"));
+ assertThat(settings.get("key2"), equalTo("value2"));
+ assertThat(settings.getAsMap().size(), equalTo(2));
+ assertThat(settings.toDelimitedString(';'), equalTo("key1=value1;key2=value2;"));
+ }
+
+ @Test(expected = NoClassSettingsException.class)
+ public void testThatAllClassNotFoundExceptionsAreCaught() {
+ // this should be nGram in order to really work, but for sure not not throw a NoClassDefFoundError
+ Settings settings = settingsBuilder().put("type", "ngram").build();
+ settings.getAsClass("type", null, "org.elasticsearch.index.analysis.", "TokenFilterFactory");
+ }
+
+ @Test
+ public void testReplacePropertiesPlaceholderSystemProperty() {
+ System.setProperty("sysProp1", "sysVal1");
+ try {
+ Settings settings = settingsBuilder()
+ .put("setting1", "${sysProp1}")
+ .replacePropertyPlaceholders()
+ .build();
+ assertThat(settings.get("setting1"), equalTo("sysVal1"));
+ } finally {
+ System.clearProperty("sysProp1");
+ }
+
+ Settings settings = settingsBuilder()
+ .put("setting1", "${sysProp1:defaultVal1}")
+ .replacePropertyPlaceholders()
+ .build();
+ assertThat(settings.get("setting1"), equalTo("defaultVal1"));
+
+ settings = settingsBuilder()
+ .put("setting1", "${sysProp1:}")
+ .replacePropertyPlaceholders()
+ .build();
+ assertThat(settings.get("setting1"), is(nullValue()));
+ }
+
+ @Test
+ public void testReplacePropertiesPlaceholderIgnoreEnvUnset() {
+ Settings settings = settingsBuilder()
+ .put("setting1", "${env.UNSET_ENV_VAR}")
+ .replacePropertyPlaceholders()
+ .build();
+ assertThat(settings.get("setting1"), is(nullValue()));
+ }
+
+ @Test
+ public void testUnFlattenedSettings() {
+ Settings settings = settingsBuilder()
+ .put("foo", "abc")
+ .put("bar", "def")
+ .put("baz.foo", "ghi")
+ .put("baz.bar", "jkl")
+ .putArray("baz.arr", "a", "b", "c")
+ .build();
+ Map<String, Object> map = settings.getAsStructuredMap();
+ assertThat(map.keySet(), Matchers.<String>hasSize(3));
+ assertThat(map, allOf(
+ Matchers.<String, Object>hasEntry("foo", "abc"),
+ Matchers.<String, Object>hasEntry("bar", "def")));
+
+ @SuppressWarnings("unchecked") Map<String, Object> bazMap = (Map<String, Object>) map.get("baz");
+ assertThat(bazMap.keySet(), Matchers.<String>hasSize(3));
+ assertThat(bazMap, allOf(
+ Matchers.<String, Object>hasEntry("foo", "ghi"),
+ Matchers.<String, Object>hasEntry("bar", "jkl")));
+ @SuppressWarnings("unchecked") List<String> bazArr = (List<String>) bazMap.get("arr");
+ assertThat(bazArr, contains("a", "b", "c"));
+
+ }
+
+ @Test
+ public void testFallbackToFlattenedSettings() {
+ Settings settings = settingsBuilder()
+ .put("foo", "abc")
+ .put("foo.bar", "def")
+ .put("foo.baz", "ghi").build();
+ Map<String, Object> map = settings.getAsStructuredMap();
+ assertThat(map.keySet(), Matchers.<String>hasSize(3));
+ assertThat(map, allOf(
+ Matchers.<String, Object>hasEntry("foo", "abc"),
+ Matchers.<String, Object>hasEntry("foo.bar", "def"),
+ Matchers.<String, Object>hasEntry("foo.baz", "ghi")));
+
+ settings = settingsBuilder()
+ .put("foo.bar", "def")
+ .put("foo", "abc")
+ .put("foo.baz", "ghi")
+ .build();
+ map = settings.getAsStructuredMap();
+ assertThat(map.keySet(), Matchers.<String>hasSize(3));
+ assertThat(map, allOf(
+ Matchers.<String, Object>hasEntry("foo", "abc"),
+ Matchers.<String, Object>hasEntry("foo.bar", "def"),
+ Matchers.<String, Object>hasEntry("foo.baz", "ghi")));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java b/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java
new file mode 100644
index 0000000..d4d5d14
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/settings/bar/BarTestClass.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.settings.bar;
+
+//used in ImmutableSettingsTest
+public class BarTestClass {
+}
diff --git a/src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java b/src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java
new file mode 100644
index 0000000..36f1527
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/settings/foo/FooTestClass.java
@@ -0,0 +1,24 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.settings.foo;
+
+// used in ImmutableSettingsTest
+public class FooTestClass {
+}
diff --git a/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java b/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java
new file mode 100644
index 0000000..c237b96
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/settings/loader/JsonSettingsLoaderTests.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.settings.loader;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class JsonSettingsLoaderTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testSimpleJsonSettings() throws Exception {
+ Settings settings = settingsBuilder()
+ .loadFromClasspath("org/elasticsearch/common/settings/loader/test-settings.json")
+ .build();
+
+ assertThat(settings.get("test1.value1"), equalTo("value1"));
+ assertThat(settings.get("test1.test2.value2"), equalTo("value2"));
+ assertThat(settings.getAsInt("test1.test2.value3", -1), equalTo(2));
+
+ // check array
+ assertThat(settings.get("test1.test3.0"), equalTo("test3-1"));
+ assertThat(settings.get("test1.test3.1"), equalTo("test3-2"));
+ assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
+ assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
+ assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java b/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java
new file mode 100644
index 0000000..d541d15
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/settings/loader/YamlSettingsLoaderTests.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.settings.loader;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class YamlSettingsLoaderTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testSimpleYamlSettings() throws Exception {
+ Settings settings = settingsBuilder()
+ .loadFromClasspath("org/elasticsearch/common/settings/loader/test-settings.yml")
+ .build();
+
+ assertThat(settings.get("test1.value1"), equalTo("value1"));
+ assertThat(settings.get("test1.test2.value2"), equalTo("value2"));
+ assertThat(settings.getAsInt("test1.test2.value3", -1), equalTo(2));
+
+ // check array
+ assertThat(settings.get("test1.test3.0"), equalTo("test3-1"));
+ assertThat(settings.get("test1.test3.1"), equalTo("test3-2"));
+ assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
+ assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
+ assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
+ }
+} \ No newline at end of file
diff --git a/src/test/java/org/elasticsearch/common/settings/loader/test-settings.json b/src/test/java/org/elasticsearch/common/settings/loader/test-settings.json
new file mode 100644
index 0000000..7190648
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/settings/loader/test-settings.json
@@ -0,0 +1,10 @@
+{
+ test1:{
+ value1:"value1",
+ test2:{
+ value2:"value2",
+ value3:2
+ },
+ test3:["test3-1", "test3-2"]
+ }
+} \ No newline at end of file
diff --git a/src/test/java/org/elasticsearch/common/settings/loader/test-settings.yml b/src/test/java/org/elasticsearch/common/settings/loader/test-settings.yml
new file mode 100644
index 0000000..b533ae0
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/settings/loader/test-settings.yml
@@ -0,0 +1,8 @@
+test1:
+ value1: value1
+ test2:
+ value2: value2
+ value3: 2
+ test3:
+ - test3-1
+ - test3-2
diff --git a/src/test/java/org/elasticsearch/common/unit/ByteSizeUnitTests.java b/src/test/java/org/elasticsearch/common/unit/ByteSizeUnitTests.java
new file mode 100644
index 0000000..8b39e4e
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/unit/ByteSizeUnitTests.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.unit;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.elasticsearch.common.unit.ByteSizeUnit.*;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class ByteSizeUnitTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testBytes() {
+ assertThat(BYTES.toBytes(1), equalTo(1l));
+ assertThat(BYTES.toKB(1024), equalTo(1l));
+ assertThat(BYTES.toMB(1024 * 1024), equalTo(1l));
+ assertThat(BYTES.toGB(1024 * 1024 * 1024), equalTo(1l));
+ }
+
+ @Test
+ public void testKB() {
+ assertThat(KB.toBytes(1), equalTo(1024l));
+ assertThat(KB.toKB(1), equalTo(1l));
+ assertThat(KB.toMB(1024), equalTo(1l));
+ assertThat(KB.toGB(1024 * 1024), equalTo(1l));
+ }
+
+ @Test
+ public void testMB() {
+ assertThat(MB.toBytes(1), equalTo(1024l * 1024));
+ assertThat(MB.toKB(1), equalTo(1024l));
+ assertThat(MB.toMB(1), equalTo(1l));
+ assertThat(MB.toGB(1024), equalTo(1l));
+ }
+
+ @Test
+ public void testGB() {
+ assertThat(GB.toBytes(1), equalTo(1024l * 1024 * 1024));
+ assertThat(GB.toKB(1), equalTo(1024l * 1024));
+ assertThat(GB.toMB(1), equalTo(1024l));
+ assertThat(GB.toGB(1), equalTo(1l));
+ }
+
+ @Test
+ public void testTB() {
+ assertThat(TB.toBytes(1), equalTo(1024l * 1024 * 1024 * 1024));
+ assertThat(TB.toKB(1), equalTo(1024l * 1024 * 1024));
+ assertThat(TB.toMB(1), equalTo(1024l * 1024));
+ assertThat(TB.toGB(1), equalTo(1024l));
+ assertThat(TB.toTB(1), equalTo(1l));
+ }
+
+ @Test
+ public void testPB() {
+ assertThat(PB.toBytes(1), equalTo(1024l * 1024 * 1024 * 1024 * 1024));
+ assertThat(PB.toKB(1), equalTo(1024l * 1024 * 1024 * 1024));
+ assertThat(PB.toMB(1), equalTo(1024l * 1024 * 1024));
+ assertThat(PB.toGB(1), equalTo(1024l * 1024));
+ assertThat(PB.toTB(1), equalTo(1024l));
+ assertThat(PB.toPB(1), equalTo(1l));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java b/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java
new file mode 100644
index 0000000..0522f87
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.unit;
+
+import org.elasticsearch.ElasticsearchParseException;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.hamcrest.MatcherAssert;
+import org.junit.Test;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+
+/**
+ *
+ */
+public class ByteSizeValueTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testActualPeta() {
+ MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.PB).bytes(), equalTo(4503599627370496l));
+ }
+
+ @Test
+ public void testActualTera() {
+ MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.TB).bytes(), equalTo(4398046511104l));
+ }
+
+ @Test
+ public void testActual() {
+ MatcherAssert.assertThat(new ByteSizeValue(4, ByteSizeUnit.GB).bytes(), equalTo(4294967296l));
+ }
+
+ @Test
+ public void testSimple() {
+ assertThat(ByteSizeUnit.BYTES.toBytes(10), is(new ByteSizeValue(10, ByteSizeUnit.BYTES).bytes()));
+ assertThat(ByteSizeUnit.KB.toKB(10), is(new ByteSizeValue(10, ByteSizeUnit.KB).kb()));
+ assertThat(ByteSizeUnit.MB.toMB(10), is(new ByteSizeValue(10, ByteSizeUnit.MB).mb()));
+ assertThat(ByteSizeUnit.GB.toGB(10), is(new ByteSizeValue(10, ByteSizeUnit.GB).gb()));
+ assertThat(ByteSizeUnit.TB.toTB(10), is(new ByteSizeValue(10, ByteSizeUnit.TB).tb()));
+ assertThat(ByteSizeUnit.PB.toPB(10), is(new ByteSizeValue(10, ByteSizeUnit.PB).pb()));
+ }
+
+ @Test
+ public void testToString() {
+ assertThat("10b", is(new ByteSizeValue(10, ByteSizeUnit.BYTES).toString()));
+ assertThat("1.5kb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.BYTES).toString()));
+ assertThat("1.5mb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.KB).toString()));
+ assertThat("1.5gb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.MB).toString()));
+ assertThat("1.5tb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.GB).toString()));
+ assertThat("1.5pb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.TB).toString()));
+ assertThat("1536pb", is(new ByteSizeValue((long) (1024 * 1.5), ByteSizeUnit.PB).toString()));
+ }
+
+ @Test
+ public void testParsing() {
+ assertThat(ByteSizeValue.parseBytesSizeValue("42pb").toString(), is("42pb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("42P").toString(), is("42pb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("42PB").toString(), is("42pb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("54tb").toString(), is("54tb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("54T").toString(), is("54tb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("54TB").toString(), is("54tb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("12gb").toString(), is("12gb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("12G").toString(), is("12gb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("12GB").toString(), is("12gb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("12M").toString(), is("12mb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("1b").toString(), is("1b"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("23kb").toString(), is("23kb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("23k").toString(), is("23kb"));
+ assertThat(ByteSizeValue.parseBytesSizeValue("23").toString(), is("23b"));
+ }
+
+ @Test(expected = ElasticsearchParseException.class)
+ public void testFailOnEmptyParsing() {
+ assertThat(ByteSizeValue.parseBytesSizeValue("").toString(), is("23kb"));
+ }
+
+ @Test(expected = ElasticsearchParseException.class)
+ public void testFailOnEmptyNumberParsing() {
+ assertThat(ByteSizeValue.parseBytesSizeValue("g").toString(), is("23b"));
+ }
+} \ No newline at end of file
diff --git a/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java b/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java
new file mode 100644
index 0000000..d84107e
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/unit/DistanceUnitTests.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.unit;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.hamcrest.Matchers.closeTo;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class DistanceUnitTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testSimpleDistanceUnit() {
+ assertThat(DistanceUnit.KILOMETERS.convert(10, DistanceUnit.MILES), closeTo(16.09344, 0.001));
+ assertThat(DistanceUnit.MILES.convert(10, DistanceUnit.MILES), closeTo(10, 0.001));
+ assertThat(DistanceUnit.MILES.convert(10, DistanceUnit.KILOMETERS), closeTo(6.21371192, 0.001));
+ assertThat(DistanceUnit.KILOMETERS.convert(10, DistanceUnit.KILOMETERS), closeTo(10, 0.001));
+ assertThat(DistanceUnit.KILOMETERS.convert(10, DistanceUnit.METERS), closeTo(0.01, 0.00001));
+ assertThat(DistanceUnit.KILOMETERS.convert(1000,DistanceUnit.METERS), closeTo(1, 0.001));
+ assertThat(DistanceUnit.METERS.convert(1, DistanceUnit.KILOMETERS), closeTo(1000, 0.001));
+ }
+
+ @Test
+ public void testDistanceUnitParsing() {
+ assertThat(DistanceUnit.Distance.parseDistance("50km").unit, equalTo(DistanceUnit.KILOMETERS));
+ assertThat(DistanceUnit.Distance.parseDistance("500m").unit, equalTo(DistanceUnit.METERS));
+ assertThat(DistanceUnit.Distance.parseDistance("51mi").unit, equalTo(DistanceUnit.MILES));
+ assertThat(DistanceUnit.Distance.parseDistance("52yd").unit, equalTo(DistanceUnit.YARD));
+ assertThat(DistanceUnit.Distance.parseDistance("12in").unit, equalTo(DistanceUnit.INCH));
+ assertThat(DistanceUnit.Distance.parseDistance("23mm").unit, equalTo(DistanceUnit.MILLIMETERS));
+ assertThat(DistanceUnit.Distance.parseDistance("23cm").unit, equalTo(DistanceUnit.CENTIMETERS));
+
+ double testValue = 12345.678;
+ for (DistanceUnit unit : DistanceUnit.values()) {
+ assertThat("Unit can be parsed from '" + unit.toString() + "'", DistanceUnit.fromString(unit.toString()), equalTo(unit));
+ assertThat("Unit can be parsed from '" + testValue + unit.toString() + "'", DistanceUnit.fromString(unit.toString()), equalTo(unit));
+ assertThat("Value can be parsed from '" + testValue + unit.toString() + "'", DistanceUnit.Distance.parseDistance(unit.toString(testValue)).value, equalTo(testValue));
+ }
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java b/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java
new file mode 100644
index 0000000..448d052
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java
@@ -0,0 +1,199 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.unit;
+
+import org.elasticsearch.common.xcontent.XContent;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+import static org.hamcrest.CoreMatchers.*;
+import static org.hamcrest.number.IsCloseTo.closeTo;
+
+public class FuzzinessTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testNumerics() {
+ String[] options = new String[]{"1.0", "1", "1.000000"};
+ assertThat(Fuzziness.build(randomFrom(options)).asByte(), equalTo((byte) 1));
+ assertThat(Fuzziness.build(randomFrom(options)).asInt(), equalTo(1));
+ assertThat(Fuzziness.build(randomFrom(options)).asFloat(), equalTo(1f));
+ assertThat(Fuzziness.build(randomFrom(options)).asDouble(), equalTo(1d));
+ assertThat(Fuzziness.build(randomFrom(options)).asLong(), equalTo(1l));
+ assertThat(Fuzziness.build(randomFrom(options)).asShort(), equalTo((short) 1));
+ }
+
+ @Test
+ public void testParseFromXContent() throws IOException {
+ final int iters = atLeast(10);
+ for (int i = 0; i < iters; i++) {
+ {
+ XContent xcontent = XContentType.JSON.xContent();
+ float floatValue = randomFloat();
+ String json = jsonBuilder().startObject()
+ .field(Fuzziness.X_FIELD_NAME, floatValue)
+ .endObject().string();
+ XContentParser parser = xcontent.createParser(json);
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_NUMBER));
+ Fuzziness parse = Fuzziness.parse(parser);
+ assertThat(parse.asFloat(), equalTo(floatValue));
+ assertThat(parse.asDouble(), closeTo((double) floatValue, 0.000001));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
+ }
+
+ {
+ XContent xcontent = XContentType.JSON.xContent();
+ Integer intValue = frequently() ? randomIntBetween(0, 2) : randomIntBetween(0, 100);
+ Float floatRep = randomFloat();
+ Number value = intValue;
+ if (randomBoolean()) {
+ value = new Float(floatRep += intValue);
+ }
+ String json = jsonBuilder().startObject()
+ .field(Fuzziness.X_FIELD_NAME, randomBoolean() ? value.toString() : value)
+ .endObject().string();
+ XContentParser parser = xcontent.createParser(json);
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING)));
+ Fuzziness parse = Fuzziness.parse(parser);
+ assertThat(parse.asInt(), equalTo(intValue));
+ assertThat((int) parse.asShort(), equalTo(intValue));
+ assertThat((int) parse.asByte(), equalTo(intValue));
+ assertThat(parse.asLong(), equalTo((long) intValue));
+ if (value.intValue() >= 1) {
+ assertThat(parse.asDistance(), equalTo(Math.min(2, intValue)));
+ }
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
+ if (intValue.equals(value)) {
+ switch (intValue) {
+ case 1:
+ assertThat(parse, sameInstance(Fuzziness.ONE));
+ break;
+ case 2:
+ assertThat(parse, sameInstance(Fuzziness.TWO));
+ break;
+ case 0:
+ assertThat(parse, sameInstance(Fuzziness.ZERO));
+ break;
+ default:
+ break;
+ }
+ }
+ }
+ {
+ XContent xcontent = XContentType.JSON.xContent();
+ String json = jsonBuilder().startObject()
+ .field(Fuzziness.X_FIELD_NAME, randomBoolean() ? "AUTO" : "auto")
+ .endObject().string();
+ if (randomBoolean()) {
+ json = Fuzziness.AUTO.toXContent(jsonBuilder().startObject(), null).endObject().string();
+ }
+ XContentParser parser = xcontent.createParser(json);
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING));
+ Fuzziness parse = Fuzziness.parse(parser);
+ assertThat(parse, sameInstance(Fuzziness.AUTO));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
+ }
+
+ {
+ String[] values = new String[]{"d", "H", "ms", "s", "S", "w"};
+ String actual = randomIntBetween(1, 3) + randomFrom(values);
+ XContent xcontent = XContentType.JSON.xContent();
+ String json = jsonBuilder().startObject()
+ .field(Fuzziness.X_FIELD_NAME, actual)
+ .endObject().string();
+ XContentParser parser = xcontent.createParser(json);
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING));
+ Fuzziness parse = Fuzziness.parse(parser);
+ assertThat(parse.asTimeValue(), equalTo(TimeValue.parseTimeValue(actual, null)));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
+ }
+ }
+
+ }
+
+ @Test
+ public void testAuto() {
+ final int codePoints = randomIntBetween(0, 10);
+ String string = randomRealisticUnicodeOfCodepointLength(codePoints);
+ if (codePoints <= 2) {
+ assertThat(Fuzziness.AUTO.asDistance(string), equalTo(0));
+ assertThat(Fuzziness.fromSimilarity(Fuzziness.AUTO.asSimilarity(string)).asDistance(string), equalTo(0));
+ } else if (codePoints > 5) {
+ assertThat(Fuzziness.AUTO.asDistance(string), equalTo(2));
+ assertThat(Fuzziness.fromSimilarity(Fuzziness.AUTO.asSimilarity(string)).asDistance(string), equalTo(2));
+ } else {
+ assertThat(Fuzziness.AUTO.asDistance(string), equalTo(1));
+ assertThat(Fuzziness.fromSimilarity(Fuzziness.AUTO.asSimilarity(string)).asDistance(string), equalTo(1));
+ }
+ assertThat(Fuzziness.AUTO.asByte(), equalTo((byte) 1));
+ assertThat(Fuzziness.AUTO.asInt(), equalTo(1));
+ assertThat(Fuzziness.AUTO.asFloat(), equalTo(1f));
+ assertThat(Fuzziness.AUTO.asDouble(), equalTo(1d));
+ assertThat(Fuzziness.AUTO.asLong(), equalTo(1l));
+ assertThat(Fuzziness.AUTO.asShort(), equalTo((short) 1));
+ assertThat(Fuzziness.AUTO.asTimeValue(), equalTo(TimeValue.parseTimeValue("1", TimeValue.timeValueMillis(1))));
+
+ }
+
+ @Test
+ public void testAsDistance() {
+ final int iters = atLeast(10);
+ for (int i = 0; i < iters; i++) {
+ Integer integer = Integer.valueOf(randomIntBetween(0, 10));
+ String value = "" + (randomBoolean() ? integer.intValue() : integer.floatValue());
+ assertThat(Fuzziness.build(value).asDistance(), equalTo(Math.min(2, integer.intValue())));
+ }
+ }
+
+ @Test
+ public void testSimilarityToDistance() {
+ assertThat(Fuzziness.fromSimilarity(0.5f).asDistance("ab"), equalTo(1));
+ assertThat(Fuzziness.fromSimilarity(0.66f).asDistance("abcefg"), equalTo(2));
+ assertThat(Fuzziness.fromSimilarity(0.8f).asDistance("ab"), equalTo(0));
+ assertThat(Fuzziness.fromSimilarity(0.8f).asDistance("abcefg"), equalTo(1));
+ assertThat((double) Fuzziness.ONE.asSimilarity("abcefg"), closeTo(0.8f, 0.05));
+ assertThat((double) Fuzziness.TWO.asSimilarity("abcefg"), closeTo(0.66f, 0.05));
+ assertThat((double) Fuzziness.ONE.asSimilarity("ab"), closeTo(0.5f, 0.05));
+
+ int iters = atLeast(100);
+ for (int i = 0; i < iters; i++) {
+ Fuzziness fuzziness = Fuzziness.fromEdits(between(1, 2));
+ String string = rarely() ? randomRealisticUnicodeOfLengthBetween(2, 4) :
+ randomRealisticUnicodeOfLengthBetween(4, 10);
+ float similarity = fuzziness.asSimilarity(string);
+ if (similarity != 0.0f) {
+ Fuzziness similarityBased = Fuzziness.build(similarity);
+ assertThat((double) similarityBased.asSimilarity(string), closeTo(similarity, 0.05));
+ assertThat(similarityBased.asDistance(string), equalTo(Math.min(2, fuzziness.asDistance(string))));
+ }
+ }
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java b/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java
new file mode 100644
index 0000000..6ca424a
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/unit/TimeValueTests.java
@@ -0,0 +1,69 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.unit;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.joda.time.PeriodType;
+import org.junit.Test;
+
+import java.util.concurrent.TimeUnit;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.lessThan;
+
+/**
+ *
+ */
+public class TimeValueTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testSimple() {
+ assertThat(TimeUnit.MILLISECONDS.toMillis(10), equalTo(new TimeValue(10, TimeUnit.MILLISECONDS).millis()));
+ assertThat(TimeUnit.MICROSECONDS.toMicros(10), equalTo(new TimeValue(10, TimeUnit.MICROSECONDS).micros()));
+ assertThat(TimeUnit.SECONDS.toSeconds(10), equalTo(new TimeValue(10, TimeUnit.SECONDS).seconds()));
+ assertThat(TimeUnit.MINUTES.toMinutes(10), equalTo(new TimeValue(10, TimeUnit.MINUTES).minutes()));
+ assertThat(TimeUnit.HOURS.toHours(10), equalTo(new TimeValue(10, TimeUnit.HOURS).hours()));
+ assertThat(TimeUnit.DAYS.toDays(10), equalTo(new TimeValue(10, TimeUnit.DAYS).days()));
+ }
+
+ @Test
+ public void testToString() {
+ assertThat("10ms", equalTo(new TimeValue(10, TimeUnit.MILLISECONDS).toString()));
+ assertThat("1.5s", equalTo(new TimeValue(1533, TimeUnit.MILLISECONDS).toString()));
+ assertThat("1.5m", equalTo(new TimeValue(90, TimeUnit.SECONDS).toString()));
+ assertThat("1.5h", equalTo(new TimeValue(90, TimeUnit.MINUTES).toString()));
+ assertThat("1.5d", equalTo(new TimeValue(36, TimeUnit.HOURS).toString()));
+ assertThat("1000d", equalTo(new TimeValue(1000, TimeUnit.DAYS).toString()));
+ }
+
+ @Test
+ public void testFormat() {
+ assertThat(new TimeValue(1025, TimeUnit.MILLISECONDS).format(PeriodType.dayTime()), equalTo("1 second and 25 milliseconds"));
+ assertThat(new TimeValue(1, TimeUnit.MINUTES).format(PeriodType.dayTime()), equalTo("1 minute"));
+ assertThat(new TimeValue(65, TimeUnit.MINUTES).format(PeriodType.dayTime()), equalTo("1 hour and 5 minutes"));
+ assertThat(new TimeValue(24 * 600 + 85, TimeUnit.MINUTES).format(PeriodType.dayTime()), equalTo("241 hours and 25 minutes"));
+ }
+
+ @Test
+ public void testMinusOne() {
+ assertThat(new TimeValue(-1).nanos(), lessThan(0l));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/util/BigArraysTests.java b/src/test/java/org/elasticsearch/common/util/BigArraysTests.java
new file mode 100644
index 0000000..514caad
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/util/BigArraysTests.java
@@ -0,0 +1,195 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.util;
+
+import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.cache.recycler.MockPageCacheRecycler;
+import org.elasticsearch.cache.recycler.PageCacheRecycler;
+import org.elasticsearch.common.settings.ImmutableSettings;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.elasticsearch.threadpool.ThreadPool;
+
+import java.util.Arrays;
+
+public class BigArraysTests extends ElasticsearchTestCase {
+
+ public static PageCacheRecycler randomCacheRecycler() {
+ return randomBoolean() ? null : new MockPageCacheRecycler(ImmutableSettings.EMPTY, new ThreadPool());
+ }
+
+ public void testByteArrayGrowth() {
+ final int totalLen = randomIntBetween(1, 4000000);
+ final int startLen = randomIntBetween(1, randomBoolean() ? 1000 : totalLen);
+ ByteArray array = BigArrays.newByteArray(startLen, randomCacheRecycler(), randomBoolean());
+ byte[] ref = new byte[totalLen];
+ for (int i = 0; i < totalLen; ++i) {
+ ref[i] = randomByte();
+ array = BigArrays.grow(array, i + 1);
+ array.set(i, ref[i]);
+ }
+ for (int i = 0; i < totalLen; ++i) {
+ assertEquals(ref[i], array.get(i));
+ }
+ array.release();
+ }
+
+ public void testIntArrayGrowth() {
+ final int totalLen = randomIntBetween(1, 1000000);
+ final int startLen = randomIntBetween(1, randomBoolean() ? 1000 : totalLen);
+ IntArray array = BigArrays.newIntArray(startLen, randomCacheRecycler(), randomBoolean());
+ int[] ref = new int[totalLen];
+ for (int i = 0; i < totalLen; ++i) {
+ ref[i] = randomInt();
+ array = BigArrays.grow(array, i + 1);
+ array.set(i, ref[i]);
+ }
+ for (int i = 0; i < totalLen; ++i) {
+ assertEquals(ref[i], array.get(i));
+ }
+ array.release();
+ }
+
+ public void testLongArrayGrowth() {
+ final int totalLen = randomIntBetween(1, 1000000);
+ final int startLen = randomIntBetween(1, randomBoolean() ? 1000 : totalLen);
+ LongArray array = BigArrays.newLongArray(startLen, randomCacheRecycler(), randomBoolean());
+ long[] ref = new long[totalLen];
+ for (int i = 0; i < totalLen; ++i) {
+ ref[i] = randomLong();
+ array = BigArrays.grow(array, i + 1);
+ array.set(i, ref[i]);
+ }
+ for (int i = 0; i < totalLen; ++i) {
+ assertEquals(ref[i], array.get(i));
+ }
+ array.release();
+ }
+
+ public void testDoubleArrayGrowth() {
+ final int totalLen = randomIntBetween(1, 1000000);
+ final int startLen = randomIntBetween(1, randomBoolean() ? 1000 : totalLen);
+ DoubleArray array = BigArrays.newDoubleArray(startLen, randomCacheRecycler(), randomBoolean());
+ double[] ref = new double[totalLen];
+ for (int i = 0; i < totalLen; ++i) {
+ ref[i] = randomDouble();
+ array = BigArrays.grow(array, i + 1);
+ array.set(i, ref[i]);
+ }
+ for (int i = 0; i < totalLen; ++i) {
+ assertEquals(ref[i], array.get(i), 0.001d);
+ }
+ array.release();
+ }
+
+ public void testObjectArrayGrowth() {
+ final int totalLen = randomIntBetween(1, 1000000);
+ final int startLen = randomIntBetween(1, randomBoolean() ? 1000 : totalLen);
+ ObjectArray<Object> array = BigArrays.newObjectArray(startLen, randomCacheRecycler());
+ final Object[] pool = new Object[100];
+ for (int i = 0; i < pool.length; ++i) {
+ pool[i] = new Object();
+ }
+ Object[] ref = new Object[totalLen];
+ for (int i = 0; i < totalLen; ++i) {
+ ref[i] = randomFrom(pool);
+ array = BigArrays.grow(array, i + 1);
+ array.set(i, ref[i]);
+ }
+ for (int i = 0; i < totalLen; ++i) {
+ assertSame(ref[i], array.get(i));
+ }
+ array.release();
+ }
+
+ public void testDoubleArrayFill() {
+ final int len = randomIntBetween(1, 100000);
+ final int fromIndex = randomIntBetween(0, len - 1);
+ final int toIndex = randomBoolean()
+ ? Math.min(fromIndex + randomInt(100), len) // single page
+ : randomIntBetween(fromIndex, len); // likely multiple pages
+ final DoubleArray array2 = BigArrays.newDoubleArray(len, randomCacheRecycler(), randomBoolean());
+ final double[] array1 = new double[len];
+ for (int i = 0; i < len; ++i) {
+ array1[i] = randomDouble();
+ array2.set(i, array1[i]);
+ }
+ final double rand = randomDouble();
+ Arrays.fill(array1, fromIndex, toIndex, rand);
+ array2.fill(fromIndex, toIndex, rand);
+ for (int i = 0; i < len; ++i) {
+ assertEquals(array1[i], array2.get(i), 0.001d);
+ }
+ array2.release();
+ }
+
+ public void testLongArrayFill() {
+ final int len = randomIntBetween(1, 100000);
+ final int fromIndex = randomIntBetween(0, len - 1);
+ final int toIndex = randomBoolean()
+ ? Math.min(fromIndex + randomInt(100), len) // single page
+ : randomIntBetween(fromIndex, len); // likely multiple pages
+ final LongArray array2 = BigArrays.newLongArray(len, randomCacheRecycler(), randomBoolean());
+ final long[] array1 = new long[len];
+ for (int i = 0; i < len; ++i) {
+ array1[i] = randomLong();
+ array2.set(i, array1[i]);
+ }
+ final long rand = randomLong();
+ Arrays.fill(array1, fromIndex, toIndex, rand);
+ array2.fill(fromIndex, toIndex, rand);
+ for (int i = 0; i < len; ++i) {
+ assertEquals(array1[i], array2.get(i));
+ }
+ array2.release();
+ }
+
+ public void testByteArrayBulkGet() {
+ final byte[] array1 = new byte[randomIntBetween(1, 4000000)];
+ getRandom().nextBytes(array1);
+ final ByteArray array2 = BigArrays.newByteArray(array1.length, randomCacheRecycler(), randomBoolean());
+ for (int i = 0; i < array1.length; ++i) {
+ array2.set(i, array1[i]);
+ }
+ final BytesRef ref = new BytesRef();
+ for (int i = 0; i < 1000; ++i) {
+ final int offset = randomInt(array1.length - 1);
+ final int len = randomInt(Math.min(randomBoolean() ? 10 : Integer.MAX_VALUE, array1.length - offset));
+ array2.get(offset, len, ref);
+ assertEquals(new BytesRef(array1, offset, len), ref);
+ }
+ array2.release();
+ }
+
+ public void testByteArrayBulkSet() {
+ final byte[] array1 = new byte[randomIntBetween(1, 4000000)];
+ getRandom().nextBytes(array1);
+ final ByteArray array2 = BigArrays.newByteArray(array1.length, randomCacheRecycler(), randomBoolean());
+ for (int i = 0; i < array1.length; ) {
+ final int len = Math.min(array1.length - i, randomBoolean() ? randomInt(10) : randomInt(3 * BigArrays.BYTE_PAGE_SIZE));
+ array2.set(i, array1, i, len);
+ i += len;
+ }
+ for (int i = 0; i < array1.length; ++i) {
+ assertEquals(array1[i], array2.get(i));
+ }
+ array2.release();
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/util/ByteUtilsTests.java b/src/test/java/org/elasticsearch/common/util/ByteUtilsTests.java
new file mode 100644
index 0000000..7331962
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/util/ByteUtilsTests.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.util;
+
+import org.apache.lucene.store.ByteArrayDataInput;
+import org.apache.lucene.store.ByteArrayDataOutput;
+import org.elasticsearch.test.ElasticsearchTestCase;
+
+import java.io.IOException;
+
+public class ByteUtilsTests extends ElasticsearchTestCase {
+
+ public void testZigZag(long l) {
+ assertEquals(l, ByteUtils.zigZagDecode(ByteUtils.zigZagEncode(l)));
+ }
+
+ public void testZigZag() {
+ testZigZag(0);
+ testZigZag(1);
+ testZigZag(-1);
+ testZigZag(Long.MAX_VALUE);
+ testZigZag(Long.MIN_VALUE);
+ for (int i = 0; i < 1000; ++i) {
+ testZigZag(randomLong());
+ assertTrue(ByteUtils.zigZagEncode(randomInt(1000)) >= 0);
+ assertTrue(ByteUtils.zigZagEncode(-randomInt(1000)) >= 0);
+ }
+ }
+
+ public void testFloat() throws IOException {
+ final float[] data = new float[atLeast(1000)];
+ final byte[] encoded = new byte[data.length * 4];
+ for (int i = 0; i < data.length; ++i) {
+ data[i] = randomFloat();
+ ByteUtils.writeFloatLE(data[i], encoded, i * 4);
+ }
+ for (int i = 0; i < data.length; ++i) {
+ assertEquals(data[i], ByteUtils.readFloatLE(encoded, i * 4), Float.MIN_VALUE);
+ }
+ }
+
+ public void testDouble() throws IOException {
+ final double[] data = new double[atLeast(1000)];
+ final byte[] encoded = new byte[data.length * 8];
+ for (int i = 0; i < data.length; ++i) {
+ data[i] = randomDouble();
+ ByteUtils.writeDoubleLE(data[i], encoded, i * 8);
+ }
+ for (int i = 0; i < data.length; ++i) {
+ assertEquals(data[i], ByteUtils.readDoubleLE(encoded, i * 8), Double.MIN_VALUE);
+ }
+ }
+
+ public void testVLong() throws IOException {
+ final long[] data = new long[atLeast(1000)];
+ for (int i = 0; i < data.length; ++i) {
+ switch (randomInt(4)) {
+ case 0:
+ data[i] = 0;
+ break;
+ case 1:
+ data[i] = Long.MAX_VALUE;
+ break;
+ case 2:
+ data[i] = Long.MIN_VALUE;
+ break;
+ case 3:
+ data[i] = randomInt(1 << randomIntBetween(2,30));
+ break;
+ case 4:
+ data[i] = randomLong();
+ break;
+ default:
+ throw new AssertionError();
+ }
+ }
+ final byte[] encoded = new byte[ByteUtils.MAX_BYTES_VLONG * data.length];
+ ByteArrayDataOutput out = new ByteArrayDataOutput(encoded);
+ for (int i = 0; i < data.length; ++i) {
+ final int pos = out.getPosition();
+ ByteUtils.writeVLong(out, data[i]);
+ if (data[i] < 0) {
+ assertEquals(ByteUtils.MAX_BYTES_VLONG, out.getPosition() - pos);
+ }
+ }
+ final ByteArrayDataInput in = new ByteArrayDataInput(encoded);
+ for (int i = 0; i < data.length; ++i) {
+ assertEquals(data[i], ByteUtils.readVLong(in));
+ }
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java b/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java
new file mode 100644
index 0000000..c541c3e
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.util;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterables;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+
+public class CollectionUtilsTests extends ElasticsearchTestCase {
+
+ @Test
+ public void rotateEmpty() {
+ assertTrue(CollectionUtils.rotate(ImmutableList.of(), randomInt()).isEmpty());
+ }
+
+ @Test
+ public void rotate() {
+ final int iters = scaledRandomIntBetween(10, 100);
+ for (int k = 0; k < iters; ++k) {
+ final int size = randomIntBetween(1, 100);
+ final int distance = randomInt();
+ List<Object> list = new ArrayList<Object>();
+ for (int i = 0; i < size; ++i) {
+ list.add(new Object());
+ }
+ final List<Object> rotated = CollectionUtils.rotate(list, distance);
+ // check content is the same
+ assertEquals(rotated.size(), list.size());
+ assertEquals(Iterables.size(rotated), list.size());
+ assertEquals(new HashSet<Object>(rotated), new HashSet<Object>(list));
+ // check stability
+ for (int j = randomInt(4); j >= 0; --j) {
+ assertEquals(rotated, CollectionUtils.rotate(list, distance));
+ }
+ // reverse
+ if (distance != Integer.MIN_VALUE) {
+ assertEquals(list, CollectionUtils.rotate(CollectionUtils.rotate(list, distance), -distance));
+ }
+ }
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/util/SlicedDoubleListTests.java b/src/test/java/org/elasticsearch/common/util/SlicedDoubleListTests.java
new file mode 100644
index 0000000..e17725b
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/util/SlicedDoubleListTests.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.util;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ * Tests for {@link SlicedDoubleList}
+ */
+public class SlicedDoubleListTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testCapacity() {
+ SlicedDoubleList list = new SlicedDoubleList(5);
+ assertThat(list.length, equalTo(5));
+ assertThat(list.offset, equalTo(0));
+ assertThat(list.values.length, equalTo(5));
+ assertThat(list.size(), equalTo(5));
+
+
+ list = new SlicedDoubleList(new double[10], 5, 5);
+ assertThat(list.length, equalTo(5));
+ assertThat(list.offset, equalTo(5));
+ assertThat(list.size(), equalTo(5));
+ assertThat(list.values.length, equalTo(10));
+ }
+
+ @Test
+ public void testGrow() {
+ SlicedDoubleList list = new SlicedDoubleList(5);
+ list.length = 1000;
+ for (int i = 0; i < list.length; i++) {
+ list.grow(i+1);
+ list.values[i] = ((double)i);
+ }
+ int expected = 0;
+ for (Double d : list) {
+ assertThat((double)expected++, equalTo(d));
+ }
+
+ for (int i = 0; i < list.length; i++) {
+ assertThat((double)i, equalTo(list.get(i)));
+ }
+
+ int count = 0;
+ for (int i = list.offset; i < list.offset+list.length; i++) {
+ assertThat((double)count++, equalTo(list.values[i]));
+ }
+ }
+
+ @Test
+ public void testIndexOf() {
+ SlicedDoubleList list = new SlicedDoubleList(5);
+ list.length = 1000;
+ for (int i = 0; i < list.length; i++) {
+ list.grow(i+1);
+ list.values[i] = ((double)i%100);
+ }
+
+ assertThat(999, equalTo(list.lastIndexOf(99.0d)));
+ assertThat(99, equalTo(list.indexOf(99.0d)));
+
+ assertThat(-1, equalTo(list.lastIndexOf(100.0d)));
+ assertThat(-1, equalTo(list.indexOf(100.0d)));
+ }
+
+ public void testIsEmpty() {
+ SlicedDoubleList list = new SlicedDoubleList(5);
+ assertThat(false, equalTo(list.isEmpty()));
+ list.length = 0;
+ assertThat(true, equalTo(list.isEmpty()));
+ }
+
+ @Test
+ public void testSet() {
+ SlicedDoubleList list = new SlicedDoubleList(5);
+ try {
+ list.set(0, (double)4);
+ fail();
+ } catch (UnsupportedOperationException ex) {
+ }
+ try {
+ list.add((double)4);
+ fail();
+ } catch (UnsupportedOperationException ex) {
+ }
+ }
+
+ @Test
+ public void testToString() {
+ SlicedDoubleList list = new SlicedDoubleList(5);
+ assertThat("[0.0, 0.0, 0.0, 0.0, 0.0]", equalTo(list.toString()));
+ for (int i = 0; i < list.length; i++) {
+ list.grow(i+1);
+ list.values[i] = ((double)i);
+ }
+ assertThat("[0.0, 1.0, 2.0, 3.0, 4.0]", equalTo(list.toString()));
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/util/SlicedLongListTests.java b/src/test/java/org/elasticsearch/common/util/SlicedLongListTests.java
new file mode 100644
index 0000000..2669501
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/util/SlicedLongListTests.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.util;
+
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ * Tests for {@link SlicedLongList}
+ */
+public class SlicedLongListTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testCapacity() {
+ SlicedLongList list = new SlicedLongList(5);
+ assertThat(list.length, equalTo(5));
+ assertThat(list.offset, equalTo(0));
+ assertThat(list.values.length, equalTo(5));
+ assertThat(list.size(), equalTo(5));
+
+ list = new SlicedLongList(new long[10], 5, 5);
+ assertThat(list.length, equalTo(5));
+ assertThat(list.offset, equalTo(5));
+ assertThat(list.size(), equalTo(5));
+ assertThat(list.values.length, equalTo(10));
+ }
+
+ @Test
+ public void testGrow() {
+ SlicedLongList list = new SlicedLongList(5);
+ list.length = 1000;
+ for (int i = 0; i < list.length; i++) {
+ list.grow(i+1);
+ list.values[i] = ((long)i);
+ }
+ int expected = 0;
+ for (Long d : list) {
+ assertThat((long)expected++, equalTo(d));
+ }
+
+ for (int i = 0; i < list.length; i++) {
+ assertThat((long)i, equalTo(list.get(i)));
+ }
+
+ int count = 0;
+ for (int i = list.offset; i < list.offset+list.length; i++) {
+ assertThat((long)count++, equalTo(list.values[i]));
+ }
+ }
+
+ @Test
+ public void testSet() {
+ SlicedLongList list = new SlicedLongList(5);
+ try {
+ list.set(0, (long)4);
+ fail();
+ } catch (UnsupportedOperationException ex) {
+ }
+ try {
+ list.add((long)4);
+ fail();
+ } catch (UnsupportedOperationException ex) {
+ }
+ }
+
+ @Test
+ public void testIndexOf() {
+ SlicedLongList list = new SlicedLongList(5);
+ list.length = 1000;
+ for (int i = 0; i < list.length; i++) {
+ list.grow(i+1);
+ list.values[i] = ((long)i%100);
+ }
+
+ assertThat(999, equalTo(list.lastIndexOf(99l)));
+ assertThat(99, equalTo(list.indexOf(99l)));
+
+ assertThat(-1, equalTo(list.lastIndexOf(100l)));
+ assertThat(-1, equalTo(list.indexOf(100l)));
+ }
+
+ public void testIsEmpty() {
+ SlicedLongList list = new SlicedLongList(5);
+ assertThat(false, equalTo(list.isEmpty()));
+ list.length = 0;
+ assertThat(true, equalTo(list.isEmpty()));
+ }
+
+ @Test
+ public void testToString() {
+ SlicedLongList list = new SlicedLongList(5);
+ assertThat("[0, 0, 0, 0, 0]", equalTo(list.toString()));
+ for (int i = 0; i < list.length; i++) {
+ list.grow(i+1);
+ list.values[i] = ((long)i);
+ }
+ assertThat("[0, 1, 2, 3, 4]", equalTo(list.toString()));
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/util/SlicedObjectListTests.java b/src/test/java/org/elasticsearch/common/util/SlicedObjectListTests.java
new file mode 100644
index 0000000..7e98073
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/util/SlicedObjectListTests.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.util;
+
+import org.apache.lucene.util.ArrayUtil;
+import org.apache.lucene.util.RamUsageEstimator;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import static org.hamcrest.Matchers.equalTo;
+/**
+ * Tests for {@link SlicedObjectList}
+ */
+public class SlicedObjectListTests extends ElasticsearchTestCase {
+
+ public class TestList extends SlicedObjectList<Double> {
+
+ public TestList(int capactiy) {
+ this(new Double[capactiy], 0, capactiy);
+ }
+
+ public TestList(Double[] values, int offset, int length) {
+ super(values, offset, length);
+ }
+
+ public TestList(Double[] values) {
+ super(values);
+ }
+
+ @Override
+ public void grow(int newLength) {
+ assertThat(offset, equalTo(0)); // NOTE: senseless if offset != 0
+ if (values.length >= newLength) {
+ return;
+ }
+ final Double[] current = values;
+ values = new Double[ArrayUtil.oversize(newLength, RamUsageEstimator.NUM_BYTES_OBJECT_REF)];
+ System.arraycopy(current, 0, values, 0, current.length);
+
+ }
+
+ }
+ @Test
+ public void testCapacity() {
+ TestList list = new TestList(5);
+ assertThat(list.length, equalTo(5));
+ assertThat(list.offset, equalTo(0));
+ assertThat(list.values.length, equalTo(5));
+ assertThat(list.size(), equalTo(5));
+
+
+ list = new TestList(new Double[10], 5, 5);
+ assertThat(list.length, equalTo(5));
+ assertThat(list.offset, equalTo(5));
+ assertThat(list.size(), equalTo(5));
+ assertThat(list.values.length, equalTo(10));
+ }
+
+ @Test
+ public void testGrow() {
+ TestList list = new TestList(5);
+ list.length = 1000;
+ for (int i = 0; i < list.length; i++) {
+ list.grow(i+1);
+ list.values[i] = ((double)i);
+ }
+ int expected = 0;
+ for (Double d : list) {
+ assertThat((double)expected++, equalTo(d));
+ }
+
+ for (int i = 0; i < list.length; i++) {
+ assertThat((double)i, equalTo(list.get(i)));
+ }
+
+ int count = 0;
+ for (int i = list.offset; i < list.offset+list.length; i++) {
+ assertThat((double)count++, equalTo(list.values[i]));
+ }
+ }
+
+ @Test
+ public void testIndexOf() {
+ TestList list = new TestList(5);
+ list.length = 1000;
+ for (int i = 0; i < list.length; i++) {
+ list.grow(i+1);
+ list.values[i] = ((double)i%100);
+ }
+
+ assertThat(999, equalTo(list.lastIndexOf(99.0d)));
+ assertThat(99, equalTo(list.indexOf(99.0d)));
+
+ assertThat(-1, equalTo(list.lastIndexOf(100.0d)));
+ assertThat(-1, equalTo(list.indexOf(100.0d)));
+ }
+
+ public void testIsEmpty() {
+ TestList list = new TestList(5);
+ assertThat(false, equalTo(list.isEmpty()));
+ list.length = 0;
+ assertThat(true, equalTo(list.isEmpty()));
+ }
+
+ @Test
+ public void testSet() {
+ TestList list = new TestList(5);
+ try {
+ list.set(0, (double)4);
+ fail();
+ } catch (UnsupportedOperationException ex) {
+ }
+ try {
+ list.add((double)4);
+ fail();
+ } catch (UnsupportedOperationException ex) {
+ }
+ }
+
+ @Test
+ public void testToString() {
+ TestList list = new TestList(5);
+ assertThat("[null, null, null, null, null]", equalTo(list.toString()));
+ for (int i = 0; i < list.length; i++) {
+ list.grow(i+1);
+ list.values[i] = ((double)i);
+ }
+ assertThat("[0.0, 1.0, 2.0, 3.0, 4.0]", equalTo(list.toString()));
+ }
+
+}
diff --git a/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTest.java b/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTest.java
new file mode 100644
index 0000000..8997969
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/util/concurrent/CountDownTest.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.util.concurrent;
+
+import com.carrotsearch.randomizedtesting.annotations.Repeat;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.hamcrest.Matchers;
+import org.junit.Test;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.greaterThan;
+
+
+public class CountDownTest extends ElasticsearchTestCase {
+
+ @Test @Repeat(iterations = 1000)
+ public void testConcurrent() throws InterruptedException {
+ final AtomicInteger count = new AtomicInteger(0);
+ final CountDown countDown = new CountDown(atLeast(10));
+ Thread[] threads = new Thread[atLeast(3)];
+ final CountDownLatch latch = new CountDownLatch(1);
+ for (int i = 0; i < threads.length; i++) {
+ threads[i] = new Thread() {
+
+ public void run() {
+ try {
+ latch.await();
+ } catch (InterruptedException e) {
+ throw new RuntimeException();
+ }
+ while (true) {
+ if(frequently()) {
+ if (countDown.isCountedDown()) {
+ break;
+ }
+ }
+ if (countDown.countDown()) {
+ count.incrementAndGet();
+ break;
+ }
+ }
+ }
+ };
+ threads[i].start();
+ }
+ latch.countDown();
+ Thread.yield();
+ if (rarely()) {
+ if (countDown.fastForward()) {
+ count.incrementAndGet();
+ }
+ assertThat(countDown.isCountedDown(), equalTo(true));
+ assertThat(countDown.fastForward(), equalTo(false));
+
+ }
+
+ for (Thread thread : threads) {
+ thread.join();
+ }
+ assertThat(countDown.isCountedDown(), equalTo(true));
+ assertThat(count.get(), Matchers.equalTo(1));
+ }
+
+ @Test
+ public void testSingleThreaded() {
+ int atLeast = atLeast(10);
+ final CountDown countDown = new CountDown(atLeast);
+ while(!countDown.isCountedDown()) {
+ atLeast--;
+ if (countDown.countDown()) {
+ assertThat(atLeast, equalTo(0));
+ assertThat(countDown.isCountedDown(), equalTo(true));
+ assertThat(countDown.fastForward(), equalTo(false));
+ break;
+ }
+ if (rarely()) {
+ assertThat(countDown.fastForward(), equalTo(true));
+ assertThat(countDown.isCountedDown(), equalTo(true));
+ assertThat(countDown.fastForward(), equalTo(false));
+ }
+ assertThat(atLeast, greaterThan(0));
+ }
+
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java b/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java
new file mode 100644
index 0000000..d4d46db
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java
@@ -0,0 +1,236 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.util.concurrent;
+
+import com.google.common.base.Predicate;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.lessThan;
+
+/**
+ */
+public class EsExecutorsTests extends ElasticsearchTestCase {
+
+ private TimeUnit randomTimeUnit() {
+ return TimeUnit.values()[between(0, TimeUnit.values().length - 1)];
+ }
+
+ @Test
+ public void testFixedForcedExecution() throws Exception {
+ EsThreadPoolExecutor executor = EsExecutors.newFixed(1, 1, EsExecutors.daemonThreadFactory("test"));
+ final CountDownLatch wait = new CountDownLatch(1);
+
+ final CountDownLatch exec1Wait = new CountDownLatch(1);
+ final AtomicBoolean executed1 = new AtomicBoolean();
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ wait.await();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ executed1.set(true);
+ exec1Wait.countDown();
+ }
+ });
+
+ final CountDownLatch exec2Wait = new CountDownLatch(1);
+ final AtomicBoolean executed2 = new AtomicBoolean();
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ executed2.set(true);
+ exec2Wait.countDown();
+ }
+ });
+
+ final AtomicBoolean executed3 = new AtomicBoolean();
+ final CountDownLatch exec3Wait = new CountDownLatch(1);
+ executor.execute(new AbstractRunnable() {
+ @Override
+ public void run() {
+ executed3.set(true);
+ exec3Wait.countDown();
+ }
+
+ @Override
+ public boolean isForceExecution() {
+ return true;
+ }
+ });
+
+ wait.countDown();
+
+ exec1Wait.await();
+ exec2Wait.await();
+ exec3Wait.await();
+
+ assertThat(executed1.get(), equalTo(true));
+ assertThat(executed2.get(), equalTo(true));
+ assertThat(executed3.get(), equalTo(true));
+
+ executor.shutdownNow();
+ }
+
+ @Test
+ public void testFixedRejected() throws Exception {
+ EsThreadPoolExecutor executor = EsExecutors.newFixed(1, 1, EsExecutors.daemonThreadFactory("test"));
+ final CountDownLatch wait = new CountDownLatch(1);
+
+ final CountDownLatch exec1Wait = new CountDownLatch(1);
+ final AtomicBoolean executed1 = new AtomicBoolean();
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ wait.await();
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ executed1.set(true);
+ exec1Wait.countDown();
+ }
+ });
+
+ final CountDownLatch exec2Wait = new CountDownLatch(1);
+ final AtomicBoolean executed2 = new AtomicBoolean();
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ executed2.set(true);
+ exec2Wait.countDown();
+ }
+ });
+
+ final AtomicBoolean executed3 = new AtomicBoolean();
+ try {
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ executed3.set(true);
+ }
+ });
+ fail("should be rejected...");
+ } catch (EsRejectedExecutionException e) {
+ // all is well
+ }
+
+ wait.countDown();
+
+ exec1Wait.await();
+ exec2Wait.await();
+
+ assertThat(executed1.get(), equalTo(true));
+ assertThat(executed2.get(), equalTo(true));
+ assertThat(executed3.get(), equalTo(false));
+
+ executor.shutdownNow();
+ }
+
+ @Test
+ public void testScaleUp() throws Exception {
+ final int min = between(1, 3);
+ final int max = between(min + 1, 6);
+ final ThreadBarrier barrier = new ThreadBarrier(max + 1);
+
+ ThreadPoolExecutor pool = EsExecutors.newScaling(min, max, between(1, 100), randomTimeUnit(), EsExecutors.daemonThreadFactory("test"));
+ assertThat("Min property", pool.getCorePoolSize(), equalTo(min));
+ assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max));
+
+ for (int i = 0; i < max; ++i) {
+ final CountDownLatch latch = new CountDownLatch(1);
+ pool.execute(new Runnable() {
+ public void run() {
+ latch.countDown();
+ try {
+ barrier.await();
+ barrier.await();
+ } catch (Throwable e) {
+ barrier.reset(e);
+ }
+ }
+ });
+
+ //wait until thread executes this task
+ //otherwise, a task might be queued
+ latch.await();
+ }
+
+ barrier.await();
+ assertThat("wrong pool size", pool.getPoolSize(), equalTo(max));
+ assertThat("wrong active size", pool.getActiveCount(), equalTo(max));
+ barrier.await();
+ pool.shutdown();
+ }
+
+ @Test
+ public void testScaleDown() throws Exception {
+ final int min = between(1, 3);
+ final int max = between(min + 1, 6);
+ final ThreadBarrier barrier = new ThreadBarrier(max + 1);
+
+ final ThreadPoolExecutor pool = EsExecutors.newScaling(min, max, between(1, 100), TimeUnit.MILLISECONDS, EsExecutors.daemonThreadFactory("test"));
+ assertThat("Min property", pool.getCorePoolSize(), equalTo(min));
+ assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max));
+
+ for (int i = 0; i < max; ++i) {
+ final CountDownLatch latch = new CountDownLatch(1);
+ pool.execute(new Runnable() {
+ public void run() {
+ latch.countDown();
+ try {
+ barrier.await();
+ barrier.await();
+ } catch (Throwable e) {
+ barrier.reset(e);
+ }
+ }
+ });
+
+ //wait until thread executes this task
+ //otherwise, a task might be queued
+ latch.await();
+ }
+
+ barrier.await();
+ assertThat("wrong pool size", pool.getPoolSize(), equalTo(max));
+ assertThat("wrong active size", pool.getActiveCount(), equalTo(max));
+ barrier.await();
+ awaitBusy(new Predicate<Object>() {
+ public boolean apply(Object o) {
+ return pool.getActiveCount() == 0 && pool.getPoolSize() < max;
+ }
+ });
+ //assertThat("not all tasks completed", pool.getCompletedTaskCount(), equalTo((long) max));
+ assertThat("wrong active count", pool.getActiveCount(), equalTo(0));
+ //assertThat("wrong pool size. ", min, equalTo(pool.getPoolSize())); //BUG in ThreadPool - Bug ID: 6458662
+ //assertThat("idle threads didn't stay above min (" + pool.getPoolSize() + ")", pool.getPoolSize(), greaterThan(0));
+ assertThat("idle threads didn't shrink below max. (" + pool.getPoolSize() + ")", pool.getPoolSize(), lessThan(max));
+ pool.shutdown();
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java b/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java
new file mode 100644
index 0000000..bd17217
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java
@@ -0,0 +1,277 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.common.util.concurrent;
+
+import org.elasticsearch.common.Priority;
+import org.elasticsearch.common.unit.TimeValue;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.*;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class PrioritizedExecutorsTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testPriorityQueue() throws Exception {
+ PriorityBlockingQueue<Priority> queue = new PriorityBlockingQueue<Priority>();
+ queue.add(Priority.LANGUID);
+ queue.add(Priority.NORMAL);
+ queue.add(Priority.HIGH);
+ queue.add(Priority.LOW);
+ queue.add(Priority.URGENT);
+
+ assertThat(queue.poll(), equalTo(Priority.URGENT));
+ assertThat(queue.poll(), equalTo(Priority.HIGH));
+ assertThat(queue.poll(), equalTo(Priority.NORMAL));
+ assertThat(queue.poll(), equalTo(Priority.LOW));
+ assertThat(queue.poll(), equalTo(Priority.LANGUID));
+ }
+
+ @Test
+ public void testSubmitPrioritizedExecutorWithRunnables() throws Exception {
+ ExecutorService executor = EsExecutors.newSinglePrioritizing(Executors.defaultThreadFactory());
+ List<Integer> results = new ArrayList<Integer>(7);
+ CountDownLatch awaitingLatch = new CountDownLatch(1);
+ CountDownLatch finishedLatch = new CountDownLatch(7);
+ executor.submit(new AwaitingJob(awaitingLatch));
+ executor.submit(new Job(6, Priority.LANGUID, results, finishedLatch));
+ executor.submit(new Job(4, Priority.LOW, results, finishedLatch));
+ executor.submit(new Job(1, Priority.HIGH, results, finishedLatch));
+ executor.submit(new Job(5, Priority.LOW, results, finishedLatch)); // will execute after the first LOW (fifo)
+ executor.submit(new Job(0, Priority.URGENT, results, finishedLatch));
+ executor.submit(new Job(3, Priority.NORMAL, results, finishedLatch));
+ executor.submit(new Job(2, Priority.HIGH, results, finishedLatch)); // will execute after the first HIGH (fifo)
+ awaitingLatch.countDown();
+ finishedLatch.await();
+
+ assertThat(results.size(), equalTo(7));
+ assertThat(results.get(0), equalTo(0));
+ assertThat(results.get(1), equalTo(1));
+ assertThat(results.get(2), equalTo(2));
+ assertThat(results.get(3), equalTo(3));
+ assertThat(results.get(4), equalTo(4));
+ assertThat(results.get(5), equalTo(5));
+ assertThat(results.get(6), equalTo(6));
+ }
+
+ @Test
+ public void testExecutePrioritizedExecutorWithRunnables() throws Exception {
+ ExecutorService executor = EsExecutors.newSinglePrioritizing(Executors.defaultThreadFactory());
+ List<Integer> results = new ArrayList<Integer>(7);
+ CountDownLatch awaitingLatch = new CountDownLatch(1);
+ CountDownLatch finishedLatch = new CountDownLatch(7);
+ executor.execute(new AwaitingJob(awaitingLatch));
+ executor.execute(new Job(6, Priority.LANGUID, results, finishedLatch));
+ executor.execute(new Job(4, Priority.LOW, results, finishedLatch));
+ executor.execute(new Job(1, Priority.HIGH, results, finishedLatch));
+ executor.execute(new Job(5, Priority.LOW, results, finishedLatch)); // will execute after the first LOW (fifo)
+ executor.execute(new Job(0, Priority.URGENT, results, finishedLatch));
+ executor.execute(new Job(3, Priority.NORMAL, results, finishedLatch));
+ executor.execute(new Job(2, Priority.HIGH, results, finishedLatch)); // will execute after the first HIGH (fifo)
+ awaitingLatch.countDown();
+ finishedLatch.await();
+
+ assertThat(results.size(), equalTo(7));
+ assertThat(results.get(0), equalTo(0));
+ assertThat(results.get(1), equalTo(1));
+ assertThat(results.get(2), equalTo(2));
+ assertThat(results.get(3), equalTo(3));
+ assertThat(results.get(4), equalTo(4));
+ assertThat(results.get(5), equalTo(5));
+ assertThat(results.get(6), equalTo(6));
+ }
+
+ @Test
+ public void testSubmitPrioritizedExecutorWithCallables() throws Exception {
+ ExecutorService executor = EsExecutors.newSinglePrioritizing(Executors.defaultThreadFactory());
+ List<Integer> results = new ArrayList<Integer>(7);
+ CountDownLatch awaitingLatch = new CountDownLatch(1);
+ CountDownLatch finishedLatch = new CountDownLatch(7);
+ executor.submit(new AwaitingJob(awaitingLatch));
+ executor.submit(new CallableJob(6, Priority.LANGUID, results, finishedLatch));
+ executor.submit(new CallableJob(4, Priority.LOW, results, finishedLatch));
+ executor.submit(new CallableJob(1, Priority.HIGH, results, finishedLatch));
+ executor.submit(new CallableJob(5, Priority.LOW, results, finishedLatch)); // will execute after the first LOW (fifo)
+ executor.submit(new CallableJob(0, Priority.URGENT, results, finishedLatch));
+ executor.submit(new CallableJob(3, Priority.NORMAL, results, finishedLatch));
+ executor.submit(new CallableJob(2, Priority.HIGH, results, finishedLatch)); // will execute after the first HIGH (fifo)
+ awaitingLatch.countDown();
+ finishedLatch.await();
+
+ assertThat(results.size(), equalTo(7));
+ assertThat(results.get(0), equalTo(0));
+ assertThat(results.get(1), equalTo(1));
+ assertThat(results.get(2), equalTo(2));
+ assertThat(results.get(3), equalTo(3));
+ assertThat(results.get(4), equalTo(4));
+ assertThat(results.get(5), equalTo(5));
+ assertThat(results.get(6), equalTo(6));
+ }
+
+ @Test
+ public void testSubmitPrioritizedExecutorWithMixed() throws Exception {
+ ExecutorService executor = EsExecutors.newSinglePrioritizing(Executors.defaultThreadFactory());
+ List<Integer> results = new ArrayList<Integer>(7);
+ CountDownLatch awaitingLatch = new CountDownLatch(1);
+ CountDownLatch finishedLatch = new CountDownLatch(7);
+ executor.submit(new AwaitingJob(awaitingLatch));
+ executor.submit(new CallableJob(6, Priority.LANGUID, results, finishedLatch));
+ executor.submit(new Job(4, Priority.LOW, results, finishedLatch));
+ executor.submit(new CallableJob(1, Priority.HIGH, results, finishedLatch));
+ executor.submit(new Job(5, Priority.LOW, results, finishedLatch)); // will execute after the first LOW (fifo)
+ executor.submit(new CallableJob(0, Priority.URGENT, results, finishedLatch));
+ executor.submit(new Job(3, Priority.NORMAL, results, finishedLatch));
+ executor.submit(new CallableJob(2, Priority.HIGH, results, finishedLatch)); // will execute after the first HIGH (fifo)
+ awaitingLatch.countDown();
+ finishedLatch.await();
+
+ assertThat(results.size(), equalTo(7));
+ assertThat(results.get(0), equalTo(0));
+ assertThat(results.get(1), equalTo(1));
+ assertThat(results.get(2), equalTo(2));
+ assertThat(results.get(3), equalTo(3));
+ assertThat(results.get(4), equalTo(4));
+ assertThat(results.get(5), equalTo(5));
+ assertThat(results.get(6), equalTo(6));
+ }
+
+ @Test
+ public void testTimeout() throws Exception {
+ ScheduledExecutorService timer = Executors.newSingleThreadScheduledExecutor();
+ PrioritizedEsThreadPoolExecutor executor = EsExecutors.newSinglePrioritizing(Executors.defaultThreadFactory());
+ final CountDownLatch block = new CountDownLatch(1);
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ block.await();
+ } catch (InterruptedException e) {
+ fail();
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "the blocking";
+ }
+ });
+
+ final AtomicBoolean executeCalled = new AtomicBoolean();
+ final CountDownLatch timedOut = new CountDownLatch(1);
+ executor.execute(new Runnable() {
+ @Override
+ public void run() {
+ executeCalled.set(true);
+ }
+
+ @Override
+ public String toString() {
+ return "the waiting";
+ }
+ }, timer, TimeValue.timeValueMillis(100) /* enough timeout to catch them in the pending list... */, new Runnable() {
+ @Override
+ public void run() {
+ timedOut.countDown();
+ }
+ }
+ );
+
+ PrioritizedEsThreadPoolExecutor.Pending[] pending = executor.getPending();
+ assertThat(pending.length, equalTo(1));
+ assertThat(pending[0].task.toString(), equalTo("the waiting"));
+
+ assertThat(timedOut.await(2, TimeUnit.SECONDS), equalTo(true));
+ block.countDown();
+ Thread.sleep(100); // sleep a bit to double check that execute on the timed out update task is not called...
+ assertThat(executeCalled.get(), equalTo(false));
+
+ timer.shutdownNow();
+ executor.shutdownNow();
+ }
+
+ static class AwaitingJob extends PrioritizedRunnable {
+
+ private final CountDownLatch latch;
+
+ private AwaitingJob(CountDownLatch latch) {
+ super(Priority.URGENT);
+ this.latch = latch;
+ }
+
+ @Override
+ public void run() {
+ try {
+ latch.await();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
+ }
+ }
+
+ static class Job extends PrioritizedRunnable {
+
+ private final int result;
+ private final List<Integer> results;
+ private final CountDownLatch latch;
+
+ Job(int result, Priority priority, List<Integer> results, CountDownLatch latch) {
+ super(priority);
+ this.result = result;
+ this.results = results;
+ this.latch = latch;
+ }
+
+ @Override
+ public void run() {
+ results.add(result);
+ latch.countDown();
+ }
+ }
+
+ static class CallableJob extends PrioritizedCallable<Integer> {
+
+ private final int result;
+ private final List<Integer> results;
+ private final CountDownLatch latch;
+
+ CallableJob(int result, Priority priority, List<Integer> results, CountDownLatch latch) {
+ super(priority);
+ this.result = result;
+ this.results = results;
+ this.latch = latch;
+ }
+
+ @Override
+ public Integer call() throws Exception {
+ results.add(result);
+ latch.countDown();
+ return result;
+ }
+
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/xcontent/builder/BuilderRawFieldTests.java b/src/test/java/org/elasticsearch/common/xcontent/builder/BuilderRawFieldTests.java
new file mode 100644
index 0000000..c8911c6
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/xcontent/builder/BuilderRawFieldTests.java
@@ -0,0 +1,124 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.xcontent.builder;
+
+import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class BuilderRawFieldTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testJsonRawField() throws IOException {
+ testRawField(XContentType.JSON);
+ }
+
+ @Test
+ public void testSmileRawField() throws IOException {
+ testRawField(XContentType.SMILE);
+ }
+
+ @Test
+ public void testYamlRawField() throws IOException {
+ testRawField(XContentType.YAML);
+ }
+
+ private void testRawField(XContentType type) throws IOException {
+ XContentBuilder builder = XContentFactory.contentBuilder(type);
+ builder.startObject();
+ builder.field("field1", "value1");
+ builder.rawField("_source", XContentFactory.contentBuilder(type).startObject().field("s_field", "s_value").endObject().bytes());
+ builder.field("field2", "value2");
+ builder.rawField("payload_i", new BytesArray(Long.toString(1)));
+ builder.field("field3", "value3");
+ builder.rawField("payload_d", new BytesArray(Double.toString(1.1)));
+ builder.field("field4", "value4");
+ builder.rawField("payload_s", new BytesArray("test"));
+ builder.field("field5", "value5");
+ builder.endObject();
+
+ XContentParser parser = XContentFactory.xContent(type).createParser(builder.bytes());
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.currentName(), equalTo("field1"));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING));
+ assertThat(parser.text(), equalTo("value1"));
+
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.currentName(), equalTo("_source"));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.currentName(), equalTo("s_field"));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING));
+ assertThat(parser.text(), equalTo("s_value"));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
+
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.currentName(), equalTo("field2"));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING));
+ assertThat(parser.text(), equalTo("value2"));
+
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.currentName(), equalTo("payload_i"));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_NUMBER));
+ assertThat(parser.numberType(), equalTo(XContentParser.NumberType.INT));
+ assertThat(parser.longValue(), equalTo(1l));
+
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.currentName(), equalTo("field3"));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING));
+ assertThat(parser.text(), equalTo("value3"));
+
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.currentName(), equalTo("payload_d"));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_NUMBER));
+ assertThat(parser.numberType(), equalTo(XContentParser.NumberType.DOUBLE));
+ assertThat(parser.doubleValue(), equalTo(1.1d));
+
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.currentName(), equalTo("field4"));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING));
+ assertThat(parser.text(), equalTo("value4"));
+
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.currentName(), equalTo("payload_s"));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING));
+ assertThat(parser.text(), equalTo("test"));
+
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
+ assertThat(parser.currentName(), equalTo("field5"));
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING));
+ assertThat(parser.text(), equalTo("value5"));
+
+ assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java
new file mode 100644
index 0000000..67bb99f
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.xcontent.builder;
+
+import com.google.common.collect.Lists;
+import org.elasticsearch.common.io.FastCharArrayWriter;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentGenerator;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.util.*;
+
+import static org.elasticsearch.common.xcontent.XContentBuilder.FieldCaseConversion.CAMELCASE;
+import static org.elasticsearch.common.xcontent.XContentBuilder.FieldCaseConversion.UNDERSCORE;
+import static org.hamcrest.Matchers.equalTo;
+
+/**
+ *
+ */
+public class XContentBuilderTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testPrettyWithLfAtEnd() throws Exception {
+ FastCharArrayWriter writer = new FastCharArrayWriter();
+ XContentGenerator generator = XContentFactory.xContent(XContentType.JSON).createGenerator(writer);
+ generator.usePrettyPrint();
+ generator.usePrintLineFeedAtEnd();
+
+ generator.writeStartObject();
+ generator.writeStringField("test", "value");
+ generator.writeEndObject();
+ generator.flush();
+
+ generator.close();
+ // double close, and check there is no error...
+ generator.close();
+
+ assertThat(writer.unsafeCharArray()[writer.size() - 1], equalTo('\n'));
+ }
+
+ @Test
+ public void verifyReuseJsonGenerator() throws Exception {
+ FastCharArrayWriter writer = new FastCharArrayWriter();
+ XContentGenerator generator = XContentFactory.xContent(XContentType.JSON).createGenerator(writer);
+ generator.writeStartObject();
+ generator.writeStringField("test", "value");
+ generator.writeEndObject();
+ generator.flush();
+
+ assertThat(writer.toStringTrim(), equalTo("{\"test\":\"value\"}"));
+
+ // try again...
+ writer.reset();
+ generator.writeStartObject();
+ generator.writeStringField("test", "value");
+ generator.writeEndObject();
+ generator.flush();
+ // we get a space at the start here since it thinks we are not in the root object (fine, we will ignore it in the real code we use)
+ assertThat(writer.toStringTrim(), equalTo("{\"test\":\"value\"}"));
+ }
+
+ @Test
+ public void testSimpleGenerator() throws Exception {
+ XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
+ builder.startObject().field("test", "value").endObject();
+ assertThat(builder.string(), equalTo("{\"test\":\"value\"}"));
+
+ builder = XContentFactory.contentBuilder(XContentType.JSON);
+ builder.startObject().field("test", "value").endObject();
+ assertThat(builder.string(), equalTo("{\"test\":\"value\"}"));
+ }
+
+ @Test
+ public void testOverloadedList() throws Exception {
+ XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
+ builder.startObject().field("test", Lists.newArrayList("1", "2")).endObject();
+ assertThat(builder.string(), equalTo("{\"test\":[\"1\",\"2\"]}"));
+ }
+
+ @Test
+ public void testWritingBinaryToStream() throws Exception {
+ BytesStreamOutput bos = new BytesStreamOutput();
+
+ XContentGenerator gen = XContentFactory.xContent(XContentType.JSON).createGenerator(bos);
+ gen.writeStartObject();
+ gen.writeStringField("name", "something");
+ gen.flush();
+ bos.write(", source : { test : \"value\" }".getBytes("UTF8"));
+ gen.writeStringField("name2", "something2");
+ gen.writeEndObject();
+ gen.close();
+
+ byte[] data = bos.bytes().toBytes();
+ String sData = new String(data, "UTF8");
+ System.out.println("DATA: " + sData);
+ }
+
+ @Test
+ public void testFieldCaseConversion() throws Exception {
+ XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON).fieldCaseConversion(CAMELCASE);
+ builder.startObject().field("test_name", "value").endObject();
+ assertThat(builder.string(), equalTo("{\"testName\":\"value\"}"));
+
+ builder = XContentFactory.contentBuilder(XContentType.JSON).fieldCaseConversion(UNDERSCORE);
+ builder.startObject().field("testName", "value").endObject();
+ assertThat(builder.string(), equalTo("{\"test_name\":\"value\"}"));
+ }
+
+ @Test
+ public void testDateTypesConversion() throws Exception {
+ Date date = new Date();
+ String expectedDate = XContentBuilder.defaultDatePrinter.print(date.getTime());
+ Calendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC"), Locale.ROOT);
+ String expectedCalendar = XContentBuilder.defaultDatePrinter.print(calendar.getTimeInMillis());
+ XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
+ builder.startObject().field("date", date).endObject();
+ assertThat(builder.string(), equalTo("{\"date\":\"" + expectedDate + "\"}"));
+
+ builder = XContentFactory.contentBuilder(XContentType.JSON);
+ builder.startObject().field("calendar", calendar).endObject();
+ assertThat(builder.string(), equalTo("{\"calendar\":\"" + expectedCalendar + "\"}"));
+
+ builder = XContentFactory.contentBuilder(XContentType.JSON);
+ Map<String, Object> map = new HashMap<String, Object>();
+ map.put("date", date);
+ builder.map(map);
+ assertThat(builder.string(), equalTo("{\"date\":\"" + expectedDate + "\"}"));
+
+ builder = XContentFactory.contentBuilder(XContentType.JSON);
+ map = new HashMap<String, Object>();
+ map.put("calendar", calendar);
+ builder.map(map);
+ assertThat(builder.string(), equalTo("{\"calendar\":\"" + expectedCalendar + "\"}"));
+ }
+} \ No newline at end of file
diff --git a/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java b/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java
new file mode 100644
index 0000000..0a57adf
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.xcontent.smile;
+
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentGenerator;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.nullValue;
+
+/**
+ *
+ */
+public class JsonVsSmileTests extends ElasticsearchTestCase {
+
+// @Test public void testBinarySmileField() throws Exception {
+// JsonGenerator gen = new SmileFactory().createJsonGenerator(new ByteArrayOutputStream());
+//// JsonGenerator gen = new JsonFactory().createJsonGenerator(new ByteArrayOutputStream(), JsonEncoding.UTF8);
+// gen.writeStartObject();
+// gen.writeFieldName("field1");
+// gen.writeBinary(new byte[]{1, 2, 3});
+// gen.writeEndObject();
+// }
+
+ @Test
+ public void compareParsingTokens() throws IOException {
+ BytesStreamOutput xsonOs = new BytesStreamOutput();
+ XContentGenerator xsonGen = XContentFactory.xContent(XContentType.SMILE).createGenerator(xsonOs);
+
+ BytesStreamOutput jsonOs = new BytesStreamOutput();
+ XContentGenerator jsonGen = XContentFactory.xContent(XContentType.JSON).createGenerator(jsonOs);
+
+ xsonGen.writeStartObject();
+ jsonGen.writeStartObject();
+
+ xsonGen.writeStringField("test", "value");
+ jsonGen.writeStringField("test", "value");
+
+ xsonGen.writeArrayFieldStart("arr");
+ jsonGen.writeArrayFieldStart("arr");
+ xsonGen.writeNumber(1);
+ jsonGen.writeNumber(1);
+ xsonGen.writeNull();
+ jsonGen.writeNull();
+ xsonGen.writeEndArray();
+ jsonGen.writeEndArray();
+
+ xsonGen.writeEndObject();
+ jsonGen.writeEndObject();
+
+ xsonGen.close();
+ jsonGen.close();
+
+ verifySameTokens(XContentFactory.xContent(XContentType.JSON).createParser(jsonOs.bytes().toBytes()), XContentFactory.xContent(XContentType.SMILE).createParser(xsonOs.bytes().toBytes()));
+ }
+
+ private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException {
+ while (true) {
+ XContentParser.Token token1 = parser1.nextToken();
+ XContentParser.Token token2 = parser2.nextToken();
+ if (token1 == null) {
+ assertThat(token2, nullValue());
+ return;
+ }
+ assertThat(token1, equalTo(token2));
+ switch (token1) {
+ case FIELD_NAME:
+ assertThat(parser1.currentName(), equalTo(parser2.currentName()));
+ break;
+ case VALUE_STRING:
+ assertThat(parser1.text(), equalTo(parser2.text()));
+ break;
+ case VALUE_NUMBER:
+ assertThat(parser1.numberType(), equalTo(parser2.numberType()));
+ assertThat(parser1.numberValue(), equalTo(parser2.numberValue()));
+ break;
+ }
+ }
+ }
+}
diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java b/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java
new file mode 100644
index 0000000..0f9e4ba
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/xcontent/support/XContentHelperTests.java
@@ -0,0 +1,68 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.xcontent.support;
+
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.hamcrest.Matchers;
+import org.junit.Test;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class XContentHelperTests extends ElasticsearchTestCase {
+
+ Map<String, Object> getMap(Object... keyValues) {
+ Map<String, Object> map = new HashMap<String, Object>();
+ for (int i = 0; i < keyValues.length; i++) {
+ map.put((String) keyValues[i], keyValues[++i]);
+ }
+ return map;
+ }
+
+ Map<String, Object> getNamedMap(String name, Object... keyValues) {
+ Map<String, Object> map = getMap(keyValues);
+
+ Map<String, Object> namedMap = new HashMap<String, Object>(1);
+ namedMap.put(name, map);
+ return namedMap;
+ }
+
+ List<Object> getList(Object... values) {
+ return Arrays.asList(values);
+ }
+
+ @Test
+ public void testMergingListValuesAreMapsOfOne() {
+
+ Map<String, Object> defaults = getMap("test", getList(getNamedMap("name1", "t1", "1"), getNamedMap("name2", "t2", "2")));
+ Map<String, Object> content = getMap("test", getList(getNamedMap("name2", "t3", "3"), getNamedMap("name4", "t4", "4")));
+ Map<String, Object> expected = getMap("test",
+ getList(getNamedMap("name2", "t2", "2", "t3", "3"), getNamedMap("name4", "t4", "4"), getNamedMap("name1", "t1", "1")));
+
+ XContentHelper.mergeDefaults(content, defaults);
+
+ assertThat(content, Matchers.equalTo(expected));
+ }
+
+
+}
diff --git a/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java b/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java
new file mode 100644
index 0000000..e2fba5a
--- /dev/null
+++ b/src/test/java/org/elasticsearch/common/xcontent/support/XContentMapValuesTests.java
@@ -0,0 +1,456 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.common.xcontent.support;
+
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.collect.Tuple;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.test.ElasticsearchTestCase;
+import org.hamcrest.Matchers;
+import org.junit.Test;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.hamcrest.Matchers.*;
+import static org.hamcrest.core.IsEqual.equalTo;
+
+/**
+ */
+public class XContentMapValuesTests extends ElasticsearchTestCase {
+
+ @Test
+ public void testFilter() throws Exception {
+ XContentBuilder builder = XContentFactory.jsonBuilder().startObject()
+ .field("test1", "value1")
+ .field("test2", "value2")
+ .field("something_else", "value3")
+ .endObject();
+
+ Map<String, Object> source = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+ Map<String, Object> filter = XContentMapValues.filter(source, new String[]{"test1"}, Strings.EMPTY_ARRAY);
+ assertThat(filter.size(), equalTo(1));
+ assertThat(filter.get("test1").toString(), equalTo("value1"));
+
+ filter = XContentMapValues.filter(source, new String[]{"test*"}, Strings.EMPTY_ARRAY);
+ assertThat(filter.size(), equalTo(2));
+ assertThat(filter.get("test1").toString(), equalTo("value1"));
+ assertThat(filter.get("test2").toString(), equalTo("value2"));
+
+ filter = XContentMapValues.filter(source, Strings.EMPTY_ARRAY, new String[]{"test1"});
+ assertThat(filter.size(), equalTo(2));
+ assertThat(filter.get("test2").toString(), equalTo("value2"));
+ assertThat(filter.get("something_else").toString(), equalTo("value3"));
+
+ // more complex object...
+ builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("path1")
+ .startArray("path2")
+ .startObject().field("test", "value1").endObject()
+ .startObject().field("test", "value2").endObject()
+ .endArray()
+ .endObject()
+ .field("test1", "value1")
+ .endObject();
+
+ source = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+ filter = XContentMapValues.filter(source, new String[]{"path1"}, Strings.EMPTY_ARRAY);
+ assertThat(filter.size(), equalTo(1));
+
+ filter = XContentMapValues.filter(source, new String[]{"path1*"}, Strings.EMPTY_ARRAY);
+ assertThat(filter.get("path1"), equalTo(source.get("path1")));
+ assertThat(filter.containsKey("test1"), equalTo(false));
+
+ filter = XContentMapValues.filter(source, new String[]{"test1*"}, Strings.EMPTY_ARRAY);
+ assertThat(filter.get("test1"), equalTo(source.get("test1")));
+ assertThat(filter.containsKey("path1"), equalTo(false));
+
+ filter = XContentMapValues.filter(source, new String[]{"path1.path2.*"}, Strings.EMPTY_ARRAY);
+ assertThat(filter.get("path1"), equalTo(source.get("path1")));
+ assertThat(filter.containsKey("test1"), equalTo(false));
+ }
+
+ @SuppressWarnings({"unchecked"})
+ @Test
+ public void testExtractValue() throws Exception {
+ XContentBuilder builder = XContentFactory.jsonBuilder().startObject()
+ .field("test", "value")
+ .endObject();
+
+ Map<String, Object> map = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+ assertThat(XContentMapValues.extractValue("test", map).toString(), equalTo("value"));
+ assertThat(XContentMapValues.extractValue("test.me", map), nullValue());
+ assertThat(XContentMapValues.extractValue("something.else.2", map), nullValue());
+
+ builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("path1").startObject("path2").field("test", "value").endObject().endObject()
+ .endObject();
+
+ map = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+ assertThat(XContentMapValues.extractValue("path1.path2.test", map).toString(), equalTo("value"));
+ assertThat(XContentMapValues.extractValue("path1.path2.test_me", map), nullValue());
+ assertThat(XContentMapValues.extractValue("path1.non_path2.test", map), nullValue());
+
+ Object extValue = XContentMapValues.extractValue("path1.path2", map);
+ assertThat(extValue, instanceOf(Map.class));
+ Map<String, Object> extMapValue = (Map<String, Object>) extValue;
+ assertThat(extMapValue, hasEntry("test", (Object) "value"));
+
+ extValue = XContentMapValues.extractValue("path1", map);
+ assertThat(extValue, instanceOf(Map.class));
+ extMapValue = (Map<String, Object>) extValue;
+ assertThat(extMapValue.containsKey("path2"), equalTo(true));
+
+ // lists
+ builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("path1").field("test", "value1", "value2").endObject()
+ .endObject();
+
+ map = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+
+ extValue = XContentMapValues.extractValue("path1.test", map);
+ assertThat(extValue, instanceOf(List.class));
+
+ List extListValue = (List) extValue;
+ assertThat(extListValue.size(), equalTo(2));
+
+ builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("path1")
+ .startArray("path2")
+ .startObject().field("test", "value1").endObject()
+ .startObject().field("test", "value2").endObject()
+ .endArray()
+ .endObject()
+ .endObject();
+
+ map = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+
+ extValue = XContentMapValues.extractValue("path1.path2.test", map);
+ assertThat(extValue, instanceOf(List.class));
+
+ extListValue = (List) extValue;
+ assertThat(extListValue.size(), equalTo(2));
+ assertThat(extListValue.get(0).toString(), equalTo("value1"));
+ assertThat(extListValue.get(1).toString(), equalTo("value2"));
+
+ // fields with . in them
+ builder = XContentFactory.jsonBuilder().startObject()
+ .field("xxx.yyy", "value")
+ .endObject();
+ map = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+ assertThat(XContentMapValues.extractValue("xxx.yyy", map).toString(), equalTo("value"));
+
+ builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("path1.xxx").startObject("path2.yyy").field("test", "value").endObject().endObject()
+ .endObject();
+
+ map = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+ assertThat(XContentMapValues.extractValue("path1.xxx.path2.yyy.test", map).toString(), equalTo("value"));
+ }
+
+ @SuppressWarnings({"unchecked"})
+ @Test
+ public void testExtractRawValue() throws Exception {
+ XContentBuilder builder = XContentFactory.jsonBuilder().startObject()
+ .field("test", "value")
+ .endObject();
+
+ Map<String, Object> map = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+ assertThat(XContentMapValues.extractRawValues("test", map).get(0).toString(), equalTo("value"));
+
+ builder = XContentFactory.jsonBuilder().startObject()
+ .field("test.me", "value")
+ .endObject();
+
+ map = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+ assertThat(XContentMapValues.extractRawValues("test.me", map).get(0).toString(), equalTo("value"));
+
+ builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("path1").startObject("path2").field("test", "value").endObject().endObject()
+ .endObject();
+
+ map = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+ assertThat(XContentMapValues.extractRawValues("path1.path2.test", map).get(0).toString(), equalTo("value"));
+
+ builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("path1.xxx").startObject("path2.yyy").field("test", "value").endObject().endObject()
+ .endObject();
+
+ map = XContentFactory.xContent(XContentType.JSON).createParser(builder.string()).mapAndClose();
+ assertThat(XContentMapValues.extractRawValues("path1.xxx.path2.yyy.test", map).get(0).toString(), equalTo("value"));
+ }
+
+ @Test
+ public void prefixedNamesFilteringTest() {
+ Map<String, Object> map = new HashMap<String, Object>();
+ map.put("obj", "value");
+ map.put("obj_name", "value_name");
+ Map<String, Object> filterdMap = XContentMapValues.filter(map, new String[]{"obj_name"}, Strings.EMPTY_ARRAY);
+ assertThat(filterdMap.size(), equalTo(1));
+ assertThat((String) filterdMap.get("obj_name"), equalTo("value_name"));
+ }
+
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void nestedFilteringTest() {
+ Map<String, Object> map = new HashMap<String, Object>();
+ map.put("field", "value");
+ map.put("array",
+ Arrays.asList(
+ 1,
+ new HashMap<String, Object>() {{
+ put("nested", 2);
+ put("nested_2", 3);
+ }}));
+ Map<String, Object> falteredMap = XContentMapValues.filter(map, new String[]{"array.nested"}, Strings.EMPTY_ARRAY);
+ assertThat(falteredMap.size(), equalTo(1));
+
+ // Selecting members of objects within arrays (ex. [ 1, { nested: "value"} ]) always returns all values in the array (1 in the ex)
+ // this is expected behavior as this types of objects are not supported in ES
+ assertThat((Integer) ((List) falteredMap.get("array")).get(0), equalTo(1));
+ assertThat(((Map<String, Object>) ((List) falteredMap.get("array")).get(1)).size(), equalTo(1));
+ assertThat((Integer) ((Map<String, Object>) ((List) falteredMap.get("array")).get(1)).get("nested"), equalTo(2));
+
+ falteredMap = XContentMapValues.filter(map, new String[]{"array.*"}, Strings.EMPTY_ARRAY);
+ assertThat(falteredMap.size(), equalTo(1));
+ assertThat((Integer) ((List) falteredMap.get("array")).get(0), equalTo(1));
+ assertThat(((Map<String, Object>) ((List) falteredMap.get("array")).get(1)).size(), equalTo(2));
+
+ map.clear();
+ map.put("field", "value");
+ map.put("obj",
+ new HashMap<String, Object>() {{
+ put("field", "value");
+ put("field2", "value2");
+ }});
+ falteredMap = XContentMapValues.filter(map, new String[]{"obj.field"}, Strings.EMPTY_ARRAY);
+ assertThat(falteredMap.size(), equalTo(1));
+ assertThat(((Map<String, Object>) falteredMap.get("obj")).size(), equalTo(1));
+ assertThat((String) ((Map<String, Object>) falteredMap.get("obj")).get("field"), equalTo("value"));
+
+ falteredMap = XContentMapValues.filter(map, new String[]{"obj.*"}, Strings.EMPTY_ARRAY);
+ assertThat(falteredMap.size(), equalTo(1));
+ assertThat(((Map<String, Object>) falteredMap.get("obj")).size(), equalTo(2));
+ assertThat((String) ((Map<String, Object>) falteredMap.get("obj")).get("field"), equalTo("value"));
+ assertThat((String) ((Map<String, Object>) falteredMap.get("obj")).get("field2"), equalTo("value2"));
+
+ }
+
+ @SuppressWarnings("unchecked")
+ @Test
+ public void completeObjectFilteringTest() {
+ Map<String, Object> map = new HashMap<String, Object>();
+ map.put("field", "value");
+ map.put("obj",
+ new HashMap<String, Object>() {{
+ put("field", "value");
+ put("field2", "value2");
+ }});
+ map.put("array",
+ Arrays.asList(
+ 1,
+ new HashMap<String, Object>() {{
+ put("field", "value");
+ put("field2", "value2");
+ }}));
+
+ Map<String, Object> filteredMap = XContentMapValues.filter(map, new String[]{"obj"}, Strings.EMPTY_ARRAY);
+ assertThat(filteredMap.size(), equalTo(1));
+ assertThat(((Map<String, Object>) filteredMap.get("obj")).size(), equalTo(2));
+ assertThat(((Map<String, Object>) filteredMap.get("obj")).get("field").toString(), equalTo("value"));
+ assertThat(((Map<String, Object>) filteredMap.get("obj")).get("field2").toString(), equalTo("value2"));
+
+
+ filteredMap = XContentMapValues.filter(map, new String[]{"obj"}, new String[]{"*.field2"});
+ assertThat(filteredMap.size(), equalTo(1));
+ assertThat(((Map<String, Object>) filteredMap.get("obj")).size(), equalTo(1));
+ assertThat(((Map<String, Object>) filteredMap.get("obj")).get("field").toString(), equalTo("value"));
+
+
+ filteredMap = XContentMapValues.filter(map, new String[]{"array"}, new String[]{});
+ assertThat(filteredMap.size(), equalTo(1));
+ assertThat(((List) filteredMap.get("array")).size(), equalTo(2));
+ assertThat((Integer) ((List) filteredMap.get("array")).get(0), equalTo(1));
+ assertThat(((Map<String, Object>) ((List) filteredMap.get("array")).get(1)).size(), equalTo(2));
+
+ filteredMap = XContentMapValues.filter(map, new String[]{"array"}, new String[]{"*.field2"});
+ assertThat(filteredMap.size(), equalTo(1));
+ assertThat(((List) filteredMap.get("array")).size(), equalTo(2));
+ assertThat((Integer) ((List) filteredMap.get("array")).get(0), equalTo(1));
+ assertThat(((Map<String, Object>) ((List) filteredMap.get("array")).get(1)).size(), equalTo(1));
+ assertThat(((Map<String, Object>) ((List) filteredMap.get("array")).get(1)).get("field").toString(), equalTo("value"));
+ }
+
+ @SuppressWarnings("unchecked")
+ @Test
+ public void filterIncludesUsingStarPrefix() {
+ Map<String, Object> map = new HashMap<String, Object>();
+ map.put("field", "value");
+ map.put("obj",
+ new HashMap<String, Object>() {{
+ put("field", "value");
+ put("field2", "value2");
+ }});
+ map.put("n_obj",
+ new HashMap<String, Object>() {{
+ put("n_field", "value");
+ put("n_field2", "value2");
+ }});
+
+ Map<String, Object> filteredMap = XContentMapValues.filter(map, new String[]{"*.field2"}, Strings.EMPTY_ARRAY);
+ assertThat(filteredMap.size(), equalTo(1));
+ assertThat(filteredMap, hasKey("obj"));
+ assertThat(((Map<String, Object>) filteredMap.get("obj")).size(), equalTo(1));
+ assertThat(((Map<String, Object>) filteredMap.get("obj")), hasKey("field2"));
+
+ // only objects
+ filteredMap = XContentMapValues.filter(map, new String[]{"*.*"}, Strings.EMPTY_ARRAY);
+ assertThat(filteredMap.size(), equalTo(2));
+ assertThat(filteredMap, hasKey("obj"));
+ assertThat(((Map<String, Object>) filteredMap.get("obj")).size(), equalTo(2));
+ assertThat(filteredMap, hasKey("n_obj"));
+ assertThat(((Map<String, Object>) filteredMap.get("n_obj")).size(), equalTo(2));
+
+
+ filteredMap = XContentMapValues.filter(map, new String[]{"*"}, new String[]{"*.*2"});
+ assertThat(filteredMap.size(), equalTo(3));
+ assertThat(filteredMap, hasKey("field"));
+ assertThat(filteredMap, hasKey("obj"));
+ assertThat(((Map) filteredMap.get("obj")).size(), equalTo(1));
+ assertThat(((Map<String, Object>) filteredMap.get("obj")), hasKey("field"));
+ assertThat(filteredMap, hasKey("n_obj"));
+ assertThat(((Map<String, Object>) filteredMap.get("n_obj")).size(), equalTo(1));
+ assertThat(((Map<String, Object>) filteredMap.get("n_obj")), hasKey("n_field"));
+
+ }
+
+ @Test
+ public void filterWithEmptyIncludesExcludes() {
+ Map<String, Object> map = new HashMap<String, Object>();
+ map.put("field", "value");
+ Map<String, Object> filteredMap = XContentMapValues.filter(map, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY);
+ assertThat(filteredMap.size(), equalTo(1));
+ assertThat(filteredMap.get("field").toString(), equalTo("value"));
+
+ }
+
+ @SuppressWarnings({"unchecked"})
+ @Test
+ public void testThatFilterIncludesEmptyObjectWhenUsingIncludes() throws Exception {
+ XContentBuilder builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("obj")
+ .endObject()
+ .endObject();
+
+ Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(builder.bytes(), true);
+ Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"obj"}, Strings.EMPTY_ARRAY);
+
+ assertThat(mapTuple.v2(), equalTo(filteredSource));
+ }
+
+ @Test
+ public void testThatFilterIncludesEmptyObjectWhenUsingExcludes() throws Exception {
+ XContentBuilder builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("obj")
+ .endObject()
+ .endObject();
+
+ Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(builder.bytes(), true);
+ Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), Strings.EMPTY_ARRAY, new String[]{"nonExistingField"});
+
+ assertThat(mapTuple.v2(), equalTo(filteredSource));
+ }
+
+ @Test
+ public void testNotOmittingObjectsWithExcludedProperties() throws Exception {
+ XContentBuilder builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("obj")
+ .field("f1", "v1")
+ .endObject()
+ .endObject();
+
+ Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(builder.bytes(), true);
+ Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), Strings.EMPTY_ARRAY, new String[]{"obj.f1"});
+
+ assertThat(filteredSource.size(), equalTo(1));
+ assertThat(filteredSource, hasKey("obj"));
+ assertThat(((Map) filteredSource.get("obj")).size(), equalTo(0));
+ }
+
+ @SuppressWarnings({"unchecked"})
+ @Test
+ public void testNotOmittingObjectWithNestedExcludedObject() throws Exception {
+ XContentBuilder builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("obj1")
+ .startObject("obj2")
+ .startObject("obj3")
+ .endObject()
+ .endObject()
+ .endObject()
+ .endObject();
+
+ // implicit include
+ Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(builder.bytes(), true);
+ Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), Strings.EMPTY_ARRAY, new String[]{"*.obj2"});
+
+ assertThat(filteredSource.size(), equalTo(1));
+ assertThat(filteredSource, hasKey("obj1"));
+ assertThat(((Map) filteredSource.get("obj1")).size(), Matchers.equalTo(0));
+
+ // explicit include
+ filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"obj1"}, new String[]{"*.obj2"});
+ assertThat(filteredSource.size(), equalTo(1));
+ assertThat(filteredSource, hasKey("obj1"));
+ assertThat(((Map) filteredSource.get("obj1")).size(), Matchers.equalTo(0));
+
+ // wild card include
+ filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"*.obj2"}, new String[]{"*.obj3"});
+ assertThat(filteredSource.size(), equalTo(1));
+ assertThat(filteredSource, hasKey("obj1"));
+ assertThat(((Map<String, Object>) filteredSource.get("obj1")), hasKey("obj2"));
+ assertThat(((Map) ((Map) filteredSource.get("obj1")).get("obj2")).size(), Matchers.equalTo(0));
+ }
+
+ @SuppressWarnings({"unchecked"})
+ @Test
+ public void testIncludingObjectWithNestedIncludedObject() throws Exception {
+ XContentBuilder builder = XContentFactory.jsonBuilder().startObject()
+ .startObject("obj1")
+ .startObject("obj2")
+ .endObject()
+ .endObject()
+ .endObject();
+
+ Tuple<XContentType, Map<String, Object>> mapTuple = XContentHelper.convertToMap(builder.bytes(), true);
+ Map<String, Object> filteredSource = XContentMapValues.filter(mapTuple.v2(), new String[]{"*.obj2"}, Strings.EMPTY_ARRAY);
+
+ assertThat(filteredSource.size(), equalTo(1));
+ assertThat(filteredSource, hasKey("obj1"));
+ assertThat(((Map) filteredSource.get("obj1")).size(), equalTo(1));
+ assertThat(((Map<String, Object>) filteredSource.get("obj1")), hasKey("obj2"));
+ assertThat(((Map) ((Map) filteredSource.get("obj1")).get("obj2")).size(), equalTo(0));
+ }
+}