summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRobert Newson <rnewson@apache.org>2022-12-05 21:20:06 +0000
committerRobert Newson <rnewson@apache.org>2022-12-21 19:42:27 +0000
commit05f42060c94b144728da68ce095111d913846f47 (patch)
tree0aead43c1fd52ee1dc3f81bc01ba72a92446b0d6
parentca86f86950bdadf573aaf231778fba81f8f1e236 (diff)
downloadcouchdb-05f42060c94b144728da68ce095111d913846f47.tar.gz
Import nouveau java application
-rw-r--r--.gitignore2
-rw-r--r--Makefile16
-rw-r--r--java/nouveau/.github/workflows/codeql-analysis.yml70
-rw-r--r--java/nouveau/.github/workflows/dependency-review.yml20
-rw-r--r--java/nouveau/.github/workflows/maven-publish.yml34
-rw-r--r--java/nouveau/.github/workflows/maven.yml26
-rw-r--r--java/nouveau/.gitignore7
-rw-r--r--java/nouveau/LICENSE201
-rw-r--r--java/nouveau/README.md105
-rw-r--r--java/nouveau/TODO27
-rw-r--r--java/nouveau/nouveau.yaml16
-rw-r--r--java/nouveau/pom.xml226
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/NouveauApplication.java87
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/NouveauApplicationConfiguration.java73
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/AnalyzeRequest.java51
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/AnalyzeResponse.java45
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/DocumentDeleteRequest.java69
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/DocumentUpdateRequest.java108
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/IndexDefinition.java70
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/IndexInfo.java54
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchHit.java64
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchRequest.java149
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchResults.java84
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/AnalyzerFactory.java162
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/DocumentFactory.java50
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/FileAlreadyExistsExceptionMapper.java35
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/FileNotFoundExceptionMapper.java35
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/IndexManager.java467
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/NouveauQueryParser.java120
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/NumericRangeQueryProcessor.java78
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ParallelSearcherFactory.java40
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/QueryParser.java24
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/QueryParserException.java22
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/UpdatesOutOfOrderException.java24
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/UpdatesOutOfOrderExceptionMapper.java33
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/BytesRefDeserializer.java44
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/BytesRefSerializer.java40
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/DoubleRangeDeserializer.java48
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/DoubleRangeSerializer.java44
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/FieldDocDeserializer.java72
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/FieldDocSerializer.java72
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/IndexableFieldDeserializer.java116
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/IndexableFieldSerializer.java84
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/LuceneModule.java49
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/SupportedType.java82
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/TotalHitsDeserializer.java45
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/health/AnalyzeHealthCheck.java45
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/health/IndexManagerHealthCheck.java59
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/AnalyzeResource.java76
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/IndexResource.java110
-rw-r--r--java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/SearchResource.java249
-rw-r--r--java/nouveau/src/main/resources/banner.txt7
-rw-r--r--java/nouveau/src/test/java/org/apache/couchdb/nouveau/IntegrationTest.java125
-rw-r--r--java/nouveau/src/test/java/org/apache/couchdb/nouveau/api/DocumentUpdateRequestTest.java66
-rw-r--r--java/nouveau/src/test/java/org/apache/couchdb/nouveau/api/SearchRequestTest.java50
-rw-r--r--java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/AnalyzerFactoryTest.java256
-rw-r--r--java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/IndexManagerTest.java58
-rw-r--r--java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/ser/LuceneModuleTest.java118
-rw-r--r--java/nouveau/src/test/resources/fixtures/DocumentUpdateRequest.json22
-rw-r--r--java/nouveau/src/test/resources/fixtures/SearchRequest.json17
-rw-r--r--java/nouveau/src/test/resources/test-nouveau.yaml3
61 files changed, 4650 insertions, 1 deletions
diff --git a/.gitignore b/.gitignore
index 816ece6d4..4fdaaea30 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,7 +14,7 @@
.rebar/
.eunit/
cover/
-core
+/core
debian/
log
apache-couchdb-*/
diff --git a/Makefile b/Makefile
index 34562d1b9..b25d0ec18 100644
--- a/Makefile
+++ b/Makefile
@@ -509,3 +509,19 @@ derived:
@echo "ON_TAG: $(ON_TAG)"
@echo "REL_TAG: $(REL_TAG)"
@echo "SUB_VSN: $(SUB_VSN)"
+
+################################################################################
+# Nouveau
+################################################################################
+
+.PHONY: nouveau
+nouveau:
+ @cd java/nouveau && mvn
+
+.PHONY: nouveau-clean
+nouveau-clean:
+ @cd java/nouveau && mvn clean
+
+.PHONY: nouveau-start
+nouveau-start: nouveau
+ @cd java/nouveau && java -jar target/nouveau-*.jar server nouveau.yaml
diff --git a/java/nouveau/.github/workflows/codeql-analysis.yml b/java/nouveau/.github/workflows/codeql-analysis.yml
new file mode 100644
index 000000000..66f990502
--- /dev/null
+++ b/java/nouveau/.github/workflows/codeql-analysis.yml
@@ -0,0 +1,70 @@
+# For most projects, this workflow file will not need changing; you simply need
+# to commit it to your repository.
+#
+# You may wish to alter this file to override the set of languages analyzed,
+# or to provide custom queries or build logic.
+#
+# ******** NOTE ********
+# We have attempted to detect the languages in your repository. Please check
+# the `language` matrix defined below to confirm you have the correct set of
+# supported CodeQL languages.
+#
+name: "CodeQL"
+
+on:
+ push:
+ branches: [ main ]
+ pull_request:
+ # The branches below must be a subset of the branches above
+ branches: [ main ]
+ schedule:
+ - cron: '26 16 * * 3'
+
+jobs:
+ analyze:
+ name: Analyze
+ runs-on: ubuntu-latest
+ permissions:
+ actions: read
+ contents: read
+ security-events: write
+
+ strategy:
+ fail-fast: false
+ matrix:
+ language: [ 'java' ]
+ # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
+ # Learn more about CodeQL language support at https://git.io/codeql-language-support
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v3
+
+ # Initializes the CodeQL tools for scanning.
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v2
+ with:
+ languages: ${{ matrix.language }}
+ # If you wish to specify custom queries, you can do so here or in a config file.
+ # By default, queries listed here will override any specified in a config file.
+ # Prefix the list here with "+" to use these queries and those in the config file.
+ # queries: ./path/to/local/query, your-org/your-repo/queries@main
+
+ # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
+ # If this step fails, then you should remove it and run the build manually (see below)
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v2
+
+ # ℹ️ Command-line programs to run using the OS shell.
+ # 📚 https://git.io/JvXDl
+
+ # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
+ # and modify them (or add more) to build your code if your project
+ # uses a compiled language
+
+ #- run: |
+ # make bootstrap
+ # make release
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v2
diff --git a/java/nouveau/.github/workflows/dependency-review.yml b/java/nouveau/.github/workflows/dependency-review.yml
new file mode 100644
index 000000000..0e72a00ef
--- /dev/null
+++ b/java/nouveau/.github/workflows/dependency-review.yml
@@ -0,0 +1,20 @@
+# Dependency Review Action
+#
+# This Action will scan dependency manifest files that change as part of a Pull Reqest, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging.
+#
+# Source repository: https://github.com/actions/dependency-review-action
+# Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement
+name: 'Dependency Review'
+on: [pull_request]
+
+permissions:
+ contents: read
+
+jobs:
+ dependency-review:
+ runs-on: ubuntu-latest
+ steps:
+ - name: 'Checkout Repository'
+ uses: actions/checkout@v3
+ - name: 'Dependency Review'
+ uses: actions/dependency-review-action@v1
diff --git a/java/nouveau/.github/workflows/maven-publish.yml b/java/nouveau/.github/workflows/maven-publish.yml
new file mode 100644
index 000000000..dab69fef7
--- /dev/null
+++ b/java/nouveau/.github/workflows/maven-publish.yml
@@ -0,0 +1,34 @@
+# This workflow will build a package using Maven and then publish it to GitHub packages when a release is created
+# For more information see: https://github.com/actions/setup-java/blob/main/docs/advanced-usage.md#apache-maven-with-a-settings-path
+
+name: Maven Package
+
+on:
+ release:
+ types: [created]
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ packages: write
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK 11
+ uses: actions/setup-java@v3
+ with:
+ java-version: '11'
+ distribution: 'temurin'
+ server-id: github # Value of the distributionManagement/repository/id field of the pom.xml
+ settings-path: ${{ github.workspace }} # location for the settings.xml file
+
+ - name: Build with Maven
+ run: mvn -B package --file pom.xml
+
+ - name: Publish to GitHub Packages Apache Maven
+ run: mvn deploy -s $GITHUB_WORKSPACE/settings.xml
+ env:
+ GITHUB_TOKEN: ${{ github.token }}
diff --git a/java/nouveau/.github/workflows/maven.yml b/java/nouveau/.github/workflows/maven.yml
new file mode 100644
index 000000000..4bf9282a6
--- /dev/null
+++ b/java/nouveau/.github/workflows/maven.yml
@@ -0,0 +1,26 @@
+# This workflow will build a Java project with Maven, and cache/restore any dependencies to improve the workflow execution time
+# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
+
+name: Java CI with Maven
+
+on:
+ push:
+ branches: [ main ]
+ pull_request:
+ branches: [ main ]
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up JDK 11
+ uses: actions/setup-java@v3
+ with:
+ java-version: '11'
+ distribution: 'temurin'
+ cache: maven
+ - name: Build with Maven
+ run: mvn -B package --file pom.xml
diff --git a/java/nouveau/.gitignore b/java/nouveau/.gitignore
new file mode 100644
index 000000000..89034c41f
--- /dev/null
+++ b/java/nouveau/.gitignore
@@ -0,0 +1,7 @@
+*~
+.classpath
+.project
+.settings/
+target/
+.vscode/
+dependency-reduced-pom.xml
diff --git a/java/nouveau/LICENSE b/java/nouveau/LICENSE
new file mode 100644
index 000000000..261eeb9e9
--- /dev/null
+++ b/java/nouveau/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/java/nouveau/README.md b/java/nouveau/README.md
new file mode 100644
index 000000000..cd56a8de3
--- /dev/null
+++ b/java/nouveau/README.md
@@ -0,0 +1,105 @@
+# nouveau
+Lucene 9 + DropWizard = Maybe a good search option for Apache CouchDB?
+
+Nouveau is an experimental search extension for CouchDB 3.x.
+
+## What works?
+
+* you can define a default analyzer and different analyzers by field name.
+* sorting on text and numbers
+* classic lucene query syntax
+* count and range facets
+* cursor support for paginating efficiently through large results sets
+* indexes automatically deleted if database is deleted (as long as nouveau is running!)
+* integration with ken
+* update=false
+* support for stale=ok
+* integration with mango
+
+## What doesn't work yet?
+
+* include_docs=true
+* No support for results grouping
+* No support to configure stop words for analyzers
+
+## Why is this better than dreyfus/clouseau?
+
+* No scalang (or Scala!)
+* Supports any version of Java that Lucene 9 supports
+* memory-mapped I/O for performance
+* direct I/O used for segment merging (so we don't evict useful data from disk cache)
+* It's new and shiny.
+
+## Erlang side
+
+You'll need to run a fork of couchdb: https://github.com/rnewson/couchdb-nouveau
+
+## Getting started
+
+Build Nouveau with;
+
+`mvn package`
+
+Run Nouvea with;
+
+`java -jar target/nouveau-*.jar server nouveau.yaml`
+
+Now run CouchDB using the 'nouveau' branch of my fork at https://github.com/rnewson/couchdb-nouveau;
+
+`make && dev/run --admin=foo:bar`
+
+Make a database with some data and an index definition;
+
+```
+#!/bin/sh
+
+URL="http://foo:bar@127.0.0.1:15984/foo"
+
+curl -X DELETE "$URL"
+curl -X PUT "$URL?n=3&q=16"
+
+curl -X PUT "$URL/_design/foo" -d '{"nouveau":{"bar":{"default_analyzer":"standard", "field_analyzers":{"foo":"english"}, "index":"function(doc) { index(\"foo\", \"bar\", \"string\"); index(\"foo\", \"bar\", \"stored_string\"); }"}}}'
+
+# curl "$URL/_index" -Hcontent-type:application/json -d '{"type":"nouveau", "index": {"fields": [{"name": "bar", "type":"number"}]}}'
+
+for I in {1..100}; do
+ DOCID=$RANDOM
+ DOCID=$[ $DOCID % 100000 ]
+ BAR=$RANDOM
+ BAR=$[ $BAR % 100000 ]
+ curl -X PUT "$URL/doc$DOCID" -d "{\"bar\": $BAR}"
+done
+```
+
+In order not to collide with `dreyfus` I've hooked Nouveau in with some uglier paths for now;
+
+`curl 'foo:bar@localhost:15984/foo/_design/foo/_nouveau/bar?q=*:*'`
+
+This will cause Nouveau to build indexes for each copy (N) and each
+shard range (Q) and then perform a search and return the results. Lots
+of query syntax is working as is sorting on strings and numbers
+(`sort=["fieldnamehere&lt;string&gt;"] or sort=["fieldnamehere&lt;number&gt;"],
+defaulting to number).
+
+Facet support
+
+Counts of string fields and Ranges for numeric fields;
+
+```
+curl 'foo:bar@localhost:15984/foo/_design/foo/_nouveau/bar?q=*:*&limit=1&ranges={"bar":[{"label":"cheap","min":0,"max":100}]}&counts=["foo"]' -g
+```
+
+## Index function
+
+To ease migration nouveau functions can use the 'index' function exactly as it exists in dreyfus, but the function also supports a new style.
+
+| Arguments | Effect
+| :------------------------------------------------- | :-----
+| index("foo", "bar"); | adds a TextField.
+| index("foo", "bar", {"store":true}); | adds a TextField and a StoredField.
+| index("foo", "bar", {"store":true, "facet":true}); | adds a TextField, a StoredField and a SortedSetDocValuesField.
+| index("foo", "bar", "text"); | adds a TextField.
+| index("foo", "bar", "string"); | adds a StringField.
+| index("foo", "bar", "stored_string"); | adds a StoredField.
+| index("foo", "bar", "sorted_set_dv"); | adds a SortedSetDocValuesField.
+| index("foo", "bar", "string", true); | adds a TextField with Store.YES
diff --git a/java/nouveau/TODO b/java/nouveau/TODO
new file mode 100644
index 000000000..4e814048c
--- /dev/null
+++ b/java/nouveau/TODO
@@ -0,0 +1,27 @@
+targeted dreyfus feature parity
+
+* pagination (bookmark I guess)
+* grouping
+* faceting
+* partitioned db support
+* ken integration
+* delete indexes on db deletion
+
+not targeted
+
+* stale=ok
+* highlighting
+* drilldown
+
+After reaching dreyfus parity, nouveau will diverge;
+
+* no javascript eval
+* ddoc will require an index schema, mapping couchdb document fields to lucene fields
+* spatial-extras and spatial3d support
+
+interim ideas
+
+* append type to field name, so `index("foo", 12.0")` becomes `new DoublePoint("foo<number>", 12.0)`
+* set a special Map to setPointsConfigMap() which examines that suffix and returns a PointsConfig for <number>
+
+* in nouveau branch of couchdb, remove dreyfus entirely and put nouveau at _search_analyze, _search, \ No newline at end of file
diff --git a/java/nouveau/nouveau.yaml b/java/nouveau/nouveau.yaml
new file mode 100644
index 000000000..a6bd00f6e
--- /dev/null
+++ b/java/nouveau/nouveau.yaml
@@ -0,0 +1,16 @@
+maxIndexesOpen: 100
+commitIntervalSeconds: 30
+idleSeconds: 60
+
+
+
+server:
+ applicationConnectors:
+ - type: h2c
+ port: 8080
+ maxConcurrentStreams: 1024
+ initialStreamRecvWindow: 65535
+ gzip:
+ includedMethods:
+ - GET
+ - POST
diff --git a/java/nouveau/pom.xml b/java/nouveau/pom.xml
new file mode 100644
index 000000000..2dbf72976
--- /dev/null
+++ b/java/nouveau/pom.xml
@@ -0,0 +1,226 @@
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.couchdb</groupId>
+ <artifactId>nouveau</artifactId>
+ <version>0.2.0-SNAPSHOT</version>
+ <name>${project.artifactId}</name>
+ <description>Full-text indexing for CouchDB</description>
+ <inceptionYear>2022</inceptionYear>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
+ <maven.compiler.source>11</maven.compiler.source>
+ <maven.compiler.target>11</maven.compiler.target>
+ <argLine>-Duser.language=en -Duser.region=US -Duser.timezone=UTC</argLine>
+ <dropwizard.version>2.1.4</dropwizard.version>
+ <lucene.version>9.4.2</lucene.version>
+ <slf4j.version>1.7.32</slf4j.version>
+ <junit5.version>5.8.2</junit5.version>
+ </properties>
+
+ <dependencyManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.junit</groupId>
+ <artifactId>junit-bom</artifactId>
+ <version>${junit5.version}</version>
+ <type>pom</type>
+ <scope>import</scope>
+ </dependency>
+ <dependency>
+ <groupId>io.dropwizard</groupId>
+ <artifactId>dropwizard-dependencies</artifactId>
+ <version>${dropwizard.version}</version>
+ <type>pom</type>
+ <scope>import</scope>
+ </dependency>
+ </dependencies>
+ </dependencyManagement>
+
+ <dependencies>
+ <!-- Lucene -->
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-core</artifactId>
+ <version>${lucene.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-grouping</artifactId>
+ <version>${lucene.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-queryparser</artifactId>
+ <version>${lucene.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-analysis-common</artifactId>
+ <version>${lucene.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-analysis-stempel</artifactId>
+ <version>${lucene.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-analysis-smartcn</artifactId>
+ <version>${lucene.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-analysis-kuromoji</artifactId>
+ <version>${lucene.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-facet</artifactId>
+ <version>${lucene.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-spatial-extras</artifactId>
+ <version>${lucene.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.lucene</groupId>
+ <artifactId>lucene-misc</artifactId>
+ <version>${lucene.version}</version>
+ </dependency>
+
+ <!-- Dropwizard -->
+ <dependency>
+ <groupId>io.dropwizard</groupId>
+ <artifactId>dropwizard-core</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>io.dropwizard</groupId>
+ <artifactId>dropwizard-http2</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>com.fasterxml.jackson.module</groupId>
+ <artifactId>jackson-module-afterburner</artifactId>
+ </dependency>
+
+ <!-- Test -->
+ <dependency>
+ <groupId>io.dropwizard</groupId>
+ <artifactId>dropwizard-testing</artifactId>
+ <scope>test</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter</artifactId>
+ <version>${junit5.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter-engine</artifactId>
+ <version>${junit5.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <version>3.22.0</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+
+ <scm>
+ <developerConnection>scm:git:git@github.com:rnewson/nouveau.git</developerConnection>
+ <tag>HEAD</tag>
+ </scm>
+
+ <build>
+ <defaultGoal>package</defaultGoal>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-release-plugin</artifactId>
+ <version>3.0.0-M5</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <version>2.3</version>
+ <configuration>
+ <createDependencyReducedPom>true</createDependencyReducedPom>
+ <filters>
+ <filter>
+ <artifact>*:*</artifact>
+ <excludes>
+ <exclude>META-INF/*.SF</exclude>
+ <exclude>META-INF/*.DSA</exclude>
+ <exclude>META-INF/*.RSA</exclude>
+ </excludes>
+ </filter>
+ </filters>
+ </configuration>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <configuration>
+ <transformers>
+ <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
+ <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+ <mainClass>org.apache.couchdb.nouveau.NouveauApplication</mainClass>
+ </transformer>
+ </transformers>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-jar-plugin</artifactId>
+ <version>2.4</version>
+ <configuration>
+ <archive>
+ <manifest>
+ <addDefaultImplementationEntries>true</addDefaultImplementationEntries>
+ </manifest>
+ </archive>
+ </configuration>
+ </plugin>
+ <plugin>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <version>2.22.2</version>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-javadoc-plugin</artifactId>
+ <version>3.4.0</version>
+ <configuration>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+
+</project>
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/NouveauApplication.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/NouveauApplication.java
new file mode 100644
index 000000000..f5a92fced
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/NouveauApplication.java
@@ -0,0 +1,87 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau;
+
+import java.util.concurrent.ExecutorService;
+
+import org.apache.couchdb.nouveau.core.AnalyzerFactory;
+import org.apache.couchdb.nouveau.core.DocumentFactory;
+import org.apache.couchdb.nouveau.core.FileAlreadyExistsExceptionMapper;
+import org.apache.couchdb.nouveau.core.FileNotFoundExceptionMapper;
+import org.apache.couchdb.nouveau.core.IndexManager;
+import org.apache.couchdb.nouveau.core.ParallelSearcherFactory;
+import org.apache.couchdb.nouveau.core.UpdatesOutOfOrderExceptionMapper;
+import org.apache.couchdb.nouveau.core.ser.LuceneModule;
+import org.apache.couchdb.nouveau.health.AnalyzeHealthCheck;
+import org.apache.couchdb.nouveau.health.IndexManagerHealthCheck;
+import org.apache.couchdb.nouveau.resources.AnalyzeResource;
+import org.apache.couchdb.nouveau.resources.IndexResource;
+import org.apache.couchdb.nouveau.resources.SearchResource;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.module.afterburner.AfterburnerModule;
+
+import io.dropwizard.Application;
+import io.dropwizard.setup.Environment;
+
+public class NouveauApplication extends Application<NouveauApplicationConfiguration> {
+
+ public static void main(String[] args) throws Exception {
+ new NouveauApplication().run(args);
+ }
+
+ @Override
+ public String getName() {
+ return "Nouveau";
+ }
+
+ @Override
+ public void run(NouveauApplicationConfiguration configuration, Environment environment) throws Exception {
+ final DocumentFactory documentFactory = new DocumentFactory();
+ final AnalyzerFactory analyzerFactory = new AnalyzerFactory();
+
+ final ExecutorService searchExecutor =
+ environment.lifecycle().executorService("searches").build();
+
+ final ParallelSearcherFactory searcherFactory = new ParallelSearcherFactory();
+ searcherFactory.setExecutor(searchExecutor);
+
+ final ObjectMapper objectMapper = environment.getObjectMapper();
+ objectMapper.registerModule(new AfterburnerModule());
+ objectMapper.registerModule(new LuceneModule());
+
+ final IndexManager indexManager = new IndexManager();
+ indexManager.setRootDir(configuration.getRootDir());
+ indexManager.setMaxIndexesOpen(configuration.getMaxIndexesOpen());
+ indexManager.setCommitIntervalSeconds(configuration.getCommitIntervalSeconds());
+ indexManager.setIdleSeconds(configuration.getIdleSeconds());
+ indexManager.setAnalyzerFactory(analyzerFactory);
+ indexManager.setObjectMapper(objectMapper);
+ indexManager.setSearcherFactory(searcherFactory);
+ environment.lifecycle().manage(indexManager);
+
+ environment.jersey().register(new FileNotFoundExceptionMapper());
+ environment.jersey().register(new FileAlreadyExistsExceptionMapper());
+ environment.jersey().register(new UpdatesOutOfOrderExceptionMapper());
+
+ final AnalyzeResource analyzeResource = new AnalyzeResource(analyzerFactory);
+ environment.jersey().register(analyzeResource);
+ environment.jersey().register(new IndexResource(indexManager, documentFactory));
+ environment.jersey().register(new SearchResource(indexManager));
+
+ // health checks
+ environment.healthChecks().register("analyzeResource", new AnalyzeHealthCheck(analyzeResource));
+ environment.healthChecks().register("indexManager", new IndexManagerHealthCheck(indexManager));
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/NouveauApplicationConfiguration.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/NouveauApplicationConfiguration.java
new file mode 100644
index 000000000..7d95bacd4
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/NouveauApplicationConfiguration.java
@@ -0,0 +1,73 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau;
+
+import java.nio.file.Path;
+
+import javax.validation.constraints.Min;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import io.dropwizard.Configuration;
+
+public class NouveauApplicationConfiguration extends Configuration {
+
+ @Min(10)
+ private int maxIndexesOpen = -1;
+
+ @Min(10)
+ private int commitIntervalSeconds = -1;
+
+ @Min(30)
+ private int idleSeconds = -1;
+
+ private Path rootDir = Path.of("target/indexes");
+
+ @JsonProperty
+ public void setMaxIndexesOpen(int maxIndexesOpen) {
+ this.maxIndexesOpen = maxIndexesOpen;
+ }
+
+ public int getMaxIndexesOpen() {
+ return maxIndexesOpen;
+ }
+
+ @JsonProperty
+ public void setCommitIntervalSeconds(int commitIntervalSeconds) {
+ this.commitIntervalSeconds = commitIntervalSeconds;
+ }
+
+ public int getCommitIntervalSeconds() {
+ return commitIntervalSeconds;
+ }
+
+ @JsonProperty
+ public void setIdleSeconds(int idleSeconds) {
+ this.idleSeconds = idleSeconds;
+ }
+
+ public int getIdleSeconds() {
+ return idleSeconds;
+ }
+
+ @JsonProperty
+ public void setRootDir(Path rootDir) {
+ this.rootDir = rootDir;
+ }
+
+ public Path getRootDir() {
+ return rootDir;
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/AnalyzeRequest.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/AnalyzeRequest.java
new file mode 100644
index 000000000..5f3fcd1e9
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/AnalyzeRequest.java
@@ -0,0 +1,51 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.api;
+
+import javax.validation.constraints.NotEmpty;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import io.dropwizard.jackson.JsonSnakeCase;
+
+@JsonSnakeCase
+public class AnalyzeRequest {
+
+ @NotEmpty
+ private String analyzer;
+
+ @NotEmpty
+ private String text;
+
+ @SuppressWarnings("unused")
+ public AnalyzeRequest() {
+ // Jackson deserialization
+ }
+
+ public AnalyzeRequest(final String analyzer, final String text) {
+ this.analyzer = analyzer;
+ this.text = text;
+ }
+
+ @JsonProperty
+ public String getAnalyzer() {
+ return analyzer;
+ }
+
+ @JsonProperty
+ public String getText() {
+ return text;
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/AnalyzeResponse.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/AnalyzeResponse.java
new file mode 100644
index 000000000..6cc006c7a
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/AnalyzeResponse.java
@@ -0,0 +1,45 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.api;
+
+import java.util.List;
+
+import javax.validation.constraints.NotEmpty;
+import javax.validation.constraints.NotNull;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import io.dropwizard.jackson.JsonSnakeCase;
+
+@JsonSnakeCase
+public class AnalyzeResponse {
+
+ @NotNull
+ private List<@NotEmpty String> tokens;
+
+ @SuppressWarnings("unused")
+ public AnalyzeResponse() {
+ // Jackson deserialization
+ }
+
+ public AnalyzeResponse(List<String> tokens) {
+ this.tokens = tokens;
+ }
+
+ @JsonProperty
+ public List<String> getTokens() {
+ return tokens;
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/DocumentDeleteRequest.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/DocumentDeleteRequest.java
new file mode 100644
index 000000000..d8c8332d4
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/DocumentDeleteRequest.java
@@ -0,0 +1,69 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.api;
+
+import javax.validation.constraints.Min;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import io.dropwizard.jackson.JsonSnakeCase;
+
+@JsonSnakeCase
+public class DocumentDeleteRequest {
+
+ @Min(1)
+ private long seq;
+
+ @SuppressWarnings("unused")
+ public DocumentDeleteRequest() {
+ // Jackson deserialization
+ }
+
+ public DocumentDeleteRequest(long seq) {
+ this.seq = seq;
+ }
+
+ @JsonProperty
+ public long getSeq() {
+ return seq;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + (int) (seq ^ (seq >>> 32));
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ DocumentDeleteRequest other = (DocumentDeleteRequest) obj;
+ if (seq != other.seq)
+ return false;
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return "DocumentDeleteRequest [seq=" + seq + "]";
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/DocumentUpdateRequest.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/DocumentUpdateRequest.java
new file mode 100644
index 000000000..d312b9d7f
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/DocumentUpdateRequest.java
@@ -0,0 +1,108 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.api;
+
+import java.util.Collection;
+
+import javax.validation.constraints.Min;
+import javax.validation.constraints.NotEmpty;
+import javax.validation.constraints.NotNull;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import org.apache.lucene.index.IndexableField;
+
+import io.dropwizard.jackson.JsonSnakeCase;
+
+@JsonSnakeCase
+public class DocumentUpdateRequest {
+
+ @Min(1)
+ private long seq;
+
+ private String partition;
+
+ @NotEmpty
+ private Collection<@NotNull IndexableField> fields;
+
+ @SuppressWarnings("unused")
+ public DocumentUpdateRequest() {
+ // Jackson deserialization
+ }
+
+ public DocumentUpdateRequest(long seq, String partition, Collection<IndexableField> fields) {
+ this.seq = seq;
+ this.partition = partition;
+ this.fields = fields;
+ }
+
+ @JsonProperty
+ public long getSeq() {
+ return seq;
+ }
+
+ @JsonProperty
+ public String getPartition() {
+ return partition;
+ }
+
+ public boolean hasPartition() {
+ return partition != null;
+ }
+
+ @JsonProperty
+ public Collection<IndexableField> getFields() {
+ return fields;
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + (int) (seq ^ (seq >>> 32));
+ result = prime * result + ((partition == null) ? 0 : partition.hashCode());
+ result = prime * result + ((fields == null) ? 0 : fields.hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ DocumentUpdateRequest other = (DocumentUpdateRequest) obj;
+ if (seq != other.seq)
+ return false;
+ if (partition == null) {
+ if (other.partition != null)
+ return false;
+ } else if (!partition.equals(other.partition))
+ return false;
+ if (fields == null) {
+ if (other.fields != null)
+ return false;
+ } else if (!fields.equals(other.fields))
+ return false;
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return "DocumentUpdateRequest [seq=" + seq + ", partition=" + partition + ", fields=" + fields + "]";
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/IndexDefinition.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/IndexDefinition.java
new file mode 100644
index 000000000..265a8c021
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/IndexDefinition.java
@@ -0,0 +1,70 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.api;
+
+import java.util.Map;
+
+import javax.validation.constraints.NotEmpty;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import io.dropwizard.jackson.JsonSnakeCase;
+
+@JsonSnakeCase
+public class IndexDefinition {
+
+ @NotEmpty
+ private String defaultAnalyzer;
+
+ private Map<@NotEmpty String, @NotEmpty String> fieldAnalyzers;
+
+ @SuppressWarnings("unused")
+ public IndexDefinition() {
+ // Jackson deserialization
+ }
+
+ public IndexDefinition(final String defaultAnalyzer, final Map<String, String> fieldAnalyzers) {
+ this.defaultAnalyzer = defaultAnalyzer;
+ this.fieldAnalyzers = fieldAnalyzers;
+ }
+
+ @JsonProperty
+ public String getDefaultAnalyzer() {
+ return defaultAnalyzer;
+ }
+
+ public void setDefaultAnalyzer(String defaultAnalyzer) {
+ this.defaultAnalyzer = defaultAnalyzer;
+ }
+
+ @JsonProperty
+ public Map<String, String> getFieldAnalyzers() {
+ return fieldAnalyzers;
+ }
+
+ public void setFieldAnalyzers(Map<String, String> fieldAnalyzers) {
+ this.fieldAnalyzers = fieldAnalyzers;
+ }
+
+ public boolean hasFieldAnalyzers() {
+ return fieldAnalyzers != null && !fieldAnalyzers.isEmpty();
+ }
+
+ @Override
+ public String toString() {
+ return "IndexDefinition [defaultAnalyzer=" + defaultAnalyzer + ", fieldAnalyzers=" + fieldAnalyzers
+ + "]";
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/IndexInfo.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/IndexInfo.java
new file mode 100644
index 000000000..2bfa096a6
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/IndexInfo.java
@@ -0,0 +1,54 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.api;
+
+import javax.validation.constraints.NotNull;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import io.dropwizard.jackson.JsonSnakeCase;
+
+@JsonSnakeCase
+public class IndexInfo {
+
+ @NotNull
+ private Long updateSeq;
+
+ @NotNull
+ private Integer numDocs;
+
+ public IndexInfo() {
+ }
+
+ public IndexInfo(final Long updateSeq, final Integer numDocs) {
+ this.updateSeq = updateSeq;
+ this.numDocs = numDocs;
+ }
+
+ @JsonProperty
+ public Integer getNumDocs() {
+ return numDocs;
+ }
+
+ @JsonProperty
+ public Long getUpdateSeq() {
+ return updateSeq;
+ }
+
+ @Override
+ public String toString() {
+ return "IndexInfo [numDocs=" + numDocs + ", updateSeq=" + updateSeq + "]";
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchHit.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchHit.java
new file mode 100644
index 000000000..aa8b064bb
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchHit.java
@@ -0,0 +1,64 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.api;
+
+import java.util.Collection;
+
+import javax.validation.constraints.NotEmpty;
+import javax.validation.constraints.NotNull;
+
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.search.FieldDoc;
+
+import io.dropwizard.jackson.JsonSnakeCase;
+
+@JsonSnakeCase
+public class SearchHit {
+
+ @NotEmpty
+ private String id;
+
+ @NotNull
+ private FieldDoc order;
+
+ @NotNull
+ private Collection<@NotNull IndexableField> fields;
+
+ public SearchHit() {
+ }
+
+ public SearchHit(final String id, final FieldDoc order, final Collection<IndexableField> fields) {
+ this.id = id;
+ this.order = order;
+ this.fields = fields;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public FieldDoc getOrder() {
+ return order;
+ }
+
+ public Collection<IndexableField> getFields() {
+ return fields;
+ }
+
+ @Override
+ public String toString() {
+ return "SearchHit [id=" + id + ", order=" + order + ", fields=" + fields + "]";
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchRequest.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchRequest.java
new file mode 100644
index 000000000..2de199ce2
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchRequest.java
@@ -0,0 +1,149 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.api;
+
+import java.util.List;
+import java.util.Map;
+
+import javax.validation.constraints.Max;
+import javax.validation.constraints.Min;
+import javax.validation.constraints.NotEmpty;
+import javax.validation.constraints.NotNull;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import org.apache.lucene.facet.range.DoubleRange;
+import org.apache.lucene.search.FieldDoc;
+
+import io.dropwizard.jackson.JsonSnakeCase;
+
+@JsonSnakeCase
+public class SearchRequest {
+
+ @NotNull
+ private String query;
+
+ private String partition;
+
+ @Min(1)
+ @Max(200)
+ private int limit = 25;
+
+ private List<@NotEmpty String> sort;
+
+ private List<@NotEmpty String> counts;
+
+ private Map<@NotEmpty String, List<@NotNull DoubleRange>> ranges;
+
+ private FieldDoc after;
+
+ @Min(1)
+ @Max(100)
+ private int topN = 10;
+
+ @SuppressWarnings("unused")
+ public SearchRequest() {
+ // Jackson deserialization
+ }
+
+ public void setQuery(final String query) {
+ this.query = query;
+ }
+
+ @JsonProperty
+ public String getQuery() {
+ return query;
+ }
+
+ public void setPartition(final String partition) {
+ this.partition = partition;
+ }
+
+ @JsonProperty
+ public String getPartition() {
+ return partition;
+ }
+
+ public boolean hasPartition() {
+ return partition != null;
+ }
+
+ public void setLimit(final int limit) {
+ this.limit = limit;
+ }
+
+ @JsonProperty
+ public int getLimit() {
+ return limit;
+ }
+
+ public boolean hasSort() {
+ return sort != null;
+ }
+
+ @JsonProperty
+ public List<String> getSort() {
+ return sort;
+ }
+
+ public boolean hasCounts() {
+ return counts != null;
+ }
+
+ public void setCounts(final List<String> counts) {
+ this.counts = counts;
+ }
+
+ @JsonProperty
+ public List<String> getCounts() {
+ return counts;
+ }
+
+ public boolean hasRanges() {
+ return ranges != null;
+ }
+
+ public void setRanges(final Map<String, List<DoubleRange>> ranges) {
+ this.ranges = ranges;
+ }
+
+ @JsonProperty
+ public Map<String, List<DoubleRange>> getRanges() {
+ return ranges;
+ }
+
+ public void setTopN(final int topN) {
+ this.topN = topN;
+ }
+
+ @JsonProperty
+ public int getTopN() {
+ return topN;
+ }
+
+ public void setAfter(final FieldDoc after) {
+ this.after = after;
+ }
+
+ @JsonProperty
+ public FieldDoc getAfter() {
+ return after;
+ }
+
+ @Override
+ public String toString() {
+ return "SearchRequest [query=" + query + ", sort=" + sort + ", limit=" + limit + ", after=" + after + ", counts=" + counts + ", ranges=" + ranges + "]";
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchResults.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchResults.java
new file mode 100644
index 000000000..931adac81
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/api/SearchResults.java
@@ -0,0 +1,84 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.api;
+
+import java.util.List;
+import java.util.Map;
+
+import javax.validation.constraints.NotNull;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import io.dropwizard.jackson.JsonSnakeCase;
+
+import org.apache.lucene.search.TotalHits;
+
+@JsonSnakeCase
+public class SearchResults {
+
+ @NotNull
+ private TotalHits totalHits;
+
+ @NotNull
+ private List<@NotNull SearchHit> hits;
+
+ private Map<@NotNull String, Map<@NotNull String, Number>> counts;
+
+ private Map<@NotNull String, Map<@NotNull String, Number>> ranges;
+
+ public SearchResults() {
+ }
+
+ public void setTotalHits(final TotalHits totalHits) {
+ this.totalHits = totalHits;
+ }
+
+ @JsonProperty
+ public TotalHits getTotalHits() {
+ return totalHits;
+ }
+
+ public void setHits(final List<SearchHit> hits) {
+ this.hits = hits;
+ }
+
+ @JsonProperty
+ public List<SearchHit> getHits() {
+ return hits;
+ }
+
+ public void setCounts(final Map<String, Map<String, Number>> counts) {
+ this.counts = counts;
+ }
+
+ @JsonProperty
+ public Map<String, Map<String, Number>> getCounts() {
+ return counts;
+ }
+
+ public void setRanges(final Map<String, Map<String, Number>> ranges) {
+ this.ranges = ranges;
+ }
+
+ @JsonProperty
+ public Map<String, Map<String, Number>> getRanges() {
+ return ranges;
+ }
+
+ @Override
+ public String toString() {
+ return "SearchResults [hits=" + hits + ", totalHits=" + totalHits + ", counts=" + counts + ", ranges=" + ranges + "]";
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/AnalyzerFactory.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/AnalyzerFactory.java
new file mode 100644
index 000000000..0ad6c0311
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/AnalyzerFactory.java
@@ -0,0 +1,162 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response.Status;
+
+import org.apache.couchdb.nouveau.api.IndexDefinition;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.ar.ArabicAnalyzer;
+import org.apache.lucene.analysis.bg.BulgarianAnalyzer;
+import org.apache.lucene.analysis.ca.CatalanAnalyzer;
+import org.apache.lucene.analysis.cjk.CJKAnalyzer;
+import org.apache.lucene.analysis.classic.ClassicAnalyzer;
+import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
+import org.apache.lucene.analysis.core.KeywordAnalyzer;
+import org.apache.lucene.analysis.core.SimpleAnalyzer;
+import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
+import org.apache.lucene.analysis.cz.CzechAnalyzer;
+import org.apache.lucene.analysis.da.DanishAnalyzer;
+import org.apache.lucene.analysis.de.GermanAnalyzer;
+import org.apache.lucene.analysis.email.UAX29URLEmailAnalyzer;
+import org.apache.lucene.analysis.en.EnglishAnalyzer;
+import org.apache.lucene.analysis.es.SpanishAnalyzer;
+import org.apache.lucene.analysis.eu.BasqueAnalyzer;
+import org.apache.lucene.analysis.fa.PersianAnalyzer;
+import org.apache.lucene.analysis.fi.FinnishAnalyzer;
+import org.apache.lucene.analysis.fr.FrenchAnalyzer;
+import org.apache.lucene.analysis.ga.IrishAnalyzer;
+import org.apache.lucene.analysis.gl.GalicianAnalyzer;
+import org.apache.lucene.analysis.hi.HindiAnalyzer;
+import org.apache.lucene.analysis.hu.HungarianAnalyzer;
+import org.apache.lucene.analysis.hy.ArmenianAnalyzer;
+import org.apache.lucene.analysis.id.IndonesianAnalyzer;
+import org.apache.lucene.analysis.it.ItalianAnalyzer;
+import org.apache.lucene.analysis.ja.JapaneseAnalyzer;
+import org.apache.lucene.analysis.lv.LatvianAnalyzer;
+import org.apache.lucene.analysis.miscellaneous.PerFieldAnalyzerWrapper;
+import org.apache.lucene.analysis.nl.DutchAnalyzer;
+import org.apache.lucene.analysis.no.NorwegianAnalyzer;
+import org.apache.lucene.analysis.pl.PolishAnalyzer;
+import org.apache.lucene.analysis.pt.PortugueseAnalyzer;
+import org.apache.lucene.analysis.ro.RomanianAnalyzer;
+import org.apache.lucene.analysis.ru.RussianAnalyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.analysis.sv.SwedishAnalyzer;
+import org.apache.lucene.analysis.th.ThaiAnalyzer;
+import org.apache.lucene.analysis.tr.TurkishAnalyzer;
+
+public class AnalyzerFactory {
+
+ public Analyzer fromDefinition(final IndexDefinition indexDefinition) {
+ final Analyzer defaultAnalyzer = newAnalyzer(indexDefinition.getDefaultAnalyzer());
+ if (!indexDefinition.hasFieldAnalyzers()) {
+ return defaultAnalyzer;
+ }
+ final Map<String, Analyzer> fieldAnalyzers = new HashMap<String, Analyzer>();
+ for (Map.Entry<String, String> entry : indexDefinition.getFieldAnalyzers().entrySet()) {
+ fieldAnalyzers.put(entry.getKey(), newAnalyzer(entry.getValue()));
+ }
+ return new PerFieldAnalyzerWrapper(defaultAnalyzer, fieldAnalyzers);
+ }
+
+ public Analyzer newAnalyzer(final String name) {
+ switch(name) {
+ case "keyword":
+ return new KeywordAnalyzer();
+ case "simple":
+ return new SimpleAnalyzer();
+ case "whitespace":
+ return new WhitespaceAnalyzer();
+ case "arabic":
+ return new ArabicAnalyzer();
+ case "bulgarian":
+ return new BulgarianAnalyzer();
+ case "catalan":
+ return new CatalanAnalyzer();
+ case "cjk":
+ return new CJKAnalyzer();
+ case "chinese":
+ return new SmartChineseAnalyzer();
+ case "czech":
+ return new CzechAnalyzer();
+ case "danish":
+ return new DanishAnalyzer();
+ case "german":
+ return new GermanAnalyzer();
+ case "english":
+ return new EnglishAnalyzer();
+ case "spanish":
+ return new SpanishAnalyzer();
+ case "basque":
+ return new BasqueAnalyzer();
+ case "persian":
+ return new PersianAnalyzer();
+ case "finnish":
+ return new FinnishAnalyzer();
+ case "french":
+ return new FrenchAnalyzer();
+ case "irish":
+ return new IrishAnalyzer();
+ case "galician":
+ return new GalicianAnalyzer();
+ case "hindi":
+ return new HindiAnalyzer();
+ case "hungarian":
+ return new HungarianAnalyzer();
+ case "armenian":
+ return new ArmenianAnalyzer();
+ case "indonesian":
+ return new IndonesianAnalyzer();
+ case "italian":
+ return new ItalianAnalyzer();
+ case "japanese":
+ return new JapaneseAnalyzer();
+ case "latvian":
+ return new LatvianAnalyzer();
+ case "dutch":
+ return new DutchAnalyzer();
+ case "norwegian":
+ return new NorwegianAnalyzer();
+ case "polish":
+ return new PolishAnalyzer();
+ case "portugese":
+ return new PortugueseAnalyzer();
+ case "romanian":
+ return new RomanianAnalyzer();
+ case "russian":
+ return new RussianAnalyzer();
+ case "classic":
+ return new ClassicAnalyzer();
+ case "standard":
+ return new StandardAnalyzer();
+ case "email":
+ return new UAX29URLEmailAnalyzer();
+ case "swedish":
+ return new SwedishAnalyzer();
+ case "thai":
+ return new ThaiAnalyzer();
+ case "turkish":
+ return new TurkishAnalyzer();
+ default:
+ throw new WebApplicationException(name + " is not a valid analyzer name", Status.BAD_REQUEST);
+ }
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/DocumentFactory.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/DocumentFactory.java
new file mode 100644
index 000000000..904a215c0
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/DocumentFactory.java
@@ -0,0 +1,50 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import java.io.IOException;
+
+import org.apache.couchdb.nouveau.api.DocumentUpdateRequest;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.util.BytesRef;
+
+public class DocumentFactory {
+
+ public Document build(final String docId, final DocumentUpdateRequest request) throws IOException {
+ final Document result = new Document();
+
+ // id
+ result.add(new org.apache.lucene.document.StringField("_id", docId, Store.YES));
+ result.add(new org.apache.lucene.document.SortedDocValuesField("_id", new BytesRef(docId)));
+
+ // partition (optional)
+ if (request.hasPartition()) {
+ result.add(new org.apache.lucene.document.StringField("_partition", request.getPartition(), Store.NO));
+ }
+
+ for (IndexableField field : request.getFields()) {
+ // Underscore-prefix is reserved.
+ if (field.name().startsWith("_")) {
+ continue;
+ }
+ result.add(field);
+ }
+
+ return result;
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/FileAlreadyExistsExceptionMapper.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/FileAlreadyExistsExceptionMapper.java
new file mode 100644
index 000000000..e9ef3ed0c
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/FileAlreadyExistsExceptionMapper.java
@@ -0,0 +1,35 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import java.nio.file.FileAlreadyExistsException;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+import javax.ws.rs.ext.ExceptionMapper;
+
+import io.dropwizard.jersey.errors.ErrorMessage;
+
+public class FileAlreadyExistsExceptionMapper implements ExceptionMapper<FileAlreadyExistsException> {
+
+ @Override
+ public Response toResponse(final FileAlreadyExistsException exception) {
+ return Response.status(Status.EXPECTATION_FAILED)
+ .type(MediaType.APPLICATION_JSON_TYPE)
+ .entity(new ErrorMessage(Status.EXPECTATION_FAILED.getStatusCode(), "Index already exists"))
+ .build();
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/FileNotFoundExceptionMapper.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/FileNotFoundExceptionMapper.java
new file mode 100644
index 000000000..84e692df7
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/FileNotFoundExceptionMapper.java
@@ -0,0 +1,35 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import java.io.FileNotFoundException;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+import javax.ws.rs.ext.ExceptionMapper;
+
+import io.dropwizard.jersey.errors.ErrorMessage;
+
+public class FileNotFoundExceptionMapper implements ExceptionMapper<FileNotFoundException> {
+
+ @Override
+ public Response toResponse(final FileNotFoundException exception) {
+ return Response.status(Status.NOT_FOUND)
+ .type(MediaType.APPLICATION_JSON_TYPE)
+ .entity(new ErrorMessage(Status.NOT_FOUND.getStatusCode(), "Index does not exist"))
+ .build();
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/IndexManager.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/IndexManager.java
new file mode 100644
index 000000000..5ab636f63
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/IndexManager.java
@@ -0,0 +1,467 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import java.io.IOException;
+import java.nio.file.FileAlreadyExistsException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.time.Duration;
+import java.util.Collections;
+import java.util.Map;
+import java.util.concurrent.CompletionException;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.stream.Stream;
+
+import javax.validation.constraints.Min;
+import javax.validation.constraints.NotEmpty;
+import javax.validation.constraints.NotNull;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response.Status;
+
+import org.apache.couchdb.nouveau.api.IndexDefinition;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.misc.store.DirectIODirectory;
+import org.apache.lucene.search.SearcherFactory;
+import org.apache.lucene.search.SearcherManager;
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.store.LockObtainFailedException;
+import org.apache.lucene.util.IOUtils;
+import org.checkerframework.checker.nullness.qual.NonNull;
+import org.checkerframework.checker.nullness.qual.Nullable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.github.benmanes.caffeine.cache.CacheLoader;
+import com.github.benmanes.caffeine.cache.Caffeine;
+import com.github.benmanes.caffeine.cache.LoadingCache;
+import com.github.benmanes.caffeine.cache.RemovalCause;
+import com.github.benmanes.caffeine.cache.RemovalListener;
+import com.github.benmanes.caffeine.cache.Scheduler;
+
+import io.dropwizard.lifecycle.Managed;
+
+public class IndexManager implements Managed {
+
+ private static final int RETRY_LIMIT = 500;
+ private static final int RETRY_SLEEP_MS = 5;
+ private static final Logger LOGGER = LoggerFactory.getLogger(IndexManager.class);
+
+ public class Index {
+ private static final String DEFAULT_FIELD = "default";
+ private final String name;
+ private IndexWriter writer;
+ private SearcherManager searcherManager;
+ private Analyzer analyzer;
+ private final AtomicBoolean deleteOnClose = new AtomicBoolean();
+ private final AtomicLong updateSeq = new AtomicLong();
+
+ // The write lock is to ensure there are no readers/searchers when
+ // we want to close the index.
+ private ReentrantReadWriteLock rwl = new ReentrantReadWriteLock();
+ private Lock rl = rwl.readLock();
+ private Lock wl = rwl.writeLock();
+
+ private Index(
+ String name,
+ IndexWriter writer,
+ SearcherManager searcherManager,
+ Analyzer analyzer,
+ long updateSeq) {
+ this.name = name;
+ this.writer = writer;
+ this.searcherManager = searcherManager;
+ this.analyzer = analyzer;
+ this.updateSeq.set(updateSeq);
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public IndexWriter getWriter() {
+ return writer;
+ }
+
+ public SearcherManager getSearcherManager() {
+ return searcherManager;
+ }
+
+ public QueryParser getQueryParser() {
+ return new NouveauQueryParser(DEFAULT_FIELD, analyzer);
+ }
+
+ public boolean commit() throws IOException {
+ rl.lock();
+ try {
+ writer.setLiveCommitData(generateCommitData().entrySet());
+ return writer.commit() != -1;
+ } finally {
+ rl.unlock();
+ }
+ }
+
+ public long getUpdateSeq() throws IOException {
+ return updateSeq.get();
+ }
+
+ public void incrementUpdateSeq(final long updateSeq) throws IOException {
+ final long newSeq = this.updateSeq.accumulateAndGet(updateSeq, (a, b) -> Math.max(a, b));
+ if (newSeq != updateSeq) {
+ throw new UpdatesOutOfOrderException();
+ }
+ }
+
+ public void close() throws IOException {
+ wl.lock();
+ try {
+ if (writer == null) {
+ // Already closed.
+ return;
+ }
+
+ // Close searcher manager
+ if (searcherManager != null) {
+ try {
+ searcherManager.close();
+ } catch (IOException e) {
+ LOGGER.info(this + " threw exception when closing searcherManager.", e);
+ } finally {
+ searcherManager = null;
+ }
+ }
+
+ if (deleteOnClose.get()) {
+ try {
+ // No need to commit in this case.
+ writer.rollback();
+ } catch (IOException e) {
+ LOGGER.info(this + " threw exception when rolling back writer.", e);
+ } finally {
+ writer = null;
+ }
+ IOUtils.rm(indexRootPath(name));
+ } else {
+ try {
+ writer.setLiveCommitData(generateCommitData().entrySet());
+ writer.close();
+ LOGGER.info("{} closed.", this);
+ } finally {
+ writer = null;
+ }
+ }
+ } finally {
+ wl.unlock();
+ }
+ }
+
+ private Map<String, String> generateCommitData() {
+ return Collections.singletonMap("update_seq", Long.toString(updateSeq.get()));
+ }
+
+ @Override
+ public String toString() {
+ return "Index [name=" + name + "]";
+ }
+ }
+
+ private class IndexLoader implements CacheLoader<String, Index> {
+
+ @Override
+ public @Nullable Index load(@NonNull String name) throws Exception {
+ return openExistingIndex(name);
+ }
+
+ @Override
+ public @Nullable Index reload(@NonNull String name, @NonNull Index index) throws Exception {
+ try {
+ if (index.commit()) {
+ LOGGER.info("{} committed.", index);
+ }
+ } catch (final IOException e) {
+ LOGGER.error(index + " threw exception when committing.", e);
+ index.close();
+ return openExistingIndex(name);
+ }
+ return index;
+ }
+
+ }
+
+ private static class IndexCloser implements RemovalListener<String, Index> {
+
+ public void onRemoval(String name, Index index, RemovalCause cause) {
+ try {
+ index.close();
+ } catch (IOException e) {
+ LOGGER.error(index + " threw exception when closing", e);
+ }
+ }
+ }
+
+ private static final IndexCloser INDEX_CLOSER = new IndexCloser();
+
+
+ @Min(1)
+ private int maxIndexesOpen;
+
+ @Min(1)
+ private int commitIntervalSeconds;
+
+ @Min(1)
+ private int idleSeconds;
+
+ @NotEmpty
+ private Path rootDir;
+
+ @NotNull
+ private AnalyzerFactory analyzerFactory;
+
+ @NotNull
+ private ObjectMapper objectMapper;
+
+ private SearcherFactory searcherFactory;
+
+ private LoadingCache<String, Index> cache;
+
+ public Index acquire(final String name) throws IOException {
+ for (int i = 0; i < RETRY_LIMIT; i++) {
+ final Index result = getFromCache(name);
+
+ // Check if we're in the middle of closing.
+ result.rl.lock();
+ if (result.writer != null) {
+ return result;
+ }
+ result.rl.unlock();
+
+ // Retry after a short sleep.
+ try {
+ Thread.sleep(RETRY_SLEEP_MS);
+ } catch (InterruptedException e) {
+ Thread.interrupted();
+ break;
+ }
+ }
+ throw new IOException("Failed to acquire " + name);
+ }
+
+ public void release(final Index index) throws IOException {
+ index.rl.unlock();
+ }
+
+ public void create(final String name, IndexDefinition indexDefinition) throws IOException {
+ createNewIndex(name, indexDefinition);
+ }
+
+ public void deleteAll(final String path) throws IOException {
+ final Path rootPath = indexRootPath(path);
+ if (!rootPath.toFile().exists()) {
+ return;
+ }
+ Stream<Path> stream = Files.find(rootPath, 100,
+ (p, attr) -> attr.isDirectory() && isIndex(p));
+ try {
+ stream.forEach((p) -> {
+ try {
+ deleteIndex(rootDir.relativize(p).toString());
+ } catch (Exception e) {
+ LOGGER.error("I/O exception deleting " + p, e);
+ }
+ });
+ } finally {
+ stream.close();
+ }
+ }
+
+ private void deleteIndex(final String name) throws IOException {
+ final Index index = acquire(name);
+ try {
+ index.deleteOnClose.set(true);
+ cache.invalidate(name);
+ } finally {
+ release(index);
+ }
+ }
+
+ @JsonProperty
+ public int getMaxIndexesOpen() {
+ return maxIndexesOpen;
+ }
+
+ public void setMaxIndexesOpen(int maxIndexesOpen) {
+ this.maxIndexesOpen = maxIndexesOpen;
+ }
+
+ @JsonProperty
+ public int getCommitIntervalSeconds() {
+ return commitIntervalSeconds;
+ }
+
+ public void setCommitIntervalSeconds(int commitIntervalSeconds) {
+ this.commitIntervalSeconds = commitIntervalSeconds;
+ }
+
+ @JsonProperty
+ public int getIdleSeconds() {
+ return idleSeconds;
+ }
+
+ public void setIdleSeconds(int idleSeconds) {
+ this.idleSeconds = idleSeconds;
+ }
+
+ @JsonProperty
+ public Path getRootDir() {
+ return rootDir;
+ }
+
+ public void setRootDir(Path rootDir) {
+ this.rootDir = rootDir;
+ }
+
+ public void setAnalyzerFactory(final AnalyzerFactory analyzerFactory) {
+ this.analyzerFactory = analyzerFactory;
+ }
+
+ public void setObjectMapper(final ObjectMapper objectMapper) {
+ this.objectMapper = objectMapper;
+ }
+
+ public void setSearcherFactory(final SearcherFactory searcherFactory) {
+ this.searcherFactory = searcherFactory;
+ }
+
+ @Override
+ public void start() throws IOException {
+ cache = Caffeine.newBuilder()
+ .initialCapacity(maxIndexesOpen)
+ .maximumSize(maxIndexesOpen)
+ .expireAfterAccess(Duration.ofSeconds(idleSeconds))
+ .expireAfterWrite(Duration.ofSeconds(idleSeconds))
+ .refreshAfterWrite(Duration.ofSeconds(commitIntervalSeconds))
+ .scheduler(Scheduler.systemScheduler())
+ .removalListener(INDEX_CLOSER)
+ .evictionListener(INDEX_CLOSER)
+ .build(new IndexLoader());
+ }
+
+ @Override
+ public void stop() {
+ cache.invalidateAll();
+ }
+
+ private Index getFromCache(final String name) throws IOException {
+ try {
+ return cache.get(name);
+ } catch (CompletionException e) {
+ if (e.getCause() instanceof IOException) {
+ throw (IOException) e.getCause();
+ }
+ throw e;
+ }
+ }
+
+ private void createNewIndex(final String name, final IndexDefinition indexDefinition) throws IOException {
+ // Validate index definiton
+ analyzerFactory.fromDefinition(indexDefinition);
+
+ // Persist definition
+ final Path path = indexDefinitionPath(name);
+ if (Files.exists(path)) {
+ throw new FileAlreadyExistsException(name + " already exists");
+ }
+ Files.createDirectories(path.getParent());
+ objectMapper.writeValue(path.toFile(), indexDefinition);
+ }
+
+ private Index openExistingIndex(final String name) throws IOException {
+ final IndexDefinition indexDefinition = objectMapper.readValue(indexDefinitionPath(name).toFile(), IndexDefinition.class);
+ final Analyzer analyzer = analyzerFactory.fromDefinition(indexDefinition);
+ final Path path = indexPath(name);
+ final Directory dir = directory(path);
+ final IndexWriter writer = newWriter(dir, analyzer);
+ final SearcherManager searcherManager = new SearcherManager(writer, searcherFactory);
+ final long updateSeq = getUpdateSeq(writer);
+ return new Index(name, writer, searcherManager, analyzer, updateSeq);
+ }
+
+ private long getUpdateSeq(final IndexWriter writer) throws IOException {
+ final Iterable<Map.Entry<String, String>> commitData = writer.getLiveCommitData();
+ if (commitData == null) {
+ return 0L;
+ }
+ for (Map.Entry<String, String> entry : commitData) {
+ if (entry.getKey().equals("update_seq")) {
+ return Long.parseLong(entry.getValue());
+ }
+ }
+ return 0L;
+ }
+
+ private IndexWriter newWriter(final Directory dir, final Analyzer analyzer) throws IOException {
+ LockObtainFailedException exceptionThrown = null;
+ for (int i = 0; i < RETRY_LIMIT; i++) {
+ try {
+ final IndexWriterConfig config = new IndexWriterConfig(analyzer);
+ config.setCommitOnClose(true);
+ config.setUseCompoundFile(false);
+ return new IndexWriter(dir, config);
+ } catch (LockObtainFailedException e) {
+ exceptionThrown = e;
+ try {
+ Thread.sleep(RETRY_SLEEP_MS);
+ } catch (InterruptedException e1) {
+ Thread.interrupted();
+ break;
+ }
+ }
+ }
+ throw exceptionThrown;
+ }
+
+ private boolean isIndex(final Path path) {
+ return path.resolve("index_definition.json").toFile().exists();
+ }
+
+ private Path indexDefinitionPath(final String name) {
+ return indexRootPath(name).resolve("index_definition.json");
+ }
+
+ private Path indexPath(final String name) {
+ return indexRootPath(name).resolve("index");
+ }
+
+ private Path indexRootPath(final String name) {
+ final Path result = rootDir.resolve(name).normalize();
+ if (result.startsWith(rootDir)) {
+ return result;
+ }
+ throw new WebApplicationException(name + " attempts to escape from index root directory",
+ Status.BAD_REQUEST);
+ }
+
+ private Directory directory(final Path path) throws IOException {
+ return new DirectIODirectory(FSDirectory.open(path));
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/NouveauQueryParser.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/NouveauQueryParser.java
new file mode 100644
index 000000000..e5fa80718
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/NouveauQueryParser.java
@@ -0,0 +1,120 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import org.apache.couchdb.nouveau.api.SearchRequest;
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.core.QueryParserHelper;
+import org.apache.lucene.queryparser.flexible.core.config.QueryConfigHandler;
+import org.apache.lucene.queryparser.flexible.core.processors.NoChildOptimizationQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorPipeline;
+import org.apache.lucene.queryparser.flexible.core.processors.RemoveDeletedQueryNodesProcessor;
+import org.apache.lucene.queryparser.flexible.standard.builders.StandardQueryTreeBuilder;
+import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler;
+import org.apache.lucene.queryparser.flexible.standard.config.StandardQueryConfigHandler.ConfigurationKeys;
+import org.apache.lucene.queryparser.flexible.standard.parser.StandardSyntaxParser;
+import org.apache.lucene.queryparser.flexible.standard.processors.AllowLeadingWildcardProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.AnalyzerQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.BooleanQuery2ModifierNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.BooleanSingleChildOptimizationQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.BoostQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.DefaultPhraseSlopQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.FuzzyQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.IntervalQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.MatchAllDocsQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.MultiFieldQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.MultiTermRewriteMethodProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.OpenRangeQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.PhraseSlopQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.PointQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.RegexpQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.RemoveEmptyNonLeafQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.TermRangeQueryNodeProcessor;
+import org.apache.lucene.queryparser.flexible.standard.processors.WildcardQueryNodeProcessor;
+import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+
+public class NouveauQueryParser extends QueryParserHelper implements QueryParser {
+
+ private static class NouveauQueryNodeProcessorPipeline extends QueryNodeProcessorPipeline {
+
+ public NouveauQueryNodeProcessorPipeline(QueryConfigHandler queryConfig) {
+ super(queryConfig);
+
+ add(new WildcardQueryNodeProcessor());
+ add(new MultiFieldQueryNodeProcessor());
+ add(new FuzzyQueryNodeProcessor());
+ add(new RegexpQueryNodeProcessor());
+ add(new MatchAllDocsQueryNodeProcessor());
+ add(new OpenRangeQueryNodeProcessor());
+ add(new PointQueryNodeProcessor());
+ add(new NumericRangeQueryProcessor());
+ add(new TermRangeQueryNodeProcessor());
+ add(new AllowLeadingWildcardProcessor());
+ add(new AnalyzerQueryNodeProcessor());
+ add(new PhraseSlopQueryNodeProcessor());
+ add(new BooleanQuery2ModifierNodeProcessor());
+ add(new NoChildOptimizationQueryNodeProcessor());
+ add(new RemoveDeletedQueryNodesProcessor());
+ add(new RemoveEmptyNonLeafQueryNodeProcessor());
+ add(new BooleanSingleChildOptimizationQueryNodeProcessor());
+ add(new DefaultPhraseSlopQueryNodeProcessor());
+ add(new BoostQueryNodeProcessor());
+ add(new MultiTermRewriteMethodProcessor());
+ add(new IntervalQueryNodeProcessor());
+ }
+
+ }
+
+ private final String defaultField;
+
+ public NouveauQueryParser(final String defaultField, final Analyzer analyzer) {
+ super(
+ new StandardQueryConfigHandler(),
+ new StandardSyntaxParser(),
+ new NouveauQueryNodeProcessorPipeline(null),
+ new StandardQueryTreeBuilder());
+ setEnablePositionIncrements(true);
+ this.setAnalyzer(analyzer);
+ this.defaultField = defaultField;
+ }
+
+ public void setAnalyzer(Analyzer analyzer) {
+ getQueryConfigHandler().set(ConfigurationKeys.ANALYZER, analyzer);
+ }
+
+ public void setEnablePositionIncrements(boolean enabled) {
+ getQueryConfigHandler().set(ConfigurationKeys.ENABLE_POSITION_INCREMENTS, enabled);
+ }
+
+ public Query parse(SearchRequest searchRequest) throws QueryParserException {
+ try {
+ final Query q = (Query) parse(searchRequest.getQuery(), defaultField);
+ if (searchRequest.hasPartition()) {
+ final BooleanQuery.Builder builder = new BooleanQuery.Builder();
+ builder.add(new TermQuery(new Term("_partition", searchRequest.getPartition())), Occur.MUST);
+ builder.add(q, Occur.MUST);
+ return builder.build();
+ }
+ return q;
+ } catch (QueryNodeException e) {
+ throw new QueryParserException(e);
+ }
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/NumericRangeQueryProcessor.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/NumericRangeQueryProcessor.java
new file mode 100644
index 000000000..8618e1721
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/NumericRangeQueryProcessor.java
@@ -0,0 +1,78 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import java.text.NumberFormat;
+import java.text.ParsePosition;
+import java.util.List;
+
+import org.apache.lucene.queryparser.flexible.core.QueryNodeException;
+import org.apache.lucene.queryparser.flexible.core.nodes.FieldQueryNode;
+import org.apache.lucene.queryparser.flexible.core.nodes.QueryNode;
+import org.apache.lucene.queryparser.flexible.core.processors.QueryNodeProcessorImpl;
+import org.apache.lucene.queryparser.flexible.standard.config.PointsConfig;
+import org.apache.lucene.queryparser.flexible.standard.nodes.PointQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.PointRangeQueryNode;
+import org.apache.lucene.queryparser.flexible.standard.nodes.TermRangeQueryNode;
+
+public class NumericRangeQueryProcessor extends QueryNodeProcessorImpl {
+
+ // TODO don't like this is locale dependent.
+ private final NumberFormat decimalFormat = NumberFormat.getInstance();
+ private final PointsConfig doublePointsConfig = new PointsConfig(decimalFormat, Double.class);
+
+ @Override
+ protected QueryNode preProcessNode(QueryNode node) throws QueryNodeException {
+ return node;
+ }
+
+ @Override
+ protected QueryNode postProcessNode(QueryNode node) throws QueryNodeException {
+ if (node instanceof TermRangeQueryNode) {
+ final TermRangeQueryNode rangeNode = (TermRangeQueryNode) node;
+ final Number lowerValue = toNumber(rangeNode.getLowerBound());
+ final Number upperValue = toNumber(rangeNode.getUpperBound());
+ if (lowerValue != null && upperValue != null) {
+ return new PointRangeQueryNode(
+ toPointQueryNode(rangeNode.getField(), lowerValue),
+ toPointQueryNode(rangeNode.getField(), upperValue),
+ rangeNode.isLowerInclusive(),
+ rangeNode.isUpperInclusive(),
+ doublePointsConfig);
+ }
+ }
+ return node;
+ }
+
+ @Override
+ protected List<QueryNode> setChildrenOrder(List<QueryNode> children) throws QueryNodeException {
+ return children;
+ }
+
+ private Number toNumber(final FieldQueryNode node) {
+ switch (node.getTextAsString()) {
+ case "Infinity":
+ return Double.POSITIVE_INFINITY;
+ case "-Infinity":
+ return Double.NEGATIVE_INFINITY;
+ default:
+ return decimalFormat.parse(node.getTextAsString(), new ParsePosition(0)).doubleValue();
+ }
+ }
+
+ private PointQueryNode toPointQueryNode(final CharSequence field, final Number value) {
+ return new PointQueryNode(field, value, decimalFormat);
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ParallelSearcherFactory.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ParallelSearcherFactory.java
new file mode 100644
index 000000000..bd31801fd
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ParallelSearcherFactory.java
@@ -0,0 +1,40 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import java.io.IOException;
+import java.util.concurrent.Executor;
+
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.SearcherFactory;
+
+public class ParallelSearcherFactory extends SearcherFactory {
+
+ private Executor executor;
+
+ public Executor getExecutor() {
+ return executor;
+ }
+
+ public void setExecutor(Executor executor) {
+ this.executor = executor;
+ }
+
+ @Override
+ public IndexSearcher newSearcher(final IndexReader reader, final IndexReader previousReader) throws IOException {
+ return new IndexSearcher(reader, executor);
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/QueryParser.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/QueryParser.java
new file mode 100644
index 000000000..d13d1acd5
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/QueryParser.java
@@ -0,0 +1,24 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import org.apache.lucene.search.Query;
+
+import org.apache.couchdb.nouveau.api.SearchRequest;
+
+public interface QueryParser {
+
+ public Query parse(SearchRequest searchRequest) throws QueryParserException;
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/QueryParserException.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/QueryParserException.java
new file mode 100644
index 000000000..7400146f1
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/QueryParserException.java
@@ -0,0 +1,22 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+public class QueryParserException extends Exception {
+
+ public QueryParserException(Throwable originaThrowable) {
+ super(originaThrowable);
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/UpdatesOutOfOrderException.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/UpdatesOutOfOrderException.java
new file mode 100644
index 000000000..3b89f41d2
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/UpdatesOutOfOrderException.java
@@ -0,0 +1,24 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import java.io.IOException;
+
+public class UpdatesOutOfOrderException extends IOException {
+
+ public UpdatesOutOfOrderException() {
+ super("Updates applied in the wrong order");
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/UpdatesOutOfOrderExceptionMapper.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/UpdatesOutOfOrderExceptionMapper.java
new file mode 100644
index 000000000..3a9042924
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/UpdatesOutOfOrderExceptionMapper.java
@@ -0,0 +1,33 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+import javax.ws.rs.ext.ExceptionMapper;
+
+import io.dropwizard.jersey.errors.ErrorMessage;
+
+public class UpdatesOutOfOrderExceptionMapper implements ExceptionMapper<UpdatesOutOfOrderException> {
+
+ @Override
+ public Response toResponse(final UpdatesOutOfOrderException exception) {
+ return Response.status(Status.BAD_REQUEST)
+ .type(MediaType.APPLICATION_JSON_TYPE)
+ .entity(new ErrorMessage(Status.BAD_REQUEST.getStatusCode(), exception.getMessage()))
+ .build();
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/BytesRefDeserializer.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/BytesRefDeserializer.java
new file mode 100644
index 000000000..2c56c31f2
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/BytesRefDeserializer.java
@@ -0,0 +1,44 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core.ser;
+
+import java.io.IOException;
+
+import org.apache.lucene.util.BytesRef;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+
+public class BytesRefDeserializer extends StdDeserializer<BytesRef> {
+
+
+ public BytesRefDeserializer() {
+ this(null);
+ }
+
+ public BytesRefDeserializer(Class<?> vc) {
+ super(vc);
+ }
+
+ @Override
+ public BytesRef deserialize(final JsonParser parser, final DeserializationContext context)
+ throws IOException, JsonProcessingException {
+ JsonNode node = parser.getCodec().readTree(parser);
+ return new BytesRef(node.binaryValue());
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/BytesRefSerializer.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/BytesRefSerializer.java
new file mode 100644
index 000000000..84850614b
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/BytesRefSerializer.java
@@ -0,0 +1,40 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core.ser;
+
+import java.io.IOException;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+
+import org.apache.lucene.util.BytesRef;
+
+public class BytesRefSerializer extends StdSerializer<BytesRef> {
+
+ public BytesRefSerializer() {
+ this(null);
+ }
+
+ public BytesRefSerializer(Class<BytesRef> vc) {
+ super(vc);
+ }
+
+ @Override
+ public void serialize(final BytesRef bytesRef, final JsonGenerator gen, final SerializerProvider provider)
+ throws IOException {
+ gen.writeBinary(bytesRef.bytes, bytesRef.offset, bytesRef.length);
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/DoubleRangeDeserializer.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/DoubleRangeDeserializer.java
new file mode 100644
index 000000000..264002b33
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/DoubleRangeDeserializer.java
@@ -0,0 +1,48 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core.ser;
+
+import java.io.IOException;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+
+import org.apache.lucene.facet.range.DoubleRange;
+
+class DoubleRangeDeserializer extends StdDeserializer<DoubleRange> {
+
+ public DoubleRangeDeserializer() {
+ this(null);
+ }
+
+ public DoubleRangeDeserializer(Class<?> vc) {
+ super(vc);
+ }
+
+ @Override
+ public DoubleRange deserialize(final JsonParser parser, final DeserializationContext context)
+ throws IOException, JsonProcessingException {
+ JsonNode node = parser.getCodec().readTree(parser);
+ final String label = node.get("label").asText();
+ final double min = node.get("min").asDouble();
+ final boolean minInc = node.has("inclusive_min") ? node.get("inclusive_min").asBoolean() : true;
+ final double max = node.get("max").asDouble();
+ final boolean maxInc = node.has("inclusive_max") ? node.get("inclusive_max").asBoolean() : true;
+ return new DoubleRange(label, min, minInc, max, maxInc);
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/DoubleRangeSerializer.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/DoubleRangeSerializer.java
new file mode 100644
index 000000000..f7aeee263
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/DoubleRangeSerializer.java
@@ -0,0 +1,44 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core.ser;
+
+import java.io.IOException;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+
+import org.apache.lucene.facet.range.DoubleRange;
+
+class DoubleRangeSerializer extends StdSerializer<DoubleRange> {
+
+ public DoubleRangeSerializer() {
+ this(null);
+ }
+
+ public DoubleRangeSerializer(Class<DoubleRange> vc) {
+ super(vc);
+ }
+
+ @Override
+ public void serialize(final DoubleRange doubleRange, final JsonGenerator gen, final SerializerProvider provider)
+ throws IOException {
+ gen.writeStartObject();
+ gen.writeStringField("label", doubleRange.label);
+ gen.writeNumberField("min", doubleRange.min);
+ gen.writeNumberField("max", doubleRange.max);
+ gen.writeEndObject();
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/FieldDocDeserializer.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/FieldDocDeserializer.java
new file mode 100644
index 000000000..51fb8699f
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/FieldDocDeserializer.java
@@ -0,0 +1,72 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core.ser;
+
+import java.io.IOException;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.util.BytesRef;
+
+public class FieldDocDeserializer extends StdDeserializer<FieldDoc> {
+
+ public FieldDocDeserializer() {
+ this(null);
+ }
+
+ public FieldDocDeserializer(Class<?> vc) {
+ super(vc);
+ }
+
+ @Override
+ public FieldDoc deserialize(final JsonParser parser, final DeserializationContext context)
+ throws IOException, JsonProcessingException {
+ ArrayNode fieldNode = (ArrayNode) parser.getCodec().readTree(parser);
+ final Object[] fields = new Object[fieldNode.size()];
+ for (int i = 0; i < fields.length; i++) {
+ final JsonNode field = fieldNode.get(i);
+ switch (field.get("type").asText()) {
+ case "string":
+ fields[i] = field.get("value").asText();
+ break;
+ case "bytes":
+ fields[i] = new BytesRef(field.get("value").binaryValue());
+ break;
+ case "float":
+ fields[i] = field.get("value").floatValue();
+ break;
+ case "double":
+ fields[i] = field.get("value").doubleValue();
+ break;
+ case "int":
+ fields[i] = field.get("value").intValue();
+ break;
+ case "long":
+ fields[i] = field.get("value").longValue();
+ break;
+ default:
+ throw new IOException("Unsupported field value: " + field);
+ }
+ }
+ // TODO .doc should be Long.MAX_VALUE if we invert the sort
+ return new FieldDoc(0, Float.NaN, fields);
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/FieldDocSerializer.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/FieldDocSerializer.java
new file mode 100644
index 000000000..29f86ee11
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/FieldDocSerializer.java
@@ -0,0 +1,72 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core.ser;
+
+import java.io.IOException;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.util.BytesRef;
+
+public class FieldDocSerializer extends StdSerializer<FieldDoc> {
+
+ public FieldDocSerializer() {
+ this(null);
+ }
+
+ public FieldDocSerializer(Class<FieldDoc> vc) {
+ super(vc);
+ }
+
+ @Override
+ public void serialize(final FieldDoc fieldDoc, final JsonGenerator gen, final SerializerProvider provider)
+ throws IOException {
+ // We ignore fieldDoc.score as it will be in the fields array if we're sorting for relevance.
+ // We ignore fieldDoc.doc as _id is always the last field and is unique.
+ gen.writeStartArray();
+ // Preserve type information for correct deserialization of cursor.
+ for (final Object o : fieldDoc.fields) {
+ gen.writeStartObject();
+ if (o instanceof String) {
+ gen.writeStringField("type", "string");
+ gen.writeStringField("value", (String) o);
+ } else if (o instanceof BytesRef) {
+ final BytesRef bytesRef = (BytesRef) o;
+ gen.writeStringField("type", "bytes");
+ gen.writeFieldName("value");
+ gen.writeBinary(bytesRef.bytes, bytesRef.offset, bytesRef.length);
+ } else if (o instanceof Float) {
+ gen.writeStringField("type", "float");
+ gen.writeNumberField("value", (Float) o);
+ } else if (o instanceof Double) {
+ gen.writeStringField("type", "double");
+ gen.writeNumberField("value", (Double) o);
+ } else if (o instanceof Integer) {
+ gen.writeStringField("type", "int");
+ gen.writeNumberField("value", (Integer) o);
+ } else if (o instanceof Long) {
+ gen.writeStringField("type", "long");
+ gen.writeNumberField("value", (Long) o);
+ } else {
+ throw new IOException(o.getClass() + " not supported");
+ }
+ gen.writeEndObject();
+ }
+ gen.writeEndArray();
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/IndexableFieldDeserializer.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/IndexableFieldDeserializer.java
new file mode 100644
index 000000000..a20f9c398
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/IndexableFieldDeserializer.java
@@ -0,0 +1,116 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core.ser;
+
+import java.io.IOException;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+
+import org.apache.lucene.document.BinaryDocValuesField;
+import org.apache.lucene.document.DoubleDocValuesField;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.FloatDocValuesField;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LatLonDocValuesField;
+import org.apache.lucene.document.LatLonPoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.document.SortedSetDocValuesField;
+import org.apache.lucene.document.StoredField;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.document.XYDocValuesField;
+import org.apache.lucene.document.XYPointField;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.util.BytesRef;
+
+class IndexableFieldDeserializer extends StdDeserializer<IndexableField> {
+
+ public IndexableFieldDeserializer() {
+ this(null);
+ }
+
+ public IndexableFieldDeserializer(Class<?> vc) {
+ super(vc);
+ }
+
+ @Override
+ public IndexableField deserialize(final JsonParser parser, final DeserializationContext context)
+ throws IOException, JsonProcessingException {
+ JsonNode node = parser.getCodec().readTree(parser);
+
+ final SupportedType type = SupportedType.valueOf(node.get("@type").asText());
+ final String name = node.get("name").asText();
+
+ switch (type) {
+ case binary_dv:
+ return new BinaryDocValuesField(name, bytesRef(node));
+ case double_point:
+ return new DoublePoint(name, node.get("value").doubleValue());
+ case float_dv:
+ return new FloatDocValuesField(name, node.get("value").floatValue());
+ case float_point:
+ return new FloatPoint(name, node.get("value").floatValue());
+ case latlon_dv:
+ return new LatLonDocValuesField(name, node.get("lat").doubleValue(), node.get("lon").doubleValue());
+ case latlon_point:
+ return new LatLonPoint(name, node.get("lat").doubleValue(), node.get("lon").doubleValue());
+ case int_point:
+ return new IntPoint(name, node.get("value").intValue());
+ case long_point:
+ return new LongPoint(name, node.get("value").longValue());
+ case xy_dv:
+ return new XYDocValuesField(name, node.get("x").floatValue(), node.get("y").floatValue());
+ case xy_point:
+ return new XYPointField(name, node.get("x").floatValue(), node.get("y").floatValue());
+ case string:
+ return new StringField(name, node.get("value").asText(),
+ node.get("stored").asBoolean() ? Store.YES : Store.NO);
+ case text:
+ return new TextField(name, node.get("value").asText(),
+ node.get("stored").asBoolean() ? Store.YES : Store.NO);
+ case stored_double:
+ return new StoredField(name, node.get("value").asDouble());
+ case stored_string:
+ return new StoredField(name, node.get("value").asText());
+ case stored_binary:
+ return new StoredField(name, bytesRef(node));
+ case sorted_set_dv:
+ return new SortedSetDocValuesField(name, bytesRef(node));
+ case sorted_dv:
+ return new SortedDocValuesField(name, bytesRef(node));
+ case sorted_numeric_dv:
+ return new SortedNumericDocValuesField(name, node.get("value").longValue());
+ case double_dv:
+ return new DoubleDocValuesField(name, node.get("value").asDouble());
+ }
+ throw new IOException(type + " not a valid type of field");
+ }
+
+ private BytesRef bytesRef(final JsonNode node) throws IOException {
+ final JsonNode value = node.get("value");
+ if (node.has("encoded") && node.get("encoded").asBoolean()) {
+ return new BytesRef(value.binaryValue());
+ }
+ return new BytesRef(value.asText());
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/IndexableFieldSerializer.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/IndexableFieldSerializer.java
new file mode 100644
index 000000000..c4520919b
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/IndexableFieldSerializer.java
@@ -0,0 +1,84 @@
+package org.apache.couchdb.nouveau.core.ser;
+
+import java.io.IOException;
+
+import com.fasterxml.jackson.core.JsonGenerator;
+import com.fasterxml.jackson.databind.SerializerProvider;
+import com.fasterxml.jackson.databind.ser.std.StdSerializer;
+
+import org.apache.lucene.geo.GeoEncodingUtils;
+import org.apache.lucene.geo.XYEncodingUtils;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.util.BytesRef;
+
+class IndexableFieldSerializer extends StdSerializer<IndexableField> {
+
+ public IndexableFieldSerializer() {
+ this(null);
+ }
+
+ public IndexableFieldSerializer(Class<IndexableField> vc) {
+ super(vc);
+ }
+
+ @Override
+ public void serialize(final IndexableField field, final JsonGenerator gen, final SerializerProvider provider)
+ throws IOException {
+ final SupportedType type = SupportedType.fromField(field);
+ gen.writeStartObject();
+ gen.writeStringField("@type", type.toString());
+ gen.writeStringField("name", field.name());
+ switch (type) {
+ case double_dv:
+ case double_point:
+ case stored_double:
+ gen.writeNumberField("value", field.numericValue().doubleValue());
+ break;
+ case float_dv:
+ case float_point:
+ gen.writeNumberField("value", field.numericValue().floatValue());
+ break;
+ case int_point:
+ gen.writeNumberField("value", field.numericValue().intValue());
+ break;
+ case latlon_dv:
+ case latlon_point: {
+ final long value = (Long) field.numericValue();
+ gen.writeNumberField("lat", GeoEncodingUtils.decodeLatitude((int) (value >> 32)));
+ gen.writeNumberField("lon", GeoEncodingUtils.decodeLongitude((int) (value & 0xFFFFFFFF)));
+ break;
+ }
+ case long_point:
+ case sorted_numeric_dv:
+ gen.writeNumberField("value", field.numericValue().longValue());
+ break;
+ case binary_dv:
+ case sorted_dv:
+ case sorted_set_dv:
+ case stored_binary: {
+ final BytesRef bytesRef = field.binaryValue();
+ gen.writeFieldName("value");
+ gen.writeBinary(bytesRef.bytes, bytesRef.offset, bytesRef.length);
+ gen.writeBooleanField("encoded", true);
+ break;
+ }
+ case stored_string:
+ gen.writeStringField("value", field.stringValue());
+ break;
+ case string:
+ case text:
+ gen.writeStringField("value", field.stringValue());
+ gen.writeBooleanField("stored", field.fieldType().stored());
+ break;
+ case xy_dv:
+ case xy_point: {
+ final BytesRef bytesRef = field.binaryValue();
+ gen.writeNumberField("x", XYEncodingUtils.decode(bytesRef.bytes, 0));
+ gen.writeNumberField("y", XYEncodingUtils.decode(bytesRef.bytes, Integer.BYTES));
+ break;
+ }
+ }
+ gen.writeEndObject();
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/LuceneModule.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/LuceneModule.java
new file mode 100644
index 000000000..f78cfb02e
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/LuceneModule.java
@@ -0,0 +1,49 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core.ser;
+
+import com.fasterxml.jackson.core.Version;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+
+import org.apache.lucene.facet.range.DoubleRange;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.search.TotalHits;
+
+public class LuceneModule extends SimpleModule {
+
+ public LuceneModule() {
+ super("lucene", Version.unknownVersion());
+
+ // IndexableField
+ addSerializer(IndexableField.class, new IndexableFieldSerializer());
+ addDeserializer(IndexableField.class, new IndexableFieldDeserializer());
+
+ // DoubleRange
+ addSerializer(DoubleRange.class, new DoubleRangeSerializer());
+ addDeserializer(DoubleRange.class, new DoubleRangeDeserializer());
+
+ // FieldDoc
+ addSerializer(FieldDoc.class, new FieldDocSerializer());
+ addDeserializer(FieldDoc.class, new FieldDocDeserializer());
+
+ // TotalHits
+ addDeserializer(TotalHits.class, new TotalHitsDeserializer());
+
+ // BytesRef - disabled until I'm sure I need it.
+ // addSerializer(BytesRef.class, new BytesRefSerializer());
+ // addDeserializer(BytesRef.class, new BytesRefDeserializer());
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/SupportedType.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/SupportedType.java
new file mode 100644
index 000000000..9f652df26
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/SupportedType.java
@@ -0,0 +1,82 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core.ser;
+
+import org.apache.lucene.document.BinaryDocValuesField;
+import org.apache.lucene.document.DoubleDocValuesField;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.FloatDocValuesField;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LatLonDocValuesField;
+import org.apache.lucene.document.LatLonPoint;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.document.SortedDocValuesField;
+import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.document.SortedSetDocValuesField;
+import org.apache.lucene.document.StoredField;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.document.XYDocValuesField;
+import org.apache.lucene.document.XYPointField;
+import org.apache.lucene.index.IndexableField;
+
+enum SupportedType {
+
+ binary_dv(BinaryDocValuesField.class),
+ double_dv(DoubleDocValuesField.class),
+ double_point(DoublePoint.class),
+ float_dv(FloatDocValuesField.class),
+ float_point(FloatPoint.class),
+ int_point(IntPoint.class),
+ latlon_dv(LatLonDocValuesField.class),
+ latlon_point(LatLonPoint.class),
+ long_point(LongPoint.class),
+ sorted_dv(SortedDocValuesField.class),
+ sorted_numeric_dv(SortedNumericDocValuesField.class),
+ sorted_set_dv(SortedSetDocValuesField.class),
+ stored_binary(StoredField.class),
+ stored_double(StoredField.class),
+ stored_string(StoredField.class),
+ string(StringField.class),
+ text(TextField.class),
+ xy_dv(XYDocValuesField.class),
+ xy_point(XYPointField.class);
+
+ private final Class<? extends IndexableField> clazz;
+
+ private SupportedType(final Class<? extends IndexableField> clazz) {
+ this.clazz = clazz;
+ }
+
+ public static SupportedType fromField(final IndexableField field) {
+ if (field instanceof StoredField) {
+ final StoredField storedField = (StoredField) field;
+ if (storedField.numericValue() != null) {
+ return stored_double;
+ } else if (storedField.stringValue() != null) {
+ return stored_string;
+ } else if (storedField.binaryValue() != null) {
+ return stored_binary;
+ }
+ }
+ for (final SupportedType t : SupportedType.values()) {
+ if (t.clazz.isAssignableFrom(field.getClass())) {
+ return t;
+ }
+ }
+ throw new IllegalArgumentException(field + " is not a supported type");
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/TotalHitsDeserializer.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/TotalHitsDeserializer.java
new file mode 100644
index 000000000..eb4976018
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/core/ser/TotalHitsDeserializer.java
@@ -0,0 +1,45 @@
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core.ser;
+
+import java.io.IOException;
+
+import org.apache.lucene.search.TotalHits;
+import org.apache.lucene.search.TotalHits.Relation;
+
+import com.fasterxml.jackson.core.JsonParser;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.DeserializationContext;
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.deser.std.StdDeserializer;
+
+public class TotalHitsDeserializer extends StdDeserializer<TotalHits> {
+
+ public TotalHitsDeserializer() {
+ this(null);
+ }
+
+ public TotalHitsDeserializer(Class<?> vc) {
+ super(vc);
+ }
+
+ @Override
+ public TotalHits deserialize(final JsonParser parser, final DeserializationContext context)
+ throws IOException, JsonProcessingException {
+ JsonNode node = parser.getCodec().readTree(parser);
+ final long value = node.get("value").asLong();
+ final Relation relation = Relation.valueOf(node.get("relation").asText());
+ return new TotalHits(value, relation);
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/health/AnalyzeHealthCheck.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/health/AnalyzeHealthCheck.java
new file mode 100644
index 000000000..75d402b43
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/health/AnalyzeHealthCheck.java
@@ -0,0 +1,45 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.health;
+
+import java.util.Arrays;
+import java.util.List;
+
+import org.apache.couchdb.nouveau.api.AnalyzeRequest;
+import org.apache.couchdb.nouveau.api.AnalyzeResponse;
+import org.apache.couchdb.nouveau.resources.AnalyzeResource;
+import com.codahale.metrics.health.HealthCheck;
+
+public class AnalyzeHealthCheck extends HealthCheck {
+
+ private AnalyzeResource analyzeResource;
+
+ public AnalyzeHealthCheck(final AnalyzeResource analyzeResource) {
+ this.analyzeResource = analyzeResource;
+ }
+
+ @Override
+ protected Result check() throws Exception {
+ final AnalyzeRequest request = new AnalyzeRequest("standard", "hello there");
+ final AnalyzeResponse response = analyzeResource.analyzeText(request);
+ final List<String> expected = Arrays.asList("hello", "there");
+ final List<String> actual = response.getTokens();
+ if (expected.equals(actual)) {
+ return Result.healthy();
+ } else {
+ return Result.unhealthy("Expected '{}' but got '{}'", expected, actual);
+ }
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/health/IndexManagerHealthCheck.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/health/IndexManagerHealthCheck.java
new file mode 100644
index 000000000..98ac5988e
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/health/IndexManagerHealthCheck.java
@@ -0,0 +1,59 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.health;
+
+import java.io.IOException;
+
+import org.apache.couchdb.nouveau.api.IndexDefinition;
+import org.apache.couchdb.nouveau.core.IndexManager;
+import org.apache.couchdb.nouveau.core.IndexManager.Index;
+import com.codahale.metrics.health.HealthCheck;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexWriter;
+
+public class IndexManagerHealthCheck extends HealthCheck {
+
+ private IndexManager indexManager;
+
+ public IndexManagerHealthCheck(final IndexManager indexManager) {
+ this.indexManager = indexManager;
+ }
+
+ @Override
+ protected Result check() throws Exception {
+ final String name = "_____test";
+ try {
+ indexManager.deleteAll(name);
+ } catch (IOException e) {
+ // Ignored, index might not exist yet.
+ }
+
+ indexManager.create(name, new IndexDefinition("standard", null));
+ final Index index = indexManager.acquire(name);
+ try {
+ final IndexWriter writer = index.getWriter();
+ try {
+ writer.addDocument(new Document());
+ writer.commit();
+ return Result.healthy();
+ } finally {
+ indexManager.deleteAll(name);
+ }
+ } finally {
+ indexManager.release(index);
+ }
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/AnalyzeResource.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/AnalyzeResource.java
new file mode 100644
index 000000000..60e8c8ca3
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/AnalyzeResource.java
@@ -0,0 +1,76 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.resources;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.validation.Valid;
+import javax.validation.constraints.NotNull;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response.Status;
+
+import org.apache.couchdb.nouveau.api.AnalyzeRequest;
+import org.apache.couchdb.nouveau.api.AnalyzeResponse;
+import org.apache.couchdb.nouveau.core.AnalyzerFactory;
+import com.codahale.metrics.annotation.Timed;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.TokenStream;
+import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
+
+@Path("/analyze")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class AnalyzeResource {
+
+ private final AnalyzerFactory analyzerFactory;
+
+ public AnalyzeResource(AnalyzerFactory analyzerFactory) {
+ this.analyzerFactory = analyzerFactory;
+ }
+
+ @POST
+ @Timed
+ public AnalyzeResponse analyzeText(@NotNull @Valid AnalyzeRequest analyzeRequest) throws IOException {
+ final Analyzer analyzer;
+ try {
+ analyzer = analyzerFactory.newAnalyzer(analyzeRequest.getAnalyzer());
+ } catch (IllegalArgumentException e) {
+ throw new WebApplicationException(analyzeRequest.getAnalyzer() + " not a valid analyzer",
+ Status.BAD_REQUEST);
+ }
+ return new AnalyzeResponse(tokenize(analyzer, analyzeRequest.getText()));
+ }
+
+ private List<String> tokenize(final Analyzer analyzer, final String text) throws IOException {
+ final List<String> result = new ArrayList<String>(10);
+ try (final TokenStream tokenStream = analyzer.tokenStream("default", text)) {
+ tokenStream.reset();
+ while (tokenStream.incrementToken()) {
+ final CharTermAttribute term = tokenStream.getAttribute(CharTermAttribute.class);
+ result.add(term.toString());
+ }
+ tokenStream.end();
+ }
+ return result;
+ }
+
+}
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/IndexResource.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/IndexResource.java
new file mode 100644
index 000000000..cd10226db
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/IndexResource.java
@@ -0,0 +1,110 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.resources;
+
+import java.io.IOException;
+
+import javax.validation.Valid;
+import javax.validation.constraints.NotNull;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.core.MediaType;
+
+import org.apache.couchdb.nouveau.api.DocumentDeleteRequest;
+import org.apache.couchdb.nouveau.api.DocumentUpdateRequest;
+import org.apache.couchdb.nouveau.api.IndexDefinition;
+import org.apache.couchdb.nouveau.api.IndexInfo;
+import org.apache.couchdb.nouveau.core.DocumentFactory;
+import org.apache.couchdb.nouveau.core.IndexManager;
+import org.apache.couchdb.nouveau.core.IndexManager.Index;
+import com.codahale.metrics.annotation.Timed;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.TermQuery;
+
+@Path("/index/{name}")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class IndexResource {
+
+ private final IndexManager indexManager;
+ private final DocumentFactory documentFactory;
+
+ public IndexResource(final IndexManager indexManager, final DocumentFactory documentFactory) {
+ this.indexManager = indexManager;
+ this.documentFactory = documentFactory;
+ }
+
+ @GET
+ @SuppressWarnings("resource")
+ public IndexInfo indexInfo(@PathParam("name") String name) throws IOException {
+ final long updateSeq;
+ final int numDocs;
+ final Index index = indexManager.acquire(name);
+ try {
+ updateSeq = index.getUpdateSeq();
+ numDocs = index.getWriter().getDocStats().numDocs;
+ } finally {
+ indexManager.release(index);
+ }
+ return new IndexInfo(updateSeq, numDocs);
+ }
+
+ @DELETE
+ public void deletePath(@PathParam("name") String path) throws IOException {
+ indexManager.deleteAll(path);
+ }
+
+ @PUT
+ public void createIndex(@PathParam("name") String name, @NotNull @Valid IndexDefinition indexDefinition) throws IOException {
+ indexManager.create(name, indexDefinition);
+ }
+
+ @DELETE
+ @Timed
+ @Path("/doc/{docId}")
+ public void deleteDoc(@PathParam("name") String name, @PathParam("docId") String docId, @NotNull @Valid final DocumentDeleteRequest request) throws IOException {
+ final Index index = indexManager.acquire(name);
+ try {
+ final IndexWriter writer = index.getWriter();
+ writer.deleteDocuments(new TermQuery(new Term("_id", docId)));
+ index.incrementUpdateSeq(request.getSeq());
+ } finally {
+ indexManager.release(index);
+ }
+ }
+
+ @PUT
+ @Timed
+ @Path("/doc/{docId}")
+ public void updateDoc(@PathParam("name") String name, @PathParam("docId") String docId, @NotNull @Valid final DocumentUpdateRequest request) throws IOException {
+ final Index index = indexManager.acquire(name);
+ try {
+ final IndexWriter writer = index.getWriter();
+ final Document doc = documentFactory.build(docId, request);
+ writer.updateDocument(new Term("_id", docId), doc);
+ index.incrementUpdateSeq(request.getSeq());
+ } finally {
+ indexManager.release(index);
+ }
+ }
+
+} \ No newline at end of file
diff --git a/java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/SearchResource.java b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/SearchResource.java
new file mode 100644
index 000000000..dd78e861c
--- /dev/null
+++ b/java/nouveau/src/main/java/org/apache/couchdb/nouveau/resources/SearchResource.java
@@ -0,0 +1,249 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.resources;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.validation.Valid;
+import javax.validation.constraints.NotNull;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response.Status;
+
+import org.apache.couchdb.nouveau.api.SearchHit;
+import org.apache.couchdb.nouveau.api.SearchRequest;
+import org.apache.couchdb.nouveau.api.SearchResults;
+import org.apache.couchdb.nouveau.core.IndexManager;
+import org.apache.couchdb.nouveau.core.IndexManager.Index;
+import org.apache.couchdb.nouveau.core.QueryParserException;
+import com.codahale.metrics.annotation.Timed;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.facet.FacetResult;
+import org.apache.lucene.facet.Facets;
+import org.apache.lucene.facet.FacetsCollector;
+import org.apache.lucene.facet.FacetsCollectorManager;
+import org.apache.lucene.facet.LabelAndValue;
+import org.apache.lucene.facet.StringDocValuesReaderState;
+import org.apache.lucene.facet.StringValueFacetCounts;
+import org.apache.lucene.facet.range.DoubleRange;
+import org.apache.lucene.facet.range.DoubleRangeFacetCounts;
+import org.apache.lucene.index.IndexableField;
+import org.apache.lucene.search.CollectorManager;
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.MultiCollectorManager;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.SearcherManager;
+import org.apache.lucene.search.Sort;
+import org.apache.lucene.search.SortField;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.TopFieldCollector;
+
+@Path("/index/{name}")
+@Consumes(MediaType.APPLICATION_JSON)
+@Produces(MediaType.APPLICATION_JSON)
+public class SearchResource {
+
+ private static final DoubleRange[] EMPTY_DOUBLE_RANGE_ARRAY = new DoubleRange[0];
+ private static final Sort DEFAULT_SORT = new Sort(SortField.FIELD_SCORE,
+ new SortField("_id", SortField.Type.STRING));
+ private static final Pattern SORT_FIELD_RE = Pattern.compile("^([-+])?([\\.\\w]+)(?:<(\\w+)>)?$");
+ private final IndexManager indexManager;
+
+ public SearchResource(final IndexManager indexManager) {
+ this.indexManager = indexManager;
+ }
+
+ @POST
+ @Timed
+ @Path("/search")
+ public SearchResults searchIndex(@PathParam("name") String name, @NotNull @Valid SearchRequest searchRequest)
+ throws IOException, QueryParserException {
+ final Index index = indexManager.acquire(name);
+ try {
+ final Query query = index.getQueryParser().parse(searchRequest);
+
+ // Construct CollectorManagers.
+ final MultiCollectorManager cm;
+ final CollectorManager<?, ? extends TopDocs> hits = hitCollector(searchRequest);
+
+ final SearcherManager searcherManager = index.getSearcherManager();
+ searcherManager.maybeRefreshBlocking();
+
+ final IndexSearcher searcher = searcherManager.acquire();
+ try {
+ if (searchRequest.hasCounts() || searchRequest.hasRanges()) {
+ cm = new MultiCollectorManager(hits, new FacetsCollectorManager());
+ } else {
+ cm = new MultiCollectorManager(hits);
+ }
+ final Object[] reduces = searcher.search(query, cm);
+ return toSearchResults(searchRequest, searcher, reduces);
+ } catch (IllegalStateException e) {
+ throw new WebApplicationException(e.getMessage(), e, Status.BAD_REQUEST);
+ } finally {
+ searcherManager.release(searcher);
+ }
+ } finally {
+ indexManager.release(index);
+ }
+ }
+
+ private CollectorManager<?, ? extends TopDocs> hitCollector(final SearchRequest searchRequest) {
+ final Sort sort = toSort(searchRequest);
+
+ final FieldDoc after = searchRequest.getAfter();
+ if (after != null) {
+ if (getLastSortField(sort).getReverse()) {
+ after.doc = 0;
+ } else {
+ after.doc = Integer.MAX_VALUE;
+ }
+ }
+
+ return TopFieldCollector.createSharedManager(
+ sort,
+ searchRequest.getLimit(),
+ after,
+ 1000);
+ }
+
+ private SortField getLastSortField(final Sort sort) {
+ final SortField[] sortFields = sort.getSort();
+ return sortFields[sortFields.length - 1];
+ }
+
+ private SearchResults toSearchResults(final SearchRequest searchRequest, final IndexSearcher searcher,
+ final Object[] reduces) throws IOException {
+ final SearchResults result = new SearchResults();
+ collectHits(searcher, (TopDocs) reduces[0], result);
+ if (reduces.length == 2) {
+ collectFacets(searchRequest, searcher, (FacetsCollector) reduces[1], result);
+ }
+ return result;
+ }
+
+ private void collectHits(final IndexSearcher searcher, final TopDocs topDocs, final SearchResults searchResults)
+ throws IOException {
+ final List<SearchHit> hits = new ArrayList<SearchHit>(topDocs.scoreDocs.length);
+
+ for (final ScoreDoc scoreDoc : topDocs.scoreDocs) {
+ final Document doc = searcher.doc(scoreDoc.doc);
+
+ final List<IndexableField> fields = new ArrayList<IndexableField>(doc.getFields());
+ for (IndexableField field : doc.getFields()) {
+ if (field.name().equals("_id")) {
+ fields.remove(field);
+ }
+ }
+
+ hits.add(new SearchHit(doc.get("_id"), (FieldDoc) scoreDoc, fields));
+ }
+
+ searchResults.setTotalHits(topDocs.totalHits);
+ searchResults.setHits(hits);
+ }
+
+ private void collectFacets(final SearchRequest searchRequest, final IndexSearcher searcher,
+ final FacetsCollector fc, final SearchResults searchResults) throws IOException {
+ if (searchRequest.hasCounts()) {
+ final Map<String, Map<String, Number>> countsMap = new HashMap<String, Map<String, Number>>(
+ searchRequest.getCounts().size());
+ for (final String field : searchRequest.getCounts()) {
+ final StringDocValuesReaderState state = new StringDocValuesReaderState(searcher.getIndexReader(),
+ field);
+ final StringValueFacetCounts counts = new StringValueFacetCounts(state, fc);
+ countsMap.put(field, collectFacets(counts, searchRequest.getTopN(), field));
+ }
+ searchResults.setCounts(countsMap);
+ }
+
+ if (searchRequest.hasRanges()) {
+ final Map<String, Map<String, Number>> rangesMap = new HashMap<String, Map<String, Number>>(
+ searchRequest.getRanges().size());
+ for (final Entry<String, List<DoubleRange>> entry : searchRequest.getRanges().entrySet()) {
+ final DoubleRangeFacetCounts counts = new DoubleRangeFacetCounts(entry.getKey(), fc,
+ entry.getValue().toArray(EMPTY_DOUBLE_RANGE_ARRAY));
+ rangesMap.put(entry.getKey(), collectFacets(counts, searchRequest.getTopN(), entry.getKey()));
+ }
+ searchResults.setRanges(rangesMap);
+ }
+ }
+
+ private Map<String, Number> collectFacets(final Facets facets, final int topN, final String dim)
+ throws IOException {
+ final FacetResult topChildren = facets.getTopChildren(topN, dim);
+ final Map<String, Number> result = new HashMap<String, Number>(topChildren.childCount);
+ for (final LabelAndValue lv : topChildren.labelValues) {
+ result.put(lv.label, lv.value);
+ }
+ return result;
+ }
+
+ // Ensure _id is final sort field so we can paginate.
+ private Sort toSort(final SearchRequest searchRequest) {
+ if (!searchRequest.hasSort()) {
+ return DEFAULT_SORT;
+ }
+
+ final List<String> sort = new ArrayList<String>(searchRequest.getSort());
+ final String last = sort.get(sort.size() - 1);
+ // Append _id field if not already present.
+ switch(last) {
+ case "-_id<string>":
+ case "_id<string>":
+ break;
+ default:
+ sort.add("_id<string>");
+ }
+ return convertSort(sort);
+ }
+
+ private Sort convertSort(final List<String> sort) {
+ final SortField[] fields = new SortField[sort.size()];
+ for (int i = 0; i < sort.size(); i++) {
+ fields[i] = convertSortField(sort.get(i));
+ }
+ return new Sort(fields);
+ }
+
+ private SortField convertSortField(final String sortString) {
+ final Matcher m = SORT_FIELD_RE.matcher(sortString);
+ if (!m.matches()) {
+ throw new WebApplicationException(
+ sortString + " is not a valid sort parameter", Status.BAD_REQUEST);
+ }
+ final boolean reverse = "-".equals(m.group(1));
+ SortField.Type type = SortField.Type.DOUBLE;
+ if ("string".equals(m.group(3))) {
+ type = SortField.Type.STRING;
+ }
+ return new SortField(m.group(2), type, reverse);
+ }
+
+}
diff --git a/java/nouveau/src/main/resources/banner.txt b/java/nouveau/src/main/resources/banner.txt
new file mode 100644
index 000000000..3575b3984
--- /dev/null
+++ b/java/nouveau/src/main/resources/banner.txt
@@ -0,0 +1,7 @@
+ .-.
+ / |
+ /\ | .-._.) ( ) .-..-. .-. ) (
+ / \ |( )( )( / ./.-'_( | ( )
+ .-' / \| `-' `--': \_/ (__.' `-'-'`--':
+(__.' `.
+
diff --git a/java/nouveau/src/test/java/org/apache/couchdb/nouveau/IntegrationTest.java b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/IntegrationTest.java
new file mode 100644
index 000000000..c353fe7da
--- /dev/null
+++ b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/IntegrationTest.java
@@ -0,0 +1,125 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.client.Entity;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+
+import org.apache.couchdb.nouveau.api.DocumentUpdateRequest;
+import org.apache.couchdb.nouveau.api.IndexDefinition;
+import org.apache.couchdb.nouveau.api.SearchRequest;
+import org.apache.couchdb.nouveau.api.SearchResults;
+
+import org.apache.lucene.document.DoubleDocValuesField;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.SortedSetDocValuesField;
+import org.apache.lucene.facet.range.DoubleRange;
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.search.TotalHits;
+import org.apache.lucene.search.TotalHits.Relation;
+import org.apache.lucene.util.BytesRef;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+
+import io.dropwizard.configuration.ResourceConfigurationSourceProvider;
+import io.dropwizard.testing.junit5.DropwizardAppExtension;
+import io.dropwizard.testing.junit5.DropwizardExtensionsSupport;
+
+@ExtendWith(DropwizardExtensionsSupport.class)
+public class IntegrationTest {
+
+ private static final String CONFIG = "test-nouveau.yaml";
+
+ static final DropwizardAppExtension<NouveauApplicationConfiguration> APP = new DropwizardAppExtension<>(
+ NouveauApplication.class, CONFIG,
+ new ResourceConfigurationSourceProvider()
+ );
+
+ @Test
+ public void indexTest() {
+ final String url = "http://localhost:" + APP.getLocalPort();
+ final String indexName = "foo";
+ final IndexDefinition indexDefinition = new IndexDefinition("standard", null);
+
+ // Clean up.
+ Response response =
+ APP.client().target(String.format("%s/index/%s", url, indexName))
+ .request()
+ .delete();
+
+ // Create index.
+ response =
+ APP.client().target(String.format("%s/index/%s", url, indexName))
+ .request()
+ .put(Entity.entity(indexDefinition, MediaType.APPLICATION_JSON_TYPE));
+
+ assertThat(response).extracting(Response::getStatus)
+ .isEqualTo(Response.Status.NO_CONTENT.getStatusCode());
+
+ // Populate index
+ for (int i = 0; i < 10; i++) {
+ final DocumentUpdateRequest docUpdate = new DocumentUpdateRequest(i + 1, null,
+ List.of(
+ new DoublePoint("foo", i),
+ new DoubleDocValuesField("baz", i),
+ new SortedSetDocValuesField("bar", new BytesRef("baz"))));
+ response =
+ APP.client().target(String.format("%s/index/%s/doc/doc%d", url, indexName, i))
+ .request()
+ .put(Entity.entity(docUpdate, MediaType.APPLICATION_JSON_TYPE));
+ assertThat(response).extracting(Response::getStatus)
+ .isEqualTo(Response.Status.NO_CONTENT.getStatusCode());
+ }
+
+ // Search index
+ final SearchRequest searchRequest = new SearchRequest();
+ searchRequest.setQuery("*:*");
+ searchRequest.setLimit(10);
+ searchRequest.setCounts(List.of("bar"));
+ searchRequest.setRanges(Map.of("baz", List.of(new DoubleRange("0 to 100 inc", 0.0, true, 100.0, true))));
+ searchRequest.setTopN(2);
+ searchRequest.setAfter(new FieldDoc(0, Float.NaN, new Object[]{1.0f, new BytesRef("a")}));
+
+ response =
+ APP.client().target(String.format("%s/index/%s/search", url, indexName))
+ .request()
+ .post(Entity.entity(searchRequest, MediaType.APPLICATION_JSON_TYPE));
+
+ assertThat(response).extracting(Response::getStatus).isEqualTo(Response.Status.OK.getStatusCode());
+ final SearchResults results = response.readEntity(SearchResults.class);
+ assertThat(results.getTotalHits()).isEqualTo(new TotalHits(10, Relation.EQUAL_TO));
+ assertThat(results.getCounts().size()).isEqualTo(1);
+ assertThat(results.getCounts().get("bar").get("baz")).isEqualTo(10);
+ assertThat(results.getRanges().get("baz").get("0 to 100 inc")).isEqualTo(1);
+ }
+
+ @Test
+ public void healthCheckShouldSucceed() {
+ final Response healthCheckResponse =
+ APP.client().target("http://localhost:" + APP.getAdminPort() + "/healthcheck")
+ .request()
+ .get();
+
+ assertThat(healthCheckResponse)
+ .extracting(Response::getStatus)
+ .isEqualTo(Response.Status.OK.getStatusCode());
+ }
+
+}
diff --git a/java/nouveau/src/test/java/org/apache/couchdb/nouveau/api/DocumentUpdateRequestTest.java b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/api/DocumentUpdateRequestTest.java
new file mode 100644
index 000000000..35b813392
--- /dev/null
+++ b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/api/DocumentUpdateRequestTest.java
@@ -0,0 +1,66 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.api;
+
+import static io.dropwizard.testing.FixtureHelpers.fixture;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.couchdb.nouveau.core.ser.LuceneModule;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.index.IndexableField;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+public class DocumentUpdateRequestTest {
+
+ private static ObjectMapper mapper;
+
+ @BeforeAll
+ public static void setupMapper() {
+ mapper = new ObjectMapper();
+ mapper.registerModule(new LuceneModule());
+ }
+
+ @Test
+ public void testSerialisation() throws Exception {
+ DocumentUpdateRequest request = asObject();
+ final String expected = mapper.writeValueAsString(
+ mapper.readValue(fixture("fixtures/DocumentUpdateRequest.json"), DocumentUpdateRequest.class));
+ assertThat(mapper.writeValueAsString(request)).isEqualTo(expected);
+ }
+
+ @Test
+ public void testDeserialisation() throws Exception {
+ DocumentUpdateRequest request = asObject();
+ assertThat(mapper.readValue(fixture("fixtures/DocumentUpdateRequest.json"), DocumentUpdateRequest.class).toString())
+ .isEqualTo(request.toString());
+ }
+
+ private DocumentUpdateRequest asObject() {
+ final List<IndexableField> fields = new ArrayList<IndexableField>();
+ fields.add(new StringField("stringfoo", "bar", Store.YES));
+ fields.add(new TextField("textfoo", "hello there", Store.YES));
+ fields.add(new DoublePoint("doublefoo", 12));
+ return new DocumentUpdateRequest(12, null, fields);
+ }
+
+}
diff --git a/java/nouveau/src/test/java/org/apache/couchdb/nouveau/api/SearchRequestTest.java b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/api/SearchRequestTest.java
new file mode 100644
index 000000000..9544b6f6e
--- /dev/null
+++ b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/api/SearchRequestTest.java
@@ -0,0 +1,50 @@
+package org.apache.couchdb.nouveau.api;
+
+import static io.dropwizard.testing.FixtureHelpers.fixture;
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.couchdb.nouveau.core.ser.LuceneModule;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import org.apache.lucene.facet.range.DoubleRange;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+public class SearchRequestTest {
+
+ private static ObjectMapper mapper;
+
+ @BeforeAll
+ public static void setupMapper() {
+ mapper = new ObjectMapper();
+ mapper.registerModule(new LuceneModule());
+ }
+
+ @Test
+ public void testSerialisation() throws Exception {
+ SearchRequest request = asObject();
+ final String expected = mapper.writeValueAsString(
+ mapper.readValue(fixture("fixtures/SearchRequest.json"), SearchRequest.class));
+ assertThat(mapper.writeValueAsString(request)).isEqualTo(expected);
+ }
+
+ @Test
+ public void testDeserialisation() throws Exception {
+ SearchRequest request = asObject();
+ assertThat(mapper.readValue(fixture("fixtures/SearchRequest.json"), SearchRequest.class).toString())
+ .isEqualTo(request.toString());
+ }
+
+ private SearchRequest asObject() {
+ final SearchRequest result = new SearchRequest();
+ result.setQuery("*:*");
+ result.setLimit(10);
+ result.setCounts(List.of("bar"));
+ result.setRanges(Map.of("foo", List.of(new DoubleRange("0 to 100 inc", 0.0, true, 100.0, true))));
+ return result;
+ }
+
+}
diff --git a/java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/AnalyzerFactoryTest.java b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/AnalyzerFactoryTest.java
new file mode 100644
index 000000000..43bdb4e14
--- /dev/null
+++ b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/AnalyzerFactoryTest.java
@@ -0,0 +1,256 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.ar.ArabicAnalyzer;
+import org.apache.lucene.analysis.bg.BulgarianAnalyzer;
+import org.apache.lucene.analysis.ca.CatalanAnalyzer;
+import org.apache.lucene.analysis.cjk.CJKAnalyzer;
+import org.apache.lucene.analysis.classic.ClassicAnalyzer;
+import org.apache.lucene.analysis.cn.smart.SmartChineseAnalyzer;
+import org.apache.lucene.analysis.core.KeywordAnalyzer;
+import org.apache.lucene.analysis.core.SimpleAnalyzer;
+import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
+import org.apache.lucene.analysis.cz.CzechAnalyzer;
+import org.apache.lucene.analysis.da.DanishAnalyzer;
+import org.apache.lucene.analysis.de.GermanAnalyzer;
+import org.apache.lucene.analysis.email.UAX29URLEmailAnalyzer;
+import org.apache.lucene.analysis.en.EnglishAnalyzer;
+import org.apache.lucene.analysis.es.SpanishAnalyzer;
+import org.apache.lucene.analysis.eu.BasqueAnalyzer;
+import org.apache.lucene.analysis.fa.PersianAnalyzer;
+import org.apache.lucene.analysis.fi.FinnishAnalyzer;
+import org.apache.lucene.analysis.fr.FrenchAnalyzer;
+import org.apache.lucene.analysis.ga.IrishAnalyzer;
+import org.apache.lucene.analysis.gl.GalicianAnalyzer;
+import org.apache.lucene.analysis.hi.HindiAnalyzer;
+import org.apache.lucene.analysis.hu.HungarianAnalyzer;
+import org.apache.lucene.analysis.hy.ArmenianAnalyzer;
+import org.apache.lucene.analysis.id.IndonesianAnalyzer;
+import org.apache.lucene.analysis.it.ItalianAnalyzer;
+import org.apache.lucene.analysis.ja.JapaneseAnalyzer;
+import org.apache.lucene.analysis.lv.LatvianAnalyzer;
+import org.apache.lucene.analysis.nl.DutchAnalyzer;
+import org.apache.lucene.analysis.no.NorwegianAnalyzer;
+import org.apache.lucene.analysis.pl.PolishAnalyzer;
+import org.apache.lucene.analysis.pt.PortugueseAnalyzer;
+import org.apache.lucene.analysis.ro.RomanianAnalyzer;
+import org.apache.lucene.analysis.ru.RussianAnalyzer;
+import org.apache.lucene.analysis.standard.StandardAnalyzer;
+import org.apache.lucene.analysis.sv.SwedishAnalyzer;
+import org.apache.lucene.analysis.th.ThaiAnalyzer;
+import org.apache.lucene.analysis.tr.TurkishAnalyzer;
+import org.junit.jupiter.api.Test;
+
+public class AnalyzerFactoryTest {
+
+ @Test
+ public void testkeyword() throws Exception {
+ assertAnalyzer("keyword", KeywordAnalyzer.class);
+ }
+
+ @Test
+ public void testsimple() throws Exception {
+ assertAnalyzer("simple", SimpleAnalyzer.class);
+ }
+
+ @Test
+ public void testwhitespace() throws Exception {
+ assertAnalyzer("whitespace", WhitespaceAnalyzer.class);
+ }
+
+ @Test
+ public void testarabic() throws Exception {
+ assertAnalyzer("arabic", ArabicAnalyzer.class);
+ }
+
+ @Test
+ public void testbulgarian() throws Exception {
+ assertAnalyzer("bulgarian", BulgarianAnalyzer.class);
+ }
+
+ @Test
+ public void testcatalan() throws Exception {
+ assertAnalyzer("catalan", CatalanAnalyzer.class);
+ }
+
+ @Test
+ public void testcjk() throws Exception {
+ assertAnalyzer("cjk", CJKAnalyzer.class);
+ }
+
+ @Test
+ public void testchinese() throws Exception {
+ assertAnalyzer("chinese", SmartChineseAnalyzer.class);
+ }
+
+ @Test
+ public void testczech() throws Exception {
+ assertAnalyzer("czech", CzechAnalyzer.class);
+ }
+
+ @Test
+ public void testdanish() throws Exception {
+ assertAnalyzer("danish", DanishAnalyzer.class);
+ }
+
+ @Test
+ public void testgerman() throws Exception {
+ assertAnalyzer("german", GermanAnalyzer.class);
+ }
+
+ @Test
+ public void testenglish() throws Exception {
+ assertAnalyzer("english", EnglishAnalyzer.class);
+ }
+
+ @Test
+ public void testspanish() throws Exception {
+ assertAnalyzer("spanish", SpanishAnalyzer.class);
+ }
+
+ @Test
+ public void testbasque() throws Exception {
+ assertAnalyzer("basque", BasqueAnalyzer.class);
+ }
+
+ @Test
+ public void testpersian() throws Exception {
+ assertAnalyzer("persian", PersianAnalyzer.class);
+ }
+
+ @Test
+ public void testfinnish() throws Exception {
+ assertAnalyzer("finnish", FinnishAnalyzer.class);
+ }
+
+ @Test
+ public void testfrench() throws Exception {
+ assertAnalyzer("french", FrenchAnalyzer.class);
+ }
+
+ @Test
+ public void testirish() throws Exception {
+ assertAnalyzer("irish", IrishAnalyzer.class);
+ }
+
+ @Test
+ public void testgalician() throws Exception {
+ assertAnalyzer("galician", GalicianAnalyzer.class);
+ }
+
+ @Test
+ public void testhindi() throws Exception {
+ assertAnalyzer("hindi", HindiAnalyzer.class);
+ }
+
+ @Test
+ public void testhungarian() throws Exception {
+ assertAnalyzer("hungarian", HungarianAnalyzer.class);
+ }
+
+ @Test
+ public void testarmenian() throws Exception {
+ assertAnalyzer("armenian", ArmenianAnalyzer.class);
+ }
+
+ @Test
+ public void testindonesian() throws Exception {
+ assertAnalyzer("indonesian", IndonesianAnalyzer.class);
+ }
+
+ @Test
+ public void testitalian() throws Exception {
+ assertAnalyzer("italian", ItalianAnalyzer.class);
+ }
+
+ @Test
+ public void testjapanese() throws Exception {
+ assertAnalyzer("japanese", JapaneseAnalyzer.class);
+ }
+
+ @Test
+ public void testlatvian() throws Exception {
+ assertAnalyzer("latvian", LatvianAnalyzer.class);
+ }
+
+ @Test
+ public void testdutch() throws Exception {
+ assertAnalyzer("dutch", DutchAnalyzer.class);
+ }
+
+ @Test
+ public void testnorwegian() throws Exception {
+ assertAnalyzer("norwegian", NorwegianAnalyzer.class);
+ }
+
+ @Test
+ public void testpolish() throws Exception {
+ assertAnalyzer("polish", PolishAnalyzer.class);
+ }
+
+ @Test
+ public void testportugese() throws Exception {
+ assertAnalyzer("portugese", PortugueseAnalyzer.class);
+ }
+
+ @Test
+ public void testromanian() throws Exception {
+ assertAnalyzer("romanian", RomanianAnalyzer.class);
+ }
+
+ @Test
+ public void testrussian() throws Exception {
+ assertAnalyzer("russian", RussianAnalyzer.class);
+ }
+
+ @Test
+ public void testclassic() throws Exception {
+ assertAnalyzer("classic", ClassicAnalyzer.class);
+ }
+
+ @Test
+ public void teststandard() throws Exception {
+ assertAnalyzer("standard", StandardAnalyzer.class);
+ }
+
+ @Test
+ public void testemail() throws Exception {
+ assertAnalyzer("email", UAX29URLEmailAnalyzer.class);
+ }
+
+ @Test
+ public void testswedish() throws Exception {
+ assertAnalyzer("swedish", SwedishAnalyzer.class);
+ }
+
+ @Test
+ public void testthai() throws Exception {
+ assertAnalyzer("thai", ThaiAnalyzer.class);
+ }
+
+ @Test
+ public void testturkish() throws Exception {
+ assertAnalyzer("turkish", TurkishAnalyzer.class);
+ }
+
+ private void assertAnalyzer(final String name, final Class<? extends Analyzer> clazz) throws Exception {
+ final AnalyzerFactory factory = new AnalyzerFactory();
+ assertThat(factory.newAnalyzer(name)).isInstanceOf(clazz);
+ }
+
+}
diff --git a/java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/IndexManagerTest.java b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/IndexManagerTest.java
new file mode 100644
index 000000000..29de49a74
--- /dev/null
+++ b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/IndexManagerTest.java
@@ -0,0 +1,58 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core;
+
+import java.nio.file.Path;
+
+import org.apache.couchdb.nouveau.api.IndexDefinition;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.io.TempDir;
+
+import io.dropwizard.testing.junit5.DropwizardExtensionsSupport;
+
+@ExtendWith(DropwizardExtensionsSupport.class)
+public class IndexManagerTest {
+
+ @TempDir
+ static Path tempDir;
+
+ private IndexManager manager;
+
+ @BeforeEach
+ public void setup() throws Exception {
+ manager = new IndexManager();
+ manager.setAnalyzerFactory(new AnalyzerFactory());
+ manager.setCommitIntervalSeconds(5);
+ manager.setObjectMapper(new ObjectMapper());
+ manager.setRootDir(tempDir);
+ manager.start();
+ }
+
+ @AfterEach
+ public void cleanup() throws Exception {
+ manager.stop();
+ }
+
+ @Test
+ public void testCreate() throws Exception {
+ final IndexDefinition def = new IndexDefinition("standard", null);
+ manager.create("foo", def);
+ }
+
+}
diff --git a/java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/ser/LuceneModuleTest.java b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/ser/LuceneModuleTest.java
new file mode 100644
index 000000000..30b45c46d
--- /dev/null
+++ b/java/nouveau/src/test/java/org/apache/couchdb/nouveau/core/ser/LuceneModuleTest.java
@@ -0,0 +1,118 @@
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package org.apache.couchdb.nouveau.core.ser;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.Field.Store;
+import org.apache.lucene.document.StoredField;
+import org.apache.lucene.document.StringField;
+import org.apache.lucene.document.TextField;
+import org.apache.lucene.search.FieldDoc;
+import org.apache.lucene.util.BytesRef;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+
+public class LuceneModuleTest {
+
+ private static ObjectMapper mapper;
+
+ @BeforeAll
+ public static void setupMapper() {
+ mapper = new ObjectMapper();
+ mapper.registerModule(new LuceneModule());
+ }
+
+ @Test
+ public void testSerializeStringFieldStoreYES() throws Exception {
+ final String expected = "{\"@type\":\"string\",\"name\":\"foo\",\"value\":\"bar\",\"stored\":true}";
+ final String actual = mapper.writeValueAsString(new StringField("foo", "bar", Store.YES));
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void testSerializeStringFieldStoreNO() throws Exception {
+ final String expected = "{\"@type\":\"string\",\"name\":\"foo\",\"value\":\"bar\",\"stored\":false}";
+ final String actual = mapper.writeValueAsString(new StringField("foo", "bar", Store.NO));
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void testSerializeTextFieldStoreYES() throws Exception {
+ final String expected = "{\"@type\":\"text\",\"name\":\"foo\",\"value\":\"bar\",\"stored\":true}";
+ final String actual = mapper.writeValueAsString(new TextField("foo", "bar", Store.YES));
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void testSerializeTextFieldStoreNO() throws Exception {
+ final String expected = "{\"@type\":\"text\",\"name\":\"foo\",\"value\":\"bar\",\"stored\":false}";
+ final String actual = mapper.writeValueAsString(new TextField("foo", "bar", Store.NO));
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void testSerializeDoublePoint() throws Exception {
+ final String expected = "{\"@type\":\"double_point\",\"name\":\"foo\",\"value\":12.5}";
+ final String actual = mapper.writeValueAsString(new DoublePoint("foo", 12.5));
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void testSerializeStoredFieldString() throws Exception {
+ final String expected = "{\"@type\":\"stored_string\",\"name\":\"foo\",\"value\":\"bar\"}";
+ final String actual = mapper.writeValueAsString(new StoredField("foo", "bar"));
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void testSerializeStoredFieldDouble() throws Exception {
+ final String expected = "{\"@type\":\"stored_double\",\"name\":\"foo\",\"value\":12.5}";
+ final String actual = mapper.writeValueAsString(new StoredField("foo", 12.5));
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void testSerializeStoredFieldBinary() throws Exception {
+ final String expected = "{\"@type\":\"stored_binary\",\"name\":\"foo\",\"value\":\"YmFy\",\"encoded\":true}";
+ final String actual = mapper.writeValueAsString(new StoredField("foo", new BytesRef("bar")));
+ assertEquals(expected, actual);
+ }
+
+ @Test
+ public void testSerializeFieldDoc() throws Exception {
+ final FieldDoc fieldDoc = new FieldDoc(1, 2.0f, new Object[] {
+ Float.valueOf(1),
+ Double.valueOf(2),
+ Integer.valueOf(3),
+ Long.valueOf(4),
+ "foo",
+ new BytesRef("bar") });
+
+ final String expected = "[{\"type\":\"float\",\"value\":1.0},{\"type\":\"double\",\"value\":2.0},{\"type\":\"int\",\"value\":3},{\"type\":\"long\",\"value\":4},{\"type\":\"string\",\"value\":\"foo\"},{\"type\":\"bytes\",\"value\":\"YmFy\"}]";
+ final String actual = mapper.writeValueAsString(fieldDoc);
+ assertEquals(expected, actual);
+
+ final FieldDoc fieldDoc2 = mapper.readValue(expected, FieldDoc.class);
+
+ for (int i = 0; i < fieldDoc.fields.length; i++) {
+ assertThat(fieldDoc.fields[i].getClass()).isEqualTo(fieldDoc2.fields[i].getClass());
+ }
+ }
+
+}
diff --git a/java/nouveau/src/test/resources/fixtures/DocumentUpdateRequest.json b/java/nouveau/src/test/resources/fixtures/DocumentUpdateRequest.json
new file mode 100644
index 000000000..121c66858
--- /dev/null
+++ b/java/nouveau/src/test/resources/fixtures/DocumentUpdateRequest.json
@@ -0,0 +1,22 @@
+{
+ "seq": 12,
+ "fields": [
+ {
+ "@type": "string",
+ "name": "stringfoo",
+ "value": "bar",
+ "stored": true
+ },
+ {
+ "@type": "text",
+ "name": "textfoo",
+ "value": "hello there",
+ "stored": true
+ },
+ {
+ "@type": "double_point",
+ "name": "doublefoo",
+ "value": 12
+ }
+ ]
+}
diff --git a/java/nouveau/src/test/resources/fixtures/SearchRequest.json b/java/nouveau/src/test/resources/fixtures/SearchRequest.json
new file mode 100644
index 000000000..c588cc16b
--- /dev/null
+++ b/java/nouveau/src/test/resources/fixtures/SearchRequest.json
@@ -0,0 +1,17 @@
+{
+ "query": "*:*",
+ "limit": 10,
+ "sort": null,
+ "counts": [
+ "bar"
+ ],
+ "ranges": {
+ "foo": [
+ {
+ "label": "0 to 100 inc",
+ "min": 0.0,
+ "max": 100.0
+ }
+ ]
+ }
+} \ No newline at end of file
diff --git a/java/nouveau/src/test/resources/test-nouveau.yaml b/java/nouveau/src/test/resources/test-nouveau.yaml
new file mode 100644
index 000000000..f5d0a0bf8
--- /dev/null
+++ b/java/nouveau/src/test/resources/test-nouveau.yaml
@@ -0,0 +1,3 @@
+maxIndexesOpen: 10
+commitIntervalSeconds: 30
+idleSeconds: 60