Skip to content

Commit

Permalink
Rebase from origin
Browse files Browse the repository at this point in the history
Signed-off-by: Zelin Hao <[email protected]>
  • Loading branch information
zelinh committed Dec 16, 2022
2 parents 3223dfe + c0eea5c commit 914b9b7
Show file tree
Hide file tree
Showing 36 changed files with 1,719 additions and 264 deletions.
29 changes: 29 additions & 0 deletions .github/workflows/auto-release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: Releases

on:
push:
tags:
- '*'

jobs:

build:
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: GitHub App token
id: github_app_token
uses: tibdex/[email protected]
with:
app_id: ${{ secrets.APP_ID }}
private_key: ${{ secrets.APP_PRIVATE_KEY }}
installation_id: 22958780
- name: Get tag
id: tag
uses: dawidd6/action-get-tag@v1
- uses: actions/checkout@v2
- uses: ncipollo/release-action@v1
with:
github_token: ${{ steps.github_app_token.outputs.token }}
bodyFile: release-notes/opensearch.release-notes-${{steps.tag.outputs.tag}}.md
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Reject bulk requests with invalid actions ([#5299](https://github.com/opensearch-project/OpenSearch/issues/5299))
- Add max_shard_size parameter for shrink API ([#5229](https://github.com/opensearch-project/OpenSearch/pull/5229))
- Added jackson dependency to server ([#5366] (https://github.com/opensearch-project/OpenSearch/pull/5366))
- Added experimental extensions to main ([#5347](https://github.com/opensearch-project/OpenSearch/pull/5347))
- Adding support to register settings dynamically ([#5495](https://github.com/opensearch-project/OpenSearch/pull/5495))
- Adding auto release workflow ([#5582](https://github.com/opensearch-project/OpenSearch/pull/5582))
- Added experimental support for extensions ([#5347](https://github.com/opensearch-project/OpenSearch/pull/5347)), ([#5518](https://github.com/opensearch-project/OpenSearch/pull/5518))
- Add CI bundle pattern to distribution download ([#5348](https://github.com/opensearch-project/OpenSearch/pull/5348))

### Dependencies
Expand All @@ -35,6 +36,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Pre conditions check before updating weighted routing metadata ([#4955](https://github.com/opensearch-project/OpenSearch/pull/4955))

### Deprecated
- Refactor fuzziness interface on query builders ([#5433](https://github.com/opensearch-project/OpenSearch/pull/5433))

### Removed
### Fixed
- Fix 1.x compatibility bug with stored Tasks ([#5412](https://github.com/opensearch-project/OpenSearch/pull/5412))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.opensearch.action.index.IndexRequestBuilder;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.Fuzziness;
import org.opensearch.common.util.set.Sets;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
Expand Down Expand Up @@ -1024,7 +1025,7 @@ public void testFuzzyFieldLevelBoosting() throws InterruptedException, Execution

SearchResponse searchResponse = client().prepareSearch(idx)
.setExplain(true)
.setQuery(multiMatchQuery("foo").field("title", 100).field("body").fuzziness(0))
.setQuery(multiMatchQuery("foo").field("title", 100).field("body").fuzziness(Fuzziness.ZERO))
.get();
SearchHit[] hits = searchResponse.getHits().getHits();
assertNotEquals("both documents should be on different shards", hits[0].getShard().getShardId(), hits[1].getShard().getShardId());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
import org.opensearch.common.regex.Regex;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.time.DateFormatter;
import org.opensearch.common.unit.Fuzziness;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
Expand Down Expand Up @@ -762,21 +763,21 @@ public void testMatchQueryFuzzy() throws Exception {
client().prepareIndex("test").setId("2").setSource("text", "Unity")
);

SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness("0")).get();
SearchResponse searchResponse = client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.ZERO)).get();
assertHitCount(searchResponse, 0L);

searchResponse = client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness("1")).get();
searchResponse = client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.ONE)).get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "2");

searchResponse = client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness("AUTO")).get();
searchResponse = client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.AUTO)).get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "2");

searchResponse = client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness("AUTO:5,7")).get();
searchResponse = client().prepareSearch().setQuery(matchQuery("text", "uniy").fuzziness(Fuzziness.customAuto(5, 7))).get();
assertHitCount(searchResponse, 0L);

searchResponse = client().prepareSearch().setQuery(matchQuery("text", "unify").fuzziness("AUTO:5,7")).get();
searchResponse = client().prepareSearch().setQuery(matchQuery("text", "unify").fuzziness(Fuzziness.customAuto(5, 7))).get();
assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "2");
}
Expand Down
10 changes: 10 additions & 0 deletions server/src/main/java/org/opensearch/common/unit/Fuzziness.java
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,16 @@ public static Fuzziness build(Object fuzziness) {
return new Fuzziness(string);
}

/***
* Creates a {@link Fuzziness} instance from lowDistance and highDistance.
* where the edit distance is 0 for strings shorter than lowDistance,
* 1 for strings where its length between lowDistance and highDistance (inclusive),
* and 2 for strings longer than highDistance.
*/
public static Fuzziness customAuto(int lowDistance, int highDistance) {
return new Fuzziness("AUTO", lowDistance, highDistance);
}

private static Fuzziness parseCustomAuto(final String string) {
assert string.toUpperCase(Locale.ROOT).startsWith(AUTO.asString() + ":");
String[] fuzzinessLimit = string.substring(AUTO.asString().length() + 1).split(",");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,62 +15,58 @@
import org.opensearch.transport.TransportRequest;

import java.io.IOException;
import java.util.List;
import java.util.Objects;

/**
* PluginRequest to intialize plugin
* InitializeExtensionRequest to intialize plugin
*
* @opensearch.internal
*/
public class PluginRequest extends TransportRequest {
public class InitializeExtensionRequest extends TransportRequest {
private final DiscoveryNode sourceNode;
/*
* TODO change DiscoveryNode to Extension information
*/
private final List<DiscoveryExtensionNode> extensions;
private final DiscoveryExtensionNode extension;

public PluginRequest(DiscoveryNode sourceNode, List<DiscoveryExtensionNode> extensions) {
public InitializeExtensionRequest(DiscoveryNode sourceNode, DiscoveryExtensionNode extension) {
this.sourceNode = sourceNode;
this.extensions = extensions;
this.extension = extension;
}

public PluginRequest(StreamInput in) throws IOException {
public InitializeExtensionRequest(StreamInput in) throws IOException {
super(in);
sourceNode = new DiscoveryNode(in);
extensions = in.readList(DiscoveryExtensionNode::new);
extension = new DiscoveryExtensionNode(in);
}

@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
sourceNode.writeTo(out);
out.writeList(extensions);
}

public List<DiscoveryExtensionNode> getExtensions() {
return extensions;
extension.writeTo(out);
}

public DiscoveryNode getSourceNode() {
return sourceNode;
}

public DiscoveryExtensionNode getExtension() {
return extension;
}

@Override
public String toString() {
return "PluginRequest{" + "sourceNode=" + sourceNode + ", extensions=" + extensions + '}';
return "InitializeExtensionsRequest{" + "sourceNode=" + sourceNode + ", extension=" + extension + '}';
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PluginRequest that = (PluginRequest) o;
return Objects.equals(sourceNode, that.sourceNode) && Objects.equals(extensions, that.extensions);
InitializeExtensionRequest that = (InitializeExtensionRequest) o;
return Objects.equals(sourceNode, that.sourceNode) && Objects.equals(extension, that.extension);
}

@Override
public int hashCode() {
return Objects.hash(sourceNode, extensions);
return Objects.hash(sourceNode, extension);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,14 @@
*
* @opensearch.internal
*/
public class PluginResponse extends TransportResponse {
public class InitializeExtensionResponse extends TransportResponse {
private String name;

public PluginResponse(String name) {
public InitializeExtensionResponse(String name) {
this.name = name;
}

public PluginResponse(StreamInput in) throws IOException {
public InitializeExtensionResponse(StreamInput in) throws IOException {
name = in.readString();
}

Expand All @@ -77,7 +77,7 @@ public String toString() {
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PluginResponse that = (PluginResponse) o;
InitializeExtensionResponse that = (InitializeExtensionResponse) o;
return Objects.equals(name, that.name);
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.extensions;

import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.transport.TransportResponse;
import java.io.IOException;
import java.util.Objects;

/**
* Generic boolean response indicating the status of some previous request sent to the SDK
*
* @opensearch.internal
*/
public class AcknowledgedResponse extends TransportResponse {

private final boolean status;

/**
* @param status Boolean indicating the status of the parse request sent to the SDK
*/
public AcknowledgedResponse(boolean status) {
this.status = status;
}

public AcknowledgedResponse(StreamInput in) throws IOException {
super(in);
this.status = in.readBoolean();
}

@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeBoolean(status);
}

@Override
public String toString() {
return "AcknowledgedResponse{" + "status=" + this.status + "}";
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AcknowledgedResponse that = (AcknowledgedResponse) o;
return Objects.equals(this.status, that.status);
}

@Override
public int hashCode() {
return Objects.hash(status);
}

/**
* Returns a boolean indicating the success of the request sent to the SDK
*/
public boolean getStatus() {
return this.status;
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/

package org.opensearch.extensions;

import java.net.UnknownHostException;
import org.opensearch.cluster.node.DiscoveryNode;

/**
* Reference to a method that transports a parse request to an extension. By convention, this method takes
* a category class used to identify the reader defined within the JVM that the extension is running on.
* Additionally, this method takes in the extension's corresponding DiscoveryNode and a byte array (context) that the
* extension's reader will be applied to.
*
* By convention the extensions' reader is a constructor that takes StreamInput as an argument for most classes and a static method for things like enums.
* Classes will implement this via a constructor (or a static method in the case of enumerations), it's something that should
* look like:
* <pre><code>
* public MyClass(final StreamInput in) throws IOException {
* * this.someValue = in.readVInt();
* this.someMap = in.readMapOfLists(StreamInput::readString, StreamInput::readString);
* }
* </code></pre>
*
* @opensearch.internal
*/
@FunctionalInterface
public interface ExtensionReader {

/**
* Transports category class, and StreamInput (context), to the extension identified by the Discovery Node
*
* @param extensionNode Discovery Node identifying the Extension
* @param categoryClass Super class that the reader extends
* @param context Some context to transport
* @throws UnknownHostException if the extension node host IP address could not be determined
*/
void parse(DiscoveryNode extensionNode, Class categoryClass, Object context) throws UnknownHostException;

}
Loading

0 comments on commit 914b9b7

Please sign in to comment.