Skip to content

update to elasticsearch 1.2.1 #7

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jun 18, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ ES version Plugin Release date Command
0.90.5 1.3.0 Oct 16, 2013 ./bin/plugin --install termlist --url http://bit.ly/1bzHfIl
0.90.7 1.4.0 Dec 20, 2013 ./bin/plugin --install termlist --url http://bit.ly/1c70ICf
1.0.0.RC1 1.0.0.RC1.1 Jan 16, 2014 ./bin/plugin --install termlist --url http://bit.ly/1dSIzoW
1.2.1 1.2.1
============= =========== ================= ===========================================================

Do not forget to restart the node after installing.
Expand Down
4 changes: 2 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

<groupId>org.xbib.elasticsearch.plugin</groupId>
<artifactId>elasticsearch-index-termlist</artifactId>
<version>1.0.0.RC1.1</version>
<version>1.2.1</version>

<packaging>jar</packaging>

Expand Down Expand Up @@ -44,7 +44,7 @@
<properties>
<github.global.server>github</github.global.server>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<elasticsearch.version>1.0.0.RC1</elasticsearch.version>
<elasticsearch.version>1.2.1</elasticsearch.version>
</properties>

<dependencies>
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@

package org.xbib.elasticsearch.action.termlist;

import java.io.IOException;

import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest;
import org.elasticsearch.action.support.broadcast.BroadcastOperationThreading;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;

import java.io.IOException;

public class TermlistRequest extends BroadcastOperationRequest<TermlistRequest> {

private String field;
Expand All @@ -23,7 +22,6 @@ public class TermlistRequest extends BroadcastOperationRequest<TermlistRequest>

public TermlistRequest(String... indices) {
super(indices);
operationThreading(BroadcastOperationThreading.THREAD_PER_SHARD);
}

public void setField(String field) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,20 @@

package org.xbib.elasticsearch.action.termlist;

import java.io.IOException;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicReferenceArray;

import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiFields;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;

import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
Expand All @@ -27,14 +34,6 @@
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;

import java.io.IOException;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicReferenceArray;

import static org.elasticsearch.common.collect.Lists.newLinkedList;

/**
Expand Down Expand Up @@ -173,7 +172,7 @@ protected ShardTermlistResponse shardOperation(ShardTermlistRequest request) thr
} catch (IOException ex) {
throw new ElasticsearchException(ex.getMessage(), ex);
} finally {
searcher.release();
searcher.close();
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,23 +1,20 @@

package org.xbib.elasticsearch.rest.action.termlist;

import java.io.IOException;
import java.util.Map;

import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.BytesRestResponse;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.rest.XContentRestResponse;
import org.elasticsearch.rest.XContentThrowableRestResponse;
import org.elasticsearch.rest.action.support.RestXContentBuilder;

import org.elasticsearch.rest.RestResponse;
import org.elasticsearch.rest.action.support.RestBuilderListener;
import org.xbib.elasticsearch.action.termlist.TermInfo;
import org.xbib.elasticsearch.action.termlist.TermlistAction;
import org.xbib.elasticsearch.action.termlist.TermlistRequest;
Expand All @@ -44,38 +41,27 @@ public void handleRequest(final RestRequest request, final RestChannel channel)
termlistRequest.setSize(request.paramAsInt("size", 0));
termlistRequest.setWithDocFreq(request.paramAsBoolean("docfreqs", false));
termlistRequest.setWithTotalFreq(request.paramAsBoolean("totalfreqs", false));
client.execute(TermlistAction.INSTANCE, termlistRequest, new ActionListener<TermlistResponse>() {
client.execute(TermlistAction.INSTANCE, termlistRequest, new RestBuilderListener<TermlistResponse>(channel) {

public void onResponse(TermlistResponse response) {
try {
XContentBuilder builder = RestXContentBuilder.restContentBuilder(request);
builder.startObject();
buildBroadcastShardsHeader(builder, response);
builder.startArray("terms");
for (Map.Entry<String,TermInfo> t : response.getTermlist().entrySet()) {
builder.startObject().field("name", t.getKey());
if (t.getValue().getDocFreq() != null) {
builder.field("docfreq", t.getValue().getDocFreq());
}
if (t.getValue().getTotalFreq() != null) {
builder.field("totalfreq", t.getValue().getTotalFreq());
}
builder.endObject();
@Override
public RestResponse buildResponse(TermlistResponse response, XContentBuilder builder) throws Exception {
builder.startObject();
buildBroadcastShardsHeader(builder, response);
builder.startArray("terms");
for (Map.Entry<String,TermInfo> t : response.getTermlist().entrySet()) {
builder.startObject().field("name", t.getKey());
if (t.getValue().getDocFreq() != null) {
builder.field("docfreq", t.getValue().getDocFreq());
}
builder.endArray().endObject();
channel.sendResponse(new XContentRestResponse(request, OK, builder));
} catch (Exception e) {
onFailure(e);
}
}

public void onFailure(Throwable e) {
try {
channel.sendResponse(new XContentThrowableRestResponse(request, e));
} catch (IOException e1) {
logger.error("Failed to send failure response", e1);
if (t.getValue().getTotalFreq() != null) {
builder.field("totalfreq", t.getValue().getTotalFreq());
}
builder.endObject();
}
}
builder.endArray().endObject();

return new BytesRestResponse(OK, builder);
}
});
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public void stopNode() {
@Test
public void assertPluginLoaded() {
NodesInfoResponse nodesInfoResponse = client.admin().cluster().prepareNodesInfo()
.clear().setPlugin(true).get();
.clear().setPlugins(true).get();
logger.info("{}", nodesInfoResponse);
assertEquals(nodesInfoResponse.getNodes().length, 1);
assertNotNull(nodesInfoResponse.getNodes()[0].getPlugins().getInfos());
Expand Down