Skip to content

Add support for an ingest pipeline and resource file loading for configuration. #51

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Merged in PR#48 before additional 5.1.x changes.
  • Loading branch information
Ray Elenteny committed Jan 20, 2017
commit b1c3d8963f43c072a778efb1c977d76361547b89
23 changes: 19 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ If you want to see this in action, go to the `samples/` directory and read the r

| Metrics-elasticsearch-reporter | elasticsearch | Release date |
|-----------------------------------|---------------------|:------------:|
| 2.3.0-SNAPSHOT | 2.3.0 -> master | NONE |
| 5.1.1-SNAPSHOT | 5.0.0 -> 5.1.x | master |
| 2.3.0 | 2.3.0 -> 2.4.x | TBD |
| 2.2.0 | 2.2.0 -> 2.2.x | 2016-02-10 |
| 2.0 | 1.0.0 -> 1.7.x | 2014-02-16 |
| 1.0 | 0.90.7 -> 0.90.x | 2014-02-05 |
Expand All @@ -27,7 +28,7 @@ You can simply add a dependency in your `pom.xml` (or whatever dependency resolu
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>metrics-elasticsearch-reporter</artifactId>
<version>2.2.0</version>
<version>2.3.0</version>
</dependency>
```

Expand Down Expand Up @@ -90,7 +91,7 @@ public class PagerNotifier implements Notifier {
}
```

Add a percolation
Add a percolation _(elasticsearch < 5.0)_

```
curl http://localhost:9200/metrics/.percolator/http-monitor -X PUT -d '{
Expand All @@ -105,6 +106,21 @@ curl http://localhost:9200/metrics/.percolator/http-monitor -X PUT -d '{
}'
```

Add a percolation _(elasticsearch >= 5.0)_

```
curl http://localhost:9200/metrics/queries/http-monitor -X PUT -d '{
"query" : {
"bool" : {
"must": [
{ "term": { "name" : "incoming-http-requests" } },
{ "range": { "m1_rate": { "to" : "10" } } }
]
}
}
}'
```

## JSON Format of metrics

This is how the serialized metrics looks like in elasticsearch
Expand Down Expand Up @@ -194,4 +210,3 @@ This is how the serialized metrics looks like in elasticsearch
## Next steps

* Integration with Kibana would be awesome

48 changes: 34 additions & 14 deletions pom.xml
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">

<groupId>org.elasticsearch</groupId>
<artifactId>metrics-elasticsearch-reporter</artifactId>
<version>2.3.0-SNAPSHOT</version>
<version>5.1.1-SNAPSHOT</version>

<properties>
<lucene.version>5.5.0</lucene.version>
<elasticsearch.version>2.3.1</elasticsearch.version>
<lucene.version>6.3.0</lucene.version>
<elasticsearch.version>5.1.1</elasticsearch.version>
<jackson.version>2.7.3</jackson.version>
<randomized.testrunner.version>2.3.3</randomized.testrunner.version>
<log4j.version>2.7</log4j.version>
<slf4j.version>1.7.2</slf4j.version>
</properties>

<modelVersion>4.0.0</modelVersion>
Expand Down Expand Up @@ -96,18 +98,35 @@
<artifactId>jackson-module-afterburner</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>${log4j.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>${log4j.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-slf4j-impl</artifactId>
<version>${log4j.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-test-framework</artifactId>
<version>${lucene.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<groupId>org.elasticsearch.test</groupId>
<artifactId>framework</artifactId>
<version>${elasticsearch.version}</version>
<scope>test</scope>
<type>test-jar</type>
</dependency>
<dependency>
<groupId>org.elasticsearch</groupId>
Expand All @@ -116,17 +135,18 @@
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.carrotsearch.randomizedtesting</groupId>
<artifactId>randomizedtesting-runner</artifactId>
<version>${randomized.testrunner.version}</version>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>transport-netty4-client</artifactId>
<version>${elasticsearch.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>1.7.19</version>
<groupId>org.elasticsearch.plugin</groupId>
<artifactId>percolator-client</artifactId>
<version>${elasticsearch.version}</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
Expand All @@ -147,4 +167,4 @@
</dependency>
</dependencies>

</project>
</project>
58 changes: 40 additions & 18 deletions src/main/java/org/elasticsearch/metrics/ElasticsearchReporter.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,13 @@
import com.codahale.metrics.Timer;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.module.afterburner.AfterburnerModule;

import org.elasticsearch.metrics.percolation.Notifier;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -249,7 +251,7 @@ public ElasticsearchReporter(MetricRegistry registry, String[] hosts, int timeou
this.notifier = percolationNotifier;
}
if (timestampFieldname == null || timestampFieldname.trim().length() == 0) {
LOGGER.error("Timestampfieldname {} is not valid, using default @timestamp", timestampFieldname);
LOGGER.error("Timestampfieldname {} is not valid, using default @timestamp", timestampFieldname);
timestampFieldname = "@timestamp";
}

Expand Down Expand Up @@ -340,7 +342,7 @@ public void report(SortedMap<String, Gauge> gauges,
}
}
}
// catch the exception to make sure we do not interrupt the live application
// catch the exception to make sure we do not interrupt the live application
} catch (IOException e) {
LOGGER.error("Couldnt report to elasticsearch server", e);
}
Expand All @@ -350,28 +352,40 @@ public void report(SortedMap<String, Gauge> gauges,
* Execute a percolation request for the specified metric
*/
private List<String> getPercolationMatches(JsonMetric jsonMetric) throws IOException {
HttpURLConnection connection = openConnection("/" + currentIndexName + "/" + jsonMetric.type() + "/_percolate", "POST");
HttpURLConnection connection = openConnection("/" + currentIndexName + "/_search", "POST");
if (connection == null) {
LOGGER.error("Could not connect to any configured elasticsearch instances for percolation: {}", Arrays.asList(hosts));
return Collections.emptyList();
}

Map<String, Object> data = new HashMap<>(1);
data.put("doc", jsonMetric);
objectMapper.writeValue(connection.getOutputStream(), data);
JsonGenerator json = new JsonFactory().createGenerator(connection.getOutputStream());
json.setCodec(objectMapper);
json.writeStartObject();
json.writeObjectFieldStart("query");
json.writeObjectFieldStart("percolate");
json.writeStringField("field", "query");
json.writeStringField("document_type", jsonMetric.type());
json.writeObjectField("document", jsonMetric);
json.writeEndObject();
json.writeEndObject();
json.writeEndObject();
json.flush();

closeConnection(connection);

if (connection.getResponseCode() != 200) {
throw new RuntimeException("Error percolating " + jsonMetric);
}

Map<String, Object> input = objectMapper.readValue(connection.getInputStream(), new TypeReference<Map<String, Object>>() {});
List<String> matches = new ArrayList<>();
if (input.containsKey("matches") && input.get("matches") instanceof List) {
List<Map<String, String>> foundMatches = (List<Map<String, String>>) input.get("matches");
for (Map<String, String> entry : foundMatches) {
if (entry.containsKey("_id")) {
matches.add(entry.get("_id"));
JsonNode input = objectMapper.readTree(connection.getInputStream());
if (input.has("hits")) {
JsonNode hits = input.get("hits");
if (hits.has("hits") && hits.get("hits").isArray()) {
for (JsonNode entry : (ArrayNode) hits.get("hits")) {
if (entry.has("_id")) {
matches.add(entry.get("_id").asText());
}
}
}
}
Expand Down Expand Up @@ -478,7 +492,7 @@ private void checkForIndexTemplate() {
LOGGER.error("Error adding metrics template to elasticsearch");
return;
}

LOGGER.info("Creating metrics template matching index pattern of {}", index + "*");
JsonGenerator json = new JsonFactory().createGenerator(putTemplateConnection.getOutputStream());
json.writeStartObject();
json.writeStringField("template", index + "*");
Expand All @@ -490,8 +504,16 @@ private void checkForIndexTemplate() {
json.writeEndObject();
json.writeObjectFieldStart("properties");
json.writeObjectFieldStart("name");
json.writeObjectField("type", "string");
json.writeObjectField("index", "not_analyzed");
json.writeObjectField("type", "keyword");
json.writeEndObject();
json.writeEndObject();
json.writeEndObject();

//Percolator - ES 5.0+ changed how percolation queries are handled. Now need to be part of the mapping
json.writeObjectFieldStart("queries");
json.writeObjectFieldStart("properties");
json.writeObjectFieldStart("query");
json.writeStringField("type", "percolator");
json.writeEndObject();
json.writeEndObject();
json.writeEndObject();
Expand All @@ -502,12 +524,12 @@ private void checkForIndexTemplate() {

putTemplateConnection.disconnect();
if (putTemplateConnection.getResponseCode() != 200) {
LOGGER.error("Error adding metrics template to elasticsearch: {}/{}" + putTemplateConnection.getResponseCode(), putTemplateConnection.getResponseMessage());
LOGGER.error("Error adding metrics template to elasticsearch: {}/{}", putTemplateConnection.getResponseCode(), putTemplateConnection.getResponseMessage());
}
}
checkedForIndexTemplate = true;
} catch (IOException e) {
LOGGER.error("Error when checking/adding metrics template to elasticsearch", e);
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -41,26 +41,31 @@
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.metrics.percolation.Notifier;
import org.elasticsearch.node.Node;
import org.elasticsearch.percolator.PercolatorPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.transport.Netty4Plugin;
import org.joda.time.format.ISODateTimeFormat;
import org.junit.Before;
import org.junit.Test;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;

import static com.codahale.metrics.MetricRegistry.name;
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.hasSize;

@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST)
public class ElasticsearchReporterTest extends ESIntegTestCase {

private ElasticsearchReporter elasticsearchReporter;
Expand All @@ -71,12 +76,22 @@ public class ElasticsearchReporterTest extends ESIntegTestCase {

@Override
protected Settings nodeSettings(int nodeOrdinal) {
return settingsBuilder()
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.put(Node.HTTP_ENABLED, true)
.put("http.type", "netty4")
.put("http.enabled", "true")
.put("http.port", "9200-9300")
.build();
}

@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
plugins.add(Netty4Plugin.class);
plugins.add(PercolatorPlugin.class);
return plugins;
}

@Before
public void setup() throws IOException {
elasticsearchReporter = createElasticsearchReporterBuilder().build();
Expand Down Expand Up @@ -110,8 +125,8 @@ public void testThatMappingFromTemplateIsApplied() throws Exception {
assertThat(indexMetaData.getMappings().containsKey("counter"), is(true));
Map<String, Object> properties = getAsMap(indexMetaData.mapping("counter").sourceAsMap(), "properties");
Map<String, Object> mapping = getAsMap(properties, "name");
assertThat(mapping, hasKey("index"));
assertThat(mapping.get("index").toString(), is("not_analyzed"));
assertThat(mapping, hasKey("type"));
assertThat(mapping.get("type").toString(), is("keyword"));
}

@SuppressWarnings("unchecked")
Expand Down Expand Up @@ -294,8 +309,8 @@ public boolean matches(String name, Metric metric) {
.must(QueryBuilders.rangeQuery("count").gte(20))
.must(QueryBuilders.termQuery("name", prefix + ".foo"))
);
String json = String.format("{ \"query\" : %s }", queryBuilder.buildAsBytes().toUtf8());
client().prepareIndex(indexWithDate, ".percolator", "myName").setRefresh(true).setSource(json).execute().actionGet();
String json = String.format("{ \"query\" : %s }", queryBuilder);
client().prepareIndex(indexWithDate, "queries", "myName").setSource(json).execute().actionGet();

evictions.inc(1);
reportAndRefresh();
Expand Down Expand Up @@ -404,4 +419,4 @@ private ElasticsearchReporter.Builder createElasticsearchReporterBuilder() {
.index(index)
.additionalFields(additionalFields);
}
}
}