Skip to content

Commit

Permalink
Update to 7.9.1
Browse files Browse the repository at this point in the history
* Add support for department numbers in index names
* Add Kibana saved objects (maps, index patterns, dashboards)
* Fix a temporary bug with Kibana when importing Logstash pipeline (elastic/kibana#76611)
  • Loading branch information
dadoonet committed Sep 10, 2020
1 parent c735a3d commit ee1f140
Show file tree
Hide file tree
Showing 14 changed files with 113 additions and 19 deletions.
2 changes: 1 addition & 1 deletion .env
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
ELASTIC_VERSION=7.6.0
ELASTIC_VERSION=7.9.1
ELASTIC_PASSWORD=changeme
INJECTOR_VERSION=7.0
21 changes: 21 additions & 0 deletions cloud/ingest-bano.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,21 @@
]
}
},
{
"gsub": {
"field": "log.file.path",
"pattern": "\\/bano-data\\/bano-",
"replacement": "",
"target_field": "dept"
}
},
{
"gsub": {
"field": "dept",
"pattern": "\\.csv",
"replacement": ""
}
},
{
"convert": {
"field": "location.lat",
Expand All @@ -39,6 +54,12 @@
"message"
]
}
},
{
"set": {
"field": "_index",
"value": "bano-{{dept}}"
}
}
]
}
4 changes: 2 additions & 2 deletions cloud/template-bano.json
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,10 @@
}
}
},
"region": {
"id": {
"type": "keyword"
},
"id": {
"dept": {
"type": "keyword"
},
"source": {
Expand Down
1 change: 1 addition & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ services:
environment:
- ELASTICSEARCH_USERNAME=elastic
- ELASTICSEARCH_PASSWORD=$ELASTIC_PASSWORD
- XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY=AZERTYUIOPAZERTYUIOPAZERTYUIOP01
links:
- elasticsearch
ports:
Expand Down
6 changes: 5 additions & 1 deletion download.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ echo "Download all BANO data"
export SOURCE_DIR=~/Documents/Elasticsearch/Talks/postal_addresses/demo/

DEPTS=95
for i in {1..19} $(seq 21 $DEPTS) {971..974} {976..976} ; do
for i in {1..7} {10..19} $(seq 21 $DEPTS) {971..974} {976..976} ; do
DEPT=$(printf %02d $i)
$SOURCE_DIR/download_region.sh $DEPT
done

# We need to manually download 08 and 09 departments as there is an issue on Linux
$SOURCE_DIR/download_region_manual.sh 08
$SOURCE_DIR/download_region_manual.sh 09
23 changes: 23 additions & 0 deletions download_region_manual.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/usr/bin/env bash

echo "Downloading BANO region $1"

DATASOURCE_DIR=~/Documents/Elasticsearch/Talks/postal_addresses/demo/bano-data

import_region () {
export REGION=$1
FILE=$DATASOURCE_DIR/bano-$REGION.csv
URL=http://bano.openstreetmap.fr/data/bano-$REGION.csv
# We import the region from openstreet map if not available yet
if [ ! -e $FILE ] ; then
echo "Fetching $FILE from $URL"
wget $URL -P $DATASOURCE_DIR
fi
}

if [ ! -e $DATASOURCE_DIR ] ; then
echo "Creating $DATASOURCE_DIR dir"
mkdir $DATASOURCE_DIR
fi

import_region $1
2 changes: 1 addition & 1 deletion filebeat-config/filebeat.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
filebeat.inputs:
- type: log
paths:
- /bano-data/bano-95.csv
- /bano-data/bano-*.csv

output.logstash:
hosts: ["logstash:5044"]
Expand Down
6 changes: 6 additions & 0 deletions kibana-config/bano.ndjson

Large diffs are not rendered by default.

9 changes: 5 additions & 4 deletions logstash-config/pipeline/bano.conf
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@ input {
}
filter {
elasticsearch {
hosts => ["http://elasticsearch:9200"]
user => "elastic"
password => "changeme"
hosts => ["https://9294b959a68c4e64b8a51f1b88105ab9.europe-west1.gcp.cloud.es.io:9243"]
user => "bano"
password => "banobano"
query_template => "/usr/share/logstash/config/search-by-geo.json"
index => "bano"
index => "bano-*"
fields => {
"address" => "[address_new]"
"location" => "[address][location]"
Expand All @@ -34,6 +34,7 @@ filter {
output {
# stdout { codec => rubydebug }
elasticsearch {
manage_template => false
hosts => ["http://elasticsearch:9200"]
user => "elastic"
password => "changeme"
Expand Down
10 changes: 8 additions & 2 deletions logstash-config/pipeline/beats.conf
Original file line number Diff line number Diff line change
Expand Up @@ -9,27 +9,33 @@ filter {
columns => [
"id","number","street_name","zipcode","city","source","latitude","longitude"
]
remove_field => [ "message", "host", "@timestamp", "@version", "input", "log", "ecs", "agent", "tags" ]
remove_field => [ "message", "host", "@timestamp", "@version", "input", "ecs", "agent", "tags" ]
}
mutate {
gsub => [
"[log][file][path]", "\/bano-data\/bano-", "",
"[log][file][path]", "\.csv", ""
]
convert => { "longitude" => "float" }
convert => { "latitude" => "float" }
}
mutate {
rename => {
"[log][file][path]" => "dept"
"longitude" => "[location][lon]"
"latitude" => "[location][lat]"
"number" => "[address][number]"
"street_name" => "[address][street_name]"
"zipcode" => "[address][zipcode]"
"city" => "[address][city]"
}
remove_field => [ "log" ]
}
}
output {
elasticsearch {
hosts => ["http://elasticsearch:9200"]
index => "bano"
index => "bano-%{[dept]}"
user => "elastic"
password => "changeme"
document_id => "%{[id]}"
Expand Down
2 changes: 2 additions & 0 deletions logstash-config/pipeline/load_pipeline.sh
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ generate_data()
{
cat <<EOF
{
"id": "$1",
"description": "Pipeline $1 for bano demo",
"pipeline": "$message"
}
EOF
Expand Down
23 changes: 16 additions & 7 deletions script.txt
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,9 @@ PUT _template/bano
"id": {
"type": "keyword"
},
"dept": {
"type": "keyword"
},
"source": {
"type": "keyword"
},
Expand All @@ -161,39 +164,45 @@ filter {
columns => [
"id","number","street_name","zipcode","city","source","latitude","longitude"
]
remove_field => [ "message", "host", "@timestamp", "@version", "input", "log", "ecs", "agent", "tags" ]
remove_field => [ "message", "host", "@timestamp", "@version", "input", "ecs", "agent", "tags" ]
}
mutate {
gsub => [
"[log][file][path]", "\/bano-data\/bano-", "",
"[log][file][path]", "\.csv", ""
]
convert => { "longitude" => "float" }
convert => { "latitude" => "float" }
}
mutate {
rename => {
"[log][file][path]" => "dept"
"longitude" => "[location][lon]"
"latitude" => "[location][lat]"
"number" => "[address][number]"
"street_name" => "[address][street_name]"
"zipcode" => "[address][zipcode]"
"city" => "[address][city]"
}
remove_field => [ "log" ]
}
}
output {
elasticsearch {
hosts => ["http://elasticsearch:9200"]
index => "bano"
index => "bano-%{[dept]}"
user => "elastic"
password => "changeme"
document_id => "%{[id]}"
}
}

# Step 4-2: start filebeat and check how addresses are coming
GET bano/_count
GET bano/_search?size=0&track_total_hits=true
GET bano*/_count
GET bano*/_search?size=0&track_total_hits=true

# Step 5-1: search for addresses
GET bano/_search?track_total_hits=true
GET bano*/_search?track_total_hits=true
{
"size": 1,
"query": {
Expand All @@ -210,7 +219,7 @@ GET bano/_search?track_total_hits=true
}

# Step 5-2: search by geo point
GET bano/_search?track_total_hits=true
GET bano*/_search?track_total_hits=true
{
"size": 1,
"sort": [
Expand All @@ -226,7 +235,7 @@ GET bano/_search?track_total_hits=true
}

# Step 5-3: search by points with some optimization
GET bano/_search?track_total_hits=true
GET bano*/_search?track_total_hits=true
{
"size": 1,
"query": {
Expand Down
15 changes: 15 additions & 0 deletions setup-cloud.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,24 @@ done
echo -ne '\n'
echo Elasticsearch is now up.


echo Elasticsearch $ELASTIC_VERSION must be available on $KIBANA_URL
echo -ne "Waiting for kibana"

until curl -u elastic:$CLOUD_PASSWORD -s "$KIBANA_URL/app/home#/" | grep "<title>Elastic</title>" > /dev/null; do
sleep 1
echo -ne '.'
done

echo -ne '\n'
echo Kibana is now up.

echo Defining bano ingest pipeline
curl -XPUT "$CLOUD_URL/_ingest/pipeline/bano" -u elastic:$CLOUD_PASSWORD -H 'Content-Type: application/json' -d'@cloud/ingest-bano.json' ; echo

echo Defining bano index template
curl -XPUT "$CLOUD_URL/_template/bano" -u elastic:$CLOUD_PASSWORD -H 'Content-Type: application/json' -d'@cloud/template-bano.json' ; echo

echo Installing Kibana Objects
curl -XPOST "$KIBANA_URL/api/saved_objects/_import?overwrite=true" -H "kbn-xsrf: true" --form file=@kibana-config/bano.ndjson -u elastic:$CLOUD_PASSWORD ; echo

8 changes: 7 additions & 1 deletion setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ echo docker-compose down -v
echo docker-compose up
echo -ne "Waiting for kibana"

until curl -s "http://localhost:5601/login" | grep "Loading Kibana" > /dev/null; do
until curl -s "http://localhost:5601/login" | grep "<title>Elastic</title>" > /dev/null; do
sleep 1
echo -ne '.'
done
Expand Down Expand Up @@ -41,6 +41,12 @@ curl -XDELETE http://localhost:9200/banotest -u elastic:$ELASTIC_PASSWORD ; echo
echo Removing existing person data
curl -XDELETE http://localhost:9200/person -u elastic:$ELASTIC_PASSWORD ; echo

echo Installing Kibana Objects
curl -XPOST "http://localhost:5601/api/saved_objects/_import?overwrite=true" -H "kbn-xsrf: true" --form file=@kibana-config/bano.ndjson -u elastic:$ELASTIC_PASSWORD ; echo

echo Defining bano ingest pipeline
curl -XPUT "http://localhost:9200/_ingest/pipeline/bano" -u elastic:$ELASTIC_PASSWORD -H 'Content-Type: application/json' -d'@cloud/ingest-bano.json' ; echo

echo Injecting person dataset
injector/injector.sh

Expand Down

0 comments on commit ee1f140

Please sign in to comment.