ES-REST-API¶
index¶
- index-manage
$ PUT /<index> # new index
$ GET /<index> # index info
$ DELETE /<index> # delete index
-
匹配语句
GET <index>/_search { "from": 0, "size": 10, "query": { "match_phrase":{ "<field-name>":"**" } } } GET <index>/_search { "query": { "bool": { "must": [ {"match_phrase": {"text": "St Peter Fm"}}, {"match_phrase": {"text": "Cape Basin"}} ] } } } GET <index>/_search { "query": { "multi_match" : { "query": "quick brown fox", "type": "phrase", "fields": [ "subject", "message" ] } } } # 查询数据 GET /<index>/_count { "query": { "match_phrase": { "dimension.keyword": "" } } } # 查询indices and store.size GET /_cat/indices/_all?v&s=store.size
-
manage index
# list indexes
curl -X GET ‘http://localhost:9200/_cat/indices?v
# or
curl -v "localhost:9200/_cat/indices"
# delete
curl -X DELETE 'http://localhost:9200/examples'
# back up
curl -XPOST --header 'Content-Type: application/json' http://localhost:9200/_reindex -d '{
"source": {
"index1": "someexamples"
},
"dest": {
"index2": "someexamples_copy"
}
}'
POST /_reindex
{
"source": {
"index": "al_document_range"
},
"dest": {
"index": "bak_al_document_range"
}
}
# list all docs
curl -X GET 'http://localhost:9200/elasticsearch_query_examples/_search'
# list all data
curl -XPUT --header 'Content-Type: application/json' http://localhost:9200/elasticsearch_query_examples/_doc/1 -d '{
"value1" : "value2"
}'
# force a synced flush
curl -XPOST 'localhost:9200/_flush/synced'
# change shards to balance cluster
curl -XPUT localhost:9200/_cluster/settings -d '{
"transient" :{
"cluster.routing.allocation.cluster_concurrent_rebalance" : 2
}
}';echo
# change size of the search queue
curl -XPUT localhost:9200/_cluster/settings -d '{
"transient" :{
"threadpool.search.queue_size" : 2000
}
}';echo
# Clear the cache on a node
curl -XPOST 'http://localhost:9200/_cache/clear'
-
Search after 查询
GET <index>/_search { "size": 10, "query": { "match_all": {} }, "sort": [ { "@timestamp": { "order": "desc" }, "_id": { "order": "asc" } } ] } GET <index>/_search { "size": 10, "query": { "match_all": {} }, "search_after": [ 1626334973086, "1210495356" ], "sort": [ { "@timestamp": { "order": "desc" }, "_id": { "order": "asc" } } ] }
-
查询分片情况
GET /_cat/indices/th_fund_announce
-
Scroll 查询
POST <index>/_search?scroll=3m
-
查询唯一值
GET /<index>/_search { "size": 0, "aggs": { "langs": { "terms": { "field": "cninfo_file.F008V.keyword", "size": 500 } } }, "_source": [ "cninfo_file.F008V" ] }
-
查询删除
POST /<index>/_delete_by_query
{
"query": {
"match_phrase": {
"dimension.keyword": ""
}
}
}
- 自定义analyser
:true,
分词,并定义field类型为text
, 注意与keyword
的区别
PUT /tag-test
{
"settings":
{
"default_pipeline": "my_timestamp_pipeline",
"analysis":
{
"analyzer":
{
"douhao":
{
"type": "pattern",
"pattern": ":true,"
}
}
}
},
"mappings":
{
"_doc":
{
"properties":
{
"tags":
{
"type": "text",
"analyzer": "douhao",
"search_analyzer": "douhao"
},
"fileId":
{
"type": "text"
}
}
}
}
}
- 查询分词效果
GET /tag-test/_analyze
{
"analyzer": "douhao",
"text": "{\"GC0000263E_Y0Y2020010120201231_适用\":true,\"GC0000332E_Y0Y2020010120201231_已完成股权分置改革\":true,\"GC0000767E_Y0Y2020010120201231_适用\":true,\"GC0000789E_Y0Y2020010120201231_适用\":true,\"GC0000797E_Y0Y2020010120201231_适用\":true,\"GC0001083E_Y0Y2020010120201231_不适用\":true,\"GC0001085E_Y0Y2020010120201231_不适用\":true,\"GC0001087E_Y0Y2020010120201231_不适用\":true,\"GC0001089E_Y0Y2020010120201231_适用\":true,\"GC0001091E_Y0Y2020010120201231_适用\":true,\"GC0001093E_Y0Y2020010120201231_不适用\":true,\"GC0001190E_Y0Y2020010120201231_不适用\":true,\"GC0001228E_Y0Y2020010120201231_不适用\":true,\"GC0001634E_Y0Y2020010120201231_否\":true,\"GC0003091E_Y0Y2020010120201231_否\":true,\"GC0003124E_Y0Y2020010120201231_否\":true,\"GC0003240E_Y0Y2020010120201231_是\":true}"
}
- regex search
GET tag-test/_search
{
"query": {
"regexp": {
"tags": ".*gc0000263e_.*231_适.*?"
}
}
}
- es 排序
排序索引添加,影响shard
PUT /du666_log_01
{
"settings" : {
"index" : {
"sort.field" : "@timestamp",
"sort.order" : "desc"
}
},
"mappings": {
"_doc": {
"properties": {
"@timestamp": {
"type": "date"
}
}
}
}
}
pipeline¶
GET _ingest/pipeline/my_timestamp_pipeline
# create
PUT _ingest/pipeline/my_timestamp_pipeline
{
"description" : "Adds a field to a document with the time of ingestion",
"processors" : [
{
"set" : {
"field" : "@timestamp",
"value" : "{{_ingest.timestamp}}"
}
}
]
}