一:将数据库数据同步到es

1.首先在数据库选择需要的资源条件,将sql语句执行出来

es整合项目_spring


2.将sql语句放入es的sync_tanle.cfg配置文件执行

es整合项目_elasticsearch_02


3.修改index名称

es整合项目_spring_03


sync_tanle.cfg文件完成版

input {
jdbc {
# mysql相关jdbc配置
jdbc_connection_string => "jdbc:mysql://192.168.230.1:3306/ddd?useUnicode=true&characterEncoding=UTF-8&serverTimezone=GMT%2B8"
jdbc_user => "root"
jdbc_password => "123123"

# jdbc连接mysql驱动的文件, 此处路径一定要正确 否则会报com.mysql.cj.jdbc.Driver could not be loaded
jdbc_driver_library => "/opt/logstash-7.7.0/config/sync/mysql-connector-java-8.0.13.jar"
jdbc_driver_class => "com.mysql.cj.jdbc.Driver"
jdbc_paging_enabled => true
jdbc_page_size => "50000"

jdbc_default_timezone =>"Asia/Shanghai"
# mysql文件, 也可以直接写SQL语句在此处,如下:
statement => "SELECT
p.id,
p.`name`,
p.brand_name,
p.product_category_name,
p.publish_status,
p.new_status,
p.sub_title,
ps.sku_code,
ps.price,
ps.pic,
ps.sale,
ps.sp_data
FROM
pms_product p
LEFT JOIN pms_sku_stock ps
ON p.id = ps.product_id
GROUP BY p.`name`"

# 这里类似crontab,可以定制定时操作,比如每分钟执行一次同步(分 时 天 月 年)
schedule => "* * * * *"

# 是否需要记录某个column 的值,如果record_last_run为真,可以自定义我们需要 track 的 column 名称,此时该参数就要为 true. 否则默认 track 的是 timestamp 的值.
use_column_value => true

# 如果 use_column_value 为真,需配置此参数. track 的数据库 column 名,该 column 必须是递增的. 一般是mysql主键
tracking_column => "create_time"

tracking_column_type => "timestamp"

last_run_metadata_path => "area_logstash_capital_bill_last_id"

# 是否清除 last_run_metadata_path 的记录,如果为真那么每次都相当于从头开始查询所有的数据库记录
clean_run => false

#是否将 字段(column) 名称转小写
#lowercase_column_names => false
}
}

filter {
date {
match => [ "create_time", "yyyy-MM-dd HH:mm:ss" ]
timezone => "Asia/Shanghai"
}
}

output {
elasticsearch {
#虚拟机id
hosts => ["192.168.230.134:9200"]
# index名 自定义 相当于数据库 对于实体类上@Document(indexName = "jq_product")indexName
index => "jq_product"
#需要关联的数据库中有有一个id字段,对应索引的id号
document_id => "%{id}"
template_overwrite => true
}

stdout {
codec => json_lines
}
}

4.开始同步 在/opt/logstash-7.7.0/bin/目录里

es整合项目_elasticsearch_04

[root@localhost bin]# ./logstash -f /opt/logstash-7.7.0/config/sync/sync_tanle.cfg

注意: 如果重新执行同步先查询进程,杀死之前的进程

[root@localhost bin]# ps -ef | grep logstash

es整合项目_mysql_05


[root@localhost bin]# kill -9 5388

5.效果:

es整合项目_elasticsearch_06


二:在idea中创建一个springboot项目服务

es整合项目_spring_07


pom.xml中配置使用的依赖

<spring-boot.version>2.3.4.RELEASE</spring-boot.version>
<spring-cloud.version>Hoxton.RELEASE</spring-cloud.version>

elasticsearch依赖

<!-- 使用ElasticsearchRestTemplate模板依赖-->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-elasticsearch</artifactId>
</dependency>

在application.xml中配置文件

#项目名
spring:
application:
name: jq-search
#连接es
elasticsearch:
rest:
uris: http://192.168.230.134:9200
mybatis-plus:
configuration:
log-impl: org.apache.ibatis.logging.stdout.StdOutImpl
#端口号
server:
port: 5053
#连接服务端
eureka:
client:
service-url:
defaultZone: http://localhost:5051/eureka

在springboot启动类上加exclude= {DataSourceAutoConfiguration.class} 因为没有连接数据库,是从es中取得值

@SpringBootApplication(exclude= {DataSourceAutoConfiguration.class})

创建索引类

package com.jq.model;

import lombok.Data;
import org.springframework.data.elasticsearch.annotations.Document;
import org.springframework.data.elasticsearch.annotations.Field;
import org.springframework.data.elasticsearch.annotations.FieldType;

@Data
@Document(indexName = "jq_product")
public class serch {
private Long id;
@Field(type = FieldType.Text,analyzer = "ik_max_word")
private String name;
@Field(type = FieldType.Text)
private String brand_name;
@Field(type = FieldType.Text)
private String product_category_name;
@Field(type = FieldType.Text)
private String publish_status;
@Field(type = FieldType.Text)
private String new_status;
@Field(type = FieldType.Text,analyzer = "ik_max_word")
private String sub_title;
@Field(type = FieldType.Text)
private String sku_code;
@Field(type = FieldType.Text)
private String price;
@Field(type = FieldType.Text)
private String pic;
@Field(type = FieldType.Text)
private String sale;
@Field(type = FieldType.Text)
private String sp_dat;
}

创建controller类进行简单测试

package com.jq.controller;

import com.jq.model.SerchModel;
import org.elasticsearch.index.query.QueryBuilders;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.elasticsearch.core.ElasticsearchRestTemplate;
import org.springframework.data.elasticsearch.core.ElasticsearchTemplate;
import org.springframework.data.elasticsearch.core.SearchHit;
import org.springframework.data.elasticsearch.core.SearchHits;
import org.springframework.data.elasticsearch.core.aggregation.AggregatedPage;
import org.springframework.data.elasticsearch.core.query.NativeSearchQuery;
import org.springframework.data.elasticsearch.core.query.NativeSearchQueryBuilder;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import javax.annotation.Resource;
import java.util.stream.Stream;

@RestController
@CrossOrigin
@RequestMapping("search")
public class SearchController {

@Autowired
private ElasticsearchRestTemplate elasticsearchRestTemplate;

@GetMapping()
public Object getSerch(String name){
//构建分页
Pageable pageable= PageRequest.of(0,999);
NativeSearchQueryBuilder builder=new NativeSearchQueryBuilder();
NativeSearchQuery query=builder.withQuery(QueryBuilders.queryStringQuery(name))
.withPageable(pageable)
.build();
SearchHits<SerchModel> search = elasticsearchRestTemplate.search(query, SerchModel.class);
Stream<SearchHit<SerchModel>> searchHitStream = search.get();
return searchHitStream;
}

}

测试一下:

es整合项目_elasticsearch_08