跳转至

0. 环境

# 当前内存:2G
# 启动logstash后内存:2.54G
# logstash大概使用内存:0.54G

# tomcat业务机:10.0.8.3
# MySQL服务器:10.0.8.5
# kafka服务器:10.0.8.2
# es服务器:10.0.8.5

1. 上传logstash的deb包

# 10.0.8.3
cd /home/ubuntu
ls

2. 安装logstash

dpkg -i logstash-7.16.2-amd64.deb

3. 修改配置文件

# 设置logstash进行日志收集,并发送到kafka

#1.复制配置文件
cd /etc/logstash
cp logstash-sample.conf conf.d/logstash-to-kafka.conf

#2.修改配置文件
cd conf.d
vim logstash-to-kafka.conf
# 写入如下内容,
#1. input:接收tomcat访问日志;
#2. output:发送给kafka服务器
input {
  file {
    type => "tomcat-log"    #自己定义索引的名字
    path => "/opt/tomcat/logs/tomcat_access_log.*.log" #日志目录
    start_position => "beginning"
    codec => json
  }
}

output {
  kafka {
    bootstrap_servers => "10.0.8.2:9092"
    topic_id => "chupeng"   #自己定义的topic名字!
    compression_type => "snappy"    #压缩类型
    codec => json
  }

stdout {
  codec => rubydebug    # 将收集到的日志信息打印出来!正式运行的话,可以关掉!
}

}

4. 测试启动logstash

/usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash-to-kafka.conf -t
root@k8s-master:/etc/logstash/conf.d# /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash-to-kafka.conf -t
Using JAVA_HOME defined java: /usr/lib/jvm/java-11-openjdk-amd64
WARNING: Using JAVA_HOME while Logstash distribution comes with a bundled JDK.
DEPRECATION: The use of JAVA_HOME is now deprecated and will be removed starting from 8.0. Please configure LS_JAVA_HOME instead.
OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
WARNING: Could not find logstash.yml which is typically located in $LS_HOME/config or /etc/logstash. You can specify the path using --path.settings. Continuing using the defaults
Could not find log4j2 configuration at path /usr/share/logstash/config/log4j2.properties. Using default config which logs errors to the console
[WARN ] 2022-01-02 18:20:21.941 [main] runner - The use of JAVA_HOME has been deprecated. Logstash 8.0 and later ignores JAVA_HOME and uses the bundled JDK. Running Logstash with the bundled JDK is recommended. The bundled JDK has been verified to work with each specific version of Logstash, and generally provides best performance and reliability. If you have compelling reasons for using your own JDK (organizational-specific compliance requirements, for example), you can configure LS_JAVA_HOME to use that version instead.
[INFO ] 2022-01-02 18:20:21.947 [main] runner - Starting Logstash {"logstash.version"=>"7.16.2", "jruby.version"=>"jruby 9.2.20.1 (2.5.8) 2021-11-30 2a2962fbd1 OpenJDK 64-Bit Server VM 11.0.13+8-Ubuntu-0ubuntu1.20.04 on 11.0.13+8-Ubuntu-0ubuntu1.20.04 +indy +jit [linux-x86_64]"}
[INFO ] 2022-01-02 18:20:21.980 [main] settings - Creating directory {:setting=>"path.queue", :path=>"/usr/share/logstash/data/queue"}
[INFO ] 2022-01-02 18:20:21.989 [main] settings - Creating directory {:setting=>"path.dead_letter_queue", :path=>"/usr/share/logstash/data/dead_letter_queue"}
[WARN ] 2022-01-02 18:20:22.259 [LogStash::Runner] multilocal - Ignoring the 'pipelines.yml' file because modules or command line options are specified
[INFO ] 2022-01-02 18:20:23.045 [LogStash::Runner] Reflections - Reflections took 97 ms to scan 1 urls, producing 119 keys and 417 values
[WARN ] 2022-01-02 18:20:23.832 [LogStash::Runner] json - Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[WARN ] 2022-01-02 18:20:23.926 [LogStash::Runner] file - Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[WARN ] 2022-01-02 18:20:24.090 [LogStash::Runner] json - Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
Configuration OK    # 出现这个,则说明ok!!!
[INFO ] 2022-01-02 18:20:24.262 [LogStash::Runner] runner - Using config.test_and_exit mode. Config Validation Result: OK. Exiting Logstash
root@k8s-master:/etc/logstash/conf.d#

5. 正式启动logstash

# 上一步测试启动没有报错,则正式启动测试!
/usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash-to-kafka.conf
# 如果提示连接不到 kafka服务器主机名:9092 的话,则添加host解析即可解决
echo "10.0.8.3 k8s-master" >>/etc/hosts
echo "10.0.8.2 zabbix" >> /etc/hosts
root@k8s-master:/etc/logstash/conf.d# /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash-to-kafka.conf
Using JAVA_HOME defined java: /usr/lib/jvm/java-11-openjdk-amd64
WARNING: Using JAVA_HOME while Logstash distribution comes with a bundled JDK.
DEPRECATION: The use of JAVA_HOME is now deprecated and will be removed starting from 8.0. Please configure LS_JAVA_HOME instead.
OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
WARNING: Could not find logstash.yml which is typically located in $LS_HOME/config or /etc/logstash. You can specify the path using --path.settings. Continuing using the defaults
Could not find log4j2 configuration at path /usr/share/logstash/config/log4j2.properties. Using default config which logs errors to the console
[WARN ] 2022-01-02 18:23:03.575 [main] runner - The use of JAVA_HOME has been deprecated. Logstash 8.0 and later ignores JAVA_HOME and uses the bundled JDK. Running Logstash with the bundled JDK is recommended. The bundled JDK has been verified to work with each specific version of Logstash, and generally provides best performance and reliability. If you have compelling reasons for using your own JDK (organizational-specific compliance requirements, for example), you can configure LS_JAVA_HOME to use that version instead.
[INFO ] 2022-01-02 18:23:03.580 [main] runner - Starting Logstash {"logstash.version"=>"7.16.2", "jruby.version"=>"jruby 9.2.20.1 (2.5.8) 2021-11-30 2a2962fbd1 OpenJDK 64-Bit Server VM 11.0.13+8-Ubuntu-0ubuntu1.20.04 on 11.0.13+8-Ubuntu-0ubuntu1.20.04 +indy +jit [linux-x86_64]"}
[WARN ] 2022-01-02 18:23:03.955 [LogStash::Runner] multilocal - Ignoring the 'pipelines.yml' file because modules or command line options are specified
[INFO ] 2022-01-02 18:23:05.329 [Api Webserver] agent - Successfully started Logstash API endpoint {:port=>9600, :ssl_enabled=>false}
[INFO ] 2022-01-02 18:23:06.149 [Converge PipelineAction::Create<main>] Reflections - Reflections took 89 ms to scan 1 urls, producing 119 keys and 417 values
[WARN ] 2022-01-02 18:23:07.021 [Converge PipelineAction::Create<main>] json - Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[WARN ] 2022-01-02 18:23:07.088 [Converge PipelineAction::Create<main>] file - Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[WARN ] 2022-01-02 18:23:07.179 [Converge PipelineAction::Create<main>] json - Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[INFO ] 2022-01-02 18:23:07.575 [[main]-pipeline-manager] ProducerConfig - ProducerConfig values:
        acks = 1
        batch.size = 16384
        bootstrap.servers = [10.0.8.2:9092]
        buffer.memory = 33554432
        client.dns.lookup = default
        client.id = producer-1
        compression.type = snappy
        connections.max.idle.ms = 540000
        delivery.timeout.ms = 120000
        enable.idempotence = false
        interceptor.classes = []
        key.serializer = class org.apache.kafka.common.serialization.StringSerializer
        linger.ms = 0
        max.block.ms = 60000
        max.in.flight.requests.per.connection = 5
        max.request.size = 1048576
        metadata.max.age.ms = 300000
        metadata.max.idle.ms = 300000
        metric.reporters = []
        metrics.num.samples = 2
        metrics.recording.level = INFO
        metrics.sample.window.ms = 30000
        partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner
        receive.buffer.bytes = 32768
        reconnect.backoff.max.ms = 50
        reconnect.backoff.ms = 50
        request.timeout.ms = 40000
        retries = 2147483647
        retry.backoff.ms = 100
        sasl.client.callback.handler.class = null
        sasl.jaas.config = null
        sasl.kerberos.kinit.cmd = /usr/bin/kinit
        sasl.kerberos.min.time.before.relogin = 60000
        sasl.kerberos.service.name = null
        sasl.kerberos.ticket.renew.jitter = 0.05
        sasl.kerberos.ticket.renew.window.factor = 0.8
        sasl.login.callback.handler.class = null
        sasl.login.class = null
        sasl.login.refresh.buffer.seconds = 300
        sasl.login.refresh.min.period.seconds = 60
        sasl.login.refresh.window.factor = 0.8
        sasl.login.refresh.window.jitter = 0.05
        sasl.mechanism = GSSAPI
        security.protocol = PLAINTEXT
        security.providers = null
        send.buffer.bytes = 131072
        ssl.cipher.suites = null
        ssl.enabled.protocols = [TLSv1.2]
        ssl.endpoint.identification.algorithm = https
        ssl.key.password = null
        ssl.keymanager.algorithm = SunX509
        ssl.keystore.location = null
        ssl.keystore.password = null
        ssl.keystore.type = JKS
        ssl.protocol = TLSv1.2
        ssl.provider = null
        ssl.secure.random.implementation = null
        ssl.trustmanager.algorithm = PKIX
        ssl.truststore.location = null
        ssl.truststore.password = null
        ssl.truststore.type = JKS
        transaction.timeout.ms = 60000
        transactional.id = null
        value.serializer = class org.apache.kafka.common.serialization.StringSerializer

[INFO ] 2022-01-02 18:23:07.657 [[main]-pipeline-manager] AppInfoParser - Kafka version: 2.5.1
[INFO ] 2022-01-02 18:23:07.658 [[main]-pipeline-manager] AppInfoParser - Kafka commitId: 0efa8fb0f4c73d92
[INFO ] 2022-01-02 18:23:07.658 [[main]-pipeline-manager] AppInfoParser - Kafka startTimeMs: 1641118987649
[INFO ] 2022-01-02 18:23:07.816 [[main]-pipeline-manager] javapipeline - Starting pipeline {:pipeline_id=>"main", "pipeline.workers"=>2, "pipeline.batch.size"=>125, "pipeline.batch.delay"=>50, "pipeline.max_inflight"=>250, "pipeline.sources"=>["/etc/logstash/conf.d/logstash-to-kafka.conf"], :thread=>"#<Thread:0x12f711da run>"}
[INFO ] 2022-01-02 18:23:08.528 [kafka-producer-network-thread | producer-1] Metadata - [Producer clientId=producer-1] Cluster ID: YTU7Aoo5RnuWtVAFKtTk8A
[INFO ] 2022-01-02 18:23:08.879 [[main]-pipeline-manager] javapipeline - Pipeline Java execution initialization time {"seconds"=>1.06}
[INFO ] 2022-01-02 18:23:08.951 [[main]-pipeline-manager] file - No sincedb_path set, generating one based on the "path" setting {:sincedb_path=>"/usr/share/logstash/data/plugins/inputs/file/.sincedb_63e4cd45b7a102d5934fdc2523551437", :path=>["/opt/tomcat/logs/tomcat_access_log.*.log"]}
[INFO ] 2022-01-02 18:23:08.983 [[main]-pipeline-manager] javapipeline - Pipeline started {"pipeline.id"=>"main"}
[INFO ] 2022-01-02 18:23:09.051 [Agent thread] agent - Pipelines running {:count=>1, :running_pipelines=>[:main], :non_running_pipelines=>[]}
[INFO ] 2022-01-02 18:23:09.085 [[main]<file] observingtail - START, creating Discoverer, Watch with file and sincedb collections

# 启动后,窗口会hang在这里!

6. 刷新tomcat页面

# 10.0.8.3:8080
# 访问页面,或者刷新后,在上一步的窗口中得到如下logstash收集到的日志信息!!!
[WARN ] 2022-01-02 18:23:56.496 [[main]<file] json - Relying on default value of `pipeline.ecs_compatibility`, which may change in a future major release of Logstash. To avoid unexpected changes when upgrading Logstash, please explicitly declare your desired ECS Compatibility mode.
[WARN ] 2022-01-02 18:23:57.155 [kafka-producer-network-thread | producer-1] NetworkClient - [Producer clientId=producer-1] Error while fetching metadata with correlation id 3 : {chupeng=LEADER_NOT_AVAILABLE}
[WARN ] 2022-01-02 18:23:57.334 [kafka-producer-network-thread | producer-1] NetworkClient - [Producer clientId=producer-1] Error while fetching metadata with correlation id 4 : {chupeng=LEADER_NOT_AVAILABLE}
{
         "@version" => "1",
         "clientip" => "10.0.8.5",
     "AgentVersion" => "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36",
       "@timestamp" => 2022-01-02T10:23:56.656Z,
       "ClientUser" => "-",
    "authenticated" => "-",
       "AccessTime" => "[02/Jan/2022:18:23:52 +0800]",
           "method" => "GET / HTTP/1.1",
           "status" => "200",
             "type" => "tomcat-log",
          "partner" => "-",
     "Query?string" => "",
             "host" => "k8s-master",
             "path" => "/opt/tomcat/logs/tomcat_access_log.2022-01-02.log",
        "SendBytes" => "11156"
}

7. 将logstash放到后台运行

# 先ctrl+c取消logstash运行,然后
# 后台运行
cd /tmp
nohup /usr/share/logstash/bin/logstash -f /etc/logstash/conf.d/logstash-to-kafka.conf &

#确认一下
jobs

至此,业务服务器部署完毕!!!

kafka服务器

1. 查看topic列表

# 解决方案:在较新版本(2.2 及更高版本)的 Kafka 不再需要 ZooKeeper 连接字符串,即- -zookeeper localhost:2181。
# 使用 Kafka Broker的 --bootstrap-server localhost:9092来替代- -zookeeper localhost:2181。
# 10.0.8.2
/usr/local/kafka/bin/kafka-topics.sh --list --bootstrap-server localhost:9092
root@zabbix:~# /usr/local/kafka/bin/kafka-topics.sh --list --bootstrap-server localhost:9092
__consumer_offsets
chupeng

2. 查看某个topic详细信息

/usr/local/kafka/bin/kafka-topics.sh --describe --bootstrap-server localhost:9092  --topic chupeng
root@zabbix:~# /usr/local/kafka/bin/kafka-topics.sh --describe --bootstrap-server localhost:9092  --topic chupeng
Topic: chupeng  TopicId: 5RjEv4T2Rnix7F0uslYJfQ PartitionCount: 1       ReplicationFactor: 1    Configs: segment.bytes=1073741824
        Topic: chupeng  Partition: 0    Leader: 0       Replicas: 0     Isr: 0

最后更新: 2022-02-19 13:05:46