站点更新:2019-05-15 15:56:58
This commit is contained in:
		
							
								
								
									
										16
									
								
								filebeat/Dockerfile
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										16
									
								
								filebeat/Dockerfile
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,16 @@ | |||||||
|  | FROM ehlxr/alpine | ||||||
|  |  | ||||||
|  | LABEL maintainer="ehlxr <ehlxr.me@gmail.com>" | ||||||
|  |  | ||||||
|  | ENV FILEBEAT_VERSION=6.5.4 | ||||||
|  |  | ||||||
|  | COPY filebeat.yml /filebeat.yml | ||||||
|  |  | ||||||
|  | RUN apk add libc6-compat \ | ||||||
|  |     && wget https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-${FILEBEAT_VERSION}-linux-x86_64.tar.gz \ | ||||||
|  |     && tar xzvf filebeat-*.tar.gz \ | ||||||
|  |     && mv filebeat-*/filebeat /usr/local/bin/ \ | ||||||
|  |     && rm -rf filebeat-* \ | ||||||
|  |     && rm -rf /var/cache/apk/* | ||||||
|  |  | ||||||
|  | CMD ["filebeat", "-c", "filebeat.yml"] | ||||||
							
								
								
									
										146
									
								
								filebeat/filebeat.yml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										146
									
								
								filebeat/filebeat.yml
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,146 @@ | |||||||
|  | ################### Filebeat Configuration Example ######################### | ||||||
|  |  | ||||||
|  | ############################# Filebeat ###################################### | ||||||
|  | filebeat: | ||||||
|  |   prospectors: | ||||||
|  |     - | ||||||
|  |       # Paths that should be crawled and fetched. Glob based paths. | ||||||
|  |       # To fetch all ".log" files from a specific level of subdirectories | ||||||
|  |       # /var/log/*/*.log can be used. | ||||||
|  |       # For each file found under this path, a harvester is started. | ||||||
|  |       # Make sure not file is defined twice as this can lead to unexpected behaviour. | ||||||
|  |       paths: | ||||||
|  |         - /data/logs/*.log | ||||||
|  |  | ||||||
|  |       # Configure the file encoding for reading files with international characters | ||||||
|  |       # following the W3C recommendation for HTML5 (http://www.w3.org/TR/encoding). | ||||||
|  |       # Some sample encodings: | ||||||
|  |       #   plain, utf-8, utf-16be-bom, utf-16be, utf-16le, big5, gb18030, gbk, | ||||||
|  |       #    hz-gb-2312, euc-kr, euc-jp, iso-2022-jp, shift-jis, ... | ||||||
|  |       encoding: plain | ||||||
|  |  | ||||||
|  |       # Type of the files. Based on this the way the file is read is decided. | ||||||
|  |       # The different types cannot be mixed in one prospector | ||||||
|  |       # | ||||||
|  |       # Possible options are: | ||||||
|  |       # * log: Reads every line of the log file (default) | ||||||
|  |       # * stdin: Reads the standard in | ||||||
|  |       input_type: log | ||||||
|  |  | ||||||
|  |       # Exclude lines. A list of regular expressions to match. It drops the lines that are | ||||||
|  |       # matching any regular expression from the list. The include_lines is called before | ||||||
|  |       # exclude_lines. By default, no lines are dropped. | ||||||
|  |       # exclude_lines: ["^DBG"] | ||||||
|  |  | ||||||
|  |       # Include lines. A list of regular expressions to match. It exports the lines that are | ||||||
|  |       # matching any regular expression from the list. The include_lines is called before | ||||||
|  |       # exclude_lines. By default, all the lines are exported. | ||||||
|  |       # include_lines: ["^ERR", "^WARN"] | ||||||
|  |  | ||||||
|  |       # Exclude files. A list of regular expressions to match. Filebeat drops the files that | ||||||
|  |       # are matching any regular expression from the list. By default, no files are dropped. | ||||||
|  |       # exclude_files: [".gz$"] | ||||||
|  |  | ||||||
|  |       # Type to be published in the 'type' field. For Elasticsearch output, | ||||||
|  |       # the type defines the document type these entries should be stored | ||||||
|  |       # in. Default: log | ||||||
|  |       document_type: service_log | ||||||
|  |  | ||||||
|  |       # Scan frequency in seconds. | ||||||
|  |       # How often these files should be checked for changes. In case it is set | ||||||
|  |       # to 0s, it is done as often as possible. Default: 10s | ||||||
|  |       scan_frequency: 10s | ||||||
|  |  | ||||||
|  |       # Mutiline can be used for log messages spanning multiple lines. This is common | ||||||
|  |       # for Java Stack Traces or C-Line Continuation | ||||||
|  |       multiline: | ||||||
|  |  | ||||||
|  |         # The regexp Pattern that has to be matched. The example pattern matches all lines starting with [ | ||||||
|  |         #pattern: ^\[ | ||||||
|  |         pattern: ^20 | ||||||
|  |  | ||||||
|  |         # Defines if the pattern set under pattern should be negated or not. Default is false. | ||||||
|  |         negate: true | ||||||
|  |  | ||||||
|  |         # Match can be set to "after" or "before". It is used to define if lines should be append to a pattern | ||||||
|  |         # that was (not) matched before or after or as long as a pattern is not matched based on negate. | ||||||
|  |         # Note: After is the equivalent to previous and before is the equivalent to to next in Logstash | ||||||
|  |         match: after | ||||||
|  |  | ||||||
|  |         # The maximum number of lines that are combined to one event. | ||||||
|  |         # In case there are more the max_lines the additional lines are discarded. | ||||||
|  |         # Default is 500 | ||||||
|  |         max_lines: 500 | ||||||
|  |  | ||||||
|  |         # After the defined timeout, an multiline event is sent even if no new pattern was found to start a new event | ||||||
|  |         # Default is 5s. | ||||||
|  |         timeout: 5s | ||||||
|  |  | ||||||
|  |       # Setting tail_files to true means filebeat starts readding new files at the end | ||||||
|  |       # instead of the beginning. If this is used in combination with log rotation | ||||||
|  |       # this can mean that the first entries of a new file are skipped. | ||||||
|  |       tail_files: false | ||||||
|  |  | ||||||
|  | #================================ General ===================================== | ||||||
|  |  | ||||||
|  | # The name of the shipper that publishes the network data. It can be used to group | ||||||
|  | # all the transactions sent by a single shipper in the web interface. | ||||||
|  | #name: doctor-service | ||||||
|  |  | ||||||
|  | # The tags of the shipper are included in their own field with each | ||||||
|  | # transaction published. | ||||||
|  | tags: ["gateway", "proxy"] | ||||||
|  |  | ||||||
|  | # Optional fields that you can specify to add additional information to the | ||||||
|  | # output. | ||||||
|  | fields: | ||||||
|  |   namespace: ${NAMESPACE:DEV} | ||||||
|  |  | ||||||
|  | ############################# Output ########################################## | ||||||
|  |  | ||||||
|  | # Configure what outputs to use when sending the data collected by the beat. | ||||||
|  | # Multiple outputs may be used. | ||||||
|  | output: | ||||||
|  |  | ||||||
|  |   ### Logstash as output | ||||||
|  |   #logstash: | ||||||
|  |     # The Logstash hosts | ||||||
|  |     #hosts: ["logstash:5043"] | ||||||
|  |  | ||||||
|  |     # Number of workers per Logstash host. | ||||||
|  |     #worker: 2 | ||||||
|  |  | ||||||
|  |     # index configures '@metadata.beat' field to be used by Logstash for | ||||||
|  |     # indexing. The default index name depends on the each beat. | ||||||
|  |     # For Packetbeat, the default is set to packetbeat, for Topbeat to | ||||||
|  |     # topbeat and for Filebeat to filebeat. | ||||||
|  |     #index: filebeat | ||||||
|  |  | ||||||
|  | #-------------------------- Elasticsearch output ------------------------------ | ||||||
|  | output.elasticsearch: | ||||||
|  |   # Array of hosts to connect to. | ||||||
|  |   hosts: ["elasticsearch.g-laikang-sh-qa:9200"] | ||||||
|  |  | ||||||
|  |   # Optional protocol and basic auth credentials. | ||||||
|  |   #protocol: "https" | ||||||
|  |   #username: "elastic" | ||||||
|  |   #password: "changeme" | ||||||
|  |  | ||||||
|  | ############################# Logging ######################################### | ||||||
|  |  | ||||||
|  | # There are three options for the log ouput: syslog, file, stderr. | ||||||
|  | # Under Windos systems, the log files are per default sent to the file output, | ||||||
|  | # under all other system per default to syslog. | ||||||
|  | logging: | ||||||
|  |  | ||||||
|  |   # Send all logging output to syslog. On Windows default is false, otherwise | ||||||
|  |   # default is true. | ||||||
|  |   to_syslog: false | ||||||
|  |  | ||||||
|  |   # Write all logging output to files. Beats automatically rotate files if rotateeverybytes | ||||||
|  |   # limit is reached. | ||||||
|  |   to_files: false | ||||||
|  |  | ||||||
|  |   # Sets log level. The default log level is error. | ||||||
|  |   # Available log levels are: critical, error, warning, info, debug | ||||||
|  |   level: error | ||||||
		Reference in New Issue
	
	Block a user