revision
This commit is contained in:
@@ -30,7 +30,8 @@ Notify Influxdb.
|
|||||||
|
|
||||||
# nginx - telegraf
|
# nginx - telegraf
|
||||||
|
|
||||||
(no tusing prometheus must same steps)
|
sources:
|
||||||
https://faun.pub/total-nginx-monitoring-with-application-performance-and-a-bit-more-using-8fc6d731051b
|
|
||||||
|
|
||||||
|
- https://faun.pub/total-nginx-monitoring-with-application-performance-and-a-bit-more-using-8fc6d731051b
|
||||||
|
- https://www.influxdata.com/blog/telegraf-correlate-log-metrics-data-performance-bottlenecks/
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,3 @@
|
|||||||
server_names_hash_bucket_size 64;
|
log_format vhosts ' $remote_addr - $remote_user [$time_local] "$request" $status $body_bytes_sent $host "$http_referer" "$http_user_agent"';
|
||||||
log_format telegraf-log '$remote_addr - $remote_user [$time_local]'
|
access_log /var/log/nginx/vhosts-access.log vhosts;
|
||||||
'"$request" $status $body_bytes_sent'
|
|
||||||
'"$http_referer" "$http_user_agent"'
|
|
||||||
'"$request_time" "$upstream_connect_time"'
|
|
||||||
'$http_host "xxx" "yyy"';
|
|
||||||
#'"$geoip_city" "$geoip_city_country_code"';
|
|
||||||
access_log /var/log/nginx/access-file.log telegraf-log;
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,16 @@
|
|||||||
[[inputs.nginx]]
|
[[inputs.nginx]]
|
||||||
urls = ["http://wansrv01:8080/stub_status"]
|
urls = ["http://wansrv01:8080/stub_status"]
|
||||||
response_timeout = "5s"
|
response_timeout = "5s"
|
||||||
|
|
||||||
|
|
||||||
[[inputs.tail]]
|
[[inputs.tail]]
|
||||||
files = ["/var/log/nginx/access-file.log"]
|
files = ["/var/log/nginx/vhosts-access.log"]
|
||||||
from_beginning = true
|
from_beginning = true
|
||||||
name_override = "nginx_access_log"
|
name_override = "nginx_access-logv3"
|
||||||
grok_patterns = ["%{CUSTOM_LOG_FORMAT}"]
|
grok_patterns = ["%{CUSTOM_LOG}"]
|
||||||
grok_custom_patterns = '''
|
grok_custom_patterns = '''
|
||||||
CUSTOM_LOG_FORMAT %{CLIENT:client_ip} %{NOTSPACE:ident} %{NOTSPACE:auth} \[%{HTTPDATE:ts:ts-httpd}\]"(?:%{WORD:verb:tag} %{NOTSPACE:request}(?: HTTP/%{NUMBER:http_version:float})?|%{DATA})" %{NUMBER:resp_code:tag} (?:%{NUMBER:resp_bytes:int}|-)%{QS:referrer} %{QS:agent}%{QS:request_time} %{QS:upstream_connect_time}%{NOTSPACE:http_host:tag} %{QS:geoip_city} %{QS:country_code}
|
CUSTOM_LOG %{COMMON_LOG_FORMAT} %{NOTSPACE:nginx_host} "%{DATA:referrer}" "%{DATA:agent}"
|
||||||
'''
|
'''
|
||||||
data_format = "grok"
|
data_format = "grok"
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
27
influxdb/by_nginx_client_ip.flux
Normal file
27
influxdb/by_nginx_client_ip.flux
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
v = {
|
||||||
|
"timeRangeStart": 2023-06-16T14:44:00Z,
|
||||||
|
"timeRangeStop": 2023-06-18T14:44:00Z
|
||||||
|
}
|
||||||
|
|
||||||
|
bucket = "telegraf"
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
hosts = ${httpHost:json}
|
||||||
|
|
||||||
|
hostsLength = hosts |> length()
|
||||||
|
measurement = "nginx_access-logv3"
|
||||||
|
from(bucket: "${bucket}")
|
||||||
|
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||||
|
|> filter(fn: (r) => r["_measurement"] == measurement and (r["_field"] == "client_ip" or r["_field"] == "nginx_host"))
|
||||||
|
|> pivot(rowKey:["_time"],
|
||||||
|
columnKey: ["_field"],
|
||||||
|
valueColumn: "_value")
|
||||||
|
|> filter(fn: (r) => if hostsLength>0 then contains(value: r["nginx_host"], set: hosts) else true)
|
||||||
|
|> duplicate(column: "client_ip", as: "_value")
|
||||||
|
|> group(columns: ["client_ip"])
|
||||||
|
|>count()
|
||||||
|
|> group()
|
||||||
|
|> sort(columns: ["_value"], desc: true)
|
||||||
|
|
||||||
26
influxdb/by_nginx_host.flux
Normal file
26
influxdb/by_nginx_host.flux
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
v = {
|
||||||
|
"timeRangeStart": 2023-06-15T14:44:00Z,
|
||||||
|
"timeRangeStop": 2023-06-18T14:44:00Z
|
||||||
|
}
|
||||||
|
httpHosts = ["git.limbosolutions.com"]
|
||||||
|
bucket = "telegraf"
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
measurement = "nginx_access-logv3"
|
||||||
|
from(bucket: "${bucket}")
|
||||||
|
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||||
|
|> filter(fn: (r) => r["_measurement"] == measurement and r["_field"] == "nginx_host")
|
||||||
|
|> duplicate(column: "_value", as: "nginx_host")
|
||||||
|
|> group(columns: ["nginx_host"])
|
||||||
|
|> count()
|
||||||
|
|> group()
|
||||||
|
|> sort(columns: ["_value"], desc: true)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
30
influxdb/by_nginx_request.flux
Normal file
30
influxdb/by_nginx_request.flux
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
v = {
|
||||||
|
"timeRangeStart": 2023-06-16T14:44:00Z,
|
||||||
|
"timeRangeStop": 2023-06-18T14:44:00Z
|
||||||
|
}
|
||||||
|
|
||||||
|
bucket = "telegraf"
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
hosts = ${httpHost:json}
|
||||||
|
hostsLength = hosts |> length()
|
||||||
|
measurement = "nginx_access-logv3"
|
||||||
|
from(bucket: "${bucket}")
|
||||||
|
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||||
|
|> filter(fn: (r) => r["_measurement"] == measurement and (r["_field"] == "request" or r["_field"] == "nginx_host"))
|
||||||
|
|> pivot(rowKey:["_time"],
|
||||||
|
columnKey: ["_field"],
|
||||||
|
valueColumn: "_value")
|
||||||
|
|> filter(fn: (r) => if hostsLength>0 then contains(value: r["nginx_host"], set: hosts) else true)
|
||||||
|
|> group()
|
||||||
|
|> duplicate(column: "request", as: "_value")
|
||||||
|
|> group(columns: ["request"])
|
||||||
|
|>count()
|
||||||
|
|> group()
|
||||||
|
|> sort(columns: ["_value"], desc: true)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
23
influxdb/by_nginx_resp_code.flux
Normal file
23
influxdb/by_nginx_resp_code.flux
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
v = {
|
||||||
|
"timeRangeStart": 2023-06-16T14:44:00Z,
|
||||||
|
"timeRangeStop": 2023-06-18T14:44:00Z
|
||||||
|
}
|
||||||
|
|
||||||
|
bucket = "telegraf"
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
hosts = ${httpHost:json}
|
||||||
|
|
||||||
|
hostsLength = hosts |> length()
|
||||||
|
measurement = "nginx_access-logv3"
|
||||||
|
from(bucket: "${bucket}")
|
||||||
|
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||||
|
|> filter(fn: (r) => r["_measurement"] == measurement and r["_field"] == "nginx_host")
|
||||||
|
|> filter(fn: (r) => if hostsLength>0 then contains(value: r["_value"], set: hosts) else true)
|
||||||
|
|> group(columns: ["resp_code"])
|
||||||
|
|> count()
|
||||||
|
|> group()
|
||||||
|
|> sort(columns: ["_value"], desc: true)
|
||||||
|
|
||||||
23
influxdb/by_nginx_verb.flux
Normal file
23
influxdb/by_nginx_verb.flux
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
v = {
|
||||||
|
"timeRangeStart": 2023-06-16T14:44:00Z,
|
||||||
|
"timeRangeStop": 2023-06-18T14:44:00Z
|
||||||
|
}
|
||||||
|
|
||||||
|
bucket = "telegraf"
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
hosts = ${httpHost:json}
|
||||||
|
|
||||||
|
hostsLength = hosts |> length()
|
||||||
|
measurement = "nginx_access-logv3"
|
||||||
|
from(bucket: "${bucket}")
|
||||||
|
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||||
|
|> filter(fn: (r) => r["_measurement"] == measurement and r["_field"] == "nginx_host")
|
||||||
|
|> filter(fn: (r) => if hostsLength>0 then contains(value: r["_value"], set: hosts) else true)
|
||||||
|
|> group(columns: ["verb"])
|
||||||
|
|> count()
|
||||||
|
|> group()
|
||||||
|
|> sort(columns: ["_value"], desc: true)
|
||||||
|
|
||||||
17
influxdb/get_nginx_hosts.flux
Normal file
17
influxdb/get_nginx_hosts.flux
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
v = {
|
||||||
|
"timeRangeStart": 2023-06-15T14:44:00Z,
|
||||||
|
"timeRangeStop": 2023-06-18T14:44:00Z
|
||||||
|
}
|
||||||
|
httpHosts = ["git.limbosolutions.com"]
|
||||||
|
bucket = "telegraf"
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
measurement = "nginx_access-logv3"
|
||||||
|
from(bucket: "${bucket}")
|
||||||
|
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||||
|
|> filter(fn: (r) => r["_measurement"] == measurement and r["_field"] == "nginx_host")
|
||||||
|
|> keep(columns: ["_value"])
|
||||||
|
|> distinct()
|
||||||
|
|
||||||
19
influxdb/testess.flux
Normal file
19
influxdb/testess.flux
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
v = {
|
||||||
|
"timeRangeStart": 2023-06-15T14:44:00Z,
|
||||||
|
"timeRangeStop": 2023-06-18T14:44:00Z
|
||||||
|
}
|
||||||
|
httpHosts = ["git.limbosolutions.com"]
|
||||||
|
bucket = "telegraf"
|
||||||
|
|
||||||
|
////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
|
measurement = "nginx_access-logv3"
|
||||||
|
from(bucket: "${bucket}")
|
||||||
|
|> range(start: v.timeRangeStart, stop: v.timeRangeStop)
|
||||||
|
|> filter(fn: (r) => r["_measurement"] == measurement and r["_field"] == "nginx_host" and r._value=="frontdoor-f7fje4hfd0hpc3h9.z01.azurefd.net")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
66.240.205.34 - - [18/Jun/2023:10:01:01 +0000] "H\x00\x00\x00tj\xA8\x9E#D\x98+\xCA\xF0\xA7\xBBl\xC5\x19\xD7\x8D\xB6\x18\xEDJ\x1En\xC1\xF9xu[l\xF0E\x1D-j\xEC\xD4xL\xC9r\xC9\x15\x10u\xE0%\x86Rtg\x05fv\x86]%\xCC\x80\x0C\xE8\xCF\xAE\x00\xB5\xC0f\xC8\x8DD\xC5\x09\xF4" 400 157 cloud.limbosolutions.com "-" "-"
|
||||||
Reference in New Issue
Block a user