在Windows环境下处理Nginx日志,可以使用批处理脚本(BAT)或PowerShell脚本。以下是几种常见的日志处理脚本实现方式:
@echo off
:: Nginx日志分割脚本
:: 设置变量
set NGINX_PATH=C:\nginx
set LOG_PATH=%NGINX_PATH%\logs
set DATE=%date:~0,4%%date:~5,2%%date:~8,2%
:: 停止Nginx服务
net stop nginx
:: 重命名日志文件
rename "%LOG_PATH%\access.log" "access_%DATE%.log"
rename "%LOG_PATH%\error.log" "error_%DATE%.log"
:: 启动Nginx服务
net start nginx
:: 删除30天前的日志文件
forfiles /p "%LOG_PATH%" /m "access_*.log" /d -30 /c "cmd /c del @path"
forfiles /p "%LOG_PATH%" /m "error_*.log" /d -30 /c "cmd /c del @path"
echo 日志分割完成
# Nginx访问日志分析脚本
$logPath = "C:\nginx\logs\access.log"
$outputPath = "C:\nginx\logs\access_report_$(Get-Date -Format 'yyyyMMdd').txt"
# 分析访问量最高的10个IP
$topIPs = Get-Content $logPath | Select-String -Pattern '(\d+\.\d+\.\d+\.\d+)' -AllMatches |
ForEach-Object { $_.Matches.Groups[1].Value } |
Group-Object |
Sort-Object -Property Count -Descending |
Select-Object -First 10
# 分析访问量最高的10个URL
$topURLs = Get-Content $logPath | Select-String -Pattern '"(GET|POST) (.+?) HTTP' -AllMatches |
ForEach-Object { $_.Matches.Groups[2].Value } |
Group-Object |
Sort-Object -Property Count -Descending |
Select-Object -First 10
# 输出报告
"=== Nginx访问日志分析报告 ===" | Out-File $outputPath
"生成时间: $(Get-Date)" | Out-File $outputPath -Append
"" | Out-File $outputPath -Append
"=== 访问量最高的10个IP ===" | Out-File $outputPath -Append
$topIPs | Format-Table -AutoSize | Out-File $outputPath -Append
"" | Out-File $outputPath -Append
"=== 访问量最高的10个URL ===" | Out-File $outputPath -Append
$topURLs | Format-Table -AutoSize | Out-File $outputPath -Append
Write-Host "分析完成,报告已保存到 $outputPath"
# Nginx日志压缩归档脚本
$logPath = "C:\nginx\logs"
$backupPath = "C:\nginx\logs\backup"
$daysToKeep = 30
# 创建备份目录
if (!(Test-Path $backupPath)) {
New-Item -ItemType Directory -Path $backupPath | Out-Null
}
# 获取需要归档的日志文件(排除当前日志和错误日志)
$logFiles = Get-ChildItem -Path $logPath -Include "access_*.log", "error_*.log" -Exclude "access.log", "error.log"
foreach ($file in $logFiles) {
$zipName = "$backupPath\$($file.BaseName).zip"
# 压缩文件
Compress-Archive -Path $file.FullName -DestinationPath $zipName -CompressionLevel Optimal
# 压缩成功后删除原文件
if (Test-Path $zipName) {
Remove-Item $file.FullName
Write-Host "已压缩并归档: $($file.Name)"
}
}
# 删除过期的备份文件
Get-ChildItem -Path $backupPath -Include "*.zip" |
Where-Object { $_.LastWriteTime -lt (Get-Date).AddDays(-$daysToKeep) } |
Remove-Item -Force
Write-Host "日志压缩归档完成"
# Nginx实时日志监控脚本
$logFile = "C:\nginx\logs\access.log"
$keywords = @("500", "404", "error", "warning") # 监控的关键词
# 清空屏幕
Clear-Host
# 获取日志文件初始大小
$initialSize = (Get-Item $logFile).Length
# 持续监控日志文件
while ($true) {
Start-Sleep -Seconds 1
# 检查文件大小是否变化
$currentSize = (Get-Item $logFile).Length
if ($currentSize -gt $initialSize) {
# 读取新增内容
$stream = [System.IO.File]::Open($logFile, [System.IO.FileMode]::Open, [System.IO.FileAccess]::Read, [System.IO.FileShare]::ReadWrite)
$stream.Seek($initialSize, [System.IO.SeekOrigin]::Begin) | Out-Null
$reader = New-Object System.IO.StreamReader($stream)
$newContent = $reader.ReadToEnd()
$reader.Close()
$stream.Close()
# 检查关键词
foreach ($keyword in $keywords) {
if ($newContent -match $keyword) {
$lines = $newContent -split "`n"
foreach ($line in $lines) {
if ($line -match $keyword) {
Write-Host "$(Get-Date -Format 'yyyy-MM-dd HH:mm:ss') - 发现关键词 '$keyword': $line" -ForegroundColor Red
}
}
}
}
$initialSize = $currentSize
}
}
如果需要更复杂的功能,可以考虑使用Logstash、Fluentd等专业日志处理工具。