# coding: utf8
# 使用说明:
# 1. 默认参数启动
# python loggenerator.py
# 2. 自定义参数启动
# KEY1=VALUE1 KEY2=VALUE2 python loggenerator.py
# 支持参数:
# TYPE: 日志类型,默认json,可选txt
# MAX: 日志总量,默认10000000
# SPEED: 每秒生成日志量,默认15000
# OUTPUT: 日志文件,默认logs/sum.log
# MAXSIZE: 日志文件大小,超过此大小将会进行轮转
import os
import gzip
import shutil
from random import random
import time
import json
def logCompressor(filepath):
offset = 0
maxsize = os.getenv('MAXSIZE', 50 * 1024 * 1024)
r = ''
while True:
n = yield r
if not n:
return
size = os.path.getsize(filepath)
if size >= maxsize:
tmpfile = "%s%d_tmp" % (filepath, int(random() * 1e17))
shutil.move(filepath, tmpfile)
gzip.GzipFile(filename="", mode='wb', compresslevel=9, fileobj=open(
"%s-%s.%d.log.gz" % (filepath.split('.')[0], time.strftime("%Y.%m.%d", time.localtime()), offset),
'wb')).write(open(tmpfile, 'rb').read())
os.remove(tmpfile)
offset += 1
r = '200'
else:
r = '0'
def logGenerator(c, maxline, speed, filepath, logtype):
if not os.path.exists(os.path.dirname(filepath)):
os.mkdir(os.path.dirname(filepath))
fb = open(filepath, 'a+')
c.send(None)
n = 0
while n < maxline:
start = time.time()
s = 0 # 控制速率
while s < speed:
if logtype == 'json':
m = {
"level": "INFO",
"date": time.strftime("%Y.%m.%d %H:%M:%S", time.localtime()),
"message": "time:%s, nothing to do!" % time.time(),
"business": "logGenerator:19",
"service": "loggenerator",
"hostname": "fluentd1"
}
m = json.dumps(m)
else:
m = '%s [INFO] [logGenerator:19] - time:%s, nothing to do!' % (time.strftime("%Y.%m.%d %H:%M:%S", time.localtime()), time.time())
fb.write(m + "\n")
n += 1
s += 1
r = c.send(n)
if r == '200':
fb.close()
fb = open(filepath, 'w+')
end = time.time()
if end - start < 1:
# 写入耗时小于1秒,控制写入速度
time.sleep(1 - (end - start))
c.close()
if __name__ == "__main__":
maxline = os.getenv('MAX', 10000000)
speed = os.getenv('SPEED', 15000)
logfile = os.getenv('OUTPUT', 'logs/sum.log')
logtype = os.getenv('TYPE', 'json')
c = logCompressor(logfile)
logGenerator(c, maxline, speed, logfile, logtype)