From 2c77c5dcebaa1cdbb11e645ea96439c593fa06fe Mon Sep 17 00:00:00 2001 From: Martin Langhoff Date: Wed, 1 Apr 2015 14:37:55 -0400 Subject: [PATCH] watchlog: minor indentation cleanup some code blocks had an extra space. no code change. --- watchlog | 66 ++++++++++++++++++++++++++++++++-------------------------------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/watchlog b/watchlog index 3dc2149..1a60a5c 100755 --- a/watchlog +++ b/watchlog @@ -197,12 +197,12 @@ class LogWatcher(object): class Parser: def __init__(self, fpath, keepdays): - self.committime = int(time.time()) - self.datestamp = self._getdatestamp() - self.keepdays = keepdays - self.conn = sqlite3.connect(fpath) - self.cur = self.conn.cursor() - self.cur.execute("""CREATE TABLE IF NOT EXISTS logentries + self.committime = int(time.time()) + self.datestamp = self._getdatestamp() + self.keepdays = keepdays + self.conn = sqlite3.connect(fpath) + self.cur = self.conn.cursor() + self.cur.execute("""CREATE TABLE IF NOT EXISTS logentries (timestamp INTEGER, hostname TEXT, hits INTEGER, @@ -212,40 +212,40 @@ class Parser: pview_50th_us INTEGER, pview_80th_us INTEGER, unique_users INTEGER )""") - self.cur.execute("""CREATE INDEX IF NOT EXISTS logentries_primary_idx + self.cur.execute("""CREATE INDEX IF NOT EXISTS logentries_primary_idx ON logentries (timestamp,hostname)""") def parseline(self, line): - # sample stats line: - # Mar 23 19:57:34 rl01-3-v1552 www_stats: stats {"hits":1,"pviews":0,"pview_avg_us":0,"boundary_epoch":1427140620.0,"unique_users":0,"pview_80th_us":0,"pview_50th_us":0,"ajax_hits":0} - try: - parts = re.split('\s+', line, 6) - hostname = parts[3] - stats = json.loads(parts[6]) - if stats['hits'] > 0: - stats['hostname'] = hostname - # cast a few values to int - stats['boundary_epoch'] = int(stats['boundary_epoch']) - stats['pview_avg_us'] = int(stats['pview_avg_us']) - stats['pview_80th_us'] = int(stats['pview_80th_us']) - stats['pview_50th_us'] = int(stats['pview_50th_us']) - # workaround a buglet in compresslog v0.7 - # that gives us a list containing just one int for unique users :-/ - if isinstance(stats['unique_users'], list): - stats['unique_users'] = stats['unique_users'][0] - self.cur.execute("""INSERT INTO logentries VALUES(:boundary_epoch, :hostname, :hits, :ajax_hits, + # sample stats line: + # Mar 23 19:57:34 rl01-3-v1552 www_stats: stats {"hits":1,"pviews":0,"pview_avg_us":0,"boundary_epoch":1427140620.0,"unique_users":0,"pview_80th_us":0,"pview_50th_us":0,"ajax_hits":0} + try: + parts = re.split('\s+', line, 6) + hostname = parts[3] + stats = json.loads(parts[6]) + if stats['hits'] > 0: + stats['hostname'] = hostname + # cast a few values to int + stats['boundary_epoch'] = int(stats['boundary_epoch']) + stats['pview_avg_us'] = int(stats['pview_avg_us']) + stats['pview_80th_us'] = int(stats['pview_80th_us']) + stats['pview_50th_us'] = int(stats['pview_50th_us']) + # workaround a buglet in compresslog v0.7 + # that gives us a list containing just one int for unique users :-/ + if isinstance(stats['unique_users'], list): + stats['unique_users'] = stats['unique_users'][0] + self.cur.execute("""INSERT INTO logentries VALUES(:boundary_epoch, :hostname, :hits, :ajax_hits, :pviews, :pview_avg_us, :pview_50th_us, :pview_80th_us, :unique_users)""", stats) - except: - e = sys.exc_info()[0] - sys.stderr.write('Caught exception %s while processing %s\n' % (e, line)) + except: + e = sys.exc_info()[0] + sys.stderr.write('Caught exception %s while processing %s\n' % (e, line)) - # TODO: if this turns out to be a bottleneck - # we can defer commits to every N inserts - # (and add a commit on exit, adding sighandlers) - self.maybe_commit() - self.maybe_gc() + # TODO: if this turns out to be a bottleneck + # we can defer commits to every N inserts + # (and add a commit on exit, adding sighandlers) + self.maybe_commit() + self.maybe_gc() def maybe_commit(self): now = int(time.time()) -- 2.11.4.GIT