Enabled logging in wsgi app.

Changed log type to RotatingFileHandler and changed logging file to
/var/tmp/wesstats.log. Added support for multiple tables, although
still broken and incomplete. Refactored some long string catenations
into a cleaner style.
This commit is contained in:
Gregory Shikhman 2009-09-02 05:29:20 +00:00
parent 6027446186
commit 67e90827f7
5 changed files with 24 additions and 19 deletions

View File

@ -40,8 +40,8 @@ sys.path.append('/srv/www/html/wesstats')
os.environ['PYTHON_EGG_CACHE'] = '/srv/www/html/wesstats/python-eggs'
#6.[Optional]If you want to enable logging you need to initialize logging. You also need to setup logger handlers in you production.ini. When done uncomment next two lines.
#from paste.script.util.logging_config import fileConfig
#fileConfig('/srv/www/html/wesstats/prod.apache.ini')
from paste.script.util.logging_config import fileConfig
fileConfig('/srv/www/html/wesstats/prod.apache.ini')
#7. Load you application production.ini file.
from paste.deploy import loadapp

View File

@ -36,20 +36,21 @@ def is_valid_date(date):
def is_valid_level(lev):
return lev.isdigit()
def scaled_query(curs,query,threshold,evaluator):
def scaled_query(curs,tbl,query,threshold,evaluator):
s_time = time.time()
#list of all the sample sizes
curs.execute("SELECT TABLE_NAME FROM information_schema.tables WHERE `TABLE_NAME` REGEXP '^"+configuration.DB_TABLE_PREFIX+"SMPL'")
query = "SELECT TABLE_NAME FROM information_schema.tables WHERE TABLE_NAME REGEXP '^%s%sSMPL'" % (configuration.DB_TABLE_PREFIX,tbl)
curs.execute(query)
results = curs.fetchall()
sizes = []
for result in results:
sizes.append(int(result[0][len(configuration.DB_TABLE_PREFIX+"SMPL"):]))
sizes.append(int(result[0][len(configuration.DB_TABLE_PREFIX+"SMPL"+tbl):]))
sizes.sort()
#print sizes
#try query on all the sample sizes in increasing order until we get one that meets the threshold
for size in sizes:
tblname = configuration.DB_TABLE_PREFIX+"SMPL"+str(size)
nquery = query.replace("GAMES",tblname)
tblname = "%sSMPL%s%d" % (configuration.DB_TABLE_PREFIX,tbl,size)
nquery = query.replace(tbl,tblname)
curs.execute(nquery)
results = curs.fetchall()
length = evaluator(results)

View File

@ -92,7 +92,7 @@ keys = generic
# If you create additional loggers, add them as a key to [loggers]
[logger_root]
level = INFO
handlers = console
handlers = filelog
[logger_wesstats]
level = DEBUG
@ -123,10 +123,10 @@ level = NOTSET
formatter = generic
[handler_filelog]
class=FileHandler
class=handlers.RotatingFileHandler
level=NOTSET
formatter=generic
args=('wesstats.log', 'w')
args=('/var/tmp/wesstats.log', 'a',1024*1024,3)
# If you create additional formatters, add them as a key to [formatters]
[formatter_generic]

View File

@ -47,15 +47,18 @@ class LineGraphController(BaseController):
conn = MySQLdb.connect(configuration.DB_HOSTNAME,configuration.DB_USERNAME,configuration.DB_PASSWORD,configuration.DB_NAME,use_unicode=True)
curs = conn.cursor()
curs.execute("SELECT title,xdata,ydata,xlabel,ylabel,filters,y_xform FROM _wsviews WHERE url = %s", (self.url,))
curs.execute("SELECT title,xdata,ydata,xlabel,ylabel,filters,y_xform,tbl FROM _wsviews WHERE url = %s", (self.url,))
view_data = curs.fetchall()[0]
log.debug("line chart request, here is SQL data for this view:")
log.debug(view_data)
tbl = view_data[7]
#fetch the relevant filters for this template and their possible values
available_filters = view_data[5].split(',')
fdata = dict()
for filter in available_filters:
curs.execute("SELECT DISTINCT "+filter+" FROM GAMES_SP")
curs.execute("SELECT DISTINCT " + filter + " FROM " + tbl)
#curs.fetchall() returns a list of lists, we convert this to a plain list for ease of handling
raw_fdata = curs.fetchall()
fdata[filter] = []
@ -117,10 +120,10 @@ class LineGraphController(BaseController):
y_group_str += y_data[i] + ","
y_data_str = y_data_str[0:len(y_data_str)-1]
y_group_str = y_group_str[0:len(y_group_str)-1]
query = "SELECT CAST(timestamp as DATE)," + y_data_str + " FROM GAMES_SP " + filters + " GROUP BY CAST(timestamp as DATE)," + y_group_str
query = "SELECT CAST(timestamp as DATE),%s FROM %s %s GROUP BY CAST(timestamp as DATE),%s" % (y_data_str,tbl,filters,y_group_str)
log.debug("SQL query:")
log.debug(query)
results = helperlib.scaled_query(curs,query,100,evaluators.simple_eval)
results = helperlib.scaled_query(curs,tbl,query,100,evaluators.simple_eval)
#log.debug("query result:")
#log.debug(results)
data = LineGraphController.reformat_data(self,results)

View File

@ -37,15 +37,16 @@ class PieGraphController(BaseController):
conn = MySQLdb.connect(configuration.DB_HOSTNAME,configuration.DB_USERNAME,configuration.DB_PASSWORD,configuration.DB_NAME,use_unicode=True)
curs = conn.cursor()
curs.execute("SELECT title,xdata,ydata,xlabel,ylabel,filters,y_xform FROM _wsviews WHERE url = %s", (self.url,))
curs.execute("SELECT title,xdata,ydata,xlabel,ylabel,filters,y_xform,tbl FROM _wsviews WHERE url = %s", (self.url,))
view_data = curs.fetchall()[0]
log.debug("pie chart request, here is SQL data for this view:")
log.info("pie chart request, here is SQL data for this view:")
log.debug(view_data)
tbl = view_data[7]
#fetch the relevant filters for this template and their possible values
available_filters = view_data[5].split(',')
fdata = dict()
for filter in available_filters:
curs.execute("SELECT DISTINCT "+filter+" FROM GAMES_SP")
curs.execute("SELECT DISTINCT "+filter+" FROM "+tbl)
#curs.fetchall() returns a list of lists, we convert this to a plain list for ease of handling
raw_fdata = curs.fetchall()
fdata[filter] = []
@ -77,10 +78,10 @@ class PieGraphController(BaseController):
for i in range(len(y_data)):
y_data_str += y_xforms[i] + "(" + y_data[i] + "),"
y_data_str = y_data_str[0:len(y_data_str)-1]
query = "SELECT "+view_data[1]+","+y_data_str+" FROM GAMES_SP "+filters+" GROUP BY "+view_data[1]
query = "SELECT %s,%s FROM %s %s GROUP BY %s" % (view_data[1],y_data_str,tbl,filters,view_data[1])
log.debug("SQL query:")
log.debug(query)
results = helperlib.scaled_query(curs,query,100,evaluators.count_eval)
results = helperlib.scaled_query(curs,tbl,query,100,evaluators.count_eval)
log.debug("query result:")
log.debug(results)
#generate JS datafields here because genshi templating can't emit JS...