Back to index

moin  1.9.0~rc2
hitcounts.py
Go to the documentation of this file.
00001 # -*- coding: iso-8859-1 -*-
00002 """
00003     MoinMoin - Hitcount Statistics
00004 
00005     This macro creates a hitcount chart from the data in "event.log".
00006 
00007     TODO: refactor to use a class, this code is ugly.
00008           A lot of code here is duplicated in stats.useragents.
00009           Maybe both can use same base class, maybe some parts are useful to other code.
00010 
00011     @copyright: 2002-2004 Juergen Hermann <jh@web.de>,
00012                 2007 MoinMoin:ThomasWaldmann
00013     @license: GNU GPL, see COPYING for details.
00014 """
00015 
00016 _debug = 0
00017 
00018 import time
00019 
00020 from MoinMoin import caching, wikiutil, logfile
00021 from MoinMoin.Page import Page
00022 from MoinMoin.logfile import eventlog
00023 
00024 # this is a CONSTANT used for on-disk caching, it must NOT be configurable and
00025 # not depend on request.user!
00026 DATE_FMT = '%04d-%02d-%02d' # % (y, m, d)
00027 
00028 def linkto(pagename, request, params=''):
00029     _ = request.getText
00030 
00031     if not request.cfg.chart_options:
00032         return text(pagename, request, params)
00033 
00034     if _debug:
00035         return draw(pagename, request)
00036 
00037     page = Page(request, pagename)
00038 
00039     # Create escaped query string from dict and params
00040     querystr = {'action': 'chart', 'type': 'hitcounts'}
00041     querystr = wikiutil.makeQueryString(querystr)
00042     querystr = wikiutil.escape(querystr)
00043     if params:
00044         querystr += '&amp;' + params
00045 
00046     data = {'url': page.url(request, querystr)}
00047     data.update(request.cfg.chart_options)
00048     result = ('<img src="%(url)s" width="%(width)d" height="%(height)d"'
00049               ' alt="hitcounts chart">') % data
00050 
00051     return result
00052 
00053 
00054 def get_data(pagename, request, filterpage=None):
00055     cache_days, cache_views, cache_edits = [], [], []
00056     cache_date = 0
00057 
00058     # Get results from cache
00059     if filterpage:
00060         arena = Page(request, pagename)
00061         cache = caching.CacheEntry(request, arena, 'hitcounts', scope='item', use_pickle=True)
00062     else:
00063         arena = 'charts'
00064         cache = caching.CacheEntry(request, arena, 'hitcounts', scope='wiki', use_pickle=True)
00065 
00066     if cache.exists():
00067         try:
00068             cache_date, cache_days, cache_views, cache_edits = cache.content()
00069         except:
00070             cache.remove() # cache gone bad
00071 
00072     # Get new results from the log
00073     log = eventlog.EventLog(request)
00074     try:
00075         new_date = log.date()
00076     except logfile.LogMissing:
00077         new_date = None
00078 
00079     # prepare data
00080     days = []
00081     views = []
00082     edits = []
00083     ratchet_day = None
00084     ratchet_time = None
00085     if new_date is not None:
00086         log.set_filter(['VIEWPAGE', 'SAVEPAGE'])
00087         latest = None
00088         for event in log.reverse():
00089             # don't use event_log.date()
00090             if latest is None:
00091                 latest = event[0]
00092             event_usecs = event[0]
00093             if event_usecs <= cache_date:
00094                 break
00095             eventpage = event[2].get('pagename', '')
00096             if filterpage and eventpage != filterpage:
00097                 continue
00098             event_secs = wikiutil.version2timestamp(event_usecs)
00099             time_tuple = time.gmtime(event_secs) # must be UTC
00100             day = tuple(time_tuple[0:3])
00101             if day != ratchet_day:
00102                 # new day
00103                 while ratchet_time:
00104                     ratchet_time -= 86400 # seconds per day
00105                     rday = tuple(time.gmtime(ratchet_time)[0:3]) # must be UTC
00106                     if rday <= day:
00107                         break
00108                     days.append(DATE_FMT % rday)
00109                     views.append(0)
00110                     edits.append(0)
00111                 days.append(DATE_FMT % day)
00112                 views.append(0)
00113                 edits.append(0)
00114                 ratchet_day = day
00115                 ratchet_time = event_secs
00116             if event[1] == 'VIEWPAGE':
00117                 views[-1] += 1
00118             elif event[1] == 'SAVEPAGE':
00119                 edits[-1] += 1
00120 
00121         days.reverse()
00122         views.reverse()
00123         edits.reverse()
00124 
00125     # merge the day on the end of the cache
00126     if cache_days and days and days[0] == cache_days[-1]:
00127         cache_edits[-1] += edits[0]
00128         cache_views[-1] += views[0]
00129         days, views, edits = days[1:], views[1:], edits[1:]
00130 
00131     # Update and save the cache
00132     cache_days.extend(days)
00133     cache_views.extend(views)
00134     cache_edits.extend(edits)
00135     if new_date is not None:
00136         cache.update((latest, cache_days, cache_views, cache_edits))
00137 
00138     return cache_days, cache_views, cache_edits
00139 
00140 
00141 def text(pagename, request, params=''):
00142     from MoinMoin.util.dataset import TupleDataset, Column
00143     from MoinMoin.widget.browser import DataBrowserWidget
00144     _ = request.getText
00145 
00146     # check params
00147     filterpage = None
00148     if params.startswith('page='):
00149         filterpage = wikiutil.url_unquote(params[len('page='):])
00150 
00151     if request and request.values and 'page' in request.values:
00152         filterpage = request.values['page']
00153 
00154     days, views, edits = get_data(pagename, request, filterpage)
00155 
00156     hits = TupleDataset()
00157     hits.columns = [Column('day', label=_("Date"), align='left'),
00158                     Column('views', label=_("Views/day"), align='right'),
00159                     Column('edits', label=_("Edits/day"), align='right'),
00160                     ]
00161 
00162     maxentries = 30
00163 
00164     if maxentries < len(days):
00165         step = float(len(days))/ maxentries
00166     else:
00167         step = 1
00168 
00169     sv = 0.0
00170     se = 0.0
00171     sd = 0.0
00172     cnt = 0
00173 
00174     for i in xrange(len(days)-1, -1, -1):
00175         d, v, e = days[i], views[i], edits[i]
00176         # sum up views and edits to step days
00177         sd += 1
00178         cnt += 1
00179         sv += v
00180         se += e
00181         if cnt >= step:
00182             cnt -= step
00183             hits.addRow((d, "%.1f" % (sv/sd), "%.1f" % (se/sd)))
00184             sv = 0.0
00185             se = 0.0
00186             sd = 0.0
00187 
00188     table = DataBrowserWidget(request)
00189     table.setData(hits)
00190     return table.render(method="GET")
00191 
00192 
00193 def draw(pagename, request):
00194     import shutil, cStringIO
00195     from MoinMoin.stats.chart import Chart, ChartData, Color
00196 
00197     _ = request.getText
00198 
00199     # check params
00200     filterpage = None
00201     if request and request.values and 'page' in request.values:
00202         filterpage = request.values['page']
00203 
00204     days, views, edits = get_data(pagename, request, filterpage)
00205 
00206     import math
00207 
00208     try:
00209         scalefactor = float(max(views))/max(edits)
00210     except (ZeroDivisionError, ValueError):
00211         scalefactor = 1.0
00212     else:
00213         scalefactor = int(10 ** math.floor(math.log10(scalefactor)))
00214 
00215     # scale edits up
00216     edits = [x * scalefactor for x in edits]
00217 
00218     # create image
00219     image = cStringIO.StringIO()
00220     c = Chart()
00221     c.addData(ChartData(views, color='green'))
00222     c.addData(ChartData(edits, color='red'))
00223     chart_title = ''
00224     if request.cfg.sitename:
00225         chart_title = "%s: " % request.cfg.sitename
00226     chart_title = chart_title + _('Page hits and edits')
00227     if filterpage:
00228         chart_title = _("%(chart_title)s for %(filterpage)s") % {
00229             'chart_title': chart_title,
00230             'filterpage': filterpage,
00231         }
00232     chart_title = "%s\n%sx%d" % (chart_title, _("green=view\nred=edit"), scalefactor)
00233     c.option(
00234         title=chart_title.encode('iso-8859-1', 'replace'), # gdchart can't do utf-8
00235         xtitle=(_('date') + ' (Server)').encode('iso-8859-1', 'replace'),
00236         ytitle=_('# of hits').encode('iso-8859-1', 'replace'),
00237         title_font=c.GDC_GIANT,
00238         #thumblabel = 'THUMB', thumbnail = 1, thumbval = 10,
00239         #ytitle_color = Color('green'),
00240         #yaxis2 = 1,
00241         #ytitle2 = '# of edits',
00242         #ytitle2_color = Color('red'),
00243         #ylabel2_color = Color('black'),
00244         #interpolations = 0,
00245         threed_depth=1.0,
00246         requested_yinterval=1.0,
00247         stack_type=c.GDC_STACK_BESIDE
00248     )
00249     c.draw(c.GDC_LINE,
00250         (request.cfg.chart_options['width'], request.cfg.chart_options['height']),
00251         image, days)
00252 
00253     request.content_type = 'image/gif'
00254     request.content_length = len(image.getvalue())
00255 
00256     # copy the image
00257     image.reset()
00258     shutil.copyfileobj(image, request, 8192)
00259