Back to index

moin  1.9.0~rc2
Classes | Functions | Variables
MoinMoin.security.antispam Namespace Reference

Classes

class  Error
class  WikirpcError
class  SecurityPolicy

Functions

def makelist
def getblacklist

Variables

tuple logging = log.getLogger(__name__)

Function Documentation

def MoinMoin.security.antispam.getblacklist (   request,
  pagename,
  do_update 
)
Get blacklist, possibly downloading new copy

@param request: current request (request instance)
@param pagename: bad content page name (unicode)
@rtype: list
@return: list of blacklisted regular expressions

Definition at line 53 of file antispam.py.

00053 
00054 def getblacklist(request, pagename, do_update):
00055     """ Get blacklist, possibly downloading new copy
00056 
00057     @param request: current request (request instance)
00058     @param pagename: bad content page name (unicode)
00059     @rtype: list
00060     @return: list of blacklisted regular expressions
00061     """
00062     from MoinMoin.PageEditor import PageEditor
00063     p = PageEditor(request, pagename, uid_override="Antispam subsystem")
00064     mymtime = wikiutil.version2timestamp(p.mtime_usecs())
00065     if do_update:
00066         tooold = time.time() - 1800
00067         failure = caching.CacheEntry(request, "antispam", "failure", scope='wiki')
00068         fail_time = failure.mtime() # only update if no failure in last hour
00069         if (mymtime < tooold) and (fail_time < tooold):
00070             logging.info("%d *BadContent too old, have to check for an update..." % tooold)
00071             import xmlrpclib
00072             import socket
00073 
00074             timeout = 15 # time out for reaching the master server via xmlrpc
00075             old_timeout = socket.getdefaulttimeout()
00076             socket.setdefaulttimeout(timeout)
00077 
00078             master_url = request.cfg.antispam_master_url
00079             master = xmlrpclib.ServerProxy(master_url)
00080             try:
00081                 # Get BadContent info
00082                 master.putClientInfo('ANTISPAM-CHECK', request.url)
00083                 response = master.getPageInfo(pagename)
00084 
00085                 # It seems that response is always a dict
00086                 if isinstance(response, dict) and 'faultCode' in response:
00087                     raise WikirpcError("failed to get BadContent information",
00088                                        response)
00089 
00090                 # Compare date against local BadContent copy
00091                 masterdate = response['lastModified']
00092 
00093                 if isinstance(masterdate, datetime.datetime):
00094                     # for python 2.5
00095                     mydate = datetime.datetime(*tuple(time.gmtime(mymtime))[0:6])
00096                 else:
00097                     # for python <= 2.4.x
00098                     mydate = xmlrpclib.DateTime(tuple(time.gmtime(mymtime)))
00099 
00100                 logging.debug("master: %s mine: %s" % (masterdate, mydate))
00101                 if mydate < masterdate:
00102                     # Get new copy and save
00103                     logging.info("Fetching page from %s..." % master_url)
00104                     master.putClientInfo('ANTISPAM-FETCH', request.url)
00105                     response = master.getPage(pagename)
00106                     if isinstance(response, dict) and 'faultCode' in response:
00107                         raise WikirpcError("failed to get BadContent data", response)
00108                     p._write_file(response)
00109                     mymtime = wikiutil.version2timestamp(p.mtime_usecs())
00110                 else:
00111                     failure.update("") # we didn't get a modified version, this avoids
00112                                        # permanent polling for every save when there
00113                                        # is no updated master page
00114 
00115             except (socket.error, xmlrpclib.ProtocolError), err:
00116                 logging.error('Timeout / socket / protocol error when accessing %s: %s' % (master_url, str(err)))
00117                 # update cache to wait before the next try
00118                 failure.update("")
00119 
00120             except (xmlrpclib.Fault, ), err:
00121                 logging.error('Fault on %s: %s' % (master_url, str(err)))
00122                 # update cache to wait before the next try
00123                 failure.update("")
00124 
00125             except Error, err:
00126                 # In case of Error, we log the error and use the local BadContent copy.
00127                 logging.error(str(err))
00128 
00129             # set back socket timeout
00130             socket.setdefaulttimeout(old_timeout)
00131 
00132     blacklist = p.get_raw_body()
00133     return mymtime, makelist(blacklist)
00134 

Here is the call graph for this function:

Here is the caller graph for this function:

Split text into lines, strip them, skip # comments 

Definition at line 41 of file antispam.py.

00041 
00042 def makelist(text):
00043     """ Split text into lines, strip them, skip # comments """
00044     lines = text.splitlines()
00045     result = []
00046     for line in lines:
00047         line = line.split(' # ', 1)[0] # rest of line comment
00048         line = line.strip()
00049         if line and not line.startswith('#'):
00050             result.append(line)
00051     return result
00052 

Here is the caller graph for this function:


Variable Documentation

Definition at line 12 of file antispam.py.