/agent/crawl: added the base.py and mailcrawl.py . Also added a README.txt for the doctests and modified the tests.py in /agent to recognize the new files.

git-svn-id: svn://svn.cy55.de/Zope3/src/cybertools/trunk@2488 fd906abe-77d9-0310-91a1-e0d9ade77398
This commit is contained in:
scrat 2008-03-30 22:36:25 +00:00
parent 13e146d774
commit c3e078ca31
4 changed files with 138 additions and 0 deletions

36
agent/crawl/README.txt Normal file
View file

@ -0,0 +1,36 @@
================================================
Agents for Job Execution and Communication Tasks
================================================
Agents collect informations and transfer them e.g. to a loops server.
($Id: README.txt 2413 2008-02-23 14:07:15Z helmutm $)
This package does not depend on zope or the other loops packages
but represents a standalone application.
But we need a reactor for working with Twisted; in order not to block
testing when running the reactor we use reactor.iterate() calls
wrapped in a ``tester`` object.
>>> from cybertools.agent.tests import tester
Crawler
============
The agent uses Twisted's cooperative multitasking model.
Crawler is the base class for all derived Crawlers like the filesystem crawler
and the mailcrawler. The SampleCrawler returns a deferred that already had a
callback being called, so it will return at once.
Returns a deferred that must be supplied with a callback method (and in
most cases also an errback method).
>>> from cybertools.agent.crawl.base import SampleCrawler
>>> from twisted.internet import defer
>>> crawler = SampleCrawler()
>>> deferred = crawler.collect()
SampleCrawler is collecting.

54
agent/crawl/base.py Normal file
View file

@ -0,0 +1,54 @@
#
# Copyright (c) 2008 Helmut Merz helmutm@cy55.de
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
"""
Crawl base and sample classes.
$Id: base.py
"""
from zope.interface import implements
from cybertools.agent.base.agent import Agent
from cybertools.agent.interfaces import ICrawler
from cybertools.agent.components import agents
from twisted.internet.defer import succeed
class Crawler(object):
implements(ICrawler)
def __init__(self):
pass
def collect(self, filter=None):
d = defer.succeed([])
return d
class SampleCrawler(Crawler):
def collect(self, filter=None):
print 'SampleCrawler is collecting.'
d = succeed([])
return d
agents.register(SampleCrawler, Agent, name='crawl.sample')

47
agent/crawl/mailcrawl.py Normal file
View file

@ -0,0 +1,47 @@
#
# Copyright (c) 2008 Helmut Merz helmutm@cy55.de
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
"""
Crawl base and sample classes.
$Id: base.py
"""
from zope.interface import implements
from cybertools.agent.agent import Agent
from cybertools.agent.interfaces import ICrawler
from cybertools.agent.crawl.base import Crawler as BaseCrawler
from cybertools.agent.components import agents
from twisted.internet.defer import succeed
class MailCrawler(Crawler):
def __init__(self, params):
self.params = params
def collect(self, filter=None):
print 'MailCrawler is collecting.'
d = succeed([])
return d
#would it make sense to register this one at the AdapterFactory?
#or should it also just serve as base class for OutlookCrawler
#KMailCrawler etc. ?
#agents.register(MailCrawler, Agent, name='crawl.mail')

View file

@ -43,6 +43,7 @@ def test_suite():
unittest.makeSuite(Test),
DocFileSuite('README.txt', optionflags=flags),
DocFileSuite('core/README.txt', optionflags=flags),
DocFileSuite('crawl/README.txt', optionflags=flags),
#DocFileSuite('transport/httpput.txt', optionflags=flags),
))
return testSuite