
git-svn-id: svn://svn.cy55.de/Zope3/src/loops/trunk@1955 fd906abe-77d9-0310-91a1-e0d9ade77398
42 lines
1.3 KiB
Text
42 lines
1.3 KiB
Text
=====================================================
|
|
loops.agent.crawl.filesystem - The Filesystem Crawler
|
|
=====================================================
|
|
|
|
($Id$)
|
|
|
|
>>> import os
|
|
>>> from time import time
|
|
|
|
>>> from loops.agent.tests import tester, baseDir
|
|
>>> from loops.agent.core import Agent
|
|
>>> from loops.agent.crawl.filesystem import CrawlingJob
|
|
|
|
>>> agent = Agent()
|
|
>>> startTime = scheduler = agent.scheduler
|
|
|
|
We create a crawling job that should scan the data subdirectory
|
|
of the testing directory in the loops.agent package.
|
|
|
|
>>> dirname = os.path.join(baseDir, 'testing', 'data')
|
|
>>> crawlJob = CrawlingJob(directory=dirname)
|
|
|
|
The result of the crawling process should be transferred using
|
|
the dummy transporter from the testing package; this just prints
|
|
an informative message with the contents of the files to be
|
|
transferred.
|
|
|
|
>>> from loops.agent.testing import transport
|
|
>>> transporter = transport.Transporter(agent)
|
|
>>> transportJob = transporter.createJob()
|
|
>>> crawlJob.successors.append(transportJob)
|
|
|
|
We are now ready to schedule the job and let the reactor execute it.
|
|
|
|
>>> startTime = scheduler.schedule(crawlJob)
|
|
|
|
>>> tester.iterate()
|
|
Metadata: {'path': '...data...subdir...file2.txt'}
|
|
Transferring: Data from file2.txt
|
|
Metadata: {'path': '...data...file1.txt'}
|
|
Transferring: Data from file1.txt
|
|
|