Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def flush_events(self):
Event.delete(self)
def emit_warning(self, message, type=None, *args):
if len(args):
message = message % args
self.log.warning(message)
return Event.save(self.crawler,
self.stage,
Event.LEVEL_WARNING,
self.run_id,
error=type,
message=message)
def events(name):
crawler = get_crawler(name)
page = int(request.args.get('page', 1))
start = (max(1, page) - 1) * PAGE_SIZE
end = start + PAGE_SIZE
run_id = request.args.get('run_id')
level = request.args.get('level')
stage_name = request.args.get('stage_name')
if stage_name:
events = Event.get_stage_events(crawler, stage_name, start, end, level)
elif run_id:
events = Event.get_run_events(crawler, run_id, start, end, level)
else:
events = Event.get_crawler_events(crawler, start, end, level)
total = len(events)
pages = int(math.ceil((float(total) / PAGE_SIZE)))
return render_template('events.html',
crawler=crawler,
results=events,
page=page,
pages=pages)
def crawler_stages(crawler):
"""See the number of executions of each stage."""
stages = []
for stage in crawler:
data = Event.get_stage_counts(crawler, stage)
data['total_ops'] = stage.op_count
data['stage'] = stage
stages.append(data)
return stages
def crawlers_index():
"""Generate a list of all crawlers, sorted alphabetically, with op
counts."""
crawlers = []
for crawler in manager:
data = Event.get_counts(crawler)
data['last_active'] = crawler.last_run
data['total_ops'] = crawler.op_count
data['running'] = crawler.is_running
data['crawler'] = crawler
crawlers.append(data)
return crawlers
def index():
"""Generate a list of all crawlers, alphabetically, with op counts."""
crawlers = []
for crawler in manager:
data = Event.get_counts(crawler)
data['last_active'] = crawler.last_run
data['total_ops'] = crawler.op_count
data['running'] = crawler.is_running
data['crawler'] = crawler
crawlers.append(data)
return render_template('index.html', crawlers=crawlers)
def emit_exception(self, exc):
self.log.exception(exc)
return Event.save(self.crawler,
self.stage,
Event.LEVEL_ERROR,
self.run_id,
error=exc.__class__.__name__,
message=str(exc))
def emit_exception(self, exc):
self.log.exception(exc)
return Event.save(self.crawler,
self.stage,
Event.LEVEL_ERROR,
self.run_id,
error=exc.__class__.__name__,
message=str(exc))
def flush(self):
"""Delete all run-time data generated by this crawler."""
Queue.flush(self)
Event.delete(self)
Crawl.flush(self)