Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test():
requestHandler = BaseRequestHandler()
jobs = []
st = time.time()
for i in xrange(100):
jobs.append( gevent.spawn( requestHandler.handle, {'url': 'http://baidu.com'} ) )
for job in jobs:
print job.get()
gevent.joinall(jobs)
print time.time() - st
def leave_network(self):
"""
Create a spider to tell close peers we won't be available to respond
to requests.
"""
self.save()
threads = []
for node in self:
threads.append(gevent.spawn(self.protocol.rpc_leaving, node))
if threads:
log("%s: Telling peers we're leaving the network." % self.network)
gevent.joinall(threads)
def poll_supervisors(self):
tasks = [spawn(supervisor.refresh)
for supervisor in self.supervisors.values()]
joinall(tasks)
b = next(coin)
print '[',i,'] bit[%d]:'%r, b
r += 1
print '[',i,'] done'
ts = []
for i in range(N):
bc = makeBroadcast(i)
recv = buffers[i].get
coin = shared_coin_dummy(i, N, t, bc, recv)
th = Greenlet(_run, i, coin)
th.start_later(random.random() * maxdelay)
ts.append(th)
try:
gevent.joinall(ts)
except gevent.hub.LoopExit: pass
def crawl_worker(queue_verification, queue_persistence):
"""
爬取下来的代理检测可用性的进程
:param queue_verification: 待验证代理队列
:param queue_persistence: 已验证待保存代理队列
:return:
"""
while True:
spawns = list()
for i in range(config.COROUTINE_NUM):
proxy = queue_verification.get()
spawns.append(gevent.spawn(availability.check.crawl_handle, 'http', proxy, queue_persistence))
spawns.append(gevent.spawn(availability.check.crawl_handle, 'https', proxy, queue_persistence))
gevent.joinall(spawns)
def __init__(self, game_id, boxscore=None, playbyplays=None):
self.game_id = game_id
self._boxscore = boxscore
self._playbyplay = playbyplays
global _async_fetch
if not self._boxscore or not self._playbyplay:
api = Api()
if not _async_fetch:
self._boxscore = api.GetBoxscore(game_id)
self._playbyplay = api.GetPlayByPlay(game_id)
self._boxscore_summary = api.GetBoxscoreSummary(game_id)
else:
box_job = gevent.spawn(api.GetBoxscore, game_id)
pbp_job = gevent.spawn(api.GetPlayByPlay, game_id)
bs_job = gevent.spawn(api.GetBoxscoreSummary, game_id)
gevent.joinall([box_job, pbp_job, bs_job])
self._boxscore = box_job.value
self._playbyplay = pbp_job.value
self._boxscore_summary = bs_job.value
self._matchups = None
def handle(self, source, address):
init_data = source.recv(BUFFER_SIZE)
try:
if len(init_data) > 3 and init_data[:3] == b'GET':
source.sendall(b'HTTP/1.1 200 OK\r\n' + format_date_time(time.time()).encode() + b'\r\n\r\nOK')
return
else:
dest = create_connection(self.tcp_service)
except IOError as ex:
sys.stderr.write('Error on create connection: {}'.format(ex))
return
forwarders = (
gevent.spawn(forward, source, dest, self),
gevent.spawn(forward, dest, source, self),
)
gevent.joinall(forwarders)
def run(self):
while True:
self.proxies.clear()
str = u'IPProxyPool----->>>>>>>>beginning'
sys.stdout.write(str + "\r\n")
sys.stdout.flush()
proxylist = sqlhelper.select()
myip = getMyIP()
spawns = []
for proxy in proxylist:
spawns.append(
gevent.spawn(detect_from_db, myip, proxy, self.proxies)
)
gevent.joinall(spawns)
self.db_proxy_num.value = len(self.proxies)
str = u'IPProxyPool----->>>>>>>>db exists ip:%d' % len(
self.proxies)
if len(self.proxies) < MINNUM:
str += u'\r\nIPProxyPool----->>>>>>>>now ip num < MINNUM,start crawling...'
sys.stdout.write(str + "\r\n")
sys.stdout.flush()
self.crawl_pool.map(self.crawl, parserList)
else:
str += u'\r\nIPProxyPool----->>>>>>>>now ip num meet the requirement,wait UPDATE_TIME...'
sys.stdout.write(str + "\r\n")
sys.stdout.flush()
time.sleep(UPDATE_TIME)
buffers = map(lambda _: Queue(1), range(N))
ts = []
for i in range(N):
bc = makeBroadcast(i)
recv = buffers[i].get
input_clone = [MonitoredInt() for _ in range(N)]
for j in range(N):
greenletPacker(Greenlet(modifyMonitoredInt, input_clone[j]),
'random_delay_acs.modifyMonitoredInt', (N, t, inputs)).start_later(maxdelay * random.random())
th = greenletPacker(Greenlet(acs, i, N, t, input_clone, bc, recv), 'random_delay_acs.acs', (N, t, inputs))
th.start() # start_later(random.random() * maxdelay) is not necessary here
ts.append(th)
#if True:
try:
gevent.joinall(ts)
break
except gevent.hub.LoopExit: # Manual fix for early stop
print "End"
def get_entity(expr,attr='Id,AA.AuId,AA.AfId,J.JId,C.CId,RId,F.FId,CC',thl=[]):
t_l = []
q = Queue.Queue()
entities = []
pe_l = []
try:
for i in xrange(2):
t_l.append(threading.Thread(target=get_entity_one,args=(expr,q,i,attr)))
t_l[i].start()
gevent.joinall(thl)
ans = q.get()
entities = json.loads(ans)['entities']
except:
para['expr'] = expr
para['attributes'] = attr
ans = get_ans_http(para)
entities = json.loads(ans)['entities']
for en in entities:
pe = Paper_Entity(en)
pe_l.append(pe)
return pe_l