Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_method_error_request():
try:
request = Request("https://httpbin.org/", method="PUT")
response = asyncio.get_event_loop().run_until_complete(request.fetch())
assert response.html == ""
except Exception as e:
assert isinstance(e, InvalidRequestMethod)
async def make_post_request(sem, callback):
headers = {"Content-Type": "application/json"}
request = Request(
"https://httpbin.org/post",
method="POST",
headers=headers,
data=params,
callback=callback,
)
return await request.fetch_callback(sem)
def test_request_config():
assert str(Request("https://httpbin.org/")) == ""
_, response = asyncio.get_event_loop().run_until_complete(
make_get_request(sem=sem, callback=hello)
)
assert response.callback_result == "hello ruia"
assert response.metadata == {"hello": "ruia"}
json_result = asyncio.get_event_loop().run_until_complete(response.json())
assert json_result["args"]["name"] == "ruia"
_, response = asyncio.get_event_loop().run_until_complete(
make_post_request(sem=sem, callback=None)
)
json_result = asyncio.get_event_loop().run_until_complete(response.json())
assert json_result["data"] == "name=ruia"
async def request_example():
url = "https://httpbin.org/get"
params = {"name": "ruia"}
headers = {"User-Agent": "Python3.6"}
request = Request(
url=url, method="GET", res_type="json", params=params, headers=headers
)
response = await request.fetch()
assert response.html["args"]["name"] == "ruia"
assert response.html["headers"]["User-Agent"] == "Python3.6"
async def parse(self, response):
yield Request(
url=response.url,
callback=self.parse_item,
headers=self.headers,
request_config=self.request_config,
**self.kwargs,
)
async def timeout_request(sem):
request_config = {"RETRIES": 1, "DELAY": 1, "TIMEOUT": 0.1}
request = Request(
"https://httpbin.org/get",
method="GET",
metadata={"hello": "ruia"},
encoding="utf-8",
request_config=request_config,
params=params,
callback=hi,
)
return await request.fetch_callback(sem)
Created by howie.hu at 2019/1/28.
"""
import asyncio
from lxml import etree
from ruia import Request
async def hello(response):
return "hello ruia"
sem = asyncio.Semaphore(3)
params = {"name": "ruia"}
request = Request(
"https://httpbin.org/get",
method="GET",
metadata={"hello": "ruia"},
params=params,
callback=hello,
)
_, response = asyncio.get_event_loop().run_until_complete(request.fetch_callback(sem))
def test_response():
url = response.url
method = response.method
encoding = response.encoding
html = response.html
metadata = response.metadata
cookies = response.cookies
async def parse(self, response):
self.mongo_db = MotorBase().get_db("ruia_test")
urls = [
"https://news.ycombinator.com/news?p=1",
"https://news.ycombinator.com/news?p=2",
]
for index, url in enumerate(urls):
yield Request(url, callback=self.parse_item, metadata={"index": index})
async def get_proxy_ip(valid: int = 1) -> str:
# random_server = ['http://0.0.0.0:8662/']
proxy_server = CONFIG.REMOTE_SERVER.get('proxy_server')
# proxy_server = random_server[random.randint(0, 1)]
kwargs = {
'json': {
"act_id": 1704,
"version": "1.0",
"data": {
"valid": 1
}
}
}
res = await Request(url=proxy_server, method='POST', res_type='json', **kwargs).fetch()
proxy = ''
if res.status == 200:
proxy = res.html.get('info').get('proxy')
return proxy
#!/usr/bin/env python
"""
Created by howie.hu at 2018/11/22.
"""
import asyncio
import async_timeout
import pyppeteer
from typing import Optional
from ruia import Request
from ruia.response import Response
from ruia_pyppeteer.response import PyppeteerResponse
class PyppeteerRequest(Request):
def __init__(
self,
url: str,
method: str = "GET",
*,
callback=None,
encoding: Optional[str] = None,
headers: dict = None,
metadata: dict = None,
request_config: dict = None,
request_session=None,
load_js: bool = True,
pyppeteer_args: list = None,
pyppeteer_launch_options: dict = None,
pyppeteer_page_options: dict = None,
pyppeteer_viewport: dict = None,