Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _create_doc_event(cls, source):
fb_info = source.fb_info
if not fb_info:
return None
# TODO(lambert): find a way to index no-location sources.
# As of now, the lat/long number fields cannot be None.
# In what radius/location should no-location sources show up
# and how do we want to return them
# Perhaps a separate index that is combined at search-time?
if fb_info.get('location', None) is None:
return None
if not source.latitude:
return None
country = fb_info['location'].get('country', '').upper()
if country in iso3166.countries_by_name:
country_code = iso3166.countries_by_name[country].alpha2
else:
country_code = None
doc_event = search.Document(
doc_id=source.graph_id,
fields=[
search.TextField(name='name', value=source.name),
search.TextField(name='description', value=fb_info.get('general_info', '')),
search.NumberField(name='like_count', value=fb_info['likes']),
search.TextField(name='category', value=fb_info['category']),
search.TextField(name='category_list', value=', '.join(str(x['name']) for x in fb_info.get('category_list', []))),
search.NumberField(name='latitude', value=source.latitude),
search.NumberField(name='longitude', value=source.longitude),
#search.TextField(name='categories', value=' '.join(source.auto_categories)),
search.TextField(name='country', value=country_code),
return None
# TODO(lambert): find a way to index no-location sources.
# As of now, the lat/long number fields cannot be None.
# In what radius/location should no-location sources show up
# and how do we want to return them
# Perhaps a separate index that is combined at search-time?
if fb_info.get('location', None) is None:
return None
if not source.latitude:
return None
country = fb_info['location'].get('country', '').upper()
if country in iso3166.countries_by_name:
country_code = iso3166.countries_by_name[country].alpha2
else:
country_code = None
doc_event = search.Document(
doc_id=source.graph_id,
fields=[
search.TextField(name='name', value=source.name),
search.TextField(name='description', value=fb_info.get('general_info', '')),
search.NumberField(name='like_count', value=fb_info['likes']),
search.TextField(name='category', value=fb_info['category']),
search.TextField(name='category_list', value=', '.join(str(x['name']) for x in fb_info.get('category_list', []))),
search.NumberField(name='latitude', value=source.latitude),
search.NumberField(name='longitude', value=source.longitude),
#search.TextField(name='categories', value=' '.join(source.auto_categories)),
search.TextField(name='country', value=country_code),
search.NumberField(name='num_real_events', value=source.num_real_events or 0),
def fetch_country_pages():
"""Fetch the map of URLs for the listings for each country"""
country_map = {}
country_listing ='allco.htm'
response = requests.get(BASE_URL + country_listing)
soup = BeautifulSoup(response.content, 'html.parser')
# Not naming things makes me a sad panda and parsing like this exist
country_table = soup.findAll('table')[5]
country_links = country_table.findAll('a')
for country in country_links:
if country.text.upper() in iso3166.countries_by_name:
alpha2 = iso3166.countries_by_name[country.text.upper()].alpha2
country_map[alpha2] = BASE_URL + country.attrs['href']
return country_map