Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_links(browser, page, logger, media, element):
links = []
try:
# Get image links in scope from hashtag, location and other pages
link_elems = element.find_elements_by_xpath('//a[starts-with(@href, "/p/")]')
sleep(2)
if link_elems:
for link_elem in link_elems:
try:
post_href = link_elem.get_attribute("href")
post_elem = element.find_elements_by_xpath(
"//a[@href='/p/" + post_href.split("/")[-2] + "/']/child::div"
)
if len(post_elem) == 1 and "Photo" in media:
# Single photo
links.append(post_href)
if len(post_elem) == 2:
# Carousel or Video
post_category = element.find_element_by_xpath(
"//a[@href='/p/"
except Exception as err:
print('Image check error: {}'.format(err))
self.logFile.write('Image check error: {}\n'.format(err))
if self.do_comment and user_name not in self.dont_include \
and checked_img and commenting:
if temp_comments:
# Use clarifai related comments only!
comments = temp_comments
elif is_video:
comments = self.comments + self.video_comments
else:
comments = self.comments + self.photo_comments
commented += comment_image(self.browser, comments)
else:
print('--> Not commented')
sleep(1)
else:
already_liked += 1
else:
print('--> Image not liked: {}'.format(reason))
inap_img += 1
except NoSuchElementException as err:
print('Invalid Page: {}'.format(err))
self.logFile.write('Invalid Page: {}\n'.format(err))
print('')
self.logFile.write('\n')
if liked_img < amount:
print('-------------')
posts_count = get_number_of_posts(browser)
attempt = 0
if posts_count is not None and amount > posts_count:
logger.info(
"You have requested to get {} posts from {}'s profile page BUT"
" there only {} posts available :D".format(amount, person, posts_count)
)
amount = posts_count
while len(links) < amount:
initial_links = links
browser.execute_script("window.scrollTo(0, document.body.scrollHeight);")
# update server calls after a scroll request
update_activity(browser, state=None)
sleep(0.66)
# using `extend` or `+=` results reference stay alive which affects
# previous assignment (can use `copy()` for it)
main_elem = browser.find_element_by_tag_name("article")
links = links + get_links(browser, person, logger, media, main_elem)
links = sorted(set(links), key=links.index)
if len(links) == len(initial_links):
if attempt >= 7:
logger.info(
"There are possibly less posts than {} in {}'s profile "
"page!".format(amount, person)
)
break
else:
attempt += 1
self.video_comments)
else:
comments = (self.comments +
self.photo_comments)
commented += comment_image(self.browser,
user_name,
comments,
self.blacklist,
self.logger,
self.logfolder)
else:
self.logger.info(disapproval_reason)
else:
self.logger.info('--> Not commented')
sleep(1)
else:
already_liked += 1
else:
self.logger.info(
'--> Image not liked: {}'.format(reason.encode('utf-8')))
inap_img += 1
except NoSuchElementException as err:
self.logger.error('Invalid Page: {}'.format(err))
if liked_img < amount:
self.logger.info('-------------')
self.logger.info("--> Given amount not fullfilled, "
"image pool reached its end\n")
button_change = True
elif following_status in post_action_text_fail:
button_change = False
else:
logger.error(
"Hey! Last {} is not verified out of an unexpected "
"failure!".format(action)
)
return False, "unexpected"
if button_change:
break
else:
if retry_count == 1:
reload_webpage(browser)
sleep(4)
elif retry_count == 2:
# handle it!
# try to do the action one more time!
click_visibly(browser, follow_button)
if action == "unfollow":
confirm_unfollow(browser)
sleep(4)
elif retry_count == 3:
logger.warning(
"Last {0} is not verified."
"\t~'{1}' might be temporarily blocked "
"from {0}ing\n".format(action, username)
)
nap = 1.5
put_sleep = 0
try:
while filtered_links in range(1, amount):
if sc_rolled > 100:
logger.info("Scrolled too much! ~ sleeping a bit :>")
sleep(600)
sc_rolled = 0
for i in range(3):
browser.execute_script(
"window.scrollTo(0, document.body.scrollHeight);"
)
update_activity(browser, state=None)
sc_rolled += 1
sleep(nap) # if not slept, and internet speed is low,
# instagram will only scroll one time, instead of many times
# you sent scroll command...
sleep(3)
links.extend(get_links(browser, location, logger, media, main_elem))
links_all = links # uniqify links while preserving order
s = set()
links = []
for i in links_all:
if i not in s:
s.add(i)
links.append(i)
if len(links) == filtered_links:
try_again += 1
def shuffle_actions(self, actions):
if len(actions) <= 1:
sleep(7 * 60)
return actions
old_order = actions[:]
random.shuffle(actions)
if actions[0] == old_order[-1]:
actions = actions[1:] + actions[0:1]
return actions
if sleep_counter >= sleep_after and sleep_delay not in [0, None]:
delay_random = random.randint(
ceil(sleep_delay * 0.85), ceil(sleep_delay * 1.14)
)
logger.info(
"Unfollowed {} new users ~sleeping about {}\n".format(
sleep_counter,
"{} seconds".format(delay_random)
if delay_random < 60
else "{} minutes".format(
truncate_float(delay_random / 60, 2)
),
)
)
sleep(delay_random)
sleep_counter = 0
sleep_after = random.randint(8, 12)
pass
if person not in dont_include:
logger.info(
"Ongoing Unfollow [{}/{}]: now unfollowing '{}'...".format(
unfollowNum + 1, amount, person.encode("utf-8")
)
)
person_id = (
automatedFollowedPool["all"][person]["id"]
if person in automatedFollowedPool["all"].keys()
else False
)