Add the filter argument.

This commit is contained in:
Lukáš Kucharczyk 2020-06-28 21:45:14 +02:00
parent 2ab1331c92
commit 9864ad2617
1 changed files with 4 additions and 1 deletions

View File

@ -6,12 +6,15 @@ from bs4 import BeautifulSoup
if __name__ == '__main__': if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Dig out links from a website.") parser = argparse.ArgumentParser(description="Dig out links from a website.")
parser.add_argument('site', type=str, help="Website that you want to scrape for links.") parser.add_argument('site', type=str, help="Website that you want to scrape for links.")
parser.add_argument('-f', '--filter', type=str, default='\..*$', help="Only return filenames matching this regular expression.")
args = parser.parse_args() args = parser.parse_args()
h = httplib2.Http('.cache') h = httplib2.Http('.cache')
response, content = h.request(args.site) response, content = h.request(args.site)
s = BeautifulSoup(content) s = BeautifulSoup(content)
"""find only file names""" """find only file names"""
links = s.find_all(href=re.compile('\..*$')) links = s.find_all(href=re.compile(args.filter))
if not links or links == 0:
exit("No filenames found with the given filter.")
for link in links: for link in links:
print(args.site + link['href']) print(args.site + link['href'])