Get Links Zip | Download All Links Download Redme

import requests from bs4 import BeautifulSoup import zipfile import os

# Create a README file with links with open('README.txt', 'w') as f: for link in links: f.write("%s\n" % link) Download all links download redme get links zip

# Create a zip file with zipfile.ZipFile('links.zip', 'w', zipfile.ZIP_DEFLATED) as zip_file: zip_file.write('README.txt') # If you want to include files from links (which might not directly work because of hrefs being external or internal paths) # you'd need a more sophisticated approach, possibly involving downloading content import requests from bs4 import BeautifulSoup import zipfile

# Find all links on the webpage links = [] for link in soup.find_all('a'): href = link.get('href') if href and href.startswith('http'): links.append(href) Download all links download redme get links zip

response = requests.get(url) soup = BeautifulSoup(response.text, 'html.parser')

# URL of the webpage url = 'http://example.com'