import requests from bs4 import BeautifulSoup # define the URL pattern and the total number of pages url_pattern = "https://niggasin.space/user/6311/posts?p={}" total_pages = 5 # create a list to store the post text from each page all_posts = [] # loop through each page and extract the post text for page_num in range(1, total_pages + 1): url = url_pattern.format(page_num) response = requests.get(url) soup = BeautifulSoup(response.content, 'html.parser') post_blocks = soup.find_all('div', class_='post-block') for block in post_blocks: post_body = block.find('div', class_='post-body') if post_body: all_posts.append(post_body.get_text()) # write the post text to a file with open('all_posts.txt', 'w', encoding='utf-8') as f: for i, post in enumerate(all_posts): f.write(f'[post {i+1}]\n{post}\n\n')