<html><head><meta name="color-scheme" content="light dark"></head><body><pre style="word-wrap: break-word; white-space: pre-wrap;">import sys
from playwright.sync_api import sync_playwright
from bs4 import BeautifulSoup
import time
import json
import os

if len(sys.argv) &lt; 3:
    print(" Usage: python script.py \"search query\" number_of_pages")
    sys.exit(1)

args = sys.argv[1:]
if args[-1].isdigit():
    total_pages = int(args.pop())
else:
    print(" Please specify the number of pages at the end.")
    sys.exit(1)

user_input = "-".join(args)

products_data = []
base_url = f"https://www.aliexpress.us/w/wholesale-{user_input}.html"

with sync_playwright() as p:
    browser = p.chromium.launch_persistent_context(user_data_dir= os.getcwd()+ '/browser-data', headless=True)
    page = browser.new_page()


    for page_number in range(1, total_pages + 1):
        print(f"Scraping page {page_number}...")
        page.goto(f"{base_url}?page={page_number}")
        page.wait_for_selector('#card-list')

        for _ in range(12):
            page.evaluate("window.scrollBy(0, window.innerHeight)")
            time.sleep(1)

        html = page.inner_html("#card-list")
        soup = BeautifulSoup(html, "lxml")

        names = [name.text.strip() for name in soup.find_all('h3', {"class": "lq_jl"})]
        prices = [price.text.strip() for price in soup.find_all("div", {"class": "lq_j3"})]
        solds = [sold.text.strip() for sold in soup.find_all("span", {"class": "lq_jg"})]
        images = ["https:" + img.get('src', img.get('data-src', '')) for img in soup.find_all("img", {"class": "l9_be"})]
        store_names = [store.text.strip() for store in soup.find_all("span", {"class": "io_ip"})]
        store_links = ["https:" + store['href'] for store in soup.find_all("a", {"class": "io_ir"})]
        product_links = ["https:" + product['href'] for product in soup.find_all("a", {"class": "lq_b io_it search-card-item"})]


        for data in zip(names, prices, solds, images, store_names, store_links, product_links):
            product_info = {
                "Product Name": data[0],
                "Price": data[1],
                "Sold": data[2],
                "Image URL": data[3],
                "Store Name": data[4],
                "Store Link": data[5],
                "Product Link": data[6]
            }
            products_data.append(product_info)

    browser.close()

output_file = f"{user_input}_products.json"
with open(output_file, "w", encoding="utf-8") as json_file:
    json.dump(products_data, json_file, ensure_ascii=False, indent=4)

# print(f"\nâœ… Extracted {len(products_data)} products and saved them in {output_file}")
</pre></body></html>