Skip to content

Commit

Permalink
Fix #15041: Update download tests after /all refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
robhudson committed Sep 3, 2024
1 parent 6ec135c commit 72f17ec
Showing 1 changed file with 24 additions and 8 deletions.
32 changes: 24 additions & 8 deletions tests/functional/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import pytest
import requests
from bs4 import BeautifulSoup
from pyquery import PyQuery as pq

TIMEOUT = 60

Expand Down Expand Up @@ -81,17 +81,33 @@ def pytest_generate_tests(metafunc):
if markexpr == "download":
base_url = metafunc.config.getoption("base_url")
if "download_path" in metafunc.fixturenames:
soup = get_web_page(f"{base_url}/en-US/firefox/download/thanks/")
urls = [a["href"] for a in soup.find("ul", class_="download-list").find_all("a")]
doc = get_web_page(f"{base_url}/en-US/firefox/download/thanks/")
urls = [a.attrib["href"] for a in doc("ul.download-list a")]
# Bug 1266682 remove links to Play Store to avoid rate limiting in automation.
urls = [url for url in urls if "play.google.com" not in url]
skip_urls = ["https://play.google.com", "https://apps.apple.com"]
urls = list(filter(lambda url: not any([s in url for s in skip_urls]), urls))
assert urls
metafunc.parametrize("download_path", urls)

elif "download_path_l10n" in metafunc.fixturenames:
soup = get_web_page(f"{base_url}/en-US/firefox/all/")
lists = soup.find("div", class_="c-all-downloads")
urls = [a["href"] for a in lists.find_all(attrs={"data-link-type": "download"})]
urls = []
doc = get_web_page(f"{base_url}/en-US/firefox/all/")
product_urls = [a.attrib["href"] for a in doc("ul.c-product-list a")]
# If product url links outside of /firefox/all/ ignore it. (e.g. testflight)
product_urls = [url for url in product_urls if url.startswith("/en-US/firefox/all/")]
for url in product_urls:
doc = get_web_page(f"{base_url}{url}")
platform_urls = [a.attrib["href"] for a in doc("ul.c-platform-list a")]
for url in platform_urls:
doc = get_web_page(f"{base_url}{url}")
lang_urls = [a.attrib["href"] for a in doc("ul.c-lang-list a")]
for url in lang_urls:
doc = get_web_page(f"{base_url}{url}")
download_urls = [a.attrib["href"] for a in doc("a.download-link")]
# Only check links to download.mozilla.org
download_urls = [link for link in download_urls if link.startswith("https://download.mozilla.org/")]
for url in download_urls:
urls.append(url)
assert urls
metafunc.parametrize("download_path_l10n", urls)

Expand All @@ -102,4 +118,4 @@ def get_web_page(url):
except requests.RequestException:
# retry
r = requests.get(url, timeout=TIMEOUT, headers={"accept-language": "en"})
return BeautifulSoup(r.content, "html.parser")
return pq(r.content)

0 comments on commit 72f17ec

Please sign in to comment.