aboutsummaryrefslogtreecommitdiff
path: root/test/test_digikey.py
diff options
context:
space:
mode:
Diffstat (limited to 'test/test_digikey.py')
-rw-r--r--test/test_digikey.py46
1 files changed, 32 insertions, 14 deletions
diff --git a/test/test_digikey.py b/test/test_digikey.py
index 733dcf7..2b3cd12 100644
--- a/test/test_digikey.py
+++ b/test/test_digikey.py
@@ -3,7 +3,8 @@ from itertools import groupby
from pathlib import Path
import pytest
-import requests
+from selenium import webdriver
+from selenium.webdriver.common.keys import Keys
import ee.digikey as dk
@@ -12,6 +13,8 @@ static_copies = basedir / "digikey" / "static-copies" # type: Path
digikey = dk.Digikey()
client = dk.DigikeyClient(digikey, cache_dir=basedir / "digikey" / "http_cache", on_download=print)
+driver: webdriver.Chrome = None
+force_refresh = False # Set to True to always fetch the updated html files
@pytest.mark.digikey
@@ -41,12 +44,12 @@ def test_digikey_1(tmpdir):
@pytest.mark.digikey
def test_digikey_2():
- content = cache_file("https://www.digikey.com/products/en?lang=en&site=us&keywords=TCR2LF&pageSize=500", "TCR2LF")
+ content = cache_file("https://www.digikey.com/products/en?lang=en&site=us&&pageSize=500", "TCR2LF")
res = client.parse_string(content)
assert res.response_type == dk.SearchResponseTypes.MANY
[print("dpn={}, mpn={}".format(p.part_number, p.mpn)) for p in res.products]
- assert len(res.products) == 28
+ assert len(res.products) > 10
for mpn, parts in groupby(sorted(res.products, key=lambda p: p.mpn), lambda p: p.mpn):
parts = list(parts)
@@ -67,32 +70,47 @@ def test_digikey_2():
@pytest.mark.digikey
def test_digikey_3():
- content = cache_file("https://www.digikey.com/products/en?lang=en&site=us&pageSize=10", "S1MTR")
+ content = cache_file("https://www.digikey.com/products/en?lang=en&site=us&pageSize=10", "RS1MTR")
res = client.parse_string(content)
assert res.response_type == dk.SearchResponseTypes.MANY
[print("dpn={}, mpn={}".format(p.part_number, p.mpn)) for p in res.products]
- assert len(res.products) == 1
- p = res.products[0]
- assert p.part_number == "1655-1506-1-ND"
- assert p.mpn == "S1MTR"
- assert p.url == "/product-detail/en/smc-diode-solutions/S1MTR/1655-1506-1-ND/6022951"
+ assert len(res.products) > 0
+
+ p = next((p for p in res.products if p.part_number == "1655-1501-1-ND"), None)
+ assert p.mpn == "RS1MTR"
+ assert p.url == "/product-detail/en/smc-diode-solutions/RS1MTR/1655-1501-1-ND/6022946"
def cache_file(url, keyword):
path = static_copies / "search-{}.html".format(keyword)
+ if force_refresh and path.is_file():
+ path.unlink()
+
if not path.is_file():
path.parent.mkdir(parents=True, exist_ok=True)
- res = requests.get(url, params=dict(keywords=keyword))
+ from urllib.parse import quote
+ url = url + "&keywords=" + quote(keyword)
+ print("GET {}".format(url))
- assert res.status_code == 200
+ global driver
+ if driver is None:
+ options = webdriver.ChromeOptions()
+ driver = webdriver.Chrome(chrome_options=options)
- with open(path, "w") as f:
- f.write(res.text)
+ driver.get(url)
+
+ with open(str(path), "w") as f:
+ f.write(driver.page_source)
assert path.stat().st_size > 0
- with open(path, "r") as f:
+ with open(str(path), "r") as f:
content = f.read()
return content
+
+try:
+ driver.close()
+except Exception:
+ pass