aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTrygve Laugstøl <trygvis@inamo.no>2019-10-25 13:33:07 +0200
committerTrygve Laugstøl <trygvis@inamo.no>2019-10-25 13:33:07 +0200
commitd49e748f4605f26d5b65caea05773ea8a2dd0eb0 (patch)
treeb2edbef56e6116066ae1402d81dd6f3051cb78e5
parent1f1d918acf653457fef33d2c7784dd0c71ab513a (diff)
downloadee-python-d49e748f4605f26d5b65caea05773ea8a2dd0eb0.tar.gz
ee-python-d49e748f4605f26d5b65caea05773ea8a2dd0eb0.tar.bz2
ee-python-d49e748f4605f26d5b65caea05773ea8a2dd0eb0.tar.xz
ee-python-d49e748f4605f26d5b65caea05773ea8a2dd0eb0.zip
digikey search: Updating expressions for exact part patch.
Also formatting the output a little better.
-rw-r--r--src/ee/digikey/__init__.py4
-rw-r--r--src/ee/digikey/search_parts.py12
2 files changed, 8 insertions, 8 deletions
diff --git a/src/ee/digikey/__init__.py b/src/ee/digikey/__init__.py
index b993c31..cc57771 100644
--- a/src/ee/digikey/__init__.py
+++ b/src/ee/digikey/__init__.py
@@ -511,7 +511,7 @@ class DigikeyParser(object):
return len(products)
def _handle_exact_part_list(self, origin_url, tree: html, res: DigikeySearchResponse):
- products = tree.xpath(".//tr[@class='exactPart']")
+ products = tree.xpath(".//*[@class='exactPart']")
for product in products:
a = _first((a for a in product.xpath(".//td/span/a[@href]")))
@@ -537,7 +537,7 @@ class DigikeyParser(object):
if count:
product_table = _first(tree.xpath("//table[@id='productTable']"))
- exact_part_list = _first(tree.xpath("//table[@id='exactPartList']"))
+ exact_part_list = _first(tree.xpath("//*[@id='exactPartList']"))
if product_table is not None:
res = DigikeySearchResponse(count, SearchResponseTypes.MANY)
diff --git a/src/ee/digikey/search_parts.py b/src/ee/digikey/search_parts.py
index 1f1184a..07459b1 100644
--- a/src/ee/digikey/search_parts.py
+++ b/src/ee/digikey/search_parts.py
@@ -124,13 +124,13 @@ class QueryEngine(object):
filtered_products = [p for p in response.products if get_field(p) == pn]
if len(filtered_products) == 0:
- print("No items matched the query.", file=self.log)
+ print("No items matched the query.\n", file=self.log)
result = "not-found"
else:
part = sorted(filtered_products, key=lambda p: p.part_number)[0]
- print("Found {} matching products, but their facts are the same so picked ``{}`` for more info.".format(
- len(filtered_products), part.part_number), file=self.log)
+ print("Found {} matching products, but their facts are the same so picked ``{}`` for more info.\n".
+ format(len(filtered_products), part.part_number), file=self.log)
page = self.client.get_for_product_url(part.url, part.part_number)
response = self.parser.parse_string(self.client.baseurl, page)
@@ -140,8 +140,8 @@ class QueryEngine(object):
self._collect_categories(product)
result = "found"
else:
- print("Unable to narrow down the part, got {} new products. Giving up.".format(
- len(response.products)), file=self.log)
+ print("Unable to narrow down the part, got {} new products. Giving up.\n".
+ format(len(response.products)), file=self.log)
result = "many"
elif response.response_type == SearchResponseTypes.TOO_MANY:
@@ -153,7 +153,7 @@ class QueryEngine(object):
if out_part.uri not in self.uri_idx:
self.out_parts.add(out_part)
- print("\nResult: {}".format(result), file=self.log)
+ print("Result: {}".format(result), file=self.log)
print("", file=self.log)
return response.response_type