aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/ee/bom/doit.py71
-rw-r--r--src/ee/digikey/doit.py91
-rw-r--r--src/ee/doit.py6
-rw-r--r--src/ee/ds/__init__.py22
-rw-r--r--src/ee/tools/digikey_download_facts.py1
5 files changed, 148 insertions, 43 deletions
diff --git a/src/ee/bom/doit.py b/src/ee/bom/doit.py
index bbb9241..521b8dd 100644
--- a/src/ee/bom/doit.py
+++ b/src/ee/bom/doit.py
@@ -5,7 +5,7 @@ from typing import Mapping, Union
from configclass import Config
from namedlist import namedlist
-from ee.ds import DataSetManager, DataSet
+from ee.ds import DataSetManager, DataSet, create_message
logger = logging.getLogger(__name__)
@@ -19,15 +19,20 @@ def change_data_sets_for_task(task, _callable):
def output_data_set_for_task(task):
- return _data_sets[task][0]
+ try:
+ return _data_sets[task][0]
+ except KeyError:
+ raise KeyError("No such task registered in this module: {}".format(task))
def input_data_sets_for_task(task):
- return _data_sets[task][1]
+ try:
+ return _data_sets[task][1]
+ except KeyError:
+ raise KeyError("No such task registered in this module: {}".format(task))
-_config_template = Config({
-})
+_config_template = Config({})
_config = None # type: Mapping[str, str]
@@ -79,9 +84,7 @@ def task_bom():
raise Exception("Missing ref")
if not mpn:
- output.create_object("message", "bom-{}".format(ref)). \
- set("level", "error"). \
- set("message", "Missing required field 'mpn'")
+ create_message(output, "Missing required field 'mpn'", "error")
continue
if ref in bom_components:
@@ -101,7 +104,7 @@ def task_bom():
_data_sets[task_bom] = ["bom", ["components"]]
-def order_csv(count: int, style: str, output_file: Path, data_sets):
+def order_csv(count: int, style: str, output_file: Path, data_sets) -> DataSet:
ds = _dsm.load_data_sets(data_sets)
out = DataSet()
@@ -114,26 +117,48 @@ def order_csv(count: int, style: str, output_file: Path, data_sets):
for c in [o for o in ds.items() if o.object_type.name == "bom-component"]:
ref = c.get("ref")
mpn = c.get("mpn")
- digikey_pn = c.get("digikey-pn")
+
+ dk_part = None
if style == "digikey":
- dpn = digikey_pn.strip() if digikey_pn else ""
+ dk_parts = [o.get_req("part-number") for o in ds.items() if
+ o.object_type.name == "component-to-part-mapping" and
+ o.has("seller", "ref", "part-number") and
+ o.get("seller") == "digikey" and
+ o.get("ref") == ref]
+
+ # The actual DK part should depend on the price breaks for the different packagings, but we don't have that
+ # info here. Luckily the DK store proposes cheaper packagings when ordering so that is something.
+
+ dk_parts = set(dk_parts)
+
+ if len(dk_parts) == 0:
+ create_message(out, "No part for component: ref={}, mpn={}".format(ref, mpn), "error")
+ continue
+ elif len(dk_parts) > 1:
+ create_message(out, "Multiple parts for component: ref={}, mpn={}. Don't know which one to select.".
+ format(ref, mpn), "error")
+ continue
+ else:
+ dk_part = next(iter(dk_parts))
- # TODO: implement part resolution
- # if len(dpn) == 0:
- # raise Exception("Missing digikey-pn for ref={}".format(ref))
+ dk_part = dk_part.strip()
+
+ if len(dk_part) == 0:
+ raise Exception("Missing digikey part number for ref={}".format(ref))
if mpn in parts:
part = parts[mpn]
- if part.digikey_pn != digikey_pn:
- raise Exception("Bad data, inconsistent digikey-pn for mpn '{}'. Original digikey-pn='{}', new "
- "digikey-pn='{}'".format(mpn, part.digikey_pn, digikey_pn))
+ if dk_part:
+ if part.digikey_pn != dk_part:
+ raise Exception("Bad data, inconsistent digikey-pn for mpn '{}'. First digikey part number='{}', "
+ "new digikey part number='{}'".format(mpn, part.digikey_pn, dk_part))
part.cnt += 1
part.refs.append(ref)
else:
- parts[mpn] = Part(mpn=mpn, cnt=1, refs=[ref], digikey_pn=digikey_pn)
+ parts[mpn] = Part(mpn=mpn, cnt=1, refs=[ref], digikey_pn=dk_part)
mpn_field = "MPN"
count_field = "Count"
@@ -163,10 +188,14 @@ def order_csv(count: int, style: str, output_file: Path, data_sets):
include_extra_fields=include_extra_fields)
-def create_task_order_csv(*, style: str = None, output_file: Union[str, Path], data_sets, count: int = 1):
+def create_task_order_csv(*, style: str = None, output_file: Union[str, Path], out_data_set, data_sets, count: int = 1):
+ def action():
+ with _dsm.create_rw(out_data_set, clean=True):
+ order_csv(count, style, Path(output_file), data_sets)
+
return {
"name": "order-{}".format(count) if not style else "order-{}-{}".format(style, count),
- "actions": [(order_csv, [count, style, Path(output_file), data_sets])],
+ "actions": [action],
"file_dep": [_dsm.cookie_for_ds(ds) for ds in data_sets],
- "targets": [output_file],
+ "targets": [_dsm.cookie_for_ds(out_data_set), output_file],
}
diff --git a/src/ee/digikey/doit.py b/src/ee/digikey/doit.py
index 74259bb..2b30d9b 100644
--- a/src/ee/digikey/doit.py
+++ b/src/ee/digikey/doit.py
@@ -1,4 +1,6 @@
import logging
+from itertools import groupby
+from typing import List
import ee.digikey as dk
from ee.doit import DoItConfig
@@ -11,38 +13,48 @@ doit_config = DoItConfig()
def resolve_schematic_components(output: DataSet, in_ds: DataSet):
def find(field, value):
- return [o for o in output.items() if o.object_type.name == "digikey-part-stub" and o.get(field) == value]
-
- def save(p: dk.DigikeyProduct):
- logger.info("Found part, dpn={}, mpn={}".format(p.part_number, p.mpn))
-
- return output.create_object("digikey-part-stub", p.part_number, replace=True). \
- set("part-number", p.part_number). \
- set("mpn", p.mpn). \
- set("description", p.description). \
- set("quantity-available", p.quantity_available). \
- set("url", p.url)
+ return [o for o in output.items() if
+ o.object_type.name == "component-to-part-mapping" and o.get(field) == value]
+
+ def save(refs: List[str], p: dk.DigikeyProduct):
+ logger.info("Found part, dpn={}, mpn={}, refs={}".format(p.part_number, p.mpn, ", ".join(refs)))
+
+ for ref in refs:
+ # Make the key a bit long so we can have several parts that matches the MPN from a single seller
+ key = "digikey-{}-{}".format(ref, p.part_number)
+ output.create_object("component-to-part-mapping", key, replace=True). \
+ set("seller", "digikey"). \
+ set("ref", ref). \
+ set("part-number", p.part_number). \
+ set("mpn", p.mpn). \
+ set("description", p.description). \
+ set("quantity-available", p.quantity_available). \
+ set("url", p.url)
digikey = dk.Digikey()
client = dk.DigikeyClient(digikey, on_download=logger.info)
- components = [o for o in in_ds.items() if o.object_type.name == "component"]
+ components = [(o.get_req("mpn"), o.get_req("ref")) for o in in_ds.items() if
+ o.object_type.name == "component" and o.has("mpn", "ref")]
+ components = sorted(components, key=lambda c: c[0])
+
+ for mpn, components in groupby(components, key=lambda c: c[0]):
+ references = [c[1] for c in components]
- for mpn in sorted({c.get("mpn") for c in components if c.get("mpn")}):
# TODO: support searching by value and digikey part number directly. Priority should be "digikey", "mpn" and
# "value", first field present should be used.
dk_components = find("mpn", mpn)
if len(dk_components):
- logger.info("Already resolved {} to {}".format(mpn, ", ".join([c.get("mpn") for c in dk_components])))
+ logger.info("Already resolved {} to {}".format(mpn, ", ".join(sorted(set([c.get("part-number") for c in dk_components])))))
continue
logger.info("Looking up {}".format(mpn))
response = client.search(mpn)
if response.response_type == dk.SearchResponseTypes.SINGLE:
- save(response.products[0])
+ save(references, response.products[0])
elif response.response_type == dk.SearchResponseTypes.MANY:
# A search for "FOO" might return products "FOO" and "FOOZ" so we pick out the ones with the matching mpn
# This will often be more than one product, but digikey shows the same part in different packagings.
@@ -60,7 +72,7 @@ def resolve_schematic_components(output: DataSet, in_ds: DataSet):
logger.info("Got multiple hits ({})".format(len(viable_products)))
- save(part)
+ save(references, part)
elif response.response_type == dk.SearchResponseTypes.TOO_MANY:
logger.warning("to many matches")
elif response.response_type == dk.SearchResponseTypes.NO_MATCHES:
@@ -85,3 +97,50 @@ def task_digikey_resolve_schematic_components():
doit_config.set_data_sets_for(task_digikey_resolve_schematic_components,
"digikey-resolved-parts", "components")
+
+
+def download_part_facts(output: DataSet, in_ds: DataSet):
+ digikey = dk.Digikey()
+ client = dk.DigikeyClient(digikey, on_download=logger.info)
+
+ parts = [o for o in in_ds.items()
+ if o.object_type.name == "component-to-part-mapping" and o.get("seller") == "digikey"]
+
+ for pn in sorted({part.get("part-number") for part in parts}):
+ logger.info("Downloading facts for {}".format(pn))
+
+ response = client.search(pn)
+
+ if response.response_type == dk.SearchResponseTypes.SINGLE:
+ product = response.products[0]
+
+ o = output.create_object("digikey-part", pn, replace=True). \
+ set("part-number", product.part_number). \
+ set("url", product.url). \
+ set("mpn", product.mpn)
+
+ for a in product.attributes:
+ key = "{}/{}".format(a.attribute_type.id, a.attribute_type.label)
+ key = key.replace("%", "_")
+ value = a.value.replace("%", "%%")
+ o.set(key, value)
+
+
+def task_digikey_fetch_full_part_facts():
+ out_data_set, in_data_sets = doit_config.data_sets_for(task_digikey_fetch_full_part_facts)
+
+ def action():
+ in_ds = doit_config.dsm.load_data_sets(in_data_sets)
+
+ with doit_config.dsm.create_rw(out_data_set, clean=False) as output:
+ download_part_facts(output, in_ds)
+
+ return dict(
+ file_dep=[doit_config.dsm.cookie_for_ds(ds) for ds in in_data_sets],
+ actions=[action],
+ targets=[doit_config.dsm.cookie_for_ds(out_data_set)],
+ )
+
+
+doit_config.set_data_sets_for(task_digikey_fetch_full_part_facts,
+ "digikey-parts", "digikey-resolved-parts")
diff --git a/src/ee/doit.py b/src/ee/doit.py
index 014af05..dc89fae 100644
--- a/src/ee/doit.py
+++ b/src/ee/doit.py
@@ -35,5 +35,11 @@ class DoItConfig(object):
def data_sets_for(self, task):
return self._data_sets[task]
+ def out_data_set_for(self, task):
+ return self.data_sets_for(task)[0]
+
+ def input_data_sets_for(self, task):
+ return self.data_sets_for(task)[1]
+
def set_data_sets_for(self, task, out_dataset: str, *in_datasets: str):
self._data_sets[task] = [out_dataset, in_datasets]
diff --git a/src/ee/ds/__init__.py b/src/ee/ds/__init__.py
index 14bbb91..e48cab6 100644
--- a/src/ee/ds/__init__.py
+++ b/src/ee/ds/__init__.py
@@ -49,6 +49,9 @@ class ObjectType(object):
self._fields.append(field)
return len(self._fields) - 1
+ def has(self, *keys: str):
+ return all([key in self._fields for key in keys])
+
class Object(object):
def __init__(self, ds: "DataSet", ot: ObjectType, key: str):
@@ -77,6 +80,9 @@ class Object(object):
for k in other._ot.fields:
self.set(k, other.get(k))
+ def has(self, *keys: str):
+ return self.object_type.has(*keys)
+
def get(self, key: str) -> Optional[str]:
idx = self._ot.index_of(key)
return self._data[idx] if idx is not None and idx < len(self._data) else None
@@ -293,7 +299,7 @@ class DataSetManager(object):
ds_dir = Path(_path) if Path(_path).is_absolute() else self._basedir / _path
ds_dir = ds_dir if ds_dir.is_dir() else ds_dir.parent
- logger.debug("Loading DS from '{}'".format(ds_dir))
+ # logger.debug("Loading DS from '{}'".format(ds_dir))
self._load_ini(ds_dir / "data-set.ini")
@@ -304,12 +310,12 @@ class DataSetManager(object):
continue
ot = ot_path.name
- logger.debug(" Loading type '{}'".format(ot))
+ # logger.debug(" Loading type '{}'".format(ot))
for o_path in ot_path.glob("*.ini"):
count += 1
key = o_path.name[:-4]
- logger.debug(" Loading key '{}'".format(key))
+ # logger.debug(" Loading key '{}'".format(key))
ini = self._load_ini(o_path)
o = ds.create_object(ot, key)
for k, v in ini.items("values"):
@@ -318,13 +324,13 @@ class DataSetManager(object):
if freeze:
ds.freeze()
- logger.debug("Loaded {} items".format(count))
+ # logger.debug("Loaded {} items".format(count))
return ds
def store(self, ds: DataSet, ds_name: str):
ds_dir = self._basedir / ds_name
items = list(ds.items())
- logger.info("Storing DS '{}' with {} objects to {}".format(ds_name, len(items), ds_dir))
+ # logger.info("Storing DS '{}' with {} objects to {}".format(ds_name, len(items), ds_dir))
os.makedirs(ds_dir, exist_ok=True)
ini = self._blank_ini()
@@ -428,3 +434,9 @@ class LazyRwDataSet(object):
def __exit__(self, *args):
self._dsm.store(self._ds, self._name)
return False
+
+
+def create_message(data_set: DataSet, message: str, level: str):
+ return data_set.create_object("message", "message-{}".format(str(hash(message)))). \
+ set("message", message). \
+ set("level", level)
diff --git a/src/ee/tools/digikey_download_facts.py b/src/ee/tools/digikey_download_facts.py
index 923b996..ee4cf20 100644
--- a/src/ee/tools/digikey_download_facts.py
+++ b/src/ee/tools/digikey_download_facts.py
@@ -1,5 +1,4 @@
import argparse
-from itertools import *
from functools import total_ordering
import ee.digikey as dk