diff options
Diffstat (limited to 'src')
-rw-r--r-- | src/ee/bom/doit.py | 80 | ||||
-rw-r--r-- | src/ee/ds/__init__.py | 14 |
2 files changed, 68 insertions, 26 deletions
diff --git a/src/ee/bom/doit.py b/src/ee/bom/doit.py index f793611..bbb9241 100644 --- a/src/ee/bom/doit.py +++ b/src/ee/bom/doit.py @@ -53,6 +53,8 @@ class BomComponent(object): def task_bom(): """ + Creates 'bom-component' from 'component'. + Takes all schematic components, filters out all virtual/non- physical components (like power flags and ground components) and creates 'bom-component' objects. @@ -99,40 +101,72 @@ def task_bom(): _data_sets[task_bom] = ["bom", ["components"]] -def order_csv(count: int, group_by_mpn: bool, output_file: Path, data_sets): +def order_csv(count: int, style: str, output_file: Path, data_sets): ds = _dsm.load_data_sets(data_sets) out = DataSet() - if group_by_mpn: - parts = {} + parts = {} + + # noinspection PyPep8Naming + Part = namedlist("Part", "mpn, cnt, refs, digikey_pn") + + for c in [o for o in ds.items() if o.object_type.name == "bom-component"]: + ref = c.get("ref") + mpn = c.get("mpn") + digikey_pn = c.get("digikey-pn") + + if style == "digikey": + dpn = digikey_pn.strip() if digikey_pn else "" + + # TODO: implement part resolution + # if len(dpn) == 0: + # raise Exception("Missing digikey-pn for ref={}".format(ref)) + + if mpn in parts: + part = parts[mpn] + + if part.digikey_pn != digikey_pn: + raise Exception("Bad data, inconsistent digikey-pn for mpn '{}'. Original digikey-pn='{}', new " + "digikey-pn='{}'".format(mpn, part.digikey_pn, digikey_pn)) + + part.cnt += 1 + part.refs.append(ref) + else: + parts[mpn] = Part(mpn=mpn, cnt=1, refs=[ref], digikey_pn=digikey_pn) + + mpn_field = "MPN" + count_field = "Count" + refs_field = "References" + + if style == "digikey": + count_field = "Quantity" + refs_field = "Customer Reference" + + for part in sorted(parts.values(), key=lambda p: p.mpn): + o = out.create_object("row", part.mpn). \ + set(mpn_field, part.mpn). \ + set(count_field, part.cnt * count). \ + set(refs_field, ",".join(part.refs)) - Part = namedlist("Part", "mpn, cnt, refs") + if style == "digikey": + o.set("Digi-Key Part Number", part.digikey_pn) - for c in [o for o in ds.items() if o.object_type.name == "bom-component"]: - ref = c.get("ref") - mpn = c.get("mpn") - if mpn in parts: - parts[mpn].cnt += 1 - parts[mpn].refs.append(ref) - else: - parts[mpn] = Part(mpn=mpn, cnt=1, refs=[ref]) + fields = None + include_extra_fields = True - for part in sorted(parts.values(), key=lambda p: p.mpn): - out.create_object("row", part.mpn). \ - set("MPN", part.mpn). \ - set("Count", part.cnt * count). \ - set("References", ",".join(part.refs)) + if style == "digikey": + fields = ["Digi-Key Part Number", refs_field, count_field, mpn_field] + include_extra_fields = False - _dsm.store_csv(output_file, out, "row", order_by="MPN") - else: - raise Exception("Not implemented") + _dsm.store_csv(output_file, out, "row", order_by=mpn_field, fields=fields, + include_extra_fields=include_extra_fields) -def create_task_order_csv(output_file: Union[str, Path], data_sets, count: int = 1): +def create_task_order_csv(*, style: str = None, output_file: Union[str, Path], data_sets, count: int = 1): return { - "name": "order-{}".format(count), - "actions": [(order_csv, [count, True, Path(output_file), data_sets])], + "name": "order-{}".format(count) if not style else "order-{}-{}".format(style, count), + "actions": [(order_csv, [count, style, Path(output_file), data_sets])], "file_dep": [_dsm.cookie_for_ds(ds) for ds in data_sets], "targets": [output_file], } diff --git a/src/ee/ds/__init__.py b/src/ee/ds/__init__.py index 030113b..f71a7c6 100644 --- a/src/ee/ds/__init__.py +++ b/src/ee/ds/__init__.py @@ -347,7 +347,8 @@ class DataSetManager(object): # noinspection PyMethodMayBeStatic def store_csv(self, path: Union[str, Path], ds: DataSet, object_type: str, - order_by: Union[str, Iterable[str]] = None): + order_by: Union[str, Iterable[str]] = None, fields: List[str] = None, + include_extra_fields: bool = True): items = [o for o in ds.items() if o.object_type.name == object_type] if order_by: @@ -359,10 +360,17 @@ class DataSetManager(object): raise Exception("Unsupported order_by") with open(path, "w") as f: - w = csv.writer(f) + w = csv.writer(f, lineterminator=os.linesep) if len(items): - header = ds.get_object_type(object_type).fields + + if fields is not None: + header = list(fields) + + if include_extra_fields: + header.append(set(ds.get_object_type(object_type).fields) - set(header)) + else: + header = ds.get_object_type(object_type).fields w.writerow(header) for o in items: |