aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--demo/doit/dodo.py12
-rw-r--r--demo/doit/ee/digikey-100.csv4
-rw-r--r--demo/doit/ee/digikey.csv4
-rw-r--r--demo/doit/ee/order-100.csv4
-rw-r--r--src/ee/bom/doit.py80
-rw-r--r--src/ee/ds/__init__.py14
6 files changed, 85 insertions, 33 deletions
diff --git a/demo/doit/dodo.py b/demo/doit/dodo.py
index c5372fc..145c0a7 100644
--- a/demo/doit/dodo.py
+++ b/demo/doit/dodo.py
@@ -33,7 +33,13 @@ ee.bom.doit.init(
def task_orders():
data_sets = [ee.bom.doit.output_data_set_for_task(task_bom)]
+ yield create_task_order_csv(
+ output_file="ee/order.csv",
+ data_sets=data_sets)
+
for size in [1, 100]:
- yield create_task_order_csv("ee/order-{}.csv".format(size) if size != 1 else "ee/order.csv",
- data_sets,
- count=size)
+ yield create_task_order_csv(
+ output_file="ee/digikey-{}.csv".format(size) if size != 1 else "ee/digikey.csv",
+ style="digikey",
+ data_sets=data_sets,
+ count=size)
diff --git a/demo/doit/ee/digikey-100.csv b/demo/doit/ee/digikey-100.csv
new file mode 100644
index 0000000..bad6173
--- /dev/null
+++ b/demo/doit/ee/digikey-100.csv
@@ -0,0 +1,4 @@
+Digi-Key Part Number,Customer Reference,Quantity,MPN
+,BT1,100,BC2AAPC
+,"R2,R1",200,RMCF1206JT10K0
+,C1,100,UCA2G100MPD1TD
diff --git a/demo/doit/ee/digikey.csv b/demo/doit/ee/digikey.csv
new file mode 100644
index 0000000..a813638
--- /dev/null
+++ b/demo/doit/ee/digikey.csv
@@ -0,0 +1,4 @@
+Digi-Key Part Number,Customer Reference,Quantity,MPN
+,BT1,1,BC2AAPC
+,"R2,R1",2,RMCF1206JT10K0
+,C1,1,UCA2G100MPD1TD
diff --git a/demo/doit/ee/order-100.csv b/demo/doit/ee/order-100.csv
deleted file mode 100644
index 4f5b439..0000000
--- a/demo/doit/ee/order-100.csv
+++ /dev/null
@@ -1,4 +0,0 @@
-MPN,Count,References
-BC2AAPC,100,BT1
-RMCF1206JT10K0,200,"R2,R1"
-UCA2G100MPD1TD,100,C1
diff --git a/src/ee/bom/doit.py b/src/ee/bom/doit.py
index f793611..bbb9241 100644
--- a/src/ee/bom/doit.py
+++ b/src/ee/bom/doit.py
@@ -53,6 +53,8 @@ class BomComponent(object):
def task_bom():
"""
+ Creates 'bom-component' from 'component'.
+
Takes all schematic components, filters out all virtual/non-
physical components (like power flags and ground components) and
creates 'bom-component' objects.
@@ -99,40 +101,72 @@ def task_bom():
_data_sets[task_bom] = ["bom", ["components"]]
-def order_csv(count: int, group_by_mpn: bool, output_file: Path, data_sets):
+def order_csv(count: int, style: str, output_file: Path, data_sets):
ds = _dsm.load_data_sets(data_sets)
out = DataSet()
- if group_by_mpn:
- parts = {}
+ parts = {}
+
+ # noinspection PyPep8Naming
+ Part = namedlist("Part", "mpn, cnt, refs, digikey_pn")
+
+ for c in [o for o in ds.items() if o.object_type.name == "bom-component"]:
+ ref = c.get("ref")
+ mpn = c.get("mpn")
+ digikey_pn = c.get("digikey-pn")
+
+ if style == "digikey":
+ dpn = digikey_pn.strip() if digikey_pn else ""
+
+ # TODO: implement part resolution
+ # if len(dpn) == 0:
+ # raise Exception("Missing digikey-pn for ref={}".format(ref))
+
+ if mpn in parts:
+ part = parts[mpn]
+
+ if part.digikey_pn != digikey_pn:
+ raise Exception("Bad data, inconsistent digikey-pn for mpn '{}'. Original digikey-pn='{}', new "
+ "digikey-pn='{}'".format(mpn, part.digikey_pn, digikey_pn))
+
+ part.cnt += 1
+ part.refs.append(ref)
+ else:
+ parts[mpn] = Part(mpn=mpn, cnt=1, refs=[ref], digikey_pn=digikey_pn)
+
+ mpn_field = "MPN"
+ count_field = "Count"
+ refs_field = "References"
+
+ if style == "digikey":
+ count_field = "Quantity"
+ refs_field = "Customer Reference"
+
+ for part in sorted(parts.values(), key=lambda p: p.mpn):
+ o = out.create_object("row", part.mpn). \
+ set(mpn_field, part.mpn). \
+ set(count_field, part.cnt * count). \
+ set(refs_field, ",".join(part.refs))
- Part = namedlist("Part", "mpn, cnt, refs")
+ if style == "digikey":
+ o.set("Digi-Key Part Number", part.digikey_pn)
- for c in [o for o in ds.items() if o.object_type.name == "bom-component"]:
- ref = c.get("ref")
- mpn = c.get("mpn")
- if mpn in parts:
- parts[mpn].cnt += 1
- parts[mpn].refs.append(ref)
- else:
- parts[mpn] = Part(mpn=mpn, cnt=1, refs=[ref])
+ fields = None
+ include_extra_fields = True
- for part in sorted(parts.values(), key=lambda p: p.mpn):
- out.create_object("row", part.mpn). \
- set("MPN", part.mpn). \
- set("Count", part.cnt * count). \
- set("References", ",".join(part.refs))
+ if style == "digikey":
+ fields = ["Digi-Key Part Number", refs_field, count_field, mpn_field]
+ include_extra_fields = False
- _dsm.store_csv(output_file, out, "row", order_by="MPN")
- else:
- raise Exception("Not implemented")
+ _dsm.store_csv(output_file, out, "row", order_by=mpn_field, fields=fields,
+ include_extra_fields=include_extra_fields)
-def create_task_order_csv(output_file: Union[str, Path], data_sets, count: int = 1):
+def create_task_order_csv(*, style: str = None, output_file: Union[str, Path], data_sets, count: int = 1):
return {
- "name": "order-{}".format(count),
- "actions": [(order_csv, [count, True, Path(output_file), data_sets])],
+ "name": "order-{}".format(count) if not style else "order-{}-{}".format(style, count),
+ "actions": [(order_csv, [count, style, Path(output_file), data_sets])],
"file_dep": [_dsm.cookie_for_ds(ds) for ds in data_sets],
"targets": [output_file],
}
diff --git a/src/ee/ds/__init__.py b/src/ee/ds/__init__.py
index 030113b..f71a7c6 100644
--- a/src/ee/ds/__init__.py
+++ b/src/ee/ds/__init__.py
@@ -347,7 +347,8 @@ class DataSetManager(object):
# noinspection PyMethodMayBeStatic
def store_csv(self, path: Union[str, Path], ds: DataSet, object_type: str,
- order_by: Union[str, Iterable[str]] = None):
+ order_by: Union[str, Iterable[str]] = None, fields: List[str] = None,
+ include_extra_fields: bool = True):
items = [o for o in ds.items() if o.object_type.name == object_type]
if order_by:
@@ -359,10 +360,17 @@ class DataSetManager(object):
raise Exception("Unsupported order_by")
with open(path, "w") as f:
- w = csv.writer(f)
+ w = csv.writer(f, lineterminator=os.linesep)
if len(items):
- header = ds.get_object_type(object_type).fields
+
+ if fields is not None:
+ header = list(fields)
+
+ if include_extra_fields:
+ header.append(set(ds.get_object_type(object_type).fields) - set(header))
+ else:
+ header = ds.get_object_type(object_type).fields
w.writerow(header)
for o in items: