aboutsummaryrefslogtreecommitdiff
path: root/src/ee
diff options
context:
space:
mode:
authorTrygve Laugstøl <trygvis@inamo.no>2018-07-28 21:59:29 +0200
committerTrygve Laugstøl <trygvis@inamo.no>2018-07-28 21:59:29 +0200
commitde9e6739543fd6a589b80184e5452a6a5cdb1196 (patch)
tree14caec7b165dfa0c0b35e0a94eb47b2f6a58e2be /src/ee
parent44c436ce849f9155706109e767fe7b1666172f7e (diff)
downloadee-python-de9e6739543fd6a589b80184e5452a6a5cdb1196.tar.gz
ee-python-de9e6739543fd6a589b80184e5452a6a5cdb1196.tar.bz2
ee-python-de9e6739543fd6a589b80184e5452a6a5cdb1196.tar.xz
ee-python-de9e6739543fd6a589b80184e5452a6a5cdb1196.zip
o Dumping MpnBomComponent, wtf.
o Renaming bom.csv to order.csv.
Diffstat (limited to 'src/ee')
-rw-r--r--src/ee/bom/doit.py133
-rw-r--r--src/ee/doit.py16
-rw-r--r--src/ee/ds/__init__.py138
-rw-r--r--src/ee/kicad/doit.py125
4 files changed, 248 insertions, 164 deletions
diff --git a/src/ee/bom/doit.py b/src/ee/bom/doit.py
index d7407d8..f793611 100644
--- a/src/ee/bom/doit.py
+++ b/src/ee/bom/doit.py
@@ -1,13 +1,31 @@
-from typing import Mapping
+import logging
+from pathlib import Path
+from typing import Mapping, Union
from configclass import Config
+from namedlist import namedlist
-from ee.ds import DataSetManager
+from ee.ds import DataSetManager, DataSet
+
+logger = logging.getLogger(__name__)
_dsm = None # type: DataSetManager
_data_sets = {}
+
+def change_data_sets_for_task(task, _callable):
+ _data_sets[task][1] = _callable(_data_sets[task][1])
+
+
+def output_data_set_for_task(task):
+ return _data_sets[task][0]
+
+
+def input_data_sets_for_task(task):
+ return _data_sets[task][1]
+
+
_config_template = Config({
})
@@ -28,62 +46,48 @@ class BomComponent(object):
self.mpn = mpn
def to_object(self, ds):
- return ds.create_object("bom-component", self.ref).\
- set("ref", self.ref).\
- set("mpn", self.mpn)
-
-
-class MpnBomComponent(object):
- def __init__(self, ref, mpn):
- self.ref = ref
- self.mpn = mpn
- self.count = 1
-
- def to_object(self, ds):
- return ds.create_object("mpn-bom-component", self.ref).\
- set("ref", self.ref).\
+ return ds.create_object("bom-component", self.ref). \
+ set("ref", self.ref). \
set("mpn", self.mpn)
def task_bom():
- out_data_set = "bom"
- in_data_sets = _data_sets[task_bom]
+ """
+ Takes all schematic components, filters out all virtual/non-
+ physical components (like power flags and ground components) and
+ creates 'bom-component' objects.
+ """
- def action(count=1):
- with _dsm.create_ro(in_data_sets) as in_ds:
- with _dsm.create_rw(out_data_set, clean=True) as output:
- components = [o for o in in_ds.items() if o.object_type.name == "component"]
+ out_data_set = _data_sets[task_bom][0]
+ in_data_sets = _data_sets[task_bom][1]
- bom_components = {}
- mpn_bom_components = {}
+ def action():
+ in_ds = _dsm.load_data_sets(in_data_sets)
- for c in components:
- ref = c.get("ref")
- mpn = c.get("mpn")
+ with _dsm.create_rw(out_data_set, clean=True) as output:
+ components = [o for o in in_ds.items() if o.object_type.name == "component"]
- if not ref:
- raise Exception("Missing ref")
+ bom_components = {}
- if not mpn:
- output.create_object("message", "bom-{}".format(ref)). \
- set("level", "error"). \
- set("message", "Missing required field 'mpn'")
- continue
+ for c in components:
+ ref = c.get("ref")
+ mpn = c.get("mpn")
- if ref in bom_components:
- raise Exception("Duplicate ref '{}'".format("ref"))
+ if not ref:
+ raise Exception("Missing ref")
- bom_components[ref] = BomComponent(ref, mpn)
+ if not mpn:
+ output.create_object("message", "bom-{}".format(ref)). \
+ set("level", "error"). \
+ set("message", "Missing required field 'mpn'")
+ continue
- mpn_bom_component = mpn_bom_components[ref]
- if not mpn_bom_component:
- mpn_bom_component = MpnBomComponent(ref, mpn)
- mpn_bom_components[ref] = mpn_bom_component
+ if ref in bom_components:
+ raise Exception("Duplicate ref '{}'".format("ref"))
- mpn_bom_component.count += 1
+ bom_components[ref] = BomComponent(ref, mpn)
- [c.to_object(output) for c in bom_components.values()]
- [c.to_object(output) for c in mpn_bom_components.values()]
+ [c.to_object(output) for c in bom_components.values()]
return {
"file_dep": [_dsm.cookie_for_ds(ds) for ds in in_data_sets],
@@ -92,4 +96,43 @@ def task_bom():
}
-_data_sets[task_bom] = ["components"]
+_data_sets[task_bom] = ["bom", ["components"]]
+
+
+def order_csv(count: int, group_by_mpn: bool, output_file: Path, data_sets):
+ ds = _dsm.load_data_sets(data_sets)
+
+ out = DataSet()
+
+ if group_by_mpn:
+ parts = {}
+
+ Part = namedlist("Part", "mpn, cnt, refs")
+
+ for c in [o for o in ds.items() if o.object_type.name == "bom-component"]:
+ ref = c.get("ref")
+ mpn = c.get("mpn")
+ if mpn in parts:
+ parts[mpn].cnt += 1
+ parts[mpn].refs.append(ref)
+ else:
+ parts[mpn] = Part(mpn=mpn, cnt=1, refs=[ref])
+
+ for part in sorted(parts.values(), key=lambda p: p.mpn):
+ out.create_object("row", part.mpn). \
+ set("MPN", part.mpn). \
+ set("Count", part.cnt * count). \
+ set("References", ",".join(part.refs))
+
+ _dsm.store_csv(output_file, out, "row", order_by="MPN")
+ else:
+ raise Exception("Not implemented")
+
+
+def create_task_order_csv(output_file: Union[str, Path], data_sets, count: int = 1):
+ return {
+ "name": "order-{}".format(count),
+ "actions": [(order_csv, [count, True, Path(output_file), data_sets])],
+ "file_dep": [_dsm.cookie_for_ds(ds) for ds in data_sets],
+ "targets": [output_file],
+ }
diff --git a/src/ee/doit.py b/src/ee/doit.py
new file mode 100644
index 0000000..87a6601
--- /dev/null
+++ b/src/ee/doit.py
@@ -0,0 +1,16 @@
+import logging
+
+from doit import get_var
+
+
+def configure_logging():
+ log_level = get_var("log-level", None)
+
+ if log_level:
+ ee_logger = logging.getLogger("ee")
+ formatter = logging.Formatter("%(levelname)s: %(message)s")
+ ch = logging.StreamHandler()
+ ch.setFormatter(formatter)
+ ee_logger.addHandler(ch)
+
+ ee_logger.setLevel(log_level)
diff --git a/src/ee/ds/__init__.py b/src/ee/ds/__init__.py
index 5899d28..030113b 100644
--- a/src/ee/ds/__init__.py
+++ b/src/ee/ds/__init__.py
@@ -5,7 +5,7 @@ import os
import shutil
from functools import total_ordering
from pathlib import Path
-from typing import MutableMapping, Optional, List, Tuple, Union, Iterator
+from typing import MutableMapping, Optional, List, Tuple, Union, Iterator, Iterable
logger = logging.getLogger(__name__)
@@ -14,7 +14,7 @@ logger = logging.getLogger(__name__)
class ObjectType(object):
def __init__(self, name: str):
self._name = name
- self._fields = []
+ self._fields = [] # type: List[str]
self._objects = {}
def __eq__(self, o) -> bool:
@@ -42,7 +42,7 @@ class ObjectType(object):
def index_of(self, field: str, create: bool = False) -> Optional[int]:
try:
return self._fields.index(field)
- except ValueError as e:
+ except ValueError:
if not create:
return None
@@ -81,6 +81,13 @@ class Object(object):
idx = self._ot.index_of(key)
return self._data[idx] if idx is not None and idx < len(self._data) else None
+ def get_req(self, key: str) -> str:
+ idx = self._ot.index_of(key)
+ if idx is not None and idx < len(self._data):
+ return self._data[idx]
+ else:
+ raise Exception("No such field: {}".format(key))
+
def get_all(self, *keys: str) -> Optional[List[str]]:
values = []
for key in keys:
@@ -92,18 +99,11 @@ class Object(object):
class DataSet(object):
- def __init__(self, name: Optional[str] = None):
- self._name = name
+ def __init__(self):
self._object_types = {} # type: MutableMapping[str, ObjectType]
self._objects_by_type = {} # type: MutableMapping[ObjectType, MutableMapping[str, Object]]
self._frozen = False
- @property
- def name(self):
- if not self._name:
- raise Exception("Unnamed data set")
- return self._name
-
def __len__(self):
return sum((len(objects) for objects in self._objects_by_type.values()))
@@ -151,11 +151,12 @@ class DataSet(object):
return o
def get_object_type(self, object_type: str) -> ObjectType:
- ot, objects = self._check_object_type(object_type, False)
+ t = self._check_object_type(object_type, False)
- if not ot:
+ if not t:
raise Exception("No such object type: {}".format(object_type))
+ ot, objects = t
return ot
def get_object(self, object_type: str, key: str) -> Object:
@@ -192,7 +193,7 @@ class DataSet(object):
yield o
def merge(self, other: "DataSet") -> "DataSet":
- ds = DataSet(self._name)
+ ds = DataSet()
for objects in self._objects_by_type.values():
for o in objects.values():
ds.create_object(o.object_type.name, o.key)._set_from_object(o)
@@ -203,6 +204,14 @@ class DataSet(object):
return ds
+ def import_object(self, other: Object) -> Object:
+ o = self._check_object(other.object_type.name, other.key, create=True)
+
+ for k in other.object_type.fields:
+ o.set(k, other.get(k))
+
+ return o
+
class DataSetManager(object):
def __init__(self, basedir: Union[Path, str]):
@@ -218,8 +227,15 @@ class DataSetManager(object):
def create_rw(self, name, clean: bool) -> "LazyRwDataSet":
return LazyRwDataSet(self, name, clean)
- def create_ro(self, inputs: List[str]) -> "LazyRoDataSet":
- return LazyRoDataSet(self, inputs)
+ def load_data_sets(self, inputs: List[str], freeze: bool = True) -> DataSet:
+ ds = DataSet()
+ for name in inputs:
+ ds = ds.merge(self.load(name, freeze=True))
+
+ if freeze:
+ ds.freeze()
+
+ return ds
def add_ds(self, ds_type: str, name: str, object_type: str, path: str = None):
if ds_type == "csv":
@@ -233,21 +249,21 @@ class DataSetManager(object):
def ds_type(self, name: str):
return "csv" if name in self._csv else "ini-dir"
- def load(self, name, freeze=False) -> DataSet:
+ def load(self, path, freeze=False) -> DataSet:
try:
- object_type, path = self._csv[name]
+ object_type, path = self._csv[path]
if not freeze:
raise Exception("CSV data sources must be frozen")
- return DataSetManager._load_csv(name, object_type, path, freeze)
+ return DataSetManager._load_csv(object_type, path, freeze)
except KeyError:
- return self._load_ini_dir(name, freeze)
+ return self._load_ini_dir(path, freeze)
@staticmethod
- def _load_csv(name: str, object_type: str, path: Path, freeze: bool) -> DataSet:
- logger.info("Loading CSV file {}".format(path))
- ds = DataSet(name)
+ def _load_csv(object_type: str, path: Path, freeze: bool) -> DataSet:
+ logger.debug("Loading CSV file {}".format(path))
+ ds = DataSet()
with open(str(path), newline='') as f:
r = csv.reader(f)
@@ -263,31 +279,33 @@ class DataSetManager(object):
for idx, value in zip(range(0, min(len(row), len(header))), row):
o.set(header[idx], value)
+ if freeze:
+ ds.freeze()
+
logger.debug("Loaded {} objects".format(len(ds)))
return ds
- def _load_ini_dir(self, name: str, freeze: bool) -> DataSet:
- ds_dir = Path(name) if Path(name).is_absolute() else self._basedir / name
+ def _load_ini_dir(self, _path: str, freeze: bool) -> DataSet:
+ ds_dir = Path(_path) if Path(_path).is_absolute() else self._basedir / _path
ds_dir = ds_dir if ds_dir.is_dir() else ds_dir.parent
- logger.info("Loading DS from '{}'".format(ds_dir))
+ logger.debug("Loading DS from '{}'".format(ds_dir))
- ini = self._load_ini(ds_dir / "data-set.ini")
- name = ini.get("data-set", "name")
+ self._load_ini(ds_dir / "data-set.ini")
- ds = DataSet(name)
+ ds = DataSet()
count = 0
for ot_path in ds_dir.glob("*"):
if not ot_path.is_dir():
continue
ot = ot_path.name
- logger.info(" Loading type '{}'".format(ot))
+ logger.debug(" Loading type '{}'".format(ot))
for o_path in ot_path.glob("*.ini"):
count += 1
key = o_path.name[:-4]
- logger.info(" Loading key '{}'".format(key))
+ logger.debug(" Loading key '{}'".format(key))
ini = self._load_ini(o_path)
o = ds.create_object(ot, key)
for k, v in ini.items("values"):
@@ -296,18 +314,18 @@ class DataSetManager(object):
if freeze:
ds.freeze()
- logger.info("Loaded {} items".format(count))
+ logger.debug("Loaded {} items".format(count))
return ds
- def store(self, ds: DataSet):
- ds_dir = self._basedir / ds.name
+ def store(self, ds: DataSet, ds_name: str):
+ ds_dir = self._basedir / ds_name
items = list(ds.items())
- logger.info("Storing DS '{}' with {} objects to {}".format(ds.name, len(items), ds_dir))
+ logger.info("Storing DS '{}' with {} objects to {}".format(ds_name, len(items), ds_dir))
os.makedirs(ds_dir, exist_ok=True)
ini = self._blank_ini()
ini.add_section("data-set")
- ini.set("data-set", "name", ds.name)
+ ini.set("data-set", "name", ds_name)
self._store_ini(ini, ds_dir / "data-set.ini")
for o in items:
@@ -327,6 +345,30 @@ class DataSetManager(object):
ini.set("values", k, str(v))
self._store_ini(ini, ot_dir / "{}.ini".format(key))
+ # noinspection PyMethodMayBeStatic
+ def store_csv(self, path: Union[str, Path], ds: DataSet, object_type: str,
+ order_by: Union[str, Iterable[str]] = None):
+ items = [o for o in ds.items() if o.object_type.name == object_type]
+
+ if order_by:
+ if isinstance(order_by, str):
+ items = sorted(items, key=lambda o: o.get_req(order_by))
+ elif isinstance(order_by, Iterable):
+ items = sorted(items, key=lambda o: [o.get_req(ob) for ob in order_by])
+ else:
+ raise Exception("Unsupported order_by")
+
+ with open(path, "w") as f:
+ w = csv.writer(f)
+
+ if len(items):
+ header = ds.get_object_type(object_type).fields
+ w.writerow(header)
+
+ for o in items:
+ row = [o.get(k) for k in header]
+ w.writerow(row)
+
@staticmethod
def _blank_ini():
return configparser.ConfigParser(interpolation=None)
@@ -350,26 +392,6 @@ class DataSetManager(object):
shutil.rmtree(self._basedir / name)
-class LazyRoDataSet(object):
- def __init__(self, dsm: DataSetManager, inputs):
- self._dsm = dsm
- self._inputs = inputs
-
- def __enter__(self) -> DataSet:
- # logger.info("enter: name={}, inputs={}".format(self._name, self._inputs))
- ds = DataSet()
- for name in self._inputs:
- ds = ds.merge(self._dsm.load(name, freeze=True))
-
- ds.freeze()
-
- self._ds = ds
- return ds
-
- def __exit__(self, *args):
- return False
-
-
class LazyRwDataSet(object):
def __init__(self, dsm: DataSetManager, name, clean):
self._dsm = dsm
@@ -384,10 +406,10 @@ class LazyRwDataSet(object):
raise IOError("DataSet already exists: {}, cookie={}".format(self._name, cookie))
self._dsm.remove(self._name)
- ds = DataSet(self._name)
+ ds = DataSet()
self._ds = ds
return ds
def __exit__(self, *args):
- self._dsm.store(self._ds)
+ self._dsm.store(self._ds, self._name)
return False
diff --git a/src/ee/kicad/doit.py b/src/ee/kicad/doit.py
index 30ca40d..bfbf0a1 100644
--- a/src/ee/kicad/doit.py
+++ b/src/ee/kicad/doit.py
@@ -26,24 +26,24 @@ _data_sets = {}
def change_data_sets_for_task(task, _callable):
- _data_sets[task] = _callable(_data_sets[task])
+ _data_sets[task][1] = _callable(_data_sets[task][1])
-def init(data_set_manager: DataSetManager, **kwargs):
- global _config
- _config = _config_template.make(kwargs)
+def output_data_set_for_task(task):
+ return _data_sets[task][0]
+
- ee_logger = logging.getLogger("ee")
- formatter = logging.Formatter("%(levelname)s: %(message)s")
- ch = logging.StreamHandler()
- ch.setFormatter(formatter)
- ee_logger.addHandler(ch)
+def input_data_sets_for_task(task):
+ return _data_sets[task][1]
- ee_logger.setLevel(logging.DEBUG)
+def init(data_set_manager: DataSetManager, **kwargs):
global _dsm
_dsm = data_set_manager
+ global _config
+ _config = _config_template.make(kwargs)
+
def task_kicad_gerber():
kicad_pcb = _config["kicad_pcb"]
@@ -84,8 +84,7 @@ def task_kicad_gerber():
def task_kicad_sch_to_data_set():
- out_data_set = "kicad-sch"
- in_data_sets = _data_sets[task_kicad_sch_to_data_set]
+ out_data_set, in_data_sets = _data_sets[task_kicad_sch_to_data_set]
sch = _config["sch"]
@@ -106,7 +105,7 @@ def task_kicad_sch_to_data_set():
for f in c.fields:
if f.value and f.name not in ComponentField.names:
- o.set(f.name, str(f.value))
+ o.set("field-{}".format(f.name), str(f.value))
return {
"file_dep": [Path(sch)] + [_dsm.cookie_for_ds(ds) for ds in in_data_sets],
@@ -115,14 +114,13 @@ def task_kicad_sch_to_data_set():
}
-_data_sets[task_kicad_sch_to_data_set] = []
+_data_sets[task_kicad_sch_to_data_set] = ["kicad-sch", []]
def task_kicad_pcb_to_data_set():
kicad_pcb = _config["kicad_pcb"]
- out_data_set = "kicad-pcb"
- in_data_sets = _data_sets[task_kicad_pcb_to_data_set]
+ out_data_set, in_data_sets = _data_sets[task_kicad_pcb_to_data_set]
def action():
from ee.kicad.pcb import KicadPcb, Module
@@ -154,71 +152,76 @@ def task_kicad_pcb_to_data_set():
}
-_data_sets[task_kicad_pcb_to_data_set] = []
+_data_sets[task_kicad_pcb_to_data_set] = ["kicad-pcb", []]
def task_kicad_create_component_data_set():
- out_data_set = "components"
- in_data_sets = _data_sets[task_kicad_create_component_data_set]
+ out_data_set, in_data_sets = _data_sets[task_kicad_create_component_data_set]
def action():
logger.info("in_data_sets={}, out_data_set={}".format(in_data_sets, out_data_set))
- with _dsm.create_ro(in_data_sets) as in_ds:
- # for o in in_ds.items():
- # logger.info("item: {}/{}".format(o.object_type.name, o.key))
+ in_ds = _dsm.load_data_sets(in_data_sets)
+ # for o in in_ds.items():
+ # logger.info("item: {}/{}".format(o.object_type.name, o.key))
+
+ def map_footprint(footprint):
+ for o in in_ds.items():
+ if not o.object_type.name == "kicad-footprint-mapping":
+ continue
- def map_footprint(footprint):
- for o in in_ds.items():
- if not o.object_type.name == "kicad-footprint-mapping":
- continue
+ common = o.get("common")
+ if common:
+ return common
- common = o.get("common")
- if common:
- return common
+ return footprint
- return footprint
+ with _dsm.create_rw(out_data_set, clean=True) as output:
+ kicad_sch = [o for o in in_ds.items() if o.object_type.name == "kicad-schematic-component"]
- with _dsm.create_rw(out_data_set, clean=True) as output:
- kicad_sch = [o for o in in_ds.items() if o.object_type.name == "kicad-schematic-component"]
+ logger.info("processing {} kicad-sch".format(len(kicad_sch)))
- logger.info("processing {} kicad-sch".format(len(kicad_sch)))
+ ignored_ref_types = {"#PWR", }
- ignored_ref_types = {"#PWR", }
+ for sch in kicad_sch:
+ ref = sch.get("ref")
+ ref_num = sch.get("ref-num")
+ if not ref or not ref_num:
+ logger.debug("Missing ref or ref-num")
+ continue
- for sch in kicad_sch:
- ref = sch.get("ref")
- ref_num = sch.get("ref-num")
- if not ref or not ref_num:
- logger.debug("Missing ref or ref-num")
- continue
+ ref_type = sch.get("ref-type")
+ if not ref_type:
+ logger.debug("Missing ref-type")
+ continue
+
+ if ref_type in ignored_ref_types:
+ continue
- ref_type = sch.get("ref-type")
- if not ref_type or ref_type in ignored_ref_types:
- logger.debug("Missing ref-type or bad ref-type: ref={}, ref-type={}".format(ref, ref_type))
- continue
+ c = output.create_object("component", ref)
+ c.set("ref", ref)
+ c.set("ref-num", ref_num)
+ c.set("ref-type", ref_type)
- c = output.create_object("component", ref)
- c.set("ref", ref)
- c.set("ref-num", ref_num)
- c.set("ref-type", ref_type)
+ fp = sch.get("footprint")
+ if fp:
+ fp = map_footprint(fp)
+ c.set("footprint", fp)
- fp = sch.get("footprint")
- if fp:
- fp = map_footprint(fp)
- c.set("footprint", fp)
+ c.set("mpn", sch.get("field-mpn"))
+ c.set("distributor", sch.get("field-distributor"))
- def pcb_match(o):
- return o.object_type.name == "kicad-pcb-component" and \
- o.get("ref") == ref
+ def pcb_match(o):
+ return o.object_type.name == "kicad-pcb-component" and \
+ o.get("ref") == ref
- pcb = [o for o in in_ds.items() if pcb_match(o)]
+ pcb = [o for o in in_ds.items() if pcb_match(o)]
- if not pcb:
- logger.info("Could not find PCB component for {}".format(ref))
+ if not pcb:
+ logger.info("Could not find PCB component for {}".format(ref))
- # TODO: check that the SCH and PCB footprint are the same
- # c.set("footprint", pcb.)
+ # TODO: check that the SCH and PCB footprint are the same
+ # c.set("footprint", pcb.)
return {
"file_dep": [_dsm.cookie_for_ds(ds) for ds in in_data_sets],
@@ -227,7 +230,7 @@ def task_kicad_create_component_data_set():
}
-_data_sets[task_kicad_create_component_data_set] = ["kicad-sch", "kicad-pcb"]
+_data_sets[task_kicad_create_component_data_set] = ["components", ["kicad-sch", "kicad-pcb"]]
__all__ = [
init.__name__,