aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorTrygve Laugstøl <trygvis@inamo.no>2019-04-01 21:52:31 +0200
committerTrygve Laugstøl <trygvis@inamo.no>2019-04-01 21:53:24 +0200
commitc8c3c9e922915b7eddc05973f6f938c6a0cbedaf (patch)
tree7b0303bfbfb41502d261f46ba351b1c324d3209c /src
parent0ebd2390a959a9562cf2096150ad6e8a24ed5ec9 (diff)
downloadee-python-c8c3c9e922915b7eddc05973f6f938c6a0cbedaf.tar.gz
ee-python-c8c3c9e922915b7eddc05973f6f938c6a0cbedaf.tar.bz2
ee-python-c8c3c9e922915b7eddc05973f6f938c6a0cbedaf.tar.xz
ee-python-c8c3c9e922915b7eddc05973f6f938c6a0cbedaf.zip
o Removing all doit code. It was a failed experiment.
Diffstat (limited to 'src')
-rw-r--r--src/ee/bom/__init__.py0
-rw-r--r--src/ee/bom/doit.py179
-rw-r--r--src/ee/digikey/doit.py165
-rw-r--r--src/ee/doit.py94
-rw-r--r--src/ee/ds/__init__.py491
-rw-r--r--src/ee/kicad/doit.py224
-rw-r--r--src/ee/report/__init__.py0
-rw-r--r--src/ee/report/doit.py61
-rw-r--r--src/ee/report/templates/index.rst.j24
-rw-r--r--src/ee/report/templates/messages.rst.j28
10 files changed, 0 insertions, 1226 deletions
diff --git a/src/ee/bom/__init__.py b/src/ee/bom/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/ee/bom/__init__.py
+++ /dev/null
diff --git a/src/ee/bom/doit.py b/src/ee/bom/doit.py
deleted file mode 100644
index bf6975b..0000000
--- a/src/ee/bom/doit.py
+++ /dev/null
@@ -1,179 +0,0 @@
-import logging
-from pathlib import Path
-from typing import Union
-
-from namedlist import namedlist
-
-from ee.doit import DoItConfig
-from ee.ds import DataSet, create_message
-
-logger = logging.getLogger(__name__)
-
-doit_config = DoItConfig()
-
-
-class BomComponent(object):
- def __init__(self, ref, mpn):
- self.ref = ref
- self.mpn = mpn
-
- def to_object(self, ds):
- return ds.create_object("bom-component", self.ref). \
- set("ref", self.ref). \
- set("mpn", self.mpn)
-
-
-def task_bom():
- """
- Creates 'bom-component' from 'component'.
-
- Takes all schematic components, filters out all virtual/non-
- physical components (like power flags and ground components) and
- creates 'bom-component' objects.
- """
-
- out_data_set, in_data_sets = doit_config.data_sets_for(task_bom)
-
- def action():
- in_ds = doit_config.dsm.load_data_sets(in_data_sets)
-
- with doit_config.dsm.create_rw(out_data_set, clean=True) as output:
- components = [o for o in in_ds.items() if o.object_type.name == "component"]
-
- bom_components = {}
-
- for c in components:
- ref = c.get("ref")
- mpn = c.get("mpn")
-
- if not ref:
- raise Exception("Missing ref")
-
- if not mpn:
- create_message(output, "Missing required field 'mpn' on component ref={}".format(ref), "error")
- continue
-
- if ref in bom_components:
- raise Exception("Duplicate ref '{}'".format("ref"))
-
- bom_components[ref] = BomComponent(ref, mpn)
-
- [c.to_object(output) for c in bom_components.values()]
-
- return {
- "file_dep": [doit_config.dsm.cookie_for_ds(ds) for ds in in_data_sets],
- "actions": [action],
- "targets": [doit_config.dsm.cookie_for_ds(out_data_set)],
- }
-
-
-doit_config.set_data_sets_for(task_bom, "bom", "components")
-
-
-def order_csv(count: int, style: str, output_file: Path, out_ds: DataSet, data_sets):
- ds = doit_config.dsm.load_data_sets(data_sets)
-
- csv_ds = DataSet()
-
- parts = {}
-
- # noinspection PyPep8Naming
- Part = namedlist("Part", "mpn, cnt, refs, digikey_pn")
-
- digikey_parts = [o for o in ds.items() if
- o.object_type.name == "component-to-part-mapping" and
- o.get("seller") == "digikey"]
-
- for c in [o for o in ds.items() if o.object_type.name == "bom-component"]:
- ref = c.get("ref")
- mpn = c.get("mpn")
-
- digikey_pn = None
-
- if style == "digikey":
-
- digikey_pns = []
- for o in (o for o in digikey_parts if o.get("ref") == ref):
-
- t = o.values("part-number", strip=True, required=True)
-
- if not t:
- create_message(out_ds, "Missing required field value for field part-number, object key={}".
- format(o.key), level="error")
-
- part_number = t[0]
-
- digikey_pns.append(part_number)
-
- # The actual DK part should depend on the price breaks for the different packagings, but we don't have that
- # info here. Luckily the DK store proposes cheaper packagings when ordering so that is something.
-
- digikey_pns = set(digikey_pns)
-
- if len(digikey_pns) == 0:
- create_message(out_ds, "No part for component: ref={}, mpn={}".format(ref, mpn), "error")
- continue
- elif len(digikey_pns) > 1:
- create_message(out_ds, "Multiple parts for component: ref={}, mpn={}. Don't know which one to select.".
- format(ref, mpn), "error")
- continue
- else:
- digikey_pn = next(iter(digikey_pns))
-
- digikey_pn = digikey_pn.strip()
-
- if len(digikey_pn) == 0:
- raise Exception("Missing digikey part number for ref={}".format(ref))
-
- if mpn in parts:
- part = parts[mpn]
-
- if digikey_pn:
- if part.digikey_pn != digikey_pn:
- raise Exception("Bad data, inconsistent digikey-pn for mpn '{}'. First digikey part number='{}', "
- "new digikey part number='{}'".format(mpn, part.digikey_pn, digikey_pn))
-
- part.cnt += 1
- part.refs.append(ref)
- else:
- parts[mpn] = Part(mpn=mpn, cnt=1, refs=[ref], digikey_pn=digikey_pn)
-
- mpn_field = "MPN"
- count_field = "Count"
- refs_field = "References"
-
- if style == "digikey":
- count_field = "Quantity"
- refs_field = "Customer Reference"
-
- for part in sorted(parts.values(), key=lambda p: p.mpn):
- o = csv_ds.create_object("row", part.mpn). \
- set(mpn_field, part.mpn). \
- set(count_field, part.cnt * count). \
- set(refs_field, ",".join(part.refs))
-
- if style == "digikey":
- o.set("Digi-Key Part Number", part.digikey_pn)
-
- fields = None
- include_extra_fields = True
-
- if style == "digikey":
- fields = ["Digi-Key Part Number", refs_field, count_field, mpn_field]
- include_extra_fields = False
-
- doit_config.dsm.store_csv(output_file, csv_ds, "row", order_by=mpn_field, fields=fields,
- include_extra_fields=include_extra_fields)
-
-
-def create_task_order_csv(*, style: str = None, output_file: Union[str, Path], out_data_set, data_sets, count: int = 1):
- def action():
- with doit_config.dsm.create_rw(out_data_set, clean=True) as out:
- order_csv(count, style, Path(output_file), out, data_sets)
-
- return {
- "name": "order-{}".format(count) if not style else "order-{}-{}".format(style, count),
- "actions": [action],
- "file_dep": [doit_config.dsm.cookie_for_ds(ds) for ds in data_sets],
- "targets": [doit_config.dsm.cookie_for_ds(out_data_set), output_file],
- }
diff --git a/src/ee/digikey/doit.py b/src/ee/digikey/doit.py
deleted file mode 100644
index 93963c9..0000000
--- a/src/ee/digikey/doit.py
+++ /dev/null
@@ -1,165 +0,0 @@
-import logging
-from itertools import groupby
-from operator import itemgetter
-from typing import List
-
-import ee.digikey as dk
-from ee.doit import DoItConfig
-from ee.ds import DataSet
-
-logger = logging.getLogger(__name__)
-
-doit_config = DoItConfig()
-
-
-def resolve_schematic_components(output: DataSet, in_ds: DataSet):
- def find(field, value):
- return [o for o in output.items() if
- o.object_type.name == "component-to-part-mapping" and o.get(field) == value]
-
- def save(refs: List[str], p: dk.DigikeyProduct):
- logger.info("Found part, dpn={}, mpn={}, refs={}".format(p.part_number, p.mpn, ", ".join(refs)))
-
- for ref in refs:
- # Make the key a bit long so we can have several parts that matches the MPN from a single seller
- key = "digikey-{}-{}".format(ref, p.part_number)
- output.create_object("component-to-part-mapping", key, replace=True). \
- set("seller", "digikey"). \
- set("ref", ref). \
- set("part-number", p.part_number). \
- set("mpn", p.mpn). \
- set("description", p.description). \
- set("quantity-available", p.quantity_available). \
- set("url", p.url)
-
- digikey = dk.Digikey()
- client = dk.DigikeyClient(digikey, on_download=logger.info)
-
- components = []
-
- # TODO: support searching by value and digikey part number directly. Priority should be "digikey", "mpn" and
- # "value", first field present should be used.
-
- for o in in_ds.items():
- if o.object_type.name != "component":
- continue
-
- ref, = o.values("ref", required=True)
-
- if not ref:
- raise Exception("Bad component: object key={}, missing required field 'ref'".format(o.key))
-
- mpn, = o.values("mpn", strip=True)
-
- # We ignore components without mpn.
- if not mpn:
- logger.debug("Skipping component without MPN: {}".format(ref))
- continue
-
- components.append([mpn, ref])
-
- components = sorted(components, key=itemgetter(0))
-
- for mpn, components in groupby(components, key=itemgetter(0)):
- references = [c[1] for c in components]
-
- dk_components = find("mpn", mpn)
-
- if len(dk_components):
- logger.info("Already resolved {} to {}".format(mpn, ", ".join(
- sorted(set([c.get("part-number") for c in dk_components])))))
- continue
-
- logger.info("Looking up MPN: {}, used by {}".format(mpn, ", ".join(sorted(references))))
- response = client.search(mpn)
-
- if response.response_type == dk.SearchResponseTypes.SINGLE:
- save(references, response.products[0])
- elif response.response_type == dk.SearchResponseTypes.MANY:
- # A search for "FOO" might return products "FOO" and "FOOZ" so we pick out the ones with the matching mpn
- # This will often be more than one product, but digikey shows the same part in different packagings.
- viable_products = [p for p in response.products if p.mpn == mpn]
-
- if len(viable_products) == 0:
- logger.warning("BUG: Got multiple hits ({}) but didn't find anyone that matched the MPN. Strange!".
- format(len(response.products)))
- else:
- if len(viable_products) == 1:
- part = viable_products[0]
- else:
- # Pick the first one, should be as good as any
- part = sorted(viable_products, key=lambda x: x.part_number)[0]
-
- logger.info("Got multiple hits ({})".format(len(viable_products)))
-
- save(references, part)
- elif response.response_type == dk.SearchResponseTypes.TOO_MANY:
- logger.warning("to many matches")
- elif response.response_type == dk.SearchResponseTypes.NO_MATCHES:
- logger.warning("no matches")
-
-
-def task_digikey_resolve_schematic_components():
- out_data_set, in_data_sets = doit_config.data_sets_for(task_digikey_resolve_schematic_components)
-
- def action():
- in_ds = doit_config.dsm.load_data_sets(in_data_sets)
-
- with doit_config.dsm.create_rw(out_data_set, clean=False) as output:
- resolve_schematic_components(output, in_ds)
-
- return dict(
- file_dep=[doit_config.dsm.cookie_for_ds(ds) for ds in in_data_sets],
- actions=[action],
- targets=[doit_config.dsm.cookie_for_ds(out_data_set)],
- )
-
-
-doit_config.set_data_sets_for(task_digikey_resolve_schematic_components,
- "digikey-resolved-parts", "components")
-
-
-def download_part_facts(output: DataSet, in_ds: DataSet):
- digikey = dk.Digikey()
- client = dk.DigikeyClient(digikey, on_download=logger.debug)
-
- parts = [o for o in in_ds.items() if
- o.object_type.name == "component-to-part-mapping" and
- o.get("seller") == "digikey"]
-
- for pn in sorted({part.get("part-number") for part in parts}):
- logger.info("Downloading facts for {}".format(pn))
-
- response = client.search(pn)
-
- if response.response_type == dk.SearchResponseTypes.SINGLE:
- product = response.products[0]
-
- o = output.create_object("digikey-part", pn, replace=True). \
- set("part-number", product.part_number). \
- set("url", product.url). \
- set("mpn", product.mpn)
-
- for a in product.attributes:
- key = "{}/{}".format(a.attribute_type.id, a.attribute_type.label)
- o.set(key, a.value)
-
-
-def task_digikey_fetch_full_part_facts():
- out_data_set, in_data_sets = doit_config.data_sets_for(task_digikey_fetch_full_part_facts)
-
- def action():
- in_ds = doit_config.dsm.load_data_sets(in_data_sets)
-
- with doit_config.dsm.create_rw(out_data_set, clean=False) as output:
- download_part_facts(output, in_ds)
-
- return dict(
- file_dep=[doit_config.dsm.cookie_for_ds(ds) for ds in in_data_sets],
- actions=[action],
- targets=[doit_config.dsm.cookie_for_ds(out_data_set)],
- )
-
-
-doit_config.set_data_sets_for(task_digikey_fetch_full_part_facts,
- "digikey-parts", "digikey-resolved-parts")
diff --git a/src/ee/doit.py b/src/ee/doit.py
deleted file mode 100644
index 665f039..0000000
--- a/src/ee/doit.py
+++ /dev/null
@@ -1,94 +0,0 @@
-import logging
-from typing import Tuple, List, Mapping, Any
-
-from doit import get_var
-
-from ee.ds import DataSetManager
-
-logger = logging.getLogger(__name__)
-
-
-def configure_logging():
- log_level = get_var("log-level", None)
-
- if log_level:
- ee_logger = logging.getLogger("ee")
- formatter = logging.Formatter("%(levelname)s: %(message)s")
- ch = logging.StreamHandler()
- ch.setFormatter(formatter)
- ee_logger.addHandler(ch)
-
- ee_logger.setLevel(log_level)
-
-
-class Report(object):
- def __init__(self, task):
- self.task = task
-
-
-class ReportCollection(object):
- def __init__(self):
- self._reports = [] # type: List[Report]
-
- def add_report(self, report: Report):
- self._reports.append(report)
-
- @property
- def reports(self) -> Tuple[Report]:
- return tuple(self._reports)
-
-
-# This should probably be called "DoItModuleConfig" since it is used once per module. The module is responsible for
-# instantiating it.
-class DoItConfig(object):
- def __init__(self):
- self._dsm = None # type: DataSetManager
- self._report_collection = None # type: ReportCollection
- self._extra_config = None # type: Mapping[str, Any]
- self._data_sets = {}
- self._reports = []
-
- def configure(self, *, data_set_manager: DataSetManager, report_collection: ReportCollection = None,
- extra_config: Mapping[str, Any] = None):
- self._dsm = data_set_manager
- self._report_collection = report_collection if report_collection is not None else {}
- self._extra_config = extra_config
-
- @property
- def dsm(self) -> DataSetManager:
- if self._dsm is None:
- raise Exception("The data set manager has not been set")
- return self._dsm
-
- @property
- def report_collection(self):
- if self._report_collection is None:
- raise Exception("The report collection has not been set")
- return self._report_collection
-
- @property
- def extra_config(self):
- return self._extra_config
-
- def data_sets_for(self, task):
- try:
- return self._data_sets[task]
- except KeyError:
- raise KeyError("No such task registered in this module: {}".format(task))
-
- def out_data_set_for(self, task):
- return self.data_sets_for(task)[0]
-
- def input_data_sets_for(self, task):
- return self.data_sets_for(task)[1]
-
- def set_data_sets_for(self, task, out_dataset: str, *in_datasets: str):
- self._data_sets[task] = [out_dataset, list(in_datasets)]
-
- def change_data_sets_for_task(self, task, _callable):
- ds = self._data_sets[task]
- ds[1] = _callable(ds[1])
-
- def append_in_data_set_for_task(self, task, *data_sets: str):
- ds = self._data_sets[task]
- ds[1] = ds[1] + list(data_sets)
diff --git a/src/ee/ds/__init__.py b/src/ee/ds/__init__.py
deleted file mode 100644
index 915dd6f..0000000
--- a/src/ee/ds/__init__.py
+++ /dev/null
@@ -1,491 +0,0 @@
-import configparser
-import csv
-import logging
-import os
-import shutil
-from functools import total_ordering
-from pathlib import Path
-from typing import MutableMapping, Optional, List, Tuple, Union, Iterator, Iterable
-
-logger = logging.getLogger(__name__)
-
-
-@total_ordering
-class ObjectType(object):
- def __init__(self, name: str):
- self._name = name
- self._fields = [] # type: List[str]
-
- def __eq__(self, o) -> bool:
- other = o # type: ObjectType
- return isinstance(o, ObjectType) and self._name == other._name
-
- def __lt__(self, o: object) -> bool:
- if not isinstance(o, ObjectType):
- return True
-
- other = o # type: ObjectType
- return self._name < other._name
-
- def __hash__(self) -> int:
- return self._name.__hash__()
-
- @property
- def name(self):
- return self._name
-
- @property
- def fields(self):
- return self._fields
-
- def index_of(self, field: str, create: bool = False) -> Optional[int]:
- try:
- return self._fields.index(field)
- except ValueError:
- if not create:
- return None
-
- self._fields.append(field)
- return len(self._fields) - 1
-
- def has(self, *keys: str):
- return all([key in self._fields for key in keys])
-
-
-class Object(object):
- class ValueList(list):
- """An auto-expanding version of list."""
-
- def __setitem__(self, index, value):
- if index >= len(self):
- self.extend([None] * (index + 1 - len(self)))
- list.__setitem__(self, index, value)
-
- def __getitem__(self, index):
- if index >= len(self):
- self.extend([None] * (index + 1 - len(self)))
- return list.__getitem__(self, index)
-
- def __init__(self, ds: "DataSet", ot: ObjectType, key: str):
- self._ds = ds
- self._ot = ot
- self._key = key
- self._data = Object.ValueList()
-
- @property
- def object_type(self):
- return self._ot
-
- @property
- def key(self):
- return self._key
-
- def set(self, key: str, value: str) -> "Object":
- if self._ds._frozen:
- raise Exception("This data set is frozen")
- idx = self._ot.index_of(key, create=True)
- self._data[idx] = value
-
- return self
-
- def _set_from_object(self, other: "Object"):
- for k in other._ot.fields:
- self.set(k, other.get(k))
-
- def has_values(self, *keys: str) -> bool:
- return all([len(value) > 0 for value in [self.get(key) for key in keys] if value is not None])
-
- def values(self, *keys: str, strip: bool = False, required: bool = True) -> List[Optional[str]]:
- """Looks up all values for all keys.
-
- If required=True, strip is also set to True
-
-
- If strip is True, all values are stripped with str.strip(). None values are preserved.
-
- If required=True, all values has to have a len() > 0. If any fails the requirement, a list with only None values
- is returned.
- """
-
- values = []
-
- strip = True if required else strip
-
- for key in keys:
- v = self.get(key)
-
- if strip:
- v = v.strip() if v else v
-
- if required:
- if v is None or len(v) == 0:
- return [None] * len(keys)
-
- values.append(v)
-
- return values
-
- def get(self, key: str) -> Optional[str]:
- idx = self._ot.index_of(key)
- return self._data[idx] if idx is not None else None
-
- def get_req(self, key: str) -> str:
- idx = self._ot.index_of(key)
- if idx is not None and idx < len(self._data):
- return self._data[idx]
- else:
- raise Exception("No such field: {}".format(key))
-
- def get_all(self, *keys: str) -> Optional[List[str]]:
- values = []
- for key in keys:
- idx = self._ot.index_of(key)
- if not idx or idx >= len(self._data):
- return None
- values.append(self._data[idx])
- return values
-
-
-class DataSet(object):
- def __init__(self):
- self._object_types = {} # type: MutableMapping[str, ObjectType]
- self._objects_by_type = {} # type: MutableMapping[ObjectType, MutableMapping[str, Object]]
- self._frozen = False
-
- def __len__(self):
- return sum((len(objects) for objects in self._objects_by_type.values()))
-
- def freeze(self):
- self._frozen = True
-
- def _assert_not_frozen(self):
- if self._frozen:
- raise Exception("This data set is frozen")
-
- def _check_object_type(self, object_type: str, create: bool) -> \
- Optional[Tuple[ObjectType, MutableMapping[str, Object]]]:
- try:
- ot = self._object_types[object_type]
- objects = self._objects_by_type[ot]
- return ot, objects,
- except KeyError:
- if not create:
- return None
-
- self._assert_not_frozen()
-
- ot = ObjectType(object_type)
- self._object_types[object_type] = ot
- self._objects_by_type[ot] = objects = {}
- return ot, objects,
-
- def _check_object(self, object_type: str, key: str, create: bool) -> Optional[Object]:
- t = self._check_object_type(object_type, create)
-
- if not t:
- return None
-
- ot, objects = t
- try:
- return objects[key]
- except KeyError:
- self._assert_not_frozen()
-
- if not create:
- raise Exception("No such object: {}:{}".format(object_type, key))
-
- o = Object(self, ot, key)
- objects[key] = o
- return o
-
- def get_object_type(self, object_type: str) -> ObjectType:
- t = self._check_object_type(object_type, False)
-
- if not t:
- raise Exception("No such object type: {}".format(object_type))
-
- ot, objects = t
- return ot
-
- def get_object(self, object_type: str, key: str) -> Object:
- o = self._check_object(object_type, key, False)
-
- if not o:
- raise Exception("No such object: {}:{}".format(object_type, key))
-
- return o
-
- def has_object(self, object_type: str, key: str) -> bool:
- t = self._check_object_type(object_type, False)
-
- if t:
- ot, objects = t
- return key in objects
-
- return False
-
- def get_or_create_object(self, object_type: str, key: str) -> Object:
- return self._check_object(object_type, key, True)
-
- def create_object(self, object_type: str, key: str, replace=False) -> Object:
- self._assert_not_frozen()
-
- if self.has_object(object_type, key):
- if not replace:
- raise Exception("Object already exist: {}:{}".format(object_type, key))
-
- ot, objects = self._check_object_type(object_type, False)
- del self._objects_by_type[ot][key]
-
- return self._check_object(object_type, key, True)
-
- def items(self) -> Iterator[Object]:
- for objects in self._objects_by_type.values():
- for o in objects.values():
- yield o
-
- def merge(self, other: "DataSet") -> "DataSet":
- ds = DataSet()
- for objects in self._objects_by_type.values():
- for o in objects.values():
- ds.create_object(o.object_type.name, o.key)._set_from_object(o)
-
- for objects in other._objects_by_type.values():
- for o in objects.values():
- ds.get_or_create_object(o.object_type.name, o.key)._set_from_object(o)
-
- return ds
-
- def import_object(self, other: Object) -> Object:
- o = self._check_object(other.object_type.name, other.key, create=True)
-
- for k in other.object_type.fields:
- o.set(k, other.get(k))
-
- return o
-
-
-class DataSetManager(object):
- def __init__(self, basedir: Union[Path, str]):
- self._basedir = Path(basedir)
- self._csv = {} # type: MutableMapping[str, Tuple[str, Path]]
-
- @property
- def all_data_sets(self):
- datasets = [ds.name for ds in self._basedir.iterdir() if (ds / "data-set.ini").is_file()]
- return list(self._csv.keys()) + datasets
-
- def cookie_for_ds(self, ds_name) -> Path:
- try:
- return self._csv[ds_name][1]
- except KeyError:
- return self._basedir / ds_name / "data-set.ini"
-
- def create_rw(self, name, clean: bool) -> "LazyRwDataSet":
- return LazyRwDataSet(self, name, clean)
-
- def load_data_sets(self, inputs: List[str], freeze: bool = True) -> DataSet:
- ds = DataSet()
- for name in inputs:
- ds = ds.merge(self.load(name, freeze=True))
-
- if freeze:
- ds.freeze()
-
- return ds
-
- def register_ds(self, ds_type: str, name: str, object_type: str, path: str = None):
- if ds_type == "csv":
- if name in self._csv:
- raise Exception("Data source already exists: {}".format(name))
-
- self._csv[name] = object_type, Path(path),
- else:
- raise Exception("Unknown data source type: {}".format(ds_type))
-
- def ds_type(self, name: str):
- return "csv" if name in self._csv else "ini-dir"
-
- def load(self, path, freeze=False) -> DataSet:
- try:
- object_type, path = self._csv[path]
-
- if not freeze:
- raise Exception("CSV data sources must be frozen")
-
- return DataSetManager._load_csv(object_type, path, freeze)
- except KeyError:
- return self._load_ini_dir(path, freeze)
-
- @staticmethod
- def _load_csv(object_type: str, path: Path, freeze: bool) -> DataSet:
- # logger.debug("Loading CSV file {}".format(path))
- ds = DataSet()
-
- with open(str(path), newline='') as f:
- r = csv.reader(f)
-
- header = next(r, None)
- for row in r:
- if len(row) == 0:
- continue
-
- key = row[0]
-
- o = ds.create_object(object_type, key)
- for idx, value in zip(range(0, min(len(row), len(header))), row):
- o.set(header[idx], value)
-
- if freeze:
- ds.freeze()
-
- # logger.debug("Loaded {} objects".format(len(ds)))
- return ds
-
- def _load_ini_dir(self, _path: str, freeze: bool) -> DataSet:
- ds_dir = Path(_path) if Path(_path).is_absolute() else self._basedir / _path
- ds_dir = ds_dir if ds_dir.is_dir() else ds_dir.parent
-
- # logger.debug("Loading DS from '{}'".format(ds_dir))
-
- self._load_ini(ds_dir / "data-set.ini")
-
- ds = DataSet()
- count = 0
- for ot_path in ds_dir.glob("*"):
- if not ot_path.is_dir():
- continue
-
- ot = ot_path.name
- # logger.debug(" Loading type '{}'".format(ot))
- for o_path in ot_path.glob("*.ini"):
- count += 1
-
- key = o_path.name[:-4]
- # logger.debug(" Loading key '{}'".format(key))
- ini = self._load_ini(o_path)
- o = ds.create_object(ot, key)
- for k, v in ini.items("values"):
- o.set(k, v)
-
- if freeze:
- ds.freeze()
-
- # logger.debug("Loaded {} items".format(count))
- return ds
-
- def store(self, ds: DataSet, ds_name: str):
- ds_dir = self._basedir / ds_name
- items = list(ds.items())
- # logger.info("Storing DS '{}' with {} objects to {}".format(ds_name, len(items), ds_dir))
-
- os.makedirs(ds_dir, exist_ok=True)
- ini = self._blank_ini()
- ini.add_section("data-set")
- ini.set("data-set", "name", ds_name)
- self._store_ini(ini, ds_dir / "data-set.ini")
-
- for o in items:
- ot = o.object_type
- key = o.key
-
- ot_dir = ds_dir / ot.name
- os.makedirs(ot_dir, exist_ok=True)
- ini = self._blank_ini()
- ini.add_section("meta")
- ini.set("meta", "type", ot.name)
-
- ini.add_section("values")
- for k in ot.fields:
- v = o.get(k)
- if v:
- ini.set("values", k, str(v))
- self._store_ini(ini, ot_dir / "{}.ini".format(key))
-
- # noinspection PyMethodMayBeStatic
- def store_csv(self, path: Union[str, Path], ds: DataSet, object_type: str,
- order_by: Union[str, Iterable[str]] = None, fields: List[str] = None,
- include_extra_fields: bool = True):
- items = [o for o in ds.items() if o.object_type.name == object_type]
-
- if order_by:
- if isinstance(order_by, str):
- items = sorted(items, key=lambda o: o.get_req(order_by))
- elif isinstance(order_by, Iterable):
- items = sorted(items, key=lambda o: [o.get_req(ob) for ob in order_by])
- else:
- raise Exception("Unsupported order_by")
-
- with open(path, "w") as f:
- w = csv.writer(f, lineterminator=os.linesep)
-
- if len(items):
-
- if fields is not None:
- header = list(fields)
-
- if include_extra_fields:
- header.append(set(ds.get_object_type(object_type).fields) - set(header))
- else:
- header = ds.get_object_type(object_type).fields
- w.writerow(header)
-
- for o in items:
- row = [o.get(k) for k in header]
- w.writerow(row)
-
- @staticmethod
- def _blank_ini():
- parser = configparser.ConfigParser(interpolation=None)
- parser.optionxform = str
- return parser
-
- def _load_ini(self, path: Path):
- ini = self._blank_ini()
- if len(ini.read(str(path))) != 1:
- raise IOError("Could not load ini file: {}".format(path))
- return ini
-
- @staticmethod
- def _store_ini(ini, path):
- with open(path, "w") as f:
- ini.write(f)
-
- def remove(self, name: str):
- try:
- object_type, path = self._csv[name]
- os.remove(str(path))
- except KeyError:
- shutil.rmtree(self._basedir / name)
-
-
-class LazyRwDataSet(object):
- def __init__(self, dsm: DataSetManager, name, clean):
- self._dsm = dsm
- self._name = name
- self._clean = clean
-
- def __enter__(self) -> DataSet:
- cookie = self._dsm.cookie_for_ds(self._name)
-
- if cookie.exists():
- if self._clean:
- self._dsm.remove(self._name)
- ds = DataSet()
- else:
- ds = self._dsm.load(self._name)
- else:
- ds = DataSet()
-
- self._ds = ds
- return ds
-
- def __exit__(self, *args):
- self._dsm.store(self._ds, self._name)
- return False
-
-
-def create_message(data_set: DataSet, message: str, level: str):
- return data_set.create_object("message", "message-{}".format(str(abs(hash(message))))). \
- set("message", message). \
- set("level", level)
diff --git a/src/ee/kicad/doit.py b/src/ee/kicad/doit.py
deleted file mode 100644
index c881c70..0000000
--- a/src/ee/kicad/doit.py
+++ /dev/null
@@ -1,224 +0,0 @@
-import logging
-import os.path
-from pathlib import Path
-from typing import Mapping
-
-from configclass import Config
-
-import ee.kicad
-import ee.kicad.pcb
-from ee.doit import DoItConfig
-
-logger = logging.getLogger(__name__)
-
-_config_template = Config({
- "sch": None,
- "kicad_pcb": None,
- "gerber_dir": None,
- "gerber_zip": None,
-})
-
-doit_config = DoItConfig()
-
-_config = None # type: Mapping[str, str]
-
-
-def init(**kwargs):
- global _config
- _config = _config_template.make(kwargs)
-
-
-def task_kicad_gerber():
- kicad_pcb = _config["kicad_pcb"]
- gerber_dir = _config["gerber_dir"]
- gerber_zip = _config["gerber_zip"]
-
- gerber_zip = gerber_zip or "{}.zip".format(gerber_dir)
-
- # logger.info("gerber_zip={}".format(gerber_zip))
-
- eg = next((p for p in (os.path.join(p, "export_gerber.py") for p in ee.kicad.__path__) if os.path.isfile(p)), None)
- if not eg:
- raise Exception("Could not find export_gerber.py")
-
- # TODO: replace with python
- mkdir = "mkdir -p {}".format(gerber_dir)
- export_gerber = " ".join([
- eg,
- "--pcb", kicad_pcb,
- "--output-directory", gerber_dir,
- "--protel-extensions",
- ])
-
- def make_zip():
- import zipfile
- from pathlib import Path
- with zipfile.ZipFile(gerber_zip, "w") as z:
- for p in Path(gerber_dir).iterdir():
- if not p.is_file():
- continue
- z.write(p, arcname=p.relative_to(gerber_dir))
-
- return {
- "targets": [gerber_zip],
- "actions": [mkdir, export_gerber, make_zip],
- "file_dep": [kicad_pcb],
- }
-
-
-def task_kicad_sch_to_data_set():
- out_data_set, in_data_sets = doit_config.data_sets_for(task_kicad_sch_to_data_set)
-
- sch = _config["sch"]
-
- def action():
- from ee.kicad.model import ComponentField
-
- with doit_config.dsm.create_rw(out_data_set, clean=True) as ds:
- schematics = ee.kicad.read_schematics(sch)
- for c in [c for c in schematics.components]:
- o = ds.create_object("kicad-schematic-component", c.timestamp)
- o.set("ref", c.ref)
- o.set("ref-type", c.ref_type)
- if c.has_ref_num:
- o.set("ref-num", str(c.ref_num))
- o.set("value", c.value)
- if c.footprint:
- o.set("footprint", c.footprint)
-
- for f in c.fields:
- if f.value and f.name not in ComponentField.names:
- o.set("field-{}".format(f.name), str(f.value))
-
- return {
- "file_dep": [Path(sch)] + [doit_config.dsm.cookie_for_ds(ds) for ds in in_data_sets],
- "actions": [action],
- "targets": [doit_config.dsm.cookie_for_ds(out_data_set)],
- }
-
-
-doit_config.set_data_sets_for(task_kicad_sch_to_data_set, "kicad-sch")
-
-
-def task_kicad_pcb_to_data_set():
- kicad_pcb = _config["kicad_pcb"]
-
- out_data_set, in_data_sets = doit_config.data_sets_for(task_kicad_pcb_to_data_set)
-
- def action():
- from ee.kicad.pcb import KicadPcb, Module
-
- logger.debug("Parsing PCB {}".format(kicad_pcb))
-
- with doit_config.dsm.create_rw(out_data_set, clean=True) as ds:
- # [ds.delete(o) for o in ds.items(object_type="kicad-pcb-component")]
-
- pcb = ee.kicad.pcb.parse(kicad_pcb) # type: KicadPcb
- for _m in pcb.modules:
- m = _m # type: Module
-
- o = ds.create_object("kicad-pcb-component", m.tstamp)
-
- ref_text = next((t for t in m.fp_texts if t.kind == "reference"), None)
- o.set("ref", ref_text.value)
-
- x, y, rot = m.at
- o.set("placement-x", x)
- o.set("placement-y", y)
- o.set("placement-rotation", rot)
- o.set("layer", m.layer)
-
- return {
- "file_dep": [Path(kicad_pcb)] + [doit_config.dsm.cookie_for_ds(ds) for ds in in_data_sets],
- "actions": [action],
- "targets": [doit_config.dsm.cookie_for_ds(out_data_set)],
- }
-
-
-doit_config.set_data_sets_for(task_kicad_pcb_to_data_set, "kicad-pcb")
-
-
-def task_kicad_create_component_data_set():
- out_data_set, in_data_sets = doit_config.data_sets_for(task_kicad_create_component_data_set)
-
- def action():
- in_ds = doit_config.dsm.load_data_sets(in_data_sets)
-
- # for o in in_ds.items():
- # logger.info("item: {}/{}".format(o.object_type.name, o.key))
-
- def map_footprint(footprint):
- for o in in_ds.items():
- if not o.object_type.name == "kicad-footprint-mapping":
- continue
-
- common = o.get("common")
- if common:
- return common
-
- return footprint
-
- with doit_config.dsm.create_rw(out_data_set, clean=True) as output:
- kicad_sch = [o for o in in_ds.items() if o.object_type.name == "kicad-schematic-component"]
-
- logger.info("processing {} kicad-sch".format(len(kicad_sch)))
-
- ignored_ref_types = {"#PWR", "#FLG"}
-
- for sch in kicad_sch:
- ref = sch.get("ref")
- ref_num = sch.get("ref-num")
- if not ref or not ref_num:
- logger.debug("Missing ref or ref-num")
- continue
-
- ref_type = sch.get("ref-type")
- if not ref_type:
- logger.debug("Missing ref-type")
- continue
-
- if ref_type in ignored_ref_types:
- continue
-
- c = output.create_object("component", ref)
- c.set("ref", ref)
- c.set("ref-num", ref_num)
- c.set("ref-type", ref_type)
-
- fp = sch.get("footprint")
- if fp:
- fp = map_footprint(fp)
- c.set("footprint", fp)
-
- c.set("mpn", sch.get("field-mpn"))
- c.set("distributor", sch.get("field-distributor"))
-
- def pcb_match(o):
- return o.object_type.name == "kicad-pcb-component" and \
- o.get("ref") == ref
-
- pcb = [o for o in in_ds.items() if pcb_match(o)]
-
- if not pcb:
- logger.info("Could not find PCB component for {}".format(ref))
-
- # TODO: check that the SCH and PCB footprint are the same
- # c.set("footprint", pcb.)
-
- return {
- "file_dep": [doit_config.dsm.cookie_for_ds(ds) for ds in in_data_sets],
- "actions": [action],
- "targets": [doit_config.dsm.cookie_for_ds(out_data_set)],
- }
-
-
-doit_config.set_data_sets_for(task_kicad_create_component_data_set, "components", "kicad-sch", "kicad-pcb")
-
-__all__ = [
- init.__name__,
-
- task_kicad_create_component_data_set.__name__,
- task_kicad_gerber.__name__,
- task_kicad_pcb_to_data_set.__name__,
- task_kicad_sch_to_data_set.__name__,
-]
diff --git a/src/ee/report/__init__.py b/src/ee/report/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/src/ee/report/__init__.py
+++ /dev/null
diff --git a/src/ee/report/doit.py b/src/ee/report/doit.py
deleted file mode 100644
index 5d28ed4..0000000
--- a/src/ee/report/doit.py
+++ /dev/null
@@ -1,61 +0,0 @@
-import logging
-from pathlib import Path
-
-from jinja2 import Environment, PackageLoader, select_autoescape
-
-from ee.doit import DoItConfig, Report
-
-logger = logging.getLogger(__name__)
-
-doit_config = DoItConfig()
-
-
-def _create_env():
- return Environment(
- loader=PackageLoader("ee.report.doit", "templates"),
- autoescape=select_autoescape(["html", "xml"])
- )
-
-
-def task_report_messages():
- doit_config.report_collection.add_report(Report(task_report_messages))
-
- def action():
- logger.debug("Generating messages report")
-
- data_sets = doit_config.dsm.all_data_sets
-
- logger.debug("Loading {} data sets".format(len(data_sets)))
-
- ds = doit_config.dsm.load_data_sets(data_sets)
-
- messages = [o for o in ds.items() if o.object_type.name == "message"]
- logger.debug("Found {} messages".format(len(messages)))
-
- report_dir = Path(doit_config.extra_config["report_dir"])
- report_dir.mkdir(exist_ok=True)
-
- with open(report_dir / "messages.rst", "w") as f:
- env = _create_env()
- template = env.get_template("messages.rst.j2")
- f.write(template.render(messages=messages))
-
- return {
- "actions": [action]
- }
-
-
-def task_make_reports():
- def action():
- report_dir = Path(doit_config.extra_config["report_dir"])
- report_dir.mkdir(exist_ok=True)
-
- with open(report_dir / "index.rst", "w") as f:
- env = _create_env()
- template = env.get_template("index.rst.j2")
- f.write(template.render())
-
- return {
- "actions": [action],
- "task_dep": [r.task.__name__[5:] for r in doit_config.report_collection.reports]
- }
diff --git a/src/ee/report/templates/index.rst.j2 b/src/ee/report/templates/index.rst.j2
deleted file mode 100644
index 5eaf939..0000000
--- a/src/ee/report/templates/index.rst.j2
+++ /dev/null
@@ -1,4 +0,0 @@
-Reports
-=======
-
-* :doc:`messages`
diff --git a/src/ee/report/templates/messages.rst.j2 b/src/ee/report/templates/messages.rst.j2
deleted file mode 100644
index ca08c04..0000000
--- a/src/ee/report/templates/messages.rst.j2
+++ /dev/null
@@ -1,8 +0,0 @@
-Messages
-========
-
-There are {{ messages|count }} messages.
-
-{% for m in messages %}
- Message: {{ m.level }}
-{% endfor %}