aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTrygve Laugstøl <trygvis@inamo.no>2019-03-15 07:58:06 +0100
committerTrygve Laugstøl <trygvis@inamo.no>2019-03-15 08:30:07 +0100
commitb67aa2b41247991e361dec0963670b4e5108410a (patch)
tree67591b0f4cc6e767d0097c1afc3f08ad40ee19ea
parent8d17fb5bc4b0dae0758e01a44d77d87acf2e686a (diff)
downloadee-python-b67aa2b41247991e361dec0963670b4e5108410a.tar.gz
ee-python-b67aa2b41247991e361dec0963670b4e5108410a.tar.bz2
ee-python-b67aa2b41247991e361dec0963670b4e5108410a.tar.xz
ee-python-b67aa2b41247991e361dec0963670b4e5108410a.zip
o Merging XSD files into one.
-rw-r--r--Makefile9
-rw-r--r--demo/thirdparty/olinuxino/py/olinuxino.py12
-rw-r--r--src/ee/digikey/normalize_facts.py14
-rw-r--r--src/ee/digikey/search_parts.py32
-rw-r--r--src/ee/element14/search_parts.py8
-rw-r--r--src/ee/kicad/make_bom.py28
-rw-r--r--src/ee/order/__init__.py16
-rw-r--r--src/ee/part/__init__.py20
-rw-r--r--src/ee/part/create_distributor_search_list.py4
-rw-r--r--src/ee/tools/import_parts_yaml.py8
-rw-r--r--src/ee/tools/init.py10
-rw-r--r--src/ee/xml/bom_file_utils.py23
-rw-r--r--src/ee/xml/indexFile.py1101
-rw-r--r--src/ee/xml/types.py (renamed from src/ee/xml/bomFile.py)298
-rw-r--r--xsd/ee-index.xsd24
-rw-r--r--xsd/ee.xsd106
16 files changed, 473 insertions, 1240 deletions
diff --git a/Makefile b/Makefile
index c648bca..4321b5f 100644
--- a/Makefile
+++ b/Makefile
@@ -1,5 +1,5 @@
-XSDS = xsd/ee-bom.xsd xsd/ee-index.xsd
-XSD_PYS = src/ee/xml/bomFile.py src/ee/xml/indexFile.py
+XSD_PYS =
+XSD_PYS += src/ee/xml/types.py
all: env/pip.cookie $(XSD_PYS)
@@ -14,7 +14,7 @@ env/pip.cookie: requirements.txt env/bin/pip
src/ee/xml/__init__.py:
touch $@
-src/ee/xml/%File.py: xsd/ee-%.xsd
+define xsd-to-py
env/bin/generateDS.py \
-f \
--no-dates \
@@ -22,5 +22,8 @@ src/ee/xml/%File.py: xsd/ee-%.xsd
--output-directory=src/ee/xml \
-o $@ \
-m $<
+endef
$(XSDS): env/pip.cookie src/ee/xml/__init__.py
+src/ee/xml/types.py: xsd/ee.xsd
+ $(xsd-to-py)
diff --git a/demo/thirdparty/olinuxino/py/olinuxino.py b/demo/thirdparty/olinuxino/py/olinuxino.py
index cc33a81..2153613 100644
--- a/demo/thirdparty/olinuxino/py/olinuxino.py
+++ b/demo/thirdparty/olinuxino/py/olinuxino.py
@@ -2,12 +2,12 @@ import re
from ee.kicad import Component
from ee.kicad.make_bom import MakeBomStrategy
-from ee.xml import bomFile
+from ee.xml import types
from ee.xml.bom_file_utils import facts
class OlinuxinoMakeBomStrategy(MakeBomStrategy):
- def process_part(self, component: Component, part: bomFile.Part):
+ def process_part(self, component: Component, part: types.Part):
print(component.ref)
print(" value={}".format(component.value))
print(" footprint={}".format(component.footprint))
@@ -41,11 +41,11 @@ class OlinuxinoMakeBomStrategy(MakeBomStrategy):
print(" NA".format())
fs = facts(part, create=True)
- fs.add_fact(bomFile.Fact(key="capacitance", value=capacitance))
- fs.add_fact(bomFile.Fact(key="max_voltage", value=voltage))
- fs.add_fact(bomFile.Fact(key="tolerance", value=tolerance))
+ fs.add_fact(types.Fact(key="capacitance", value=capacitance))
+ fs.add_fact(types.Fact(key="max_voltage", value=voltage))
+ fs.add_fact(types.Fact(key="tolerance", value=tolerance))
if derating:
- fs.add_fact(bomFile.Fact(key="derating", value=derating))
+ fs.add_fact(types.Fact(key="derating", value=derating))
else:
print("FAIL")
diff --git a/src/ee/digikey/normalize_facts.py b/src/ee/digikey/normalize_facts.py
index 62aef0d..91e8c07 100644
--- a/src/ee/digikey/normalize_facts.py
+++ b/src/ee/digikey/normalize_facts.py
@@ -4,13 +4,13 @@ from typing import List, Tuple, Union, Mapping, Callable, Any
from ee import EeVal, EeException
from ee.part import PartDb, load_db, save_db
-from ee.xml import bomFile, uris
+from ee.xml import types, uris
__all__ = ["normalize_facts"]
# TODO: this should be moved to a generic normalizer
-def handle_tolerance(tolerance: bomFile.Fact, capacitance: bomFile.Fact) -> List[bomFile.Fact]:
+def handle_tolerance(tolerance: types.Fact, capacitance: types.Fact) -> List[types.Fact]:
cap_value = float(EeVal.parse(capacitance.valueProp))
s = tolerance.valueProp
@@ -62,7 +62,7 @@ def handle_tolerance(tolerance: bomFile.Fact, capacitance: bomFile.Fact) -> List
if low_pct and low_pct == high_pct:
facts.append(("tolerance-percent", "Tolerance (±%)", low_pct))
- return [bomFile.Fact(key=uris.make_fact_key(key), label=label, value=str(value)) for key, label, value in facts]
+ return [types.Fact(key=uris.make_fact_key(key), label=label, value=str(value)) for key, label, value in facts]
def re_parser(pattern, kis: List[Tuple[str, int]]):
@@ -119,12 +119,12 @@ def normalize_facts(in_dir: Path, out_dir: Path):
in_db = load_db(in_dir)
out_parts = PartDb()
- for part in in_db.iterparts(): # type: bomFile.Part
- fact_list: bomFile.FactList = part.factsProp
+ for part in in_db.iterparts(): # type: types.Part
+ fact_list: types.FactList = part.factsProp
if fact_list is None:
continue
- in_facts: List[bomFile.Fact] = fact_list.factProp
+ in_facts: List[types.Fact] = fact_list.factProp
out_facts = []
for f in in_facts:
@@ -145,7 +145,7 @@ def normalize_facts(in_dir: Path, out_dir: Path):
results = res
else:
results = [res]
- facts = [bomFile.Fact(key=uris.make_fact_key(key), value=value) for key, value in results]
+ facts = [types.Fact(key=uris.make_fact_key(key), value=value) for key, value in results]
out_facts.extend(facts)
if len(out_facts) == 0:
diff --git a/src/ee/digikey/search_parts.py b/src/ee/digikey/search_parts.py
index 61c5c1b..4203624 100644
--- a/src/ee/digikey/search_parts.py
+++ b/src/ee/digikey/search_parts.py
@@ -3,37 +3,37 @@ from typing import List
from ee.digikey import Digikey, DigikeyParser, DigikeyClient, SearchResponseTypes, DigikeyProduct
from ee.part import PartDb, load_db, save_db
-from ee.xml import bomFile, bom_file_utils
+from ee.xml import types, bom_file_utils
from ee.xml.uris import DIGIKEY_URI, make_digikey_fact_key
__all__ = ["search_parts"]
-def resolved(p: DigikeyProduct) -> bomFile.Part:
- part = bomFile.Part(id=p.part_number,
- distributor_info=bomFile.DistributorInfo(),
- facts=bomFile.FactList(),
- part_numbers=bomFile.PartNumberList())
+def resolved(p: DigikeyProduct) -> types.Part:
+ part = types.Part(id=p.part_number,
+ distributor_info=types.DistributorInfo(),
+ facts=types.FactList(),
+ part_numbers=types.PartNumberList())
part.distributor_infoProp.stateProp = "resolved"
part_numbers = part.part_numbersProp.part_numberProp
- part_numbers.append(bomFile.PartNumber(value=p.part_number, distributor=DIGIKEY_URI))
+ part_numbers.append(types.PartNumber(value=p.part_number, distributor=DIGIKEY_URI))
if p.mpn:
- part_numbers.append(bomFile.PartNumber(value=p.mpn))
- facts: List[bomFile.Fact] = part.factsProp.factProp
+ part_numbers.append(types.PartNumber(value=p.mpn))
+ facts: List[types.Fact] = part.factsProp.factProp
for a in p.attributes:
key = make_digikey_fact_key(a.attribute_type.id)
- facts.append(bomFile.Fact(key=key, label=a.attribute_type.label, value=a.value))
+ facts.append(types.Fact(key=key, label=a.attribute_type.label, value=a.value))
if len(p.price_breaks):
- part.price_breaksProp = bomFile.PriceBreakList()
+ part.price_breaksProp = types.PriceBreakList()
- price_breaks: List[bomFile.PriceBreak] = part.price_breaksProp.price_break
+ price_breaks: List[types.PriceBreak] = part.price_breaksProp.price_break
for pb in p.price_breaks:
- amount = bomFile.Amount(value=str(pb.per_piece_price.amount), currency=pb.per_piece_price.currency)
- price_breaks.append(bomFile.PriceBreak(pb.quantity, amount=amount))
+ amount = types.Amount(value=str(pb.per_piece_price.amount), currency=pb.per_piece_price.currency)
+ price_breaks.append(types.PriceBreak(pb.quantity, amount=amount))
return part
@@ -65,8 +65,8 @@ def search_parts(in_dir: Path, out_dir: Path, cache_dir: Path, force_refresh: bo
continue
out_id = query
- out_part = bomFile.Part(id=out_id,
- distributor_info=bomFile.DistributorInfo(),
+ out_part = types.Part(id=out_id,
+ distributor_info=types.DistributorInfo(),
part_numbers=part.part_numbersProp)
di = out_part.distributor_infoProp
diff --git a/src/ee/element14/search_parts.py b/src/ee/element14/search_parts.py
index 724485c..32abec1 100644
--- a/src/ee/element14/search_parts.py
+++ b/src/ee/element14/search_parts.py
@@ -2,7 +2,7 @@ from pathlib import Path
from ee.element14 import *
from ee.part import PartDb, load_db, save_db
-from ee.xml import bom_file_utils, bomFile
+from ee.xml import bom_file_utils, types
__all__ = ["search_parts"]
@@ -22,9 +22,9 @@ def search_parts(in_dir: Path, out_dir: Path, cache_dir: Path, config: Element14
client.search(term="manuPartNum:" + query)
- out_part = bomFile.Part(id=out_id,
- distributor_info=bomFile.DistributorInfo(),
- part_numbers=part.part_numbersProp)
+ out_part = types.Part(id=out_id,
+ distributor_info=types.DistributorInfo(),
+ part_numbers=part.part_numbersProp)
di = out_part.distributor_infoProp
print("Saving {} work parts".format(out_parts.size()))
diff --git a/src/ee/kicad/make_bom.py b/src/ee/kicad/make_bom.py
index 00e9ae3..7dd5532 100644
--- a/src/ee/kicad/make_bom.py
+++ b/src/ee/kicad/make_bom.py
@@ -9,7 +9,7 @@ from ee.kicad.model import Component
from ee.kicad.read_schematic import read_schematics
from ee.kicad.to_bom import to_bom, to_bom_xml
from ee.part import PartDb, save_db
-from ee.xml import bomFile, uris
+from ee.xml import types, uris
__all__ = [
"StrategyCallable",
@@ -21,23 +21,23 @@ __all__ = [
"make_bom",
]
-StrategyCallable = Callable[[Component, bomFile.Part], Optional[bomFile.Part]]
+StrategyCallable = Callable[[Component, types.Part], Optional[types.Part]]
-def apply_strategies(c: Component, part: bomFile.Part, strategies: List[StrategyCallable]):
+def apply_strategies(c: Component, part: types.Part, strategies: List[StrategyCallable]):
for strategy in strategies:
part = strategy(c, part)
if part is None:
return
- if not isinstance(part, bomFile.Part):
- raise EeException("Values returned from strategy must be a bomFile.Part, got {}".format(type(part)))
+ if not isinstance(part, types.Part):
+ raise EeException("Values returned from strategy must be a types.Part, got {}".format(type(part)))
return part
-def part_type_strategy(component: Component, part: bomFile.Part) -> bomFile.Part:
+def part_type_strategy(component: Component, part: types.Part) -> types.Part:
fp = component.footprint
if fp is None:
return part
@@ -58,23 +58,23 @@ def part_type_strategy(component: Component, part: bomFile.Part) -> bomFile.Part
return part
-def mpn_strategy(component: Component, part: bomFile.Part) -> bomFile.Part:
+def mpn_strategy(component: Component, part: types.Part) -> types.Part:
mpn = component.get_field("mpn")
if mpn is not None:
- pn = bomFile.PartNumber(value=mpn.value)
+ pn = types.PartNumber(value=mpn.value)
part.part_numbersProp.add_part_number(pn)
return part
def dpn_strategy_factory(dpn_mappings: Mapping[str, str]) -> StrategyCallable:
- def dpn_strategy(component: Component, part: bomFile.Part) -> bomFile.Part:
+ def dpn_strategy(component: Component, part: types.Part) -> types.Part:
for field_name, distributor in dpn_mappings:
s = component.get_field(field_name)
if s is None:
continue
- pn = bomFile.PartNumber(value=s.value, distributor=distributor)
+ pn = types.PartNumber(value=s.value, distributor=distributor)
part.part_numbersProp.add_part_number(pn)
return part
@@ -91,10 +91,10 @@ class MakeBomStrategy():
dpn_strategy_factory(self.dpn_mappings),
]
- def process_part(self, component: Component, part: bomFile.Part):
+ def process_part(self, component: Component, part: types.Part):
return self.default_process_part(component, part)
- def default_process_part(self, component: Component, part: bomFile.Part):
+ def default_process_part(self, component: Component, part: types.Part):
return apply_strategies(component, part, self.default_strategies)
@@ -111,9 +111,9 @@ def work(sch, out: Path, strategy: MakeBomStrategy, new_mode, pretty):
parts = PartDb()
components = to_bom(sch)
for c in components:
- part = bomFile.Part(id=c.ref)
+ part = types.Part(id=c.ref)
part.schema_reference = c.ref
- part.part_numbersProp = bomFile.PartNumberList()
+ part.part_numbersProp = types.PartNumberList()
part = strategy.process_part(c, part)
diff --git a/src/ee/order/__init__.py b/src/ee/order/__init__.py
index 7c815b4..368ea7a 100644
--- a/src/ee/order/__init__.py
+++ b/src/ee/order/__init__.py
@@ -3,18 +3,18 @@ from pathlib import Path
from typing import List, Tuple
from ee.part import PartDb, load_db, save_db
-from ee.xml import bomFile, bom_file_utils
+from ee.xml import types, bom_file_utils
__all__ = ["create_order"]
@total_ordering
class PartInfo(object):
- def __init__(self, part: bomFile.Part):
+ def __init__(self, part: types.Part):
self.part = part
self.id = part.id
self.pn = bom_file_utils.find_pn(part)
- self.available_from: List[Tuple[str, bomFile.Part]] = []
+ self.available_from: List[Tuple[str, types.Part]] = []
def __lt__(self, other: "PartInfo"):
return self.part.idProp == other.part.idProp
@@ -41,10 +41,10 @@ def create_order(schematic_dir: Path, out_dir: Path, part_db_dirs: List[Path], f
info.available_from.append((distributor, p))
for sch_pn_ in bom_file_utils.part_numbers(info.part):
- sch_pn: bomFile.PartNumber = sch_pn_
+ sch_pn: types.PartNumber = sch_pn_
for p_pn_ in bom_file_utils.part_numbers(p):
- p_pn: bomFile.PartNumber = p_pn_
+ p_pn: types.PartNumber = p_pn_
if sch_pn.distributorProp == p_pn.distributorProp and sch_pn.value == p_pn.value:
if p.idProp not in info.available_from:
@@ -69,9 +69,9 @@ def create_order(schematic_dir: Path, out_dir: Path, part_db_dirs: List[Path], f
return False
for info in infos:
- part = bomFile.Part(id=info.part.id,
+ part = types.Part(id=info.part.id,
schema_reference=info.part.schema_reference,
- part_numbers=bomFile.PartNumberList())
+ part_numbers=types.PartNumberList())
part_numbers = part.part_numbersProp.part_number
@@ -80,7 +80,7 @@ def create_order(schematic_dir: Path, out_dir: Path, part_db_dirs: List[Path], f
distributor, distributor_part = info.available_from[0]
- part_numbers.append(bomFile.PartNumber(value=distributor_part.id, distributor=distributor))
+ part_numbers.append(types.PartNumber(value=distributor_part.id, distributor=distributor))
out_parts.add_entry(part, True)
diff --git a/src/ee/part/__init__.py b/src/ee/part/__init__.py
index 26bc26c..27b6619 100644
--- a/src/ee/part/__init__.py
+++ b/src/ee/part/__init__.py
@@ -2,7 +2,7 @@ from pathlib import Path
from typing import List, MutableMapping, Optional, Iterator
from ee import EeException
-from ee.xml import bomFile, indexFile
+from ee.xml import types
from ee.xml.bom_file_utils import find_pn, find_dpn, find_root_tag
__all__ = [
@@ -13,7 +13,7 @@ __all__ = [
class Entry(object):
- def __init__(self, new: bool, part: bomFile.Part):
+ def __init__(self, new: bool, part: types.Part):
self.new = new
self.part = part
@@ -30,7 +30,7 @@ class PartDb(object):
self.dpn_indexes = {} # type: MutableMapping[str, MutableMapping[str, Entry]]
self.new_entries = 0
- def add_entry(self, part: bomFile.Part, new: bool):
+ def add_entry(self, part: types.Part, new: bool):
e = Entry(new, part)
self.parts.append(e)
@@ -40,18 +40,18 @@ class PartDb(object):
if e.new:
self.new_entries = self.new_entries + 1
- def iterparts(self, sort=False) -> Iterator[bomFile.Part]:
+ def iterparts(self, sort=False) -> Iterator[types.Part]:
it = (e.part for e in self.parts)
return sorted(it, key=lambda p: p.idProp) if sort else it
def size(self) -> int:
return len(self.parts)
- def find_by_pn(self, pn: str) -> Optional[bomFile.Part]:
+ def find_by_pn(self, pn: str) -> Optional[types.Part]:
entry = self.pn_index.get(pn, None)
return entry.part if entry else None
- def find_by_dpn(self, distributor: str, pn: str) -> bomFile.Part:
+ def find_by_dpn(self, distributor: str, pn: str) -> types.Part:
idx = self.dpn_indexes.get(distributor)
if idx is None:
@@ -69,7 +69,7 @@ def load_db(dir_path: Path) -> PartDb:
if not file.is_file() or not file.name.endswith(".xml") or file.name == "index.xml":
continue
- part = bomFile.parse(str(file), silence=True) # type: bomFile.Part
+ part = types.parse(str(file), silence=True) # type: types.Part
db.add_entry(part, False)
return db
@@ -94,8 +94,8 @@ def save_db(dir_path: Path, db: PartDb):
dir_path.mkdir(parents=True, exist_ok=True)
- idx = indexFile.IndexFile()
- idx.filesProp = indexFile.FileList()
+ idx = types.IndexFile()
+ idx.filesProp = types.FileList()
files = idx.filesProp.fileProp
parts = db.iterparts()
@@ -107,7 +107,7 @@ def save_db(dir_path: Path, db: PartDb):
with path.open("w") as f:
part.export(outfile=f, level=0, name_=find_root_tag(part))
- files.append(indexFile.File(path=str(path)))
+ files.append(types.File(path=str(path)))
with (dir_path / "index.xml").open("w") as f:
idx.export(f, level=0, name_=find_root_tag(idx))
diff --git a/src/ee/part/create_distributor_search_list.py b/src/ee/part/create_distributor_search_list.py
index cfbe0d5..bd5f69e 100644
--- a/src/ee/part/create_distributor_search_list.py
+++ b/src/ee/part/create_distributor_search_list.py
@@ -1,7 +1,7 @@
from pathlib import Path
from ee.part import PartDb, load_db, save_db
-from ee.xml import bomFile
+from ee.xml import types
from ee.xml.bom_file_utils import *
__all__ = ["create_distributor_search_list"]
@@ -27,7 +27,7 @@ def create_distributor_search_list(in_dir: Path, out_dir: Path):
if entry is not None:
continue
- new_part = bomFile.Part(id=pn_value)
+ new_part = types.Part(id=pn_value)
new_part.part_numbersProp = part.part_numbersProp
out_parts.add_entry(new_part, True)
diff --git a/src/ee/tools/import_parts_yaml.py b/src/ee/tools/import_parts_yaml.py
index 81a23f8..d13907e 100644
--- a/src/ee/tools/import_parts_yaml.py
+++ b/src/ee/tools/import_parts_yaml.py
@@ -4,7 +4,7 @@ import yaml
from pathlib import Path
from ee.part import PartDb, save_db
-from ee.xml import bomFile
+from ee.xml import types
def import_parts_yaml(in_file: Path, out_dir: Path):
@@ -20,15 +20,15 @@ def import_parts_yaml(in_file: Path, out_dir: Path):
print("Bad YAML document, each part must be a dict", file=sys.stderr)
return
- part_number_list = bomFile.PartNumberList()
+ part_number_list = types.PartNumberList()
mpn = item.get("mpn")
assert isinstance(mpn, str)
- part_number_list.part_number.append(bomFile.PartNumber(value=mpn))
+ part_number_list.part_number.append(types.PartNumber(value=mpn))
id_ = mpn
- part = bomFile.Part(id=id_)
+ part = types.Part(id=id_)
part.part_numbersProp = part_number_list
parts.add_entry(part, True)
diff --git a/src/ee/tools/init.py b/src/ee/tools/init.py
new file mode 100644
index 0000000..a3e0472
--- /dev/null
+++ b/src/ee/tools/init.py
@@ -0,0 +1,10 @@
+import argparse
+from pathlib import Path
+
+from ee.project import init
+
+parser = argparse.ArgumentParser()
+
+args = parser.parse_args()
+
+init(Path("."))
diff --git a/src/ee/xml/bom_file_utils.py b/src/ee/xml/bom_file_utils.py
index 063a7bd..59a9b0e 100644
--- a/src/ee/xml/bom_file_utils.py
+++ b/src/ee/xml/bom_file_utils.py
@@ -1,6 +1,6 @@
from typing import List, Optional
-from ee.xml import bomFile, indexFile
+from ee.xml import types
__all__ = [
"facts",
@@ -11,16 +11,11 @@ __all__ = [
def find_root_tag(root):
- tag = next((tag for tag, klass in bomFile.GDSClassesMapping.items() if klass == type(root)), None)
+ return next((tag for tag, klass in types.GDSClassesMapping.items() if klass == type(root)), None)
- if tag is not None:
- return tag
- return next((tag for tag, klass in indexFile.GDSClassesMapping.items() if klass == type(root)))
-
-
-def part_numbers(part: bomFile.Part) -> List[bomFile.PartNumber]:
- pns = part.part_numbersProp # type: bomFile.PartNumberList
+def part_numbers(part: types.Part) -> List[types.PartNumber]:
+ pns = part.part_numbersProp # type: types.PartNumberList
if pns is None:
return []
@@ -28,20 +23,20 @@ def part_numbers(part: bomFile.Part) -> List[bomFile.PartNumber]:
return pns.part_numberProp
-def find_pn(part: bomFile.Part) -> Optional[str]:
+def find_pn(part: types.Part) -> Optional[str]:
for pn in part_numbers(part):
if pn.distributor is None:
return pn.value
-def find_dpn(part: bomFile.Part, distributor: str) -> Optional[str]:
+def find_dpn(part: types.Part, distributor: str) -> Optional[str]:
for pn in part_numbers(part):
if pn.distributor == distributor:
return pn.value
-def facts(part: bomFile.Part, create=False) -> Optional[bomFile.FactList]:
- fs: bomFile.FactList = part.factsProp
+def facts(part: types.Part, create=False) -> Optional[types.FactList]:
+ fs: types.FactList = part.factsProp
if fs is not None:
return fs
@@ -49,6 +44,6 @@ def facts(part: bomFile.Part, create=False) -> Optional[bomFile.FactList]:
if not create:
return
- fs = bomFile.FactList()
+ fs = types.FactList()
part.factsProp = fs
return fs
diff --git a/src/ee/xml/indexFile.py b/src/ee/xml/indexFile.py
deleted file mode 100644
index d460db3..0000000
--- a/src/ee/xml/indexFile.py
+++ /dev/null
@@ -1,1101 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-#
-# Generated by generateDS.py.
-# Python 3.7.2+ (default, Feb 2 2019, 14:31:48) [GCC 8.2.0]
-#
-# Command line options:
-# ('-f', '')
-# ('--no-dates', '')
-# ('--no-versions', '')
-# ('--output-directory', 'src/ee/xml')
-# ('-o', 'src/ee/xml/indexFile.py')
-# ('-m', '')
-#
-# Command line arguments:
-# xsd/ee-index.xsd
-#
-# Command line:
-# env/bin/generateDS.py -f --no-dates --no-versions --output-directory="src/ee/xml" -o "src/ee/xml/indexFile.py" -m xsd/ee-index.xsd
-#
-# Current working directory (os.getcwd()):
-# ee-python
-#
-
-import sys
-import re as re_
-import base64
-import datetime as datetime_
-import warnings as warnings_
-try:
- from lxml import etree as etree_
-except ImportError:
- from xml.etree import ElementTree as etree_
-
-
-Validate_simpletypes_ = True
-if sys.version_info.major == 2:
- BaseStrType_ = basestring
-else:
- BaseStrType_ = str
-
-
-def parsexml_(infile, parser=None, **kwargs):
- if parser is None:
- # Use the lxml ElementTree compatible parser so that, e.g.,
- # we ignore comments.
- try:
- parser = etree_.ETCompatXMLParser()
- except AttributeError:
- # fallback to xml.etree
- parser = etree_.XMLParser()
- doc = etree_.parse(infile, parser=parser, **kwargs)
- return doc
-
-def parsexmlstring_(instring, parser=None, **kwargs):
- if parser is None:
- # Use the lxml ElementTree compatible parser so that, e.g.,
- # we ignore comments.
- try:
- parser = etree_.ETCompatXMLParser()
- except AttributeError:
- # fallback to xml.etree
- parser = etree_.XMLParser()
- element = etree_.fromstring(instring, parser=parser, **kwargs)
- return element
-
-#
-# Namespace prefix definition table (and other attributes, too)
-#
-# The module generatedsnamespaces, if it is importable, must contain
-# a dictionary named GeneratedsNamespaceDefs. This Python dictionary
-# should map element type names (strings) to XML schema namespace prefix
-# definitions. The export method for any class for which there is
-# a namespace prefix definition, will export that definition in the
-# XML representation of that element. See the export method of
-# any generated element type class for a example of the use of this
-# table.
-# A sample table is:
-#
-# # File: generatedsnamespaces.py
-#
-# GenerateDSNamespaceDefs = {
-# "ElementtypeA": "http://www.xxx.com/namespaceA",
-# "ElementtypeB": "http://www.xxx.com/namespaceB",
-# }
-#
-
-try:
- from generatedsnamespaces import GenerateDSNamespaceDefs as GenerateDSNamespaceDefs_
-except ImportError:
- GenerateDSNamespaceDefs_ = {}
-
-#
-# The root super-class for element type classes
-#
-# Calls to the methods in these classes are generated by generateDS.py.
-# You can replace these methods by re-implementing the following class
-# in a module named generatedssuper.py.
-
-try:
- from generatedssuper import GeneratedsSuper
-except ImportError as exp:
-
- class GeneratedsSuper(object):
- tzoff_pattern = re_.compile(r'(\+|-)((0\d|1[0-3]):[0-5]\d|14:00)$')
- class _FixedOffsetTZ(datetime_.tzinfo):
- def __init__(self, offset, name):
- self.__offset = datetime_.timedelta(minutes=offset)
- self.__name = name
- def utcoffset(self, dt):
- return self.__offset
- def tzname(self, dt):
- return self.__name
- def dst(self, dt):
- return None
- def gds_format_string(self, input_data, input_name=''):
- return input_data
- def gds_validate_string(self, input_data, node=None, input_name=''):
- if not input_data:
- return ''
- else:
- return input_data
- def gds_format_base64(self, input_data, input_name=''):
- return base64.b64encode(input_data)
- def gds_validate_base64(self, input_data, node=None, input_name=''):
- return input_data
- def gds_format_integer(self, input_data, input_name=''):
- return '%d' % input_data
- def gds_validate_integer(self, input_data, node=None, input_name=''):
- return input_data
- def gds_format_integer_list(self, input_data, input_name=''):
- return '%s' % ' '.join(input_data)
- def gds_validate_integer_list(
- self, input_data, node=None, input_name=''):
- values = input_data.split()
- for value in values:
- try:
- int(value)
- except (TypeError, ValueError):
- raise_parse_error(node, 'Requires sequence of integers')
- return values
- def gds_format_float(self, input_data, input_name=''):
- return ('%.15f' % input_data).rstrip('0')
- def gds_validate_float(self, input_data, node=None, input_name=''):
- return input_data
- def gds_format_float_list(self, input_data, input_name=''):
- return '%s' % ' '.join(input_data)
- def gds_validate_float_list(
- self, input_data, node=None, input_name=''):
- values = input_data.split()
- for value in values:
- try:
- float(value)
- except (TypeError, ValueError):
- raise_parse_error(node, 'Requires sequence of floats')
- return values
- def gds_format_double(self, input_data, input_name=''):
- return '%e' % input_data
- def gds_validate_double(self, input_data, node=None, input_name=''):
- return input_data
- def gds_format_double_list(self, input_data, input_name=''):
- return '%s' % ' '.join(input_data)
- def gds_validate_double_list(
- self, input_data, node=None, input_name=''):
- values = input_data.split()
- for value in values:
- try:
- float(value)
- except (TypeError, ValueError):
- raise_parse_error(node, 'Requires sequence of doubles')
- return values
- def gds_format_boolean(self, input_data, input_name=''):
- return ('%s' % input_data).lower()
- def gds_validate_boolean(self, input_data, node=None, input_name=''):
- return input_data
- def gds_format_boolean_list(self, input_data, input_name=''):
- return '%s' % ' '.join(input_data)
- def gds_validate_boolean_list(
- self, input_data, node=None, input_name=''):
- values = input_data.split()
- for value in values:
- if value not in ('true', '1', 'false', '0', ):
- raise_parse_error(
- node,
- 'Requires sequence of booleans '
- '("true", "1", "false", "0")')
- return values
- def gds_validate_datetime(self, input_data, node=None, input_name=''):
- return input_data
- def gds_format_datetime(self, input_data, input_name=''):
- if input_data.microsecond == 0:
- _svalue = '%04d-%02d-%02dT%02d:%02d:%02d' % (
- input_data.year,
- input_data.month,
- input_data.day,
- input_data.hour,
- input_data.minute,
- input_data.second,
- )
- else:
- _svalue = '%04d-%02d-%02dT%02d:%02d:%02d.%s' % (
- input_data.year,
- input_data.month,
- input_data.day,
- input_data.hour,
- input_data.minute,
- input_data.second,
- ('%f' % (float(input_data.microsecond) / 1000000))[2:],
- )
- if input_data.tzinfo is not None:
- tzoff = input_data.tzinfo.utcoffset(input_data)
- if tzoff is not None:
- total_seconds = tzoff.seconds + (86400 * tzoff.days)
- if total_seconds == 0:
- _svalue += 'Z'
- else:
- if total_seconds < 0:
- _svalue += '-'
- total_seconds *= -1
- else:
- _svalue += '+'
- hours = total_seconds // 3600
- minutes = (total_seconds - (hours * 3600)) // 60
- _svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
- return _svalue
- @classmethod
- def gds_parse_datetime(cls, input_data):
- tz = None
- if input_data[-1] == 'Z':
- tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
- input_data = input_data[:-1]
- else:
- results = GeneratedsSuper.tzoff_pattern.search(input_data)
- if results is not None:
- tzoff_parts = results.group(2).split(':')
- tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
- if results.group(1) == '-':
- tzoff *= -1
- tz = GeneratedsSuper._FixedOffsetTZ(
- tzoff, results.group(0))
- input_data = input_data[:-6]
- time_parts = input_data.split('.')
- if len(time_parts) > 1:
- micro_seconds = int(float('0.' + time_parts[1]) * 1000000)
- input_data = '%s.%s' % (
- time_parts[0], "{}".format(micro_seconds).rjust(6, "0"), )
- dt = datetime_.datetime.strptime(
- input_data, '%Y-%m-%dT%H:%M:%S.%f')
- else:
- dt = datetime_.datetime.strptime(
- input_data, '%Y-%m-%dT%H:%M:%S')
- dt = dt.replace(tzinfo=tz)
- return dt
- def gds_validate_date(self, input_data, node=None, input_name=''):
- return input_data
- def gds_format_date(self, input_data, input_name=''):
- _svalue = '%04d-%02d-%02d' % (
- input_data.year,
- input_data.month,
- input_data.day,
- )
- try:
- if input_data.tzinfo is not None:
- tzoff = input_data.tzinfo.utcoffset(input_data)
- if tzoff is not None:
- total_seconds = tzoff.seconds + (86400 * tzoff.days)
- if total_seconds == 0:
- _svalue += 'Z'
- else:
- if total_seconds < 0:
- _svalue += '-'
- total_seconds *= -1
- else:
- _svalue += '+'
- hours = total_seconds // 3600
- minutes = (total_seconds - (hours * 3600)) // 60
- _svalue += '{0:02d}:{1:02d}'.format(
- hours, minutes)
- except AttributeError:
- pass
- return _svalue
- @classmethod
- def gds_parse_date(cls, input_data):
- tz = None
- if input_data[-1] == 'Z':
- tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
- input_data = input_data[:-1]
- else:
- results = GeneratedsSuper.tzoff_pattern.search(input_data)
- if results is not None:
- tzoff_parts = results.group(2).split(':')
- tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
- if results.group(1) == '-':
- tzoff *= -1
- tz = GeneratedsSuper._FixedOffsetTZ(
- tzoff, results.group(0))
- input_data = input_data[:-6]
- dt = datetime_.datetime.strptime(input_data, '%Y-%m-%d')
- dt = dt.replace(tzinfo=tz)
- return dt.date()
- def gds_validate_time(self, input_data, node=None, input_name=''):
- return input_data
- def gds_format_time(self, input_data, input_name=''):
- if input_data.microsecond == 0:
- _svalue = '%02d:%02d:%02d' % (
- input_data.hour,
- input_data.minute,
- input_data.second,
- )
- else:
- _svalue = '%02d:%02d:%02d.%s' % (
- input_data.hour,
- input_data.minute,
- input_data.second,
- ('%f' % (float(input_data.microsecond) / 1000000))[2:],
- )
- if input_data.tzinfo is not None:
- tzoff = input_data.tzinfo.utcoffset(input_data)
- if tzoff is not None:
- total_seconds = tzoff.seconds + (86400 * tzoff.days)
- if total_seconds == 0:
- _svalue += 'Z'
- else:
- if total_seconds < 0:
- _svalue += '-'
- total_seconds *= -1
- else:
- _svalue += '+'
- hours = total_seconds // 3600
- minutes = (total_seconds - (hours * 3600)) // 60
- _svalue += '{0:02d}:{1:02d}'.format(hours, minutes)
- return _svalue
- def gds_validate_simple_patterns(self, patterns, target):
- # pat is a list of lists of strings/patterns.
- # The target value must match at least one of the patterns
- # in order for the test to succeed.
- found1 = True
- for patterns1 in patterns:
- found2 = False
- for patterns2 in patterns1:
- mo = re_.search(patterns2, target)
- if mo is not None and len(mo.group(0)) == len(target):
- found2 = True
- break
- if not found2:
- found1 = False
- break
- return found1
- @classmethod
- def gds_parse_time(cls, input_data):
- tz = None
- if input_data[-1] == 'Z':
- tz = GeneratedsSuper._FixedOffsetTZ(0, 'UTC')
- input_data = input_data[:-1]
- else:
- results = GeneratedsSuper.tzoff_pattern.search(input_data)
- if results is not None:
- tzoff_parts = results.group(2).split(':')
- tzoff = int(tzoff_parts[0]) * 60 + int(tzoff_parts[1])
- if results.group(1) == '-':
- tzoff *= -1
- tz = GeneratedsSuper._FixedOffsetTZ(
- tzoff, results.group(0))
- input_data = input_data[:-6]
- if len(input_data.split('.')) > 1:
- dt = datetime_.datetime.strptime(input_data, '%H:%M:%S.%f')
- else:
- dt = datetime_.datetime.strptime(input_data, '%H:%M:%S')
- dt = dt.replace(tzinfo=tz)
- return dt.time()
- def gds_str_lower(self, instring):
- return instring.lower()
- def get_path_(self, node):
- path_list = []
- self.get_path_list_(node, path_list)
- path_list.reverse()
- path = '/'.join(path_list)
- return path
- Tag_strip_pattern_ = re_.compile(r'\{.*\}')
- def get_path_list_(self, node, path_list):
- if node is None:
- return
- tag = GeneratedsSuper.Tag_strip_pattern_.sub('', node.tag)
- if tag:
- path_list.append(tag)
- self.get_path_list_(node.getparent(), path_list)
- def get_class_obj_(self, node, default_class=None):
- class_obj1 = default_class
- if 'xsi' in node.nsmap:
- classname = node.get('{%s}type' % node.nsmap['xsi'])
- if classname is not None:
- names = classname.split(':')
- if len(names) == 2:
- classname = names[1]
- class_obj2 = globals().get(classname)
- if class_obj2 is not None:
- class_obj1 = class_obj2
- return class_obj1
- def gds_build_any(self, node, type_name=None):
- return None
- @classmethod
- def gds_reverse_node_mapping(cls, mapping):
- return dict(((v, k) for k, v in mapping.items()))
- @staticmethod
- def gds_encode(instring):
- if sys.version_info.major == 2:
- if ExternalEncoding:
- encoding = ExternalEncoding
- else:
- encoding = 'utf-8'
- return instring.encode(encoding)
- else:
- return instring
- @staticmethod
- def convert_unicode(instring):
- if isinstance(instring, str):
- result = quote_xml(instring)
- elif sys.version_info.major == 2 and isinstance(instring, unicode):
- result = quote_xml(instring).encode('utf8')
- else:
- result = GeneratedsSuper.gds_encode(str(instring))
- return result
- def __eq__(self, other):
- if type(self) != type(other):
- return False
- return self.__dict__ == other.__dict__
- def __ne__(self, other):
- return not self.__eq__(other)
-
- def getSubclassFromModule_(module, class_):
- '''Get the subclass of a class from a specific module.'''
- name = class_.__name__ + 'Sub'
- if hasattr(module, name):
- return getattr(module, name)
- else:
- return None
-
-
-#
-# If you have installed IPython you can uncomment and use the following.
-# IPython is available from http://ipython.scipy.org/.
-#
-
-## from IPython.Shell import IPShellEmbed
-## args = ''
-## ipshell = IPShellEmbed(args,
-## banner = 'Dropping into IPython',
-## exit_msg = 'Leaving Interpreter, back to program.')
-
-# Then use the following line where and when you want to drop into the
-# IPython shell:
-# ipshell('<some message> -- Entering ipshell.\nHit Ctrl-D to exit')
-
-#
-# Globals
-#
-
-ExternalEncoding = ''
-Tag_pattern_ = re_.compile(r'({.*})?(.*)')
-String_cleanup_pat_ = re_.compile(r"[\n\r\s]+")
-Namespace_extract_pat_ = re_.compile(r'{(.*)}(.*)')
-CDATA_pattern_ = re_.compile(r"<!\[CDATA\[.*?\]\]>", re_.DOTALL)
-
-# Change this to redirect the generated superclass module to use a
-# specific subclass module.
-CurrentSubclassModule_ = None
-
-#
-# Support/utility functions.
-#
-
-
-def showIndent(outfile, level, pretty_print=True):
- if pretty_print:
- for idx in range(level):
- outfile.write(' ')
-
-
-def quote_xml(inStr):
- "Escape markup chars, but do not modify CDATA sections."
- if not inStr:
- return ''
- s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
- s2 = ''
- pos = 0
- matchobjects = CDATA_pattern_.finditer(s1)
- for mo in matchobjects:
- s3 = s1[pos:mo.start()]
- s2 += quote_xml_aux(s3)
- s2 += s1[mo.start():mo.end()]
- pos = mo.end()
- s3 = s1[pos:]
- s2 += quote_xml_aux(s3)
- return s2
-
-
-def quote_xml_aux(inStr):
- s1 = inStr.replace('&', '&amp;')
- s1 = s1.replace('<', '&lt;')
- s1 = s1.replace('>', '&gt;')
- return s1
-
-
-def quote_attrib(inStr):
- s1 = (isinstance(inStr, BaseStrType_) and inStr or '%s' % inStr)
- s1 = s1.replace('&', '&amp;')
- s1 = s1.replace('<', '&lt;')
- s1 = s1.replace('>', '&gt;')
- if '"' in s1:
- if "'" in s1:
- s1 = '"%s"' % s1.replace('"', "&quot;")
- else:
- s1 = "'%s'" % s1
- else:
- s1 = '"%s"' % s1
- return s1
-
-
-def quote_python(inStr):
- s1 = inStr
- if s1.find("'") == -1:
- if s1.find('\n') == -1:
- return "'%s'" % s1
- else:
- return "'''%s'''" % s1
- else:
- if s1.find('"') != -1:
- s1 = s1.replace('"', '\\"')
- if s1.find('\n') == -1:
- return '"%s"' % s1
- else:
- return '"""%s"""' % s1
-
-
-def get_all_text_(node):
- if node.text is not None:
- text = node.text
- else:
- text = ''
- for child in node:
- if child.tail is not None:
- text += child.tail
- return text
-
-
-def find_attr_value_(attr_name, node):
- attrs = node.attrib
- attr_parts = attr_name.split(':')
- value = None
- if len(attr_parts) == 1:
- value = attrs.get(attr_name)
- elif len(attr_parts) == 2:
- prefix, name = attr_parts
- namespace = node.nsmap.get(prefix)
- if namespace is not None:
- value = attrs.get('{%s}%s' % (namespace, name, ))
- return value
-
-
-class GDSParseError(Exception):
- pass
-
-
-def raise_parse_error(node, msg):
- msg = '%s (element %s/line %d)' % (msg, node.tag, node.sourceline, )
- raise GDSParseError(msg)
-
-
-class MixedContainer:
- # Constants for category:
- CategoryNone = 0
- CategoryText = 1
- CategorySimple = 2
- CategoryComplex = 3
- # Constants for content_type:
- TypeNone = 0
- TypeText = 1
- TypeString = 2
- TypeInteger = 3
- TypeFloat = 4
- TypeDecimal = 5
- TypeDouble = 6
- TypeBoolean = 7
- TypeBase64 = 8
- def __init__(self, category, content_type, name, value):
- self.category = category
- self.content_type = content_type
- self.name = name
- self.value = value
- def getCategory(self):
- return self.category
- def getContenttype(self, content_type):
- return self.content_type
- def getValue(self):
- return self.value
- def getName(self):
- return self.name
- def export(self, outfile, level, name, namespace,
- pretty_print=True):
- if self.category == MixedContainer.CategoryText:
- # Prevent exporting empty content as empty lines.
- if self.value.strip():
- outfile.write(self.value)
- elif self.category == MixedContainer.CategorySimple:
- self.exportSimple(outfile, level, name)
- else: # category == MixedContainer.CategoryComplex
- self.value.export(
- outfile, level, namespace, name_=name,
- pretty_print=pretty_print)
- def exportSimple(self, outfile, level, name):
- if self.content_type == MixedContainer.TypeString:
- outfile.write('<%s>%s</%s>' % (
- self.name, self.value, self.name))
- elif self.content_type == MixedContainer.TypeInteger or \
- self.content_type == MixedContainer.TypeBoolean:
- outfile.write('<%s>%d</%s>' % (
- self.name, self.value, self.name))
- elif self.content_type == MixedContainer.TypeFloat or \
- self.content_type == MixedContainer.TypeDecimal:
- outfile.write('<%s>%f</%s>' % (
- self.name, self.value, self.name))
- elif self.content_type == MixedContainer.TypeDouble:
- outfile.write('<%s>%g</%s>' % (
- self.name, self.value, self.name))
- elif self.content_type == MixedContainer.TypeBase64:
- outfile.write('<%s>%s</%s>' % (
- self.name,
- base64.b64encode(self.value),
- self.name))
- def to_etree(self, element):
- if self.category == MixedContainer.CategoryText:
- # Prevent exporting empty content as empty lines.
- if self.value.strip():
- if len(element) > 0:
- if element[-1].tail is None:
- element[-1].tail = self.value
- else:
- element[-1].tail += self.value
- else:
- if element.text is None:
- element.text = self.value
- else:
- element.text += self.value
- elif self.category == MixedContainer.CategorySimple:
- subelement = etree_.SubElement(
- element, '%s' % self.name)
- subelement.text = self.to_etree_simple()
- else: # category == MixedContainer.CategoryComplex
- self.value.to_etree(element)
- def to_etree_simple(self):
- if self.content_type == MixedContainer.TypeString:
- text = self.value
- elif (self.content_type == MixedContainer.TypeInteger or
- self.content_type == MixedContainer.TypeBoolean):
- text = '%d' % self.value
- elif (self.content_type == MixedContainer.TypeFloat or
- self.content_type == MixedContainer.TypeDecimal):
- text = '%f' % self.value
- elif self.content_type == MixedContainer.TypeDouble:
- text = '%g' % self.value
- elif self.content_type == MixedContainer.TypeBase64:
- text = '%s' % base64.b64encode(self.value)
- return text
- def exportLiteral(self, outfile, level, name):
- if self.category == MixedContainer.CategoryText:
- showIndent(outfile, level)
- outfile.write(
- 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
- self.category, self.content_type,
- self.name, self.value))
- elif self.category == MixedContainer.CategorySimple:
- showIndent(outfile, level)
- outfile.write(
- 'model_.MixedContainer(%d, %d, "%s", "%s"),\n' % (
- self.category, self.content_type,
- self.name, self.value))
- else: # category == MixedContainer.CategoryComplex
- showIndent(outfile, level)
- outfile.write(
- 'model_.MixedContainer(%d, %d, "%s",\n' % (
- self.category, self.content_type, self.name,))
- self.value.exportLiteral(outfile, level + 1)
- showIndent(outfile, level)
- outfile.write(')\n')
-
-
-class MemberSpec_(object):
- def __init__(self, name='', data_type='', container=0,
- optional=0, child_attrs=None, choice=None):
- self.name = name
- self.data_type = data_type
- self.container = container
- self.child_attrs = child_attrs
- self.choice = choice
- self.optional = optional
- def set_name(self, name): self.name = name
- def get_name(self): return self.name
- def set_data_type(self, data_type): self.data_type = data_type
- def get_data_type_chain(self): return self.data_type
- def get_data_type(self):
- if isinstance(self.data_type, list):
- if len(self.data_type) > 0:
- return self.data_type[-1]
- else:
- return 'xs:string'
- else:
- return self.data_type
- def set_container(self, container): self.container = container
- def get_container(self): return self.container
- def set_child_attrs(self, child_attrs): self.child_attrs = child_attrs
- def get_child_attrs(self): return self.child_attrs
- def set_choice(self, choice): self.choice = choice
- def get_choice(self): return self.choice
- def set_optional(self, optional): self.optional = optional
- def get_optional(self): return self.optional
-
-
-def _cast(typ, value):
- if typ is None or value is None:
- return value
- return typ(value)
-
-#
-# Data representation classes.
-#
-
-
-class IndexFile(GeneratedsSuper):
- subclass = None
- superclass = None
- def __init__(self, files=None, **kwargs_):
- self.original_tagname_ = None
- self.parent_object_ = kwargs_.get('parent_object_')
- self.files = files
- def factory(*args_, **kwargs_):
- if CurrentSubclassModule_ is not None:
- subclass = getSubclassFromModule_(
- CurrentSubclassModule_, IndexFile)
- if subclass is not None:
- return subclass(*args_, **kwargs_)
- if IndexFile.subclass:
- return IndexFile.subclass(*args_, **kwargs_)
- else:
- return IndexFile(*args_, **kwargs_)
- factory = staticmethod(factory)
- def get_files(self):
- return self.files
- def set_files(self, files):
- self.files = files
- filesProp = property(get_files, set_files)
- def hasContent_(self):
- if (
- self.files is not None
- ):
- return True
- else:
- return False
- def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IndexFile', pretty_print=True):
- imported_ns_def_ = GenerateDSNamespaceDefs_.get('IndexFile')
- if imported_ns_def_ is not None:
- namespacedef_ = imported_ns_def_
- if pretty_print:
- eol_ = '\n'
- else:
- eol_ = ''
- if self.original_tagname_ is not None:
- name_ = self.original_tagname_
- showIndent(outfile, level, pretty_print)
- outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
- already_processed = set()
- self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IndexFile')
- if self.hasContent_():
- outfile.write('>%s' % (eol_, ))
- self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IndexFile', pretty_print=pretty_print)
- showIndent(outfile, level, pretty_print)
- outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
- else:
- outfile.write('/>%s' % (eol_, ))
- def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IndexFile'):
- pass
- def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IndexFile', fromsubclass_=False, pretty_print=True):
- if pretty_print:
- eol_ = '\n'
- else:
- eol_ = ''
- if self.files is not None:
- self.files.export(outfile, level, namespaceprefix_, namespacedef_='', name_='files', pretty_print=pretty_print)
- def build(self, node):
- already_processed = set()
- self.buildAttributes(node, node.attrib, already_processed)
- for child in node:
- nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
- self.buildChildren(child, node, nodeName_)
- return self
- def buildAttributes(self, node, attrs, already_processed):
- pass
- def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
- if nodeName_ == 'files':
- obj_ = FileList.factory(parent_object_=self)
- obj_.build(child_)
- self.files = obj_
- obj_.original_tagname_ = 'files'
-# end class IndexFile
-
-
-class File(GeneratedsSuper):
- subclass = None
- superclass = None
- def __init__(self, path=None, **kwargs_):
- self.original_tagname_ = None
- self.parent_object_ = kwargs_.get('parent_object_')
- self.path = _cast(None, path)
- def factory(*args_, **kwargs_):
- if CurrentSubclassModule_ is not None:
- subclass = getSubclassFromModule_(
- CurrentSubclassModule_, File)
- if subclass is not None:
- return subclass(*args_, **kwargs_)
- if File.subclass:
- return File.subclass(*args_, **kwargs_)
- else:
- return File(*args_, **kwargs_)
- factory = staticmethod(factory)
- def get_path(self):
- return self.path
- def set_path(self, path):
- self.path = path
- pathProp = property(get_path, set_path)
- def hasContent_(self):
- if (
-
- ):
- return True
- else:
- return False
- def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='File', pretty_print=True):
- imported_ns_def_ = GenerateDSNamespaceDefs_.get('File')
- if imported_ns_def_ is not None:
- namespacedef_ = imported_ns_def_
- if pretty_print:
- eol_ = '\n'
- else:
- eol_ = ''
- if self.original_tagname_ is not None:
- name_ = self.original_tagname_
- showIndent(outfile, level, pretty_print)
- outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
- already_processed = set()
- self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='File')
- if self.hasContent_():
- outfile.write('>%s' % (eol_, ))
- self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='File', pretty_print=pretty_print)
- outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
- else:
- outfile.write('/>%s' % (eol_, ))
- def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='File'):
- if self.path is not None and 'path' not in already_processed:
- already_processed.add('path')
- outfile.write(' path=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.path), input_name='path')), ))
- def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='File', fromsubclass_=False, pretty_print=True):
- pass
- def build(self, node):
- already_processed = set()
- self.buildAttributes(node, node.attrib, already_processed)
- for child in node:
- nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
- self.buildChildren(child, node, nodeName_)
- return self
- def buildAttributes(self, node, attrs, already_processed):
- value = find_attr_value_('path', node)
- if value is not None and 'path' not in already_processed:
- already_processed.add('path')
- self.path = value
- def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
- pass
-# end class File
-
-
-class FileList(GeneratedsSuper):
- subclass = None
- superclass = None
- def __init__(self, file=None, **kwargs_):
- self.original_tagname_ = None
- self.parent_object_ = kwargs_.get('parent_object_')
- if file is None:
- self.file = []
- else:
- self.file = file
- def factory(*args_, **kwargs_):
- if CurrentSubclassModule_ is not None:
- subclass = getSubclassFromModule_(
- CurrentSubclassModule_, FileList)
- if subclass is not None:
- return subclass(*args_, **kwargs_)
- if FileList.subclass:
- return FileList.subclass(*args_, **kwargs_)
- else:
- return FileList(*args_, **kwargs_)
- factory = staticmethod(factory)
- def get_file(self):
- return self.file
- def set_file(self, file):
- self.file = file
- def add_file(self, value):
- self.file.append(value)
- def add_file(self, value):
- self.file.append(value)
- def insert_file_at(self, index, value):
- self.file.insert(index, value)
- def replace_file_at(self, index, value):
- self.file[index] = value
- fileProp = property(get_file, set_file)
- def hasContent_(self):
- if (
- self.file
- ):
- return True
- else:
- return False
- def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FileList', pretty_print=True):
- imported_ns_def_ = GenerateDSNamespaceDefs_.get('FileList')
- if imported_ns_def_ is not None:
- namespacedef_ = imported_ns_def_
- if pretty_print:
- eol_ = '\n'
- else:
- eol_ = ''
- if self.original_tagname_ is not None:
- name_ = self.original_tagname_
- showIndent(outfile, level, pretty_print)
- outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
- already_processed = set()
- self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FileList')
- if self.hasContent_():
- outfile.write('>%s' % (eol_, ))
- self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FileList', pretty_print=pretty_print)
- showIndent(outfile, level, pretty_print)
- outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
- else:
- outfile.write('/>%s' % (eol_, ))
- def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FileList'):
- pass
- def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FileList', fromsubclass_=False, pretty_print=True):
- if pretty_print:
- eol_ = '\n'
- else:
- eol_ = ''
- for file_ in self.file:
- file_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='file', pretty_print=pretty_print)
- def build(self, node):
- already_processed = set()
- self.buildAttributes(node, node.attrib, already_processed)
- for child in node:
- nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
- self.buildChildren(child, node, nodeName_)
- return self
- def buildAttributes(self, node, attrs, already_processed):
- pass
- def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
- if nodeName_ == 'file':
- obj_ = File.factory(parent_object_=self)
- obj_.build(child_)
- self.file.append(obj_)
- obj_.original_tagname_ = 'file'
-# end class FileList
-
-
-GDSClassesMapping = {
- 'file-index': IndexFile,
-}
-
-
-USAGE_TEXT = """
-Usage: python <Parser>.py [ -s ] <in_xml_file>
-"""
-
-
-def usage():
- print(USAGE_TEXT)
- sys.exit(1)
-
-
-def get_root_tag(node):
- tag = Tag_pattern_.match(node.tag).groups()[-1]
- rootClass = GDSClassesMapping.get(tag)
- if rootClass is None:
- rootClass = globals().get(tag)
- return tag, rootClass
-
-
-def parse(inFileName, silence=False):
- parser = None
- doc = parsexml_(inFileName, parser)
- rootNode = doc.getroot()
- rootTag, rootClass = get_root_tag(rootNode)
- if rootClass is None:
- rootTag = 'IndexFile'
- rootClass = IndexFile
- rootObj = rootClass.factory()
- rootObj.build(rootNode)
- # Enable Python to collect the space used by the DOM.
- doc = None
- if not silence:
- sys.stdout.write('<?xml version="1.0" ?>\n')
- rootObj.export(
- sys.stdout, 0, name_=rootTag,
- namespacedef_='',
- pretty_print=True)
- return rootObj
-
-
-def parseEtree(inFileName, silence=False):
- parser = None
- doc = parsexml_(inFileName, parser)
- rootNode = doc.getroot()
- rootTag, rootClass = get_root_tag(rootNode)
- if rootClass is None:
- rootTag = 'IndexFile'
- rootClass = IndexFile
- rootObj = rootClass.factory()
- rootObj.build(rootNode)
- # Enable Python to collect the space used by the DOM.
- doc = None
- mapping = {}
- rootElement = rootObj.to_etree(None, name_=rootTag, mapping_=mapping)
- reverse_mapping = rootObj.gds_reverse_node_mapping(mapping)
- if not silence:
- content = etree_.tostring(
- rootElement, pretty_print=True,
- xml_declaration=True, encoding="utf-8")
- sys.stdout.write(content)
- sys.stdout.write('\n')
- return rootObj, rootElement, mapping, reverse_mapping
-
-
-def parseString(inString, silence=False):
- '''Parse a string, create the object tree, and export it.
-
- Arguments:
- - inString -- A string. This XML fragment should not start
- with an XML declaration containing an encoding.
- - silence -- A boolean. If False, export the object.
- Returns -- The root object in the tree.
- '''
- parser = None
- rootNode= parsexmlstring_(inString, parser)
- rootTag, rootClass = get_root_tag(rootNode)
- if rootClass is None:
- rootTag = 'IndexFile'
- rootClass = IndexFile
- rootObj = rootClass.factory()
- rootObj.build(rootNode)
- # Enable Python to collect the space used by the DOM.
- if not silence:
- sys.stdout.write('<?xml version="1.0" ?>\n')
- rootObj.export(
- sys.stdout, 0, name_=rootTag,
- namespacedef_='')
- return rootObj
-
-
-def parseLiteral(inFileName, silence=False):
- parser = None
- doc = parsexml_(inFileName, parser)
- rootNode = doc.getroot()
- rootTag, rootClass = get_root_tag(rootNode)
- if rootClass is None:
- rootTag = 'IndexFile'
- rootClass = IndexFile
- rootObj = rootClass.factory()
- rootObj.build(rootNode)
- # Enable Python to collect the space used by the DOM.
- doc = None
- if not silence:
- sys.stdout.write('#from indexFile import *\n\n')
- sys.stdout.write('import indexFile as model_\n\n')
- sys.stdout.write('rootObj = model_.rootClass(\n')
- rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
- sys.stdout.write(')\n')
- return rootObj
-
-
-def main():
- args = sys.argv[1:]
- if len(args) == 1:
- parse(args[0])
- else:
- usage()
-
-
-if __name__ == '__main__':
- #import pdb; pdb.set_trace()
- main()
-
-
-__all__ = [
- "File",
- "FileList",
- "IndexFile"
-]
diff --git a/src/ee/xml/bomFile.py b/src/ee/xml/types.py
index 0557a6c..356b520 100644
--- a/src/ee/xml/bomFile.py
+++ b/src/ee/xml/types.py
@@ -10,14 +10,14 @@
# ('--no-dates', '')
# ('--no-versions', '')
# ('--output-directory', 'src/ee/xml')
-# ('-o', 'src/ee/xml/bomFile.py')
+# ('-o', 'src/ee/xml/types.py')
# ('-m', '')
#
# Command line arguments:
-# xsd/ee-bom.xsd
+# xsd/ee.xsd
#
# Command line:
-# env/bin/generateDS.py -f --no-dates --no-versions --output-directory="src/ee/xml" -o "src/ee/xml/bomFile.py" -m xsd/ee-bom.xsd
+# env/bin/generateDS.py -f --no-dates --no-versions --output-directory="src/ee/xml" -o "src/ee/xml/types.py" -m xsd/ee.xsd
#
# Current working directory (os.getcwd()):
# ee-python
@@ -726,7 +726,7 @@ def _cast(typ, value):
#
-class BomFile(GeneratedsSuper):
+class PartCollection(GeneratedsSuper):
subclass = None
superclass = None
def __init__(self, parts=None, **kwargs_):
@@ -736,13 +736,13 @@ class BomFile(GeneratedsSuper):
def factory(*args_, **kwargs_):
if CurrentSubclassModule_ is not None:
subclass = getSubclassFromModule_(
- CurrentSubclassModule_, BomFile)
+ CurrentSubclassModule_, PartCollection)
if subclass is not None:
return subclass(*args_, **kwargs_)
- if BomFile.subclass:
- return BomFile.subclass(*args_, **kwargs_)
+ if PartCollection.subclass:
+ return PartCollection.subclass(*args_, **kwargs_)
else:
- return BomFile(*args_, **kwargs_)
+ return PartCollection(*args_, **kwargs_)
factory = staticmethod(factory)
def get_parts(self):
return self.parts
@@ -756,8 +756,8 @@ class BomFile(GeneratedsSuper):
return True
else:
return False
- def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BomFile', pretty_print=True):
- imported_ns_def_ = GenerateDSNamespaceDefs_.get('BomFile')
+ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PartCollection', pretty_print=True):
+ imported_ns_def_ = GenerateDSNamespaceDefs_.get('PartCollection')
if imported_ns_def_ is not None:
namespacedef_ = imported_ns_def_
if pretty_print:
@@ -769,17 +769,17 @@ class BomFile(GeneratedsSuper):
showIndent(outfile, level, pretty_print)
outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
already_processed = set()
- self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='BomFile')
+ self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='PartCollection')
if self.hasContent_():
outfile.write('>%s' % (eol_, ))
- self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='BomFile', pretty_print=pretty_print)
+ self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='PartCollection', pretty_print=pretty_print)
showIndent(outfile, level, pretty_print)
outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
else:
outfile.write('/>%s' % (eol_, ))
- def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='BomFile'):
+ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='PartCollection'):
pass
- def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='BomFile', fromsubclass_=False, pretty_print=True):
+ def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='PartCollection', fromsubclass_=False, pretty_print=True):
if pretty_print:
eol_ = '\n'
else:
@@ -801,7 +801,7 @@ class BomFile(GeneratedsSuper):
obj_.build(child_)
self.parts = obj_
obj_.original_tagname_ = 'parts'
-# end class BomFile
+# end class PartCollection
class Part(GeneratedsSuper):
@@ -1773,9 +1773,250 @@ class PriceBreakList(GeneratedsSuper):
# end class PriceBreakList
+class IndexFile(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, files=None, **kwargs_):
+ self.original_tagname_ = None
+ self.parent_object_ = kwargs_.get('parent_object_')
+ self.files = files
+ def factory(*args_, **kwargs_):
+ if CurrentSubclassModule_ is not None:
+ subclass = getSubclassFromModule_(
+ CurrentSubclassModule_, IndexFile)
+ if subclass is not None:
+ return subclass(*args_, **kwargs_)
+ if IndexFile.subclass:
+ return IndexFile.subclass(*args_, **kwargs_)
+ else:
+ return IndexFile(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_files(self):
+ return self.files
+ def set_files(self, files):
+ self.files = files
+ filesProp = property(get_files, set_files)
+ def hasContent_(self):
+ if (
+ self.files is not None
+ ):
+ return True
+ else:
+ return False
+ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IndexFile', pretty_print=True):
+ imported_ns_def_ = GenerateDSNamespaceDefs_.get('IndexFile')
+ if imported_ns_def_ is not None:
+ namespacedef_ = imported_ns_def_
+ if pretty_print:
+ eol_ = '\n'
+ else:
+ eol_ = ''
+ if self.original_tagname_ is not None:
+ name_ = self.original_tagname_
+ showIndent(outfile, level, pretty_print)
+ outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
+ already_processed = set()
+ self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='IndexFile')
+ if self.hasContent_():
+ outfile.write('>%s' % (eol_, ))
+ self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='IndexFile', pretty_print=pretty_print)
+ showIndent(outfile, level, pretty_print)
+ outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
+ else:
+ outfile.write('/>%s' % (eol_, ))
+ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='IndexFile'):
+ pass
+ def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='IndexFile', fromsubclass_=False, pretty_print=True):
+ if pretty_print:
+ eol_ = '\n'
+ else:
+ eol_ = ''
+ if self.files is not None:
+ self.files.export(outfile, level, namespaceprefix_, namespacedef_='', name_='files', pretty_print=pretty_print)
+ def build(self, node):
+ already_processed = set()
+ self.buildAttributes(node, node.attrib, already_processed)
+ for child in node:
+ nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
+ self.buildChildren(child, node, nodeName_)
+ return self
+ def buildAttributes(self, node, attrs, already_processed):
+ pass
+ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
+ if nodeName_ == 'files':
+ obj_ = FileList.factory(parent_object_=self)
+ obj_.build(child_)
+ self.files = obj_
+ obj_.original_tagname_ = 'files'
+# end class IndexFile
+
+
+class File(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, path=None, **kwargs_):
+ self.original_tagname_ = None
+ self.parent_object_ = kwargs_.get('parent_object_')
+ self.path = _cast(None, path)
+ def factory(*args_, **kwargs_):
+ if CurrentSubclassModule_ is not None:
+ subclass = getSubclassFromModule_(
+ CurrentSubclassModule_, File)
+ if subclass is not None:
+ return subclass(*args_, **kwargs_)
+ if File.subclass:
+ return File.subclass(*args_, **kwargs_)
+ else:
+ return File(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_path(self):
+ return self.path
+ def set_path(self, path):
+ self.path = path
+ pathProp = property(get_path, set_path)
+ def hasContent_(self):
+ if (
+
+ ):
+ return True
+ else:
+ return False
+ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='File', pretty_print=True):
+ imported_ns_def_ = GenerateDSNamespaceDefs_.get('File')
+ if imported_ns_def_ is not None:
+ namespacedef_ = imported_ns_def_
+ if pretty_print:
+ eol_ = '\n'
+ else:
+ eol_ = ''
+ if self.original_tagname_ is not None:
+ name_ = self.original_tagname_
+ showIndent(outfile, level, pretty_print)
+ outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
+ already_processed = set()
+ self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='File')
+ if self.hasContent_():
+ outfile.write('>%s' % (eol_, ))
+ self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='File', pretty_print=pretty_print)
+ outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
+ else:
+ outfile.write('/>%s' % (eol_, ))
+ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='File'):
+ if self.path is not None and 'path' not in already_processed:
+ already_processed.add('path')
+ outfile.write(' path=%s' % (self.gds_encode(self.gds_format_string(quote_attrib(self.path), input_name='path')), ))
+ def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='File', fromsubclass_=False, pretty_print=True):
+ pass
+ def build(self, node):
+ already_processed = set()
+ self.buildAttributes(node, node.attrib, already_processed)
+ for child in node:
+ nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
+ self.buildChildren(child, node, nodeName_)
+ return self
+ def buildAttributes(self, node, attrs, already_processed):
+ value = find_attr_value_('path', node)
+ if value is not None and 'path' not in already_processed:
+ already_processed.add('path')
+ self.path = value
+ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
+ pass
+# end class File
+
+
+class FileList(GeneratedsSuper):
+ subclass = None
+ superclass = None
+ def __init__(self, file=None, **kwargs_):
+ self.original_tagname_ = None
+ self.parent_object_ = kwargs_.get('parent_object_')
+ if file is None:
+ self.file = []
+ else:
+ self.file = file
+ def factory(*args_, **kwargs_):
+ if CurrentSubclassModule_ is not None:
+ subclass = getSubclassFromModule_(
+ CurrentSubclassModule_, FileList)
+ if subclass is not None:
+ return subclass(*args_, **kwargs_)
+ if FileList.subclass:
+ return FileList.subclass(*args_, **kwargs_)
+ else:
+ return FileList(*args_, **kwargs_)
+ factory = staticmethod(factory)
+ def get_file(self):
+ return self.file
+ def set_file(self, file):
+ self.file = file
+ def add_file(self, value):
+ self.file.append(value)
+ def add_file(self, value):
+ self.file.append(value)
+ def insert_file_at(self, index, value):
+ self.file.insert(index, value)
+ def replace_file_at(self, index, value):
+ self.file[index] = value
+ fileProp = property(get_file, set_file)
+ def hasContent_(self):
+ if (
+ self.file
+ ):
+ return True
+ else:
+ return False
+ def export(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FileList', pretty_print=True):
+ imported_ns_def_ = GenerateDSNamespaceDefs_.get('FileList')
+ if imported_ns_def_ is not None:
+ namespacedef_ = imported_ns_def_
+ if pretty_print:
+ eol_ = '\n'
+ else:
+ eol_ = ''
+ if self.original_tagname_ is not None:
+ name_ = self.original_tagname_
+ showIndent(outfile, level, pretty_print)
+ outfile.write('<%s%s%s' % (namespaceprefix_, name_, namespacedef_ and ' ' + namespacedef_ or '', ))
+ already_processed = set()
+ self.exportAttributes(outfile, level, already_processed, namespaceprefix_, name_='FileList')
+ if self.hasContent_():
+ outfile.write('>%s' % (eol_, ))
+ self.exportChildren(outfile, level + 1, namespaceprefix_, namespacedef_, name_='FileList', pretty_print=pretty_print)
+ showIndent(outfile, level, pretty_print)
+ outfile.write('</%s%s>%s' % (namespaceprefix_, name_, eol_))
+ else:
+ outfile.write('/>%s' % (eol_, ))
+ def exportAttributes(self, outfile, level, already_processed, namespaceprefix_='', name_='FileList'):
+ pass
+ def exportChildren(self, outfile, level, namespaceprefix_='', namespacedef_='', name_='FileList', fromsubclass_=False, pretty_print=True):
+ if pretty_print:
+ eol_ = '\n'
+ else:
+ eol_ = ''
+ for file_ in self.file:
+ file_.export(outfile, level, namespaceprefix_, namespacedef_='', name_='file', pretty_print=pretty_print)
+ def build(self, node):
+ already_processed = set()
+ self.buildAttributes(node, node.attrib, already_processed)
+ for child in node:
+ nodeName_ = Tag_pattern_.match(child.tag).groups()[-1]
+ self.buildChildren(child, node, nodeName_)
+ return self
+ def buildAttributes(self, node, attrs, already_processed):
+ pass
+ def buildChildren(self, child_, node, nodeName_, fromsubclass_=False):
+ if nodeName_ == 'file':
+ obj_ = File.factory(parent_object_=self)
+ obj_.build(child_)
+ self.file.append(obj_)
+ obj_.original_tagname_ = 'file'
+# end class FileList
+
+
GDSClassesMapping = {
- 'bom-file': BomFile,
+ 'file-index': IndexFile,
'part': Part,
+ 'part-collection': PartCollection,
}
@@ -1803,8 +2044,8 @@ def parse(inFileName, silence=False):
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
- rootTag = 'BomFile'
- rootClass = BomFile
+ rootTag = 'PartCollection'
+ rootClass = PartCollection
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
@@ -1824,8 +2065,8 @@ def parseEtree(inFileName, silence=False):
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
- rootTag = 'BomFile'
- rootClass = BomFile
+ rootTag = 'PartCollection'
+ rootClass = PartCollection
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
@@ -1855,8 +2096,8 @@ def parseString(inString, silence=False):
rootNode= parsexmlstring_(inString, parser)
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
- rootTag = 'BomFile'
- rootClass = BomFile
+ rootTag = 'PartCollection'
+ rootClass = PartCollection
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
@@ -1874,15 +2115,15 @@ def parseLiteral(inFileName, silence=False):
rootNode = doc.getroot()
rootTag, rootClass = get_root_tag(rootNode)
if rootClass is None:
- rootTag = 'BomFile'
- rootClass = BomFile
+ rootTag = 'PartCollection'
+ rootClass = PartCollection
rootObj = rootClass.factory()
rootObj.build(rootNode)
# Enable Python to collect the space used by the DOM.
doc = None
if not silence:
- sys.stdout.write('#from bomFile import *\n\n')
- sys.stdout.write('import bomFile as model_\n\n')
+ sys.stdout.write('#from types import *\n\n')
+ sys.stdout.write('import types as model_\n\n')
sys.stdout.write('rootObj = model_.rootClass(\n')
rootObj.exportLiteral(sys.stdout, 0, name_=rootTag)
sys.stdout.write(')\n')
@@ -1904,11 +2145,14 @@ if __name__ == '__main__':
__all__ = [
"Amount",
- "BomFile",
"DistributorInfo",
"Fact",
"FactList",
+ "File",
+ "FileList",
+ "IndexFile",
"Part",
+ "PartCollection",
"PartList",
"PartNumber",
"PartNumberList",
diff --git a/xsd/ee-index.xsd b/xsd/ee-index.xsd
deleted file mode 100644
index 21dbeb2..0000000
--- a/xsd/ee-index.xsd
+++ /dev/null
@@ -1,24 +0,0 @@
-<xs:schema
- xmlns:xs="http://www.w3.org/2001/XMLSchema"
- targetNamespace="http://purl.org/ee/bom-file"
- xmlns="http://purl.org/ee/bom-file">
-
- <xs:element name="file-index" type="IndexFile"/>
-
- <xs:complexType name="IndexFile">
- <xs:sequence>
- <xs:element name="files" type="FileList" minOccurs="0"/>
- </xs:sequence>
- </xs:complexType>
-
- <xs:complexType name="File">
- <xs:attribute name="path" use="required" type="xs:string"/>
- </xs:complexType>
-
- <xs:complexType name="FileList">
- <xs:sequence>
- <xs:element name="file" type="File" maxOccurs="unbounded"/>
- </xs:sequence>
- </xs:complexType>
-
-</xs:schema>
diff --git a/xsd/ee.xsd b/xsd/ee.xsd
new file mode 100644
index 0000000..7c5c044
--- /dev/null
+++ b/xsd/ee.xsd
@@ -0,0 +1,106 @@
+<xs:schema
+ xmlns:xs="http://www.w3.org/2001/XMLSchema"
+ targetNamespace="http://purl.org/ee/bom-file"
+ xmlns="http://purl.org/ee/bom-file">
+
+ <xs:attribute name="id" type="xs:string"/>
+
+ <xs:element name="part-collection" type="PartCollection"/>
+ <xs:element name="part" type="Part"/>
+
+ <xs:complexType name="PartCollection">
+ <xs:sequence>
+ <xs:element name="parts" type="PartList" minOccurs="0"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="Part">
+ <xs:sequence>
+ <xs:element name="schema-reference" type="xs:string"/>
+ <xs:element name="part-type" type="xs:anyURI"/>
+ <xs:element name="part-numbers" type="PartNumberList"/>
+ <xs:element name="distributor-info" type="DistributorInfo"/>
+ <xs:element name="facts" type="FactList"/>
+ <xs:element name="price-breaks" type="PriceBreakList"/>
+ </xs:sequence>
+ <xs:attribute ref="id" use="required"/>
+ </xs:complexType>
+
+ <xs:complexType name="PartList">
+ <xs:sequence>
+ <xs:element name="part" type="Part" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="PartNumber">
+ <xs:sequence>
+ <xs:element name="value" type="xs:string"/>
+ <xs:element name="distributor" type="xs:anyURI"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="PartNumberList">
+ <xs:sequence>
+ <xs:element name="part-number" type="PartNumber" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="Fact">
+ <xs:sequence>
+ <xs:element name="key" type="xs:anyURI"/>
+ <xs:element name="label" type="xs:string"/>
+ <xs:element name="value" type="xs:string"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="FactList">
+ <xs:sequence>
+ <xs:element name="fact" type="Fact" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="DistributorInfo">
+ <xs:sequence>
+ <xs:element name="state" type="xs:string"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="Amount">
+ <xs:attribute name="value" use="required"/>
+ <xs:attribute name="currency"/>
+ </xs:complexType>
+
+ <xs:complexType name="PriceBreak">
+ <xs:sequence>
+ <xs:element name="quantity" type="xs:anyURI"/>
+ <xs:element name="amount" type="Amount"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="PriceBreakList">
+ <xs:sequence>
+ <xs:element name="price-break" type="PriceBreak" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <!-- -->
+
+ <xs:element name="file-index" type="IndexFile"/>
+
+ <xs:complexType name="IndexFile">
+ <xs:sequence>
+ <xs:element name="files" type="FileList" minOccurs="0"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <xs:complexType name="File">
+ <xs:attribute name="path" use="required" type="xs:string"/>
+ </xs:complexType>
+
+ <xs:complexType name="FileList">
+ <xs:sequence>
+ <xs:element name="file" type="File" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+
+</xs:schema>