import sys import logging from genericpath import isfile from os import mkdir from os.path import dirname, isdir from typing import Callable from rdflib import store, ConjunctiveGraph, Graph, RDF, RDFS, OWL from rdflib.plugins.sparql import prepareQuery import rdflib.plugins.stores.sparqlstore as sparqlstore import configparser # noinspection PyUnresolvedReferences import argparse from .. import EDA from ..digikey import rdf as digikey_rdf from ..kicad import rdf as kicad_rdf class CliCommand(object): def __init__(self, key, description): self.key = key self.description = description class CliException(Exception): pass def initialize(): logging.basicConfig(level=logging.DEBUG) pass def info(msg=None): if msg is not None: sys.stderr.write(msg) sys.stderr.write("\n") def debug(msg=None): if msg is not None: sys.stderr.write('D: %s' % msg) sys.stderr.write("\n") def do_exit(msg=None): sys.exit(msg) def with_database(tx): config = read_config() db_type = config['db']['type'] if db_type is None or db_type == 'local': path = ".eda-rdf/db" g = ConjunctiveGraph('Sleepycat') rt = g.open(path, create=False) if rt == store.NO_STORE: info("Creating store in %s" % path) g.open(path, create=True) elif rt != store.VALID_STORE: raise CliException("The database is corrupt: %s" % path) elif db_type == 'sparql': query_endpoint = config["db"]["url"] update_endpoint = config["db"].get("update-url") if update_endpoint is None: g = sparqlstore.SPARQLStore() g.open(query_endpoint) else: g = sparqlstore.SPARQLUpdateStore(autocommit=False) g.open((query_endpoint, update_endpoint)) else: raise CliException("Unknown db.type: %s" % db_type) try: tx(g) if isinstance(g, sparqlstore.SPARQLUpdateStore): g.commit() finally: g.close() def create_graph(digikey=False, kicad=False) -> Graph: g = Graph() g.bind("owl", OWL) g.bind("eda", EDA) if digikey: g.bind("dk", digikey_rdf.DIGIKEY) g.bind("dk-part", digikey_rdf.DIGIKEY_PART) g.bind("dk-attr-type", digikey_rdf.DIGIKEY_ATTRIBUTE_TYPE) g.bind("dk-attr-value", digikey_rdf.DIGIKEY_ATTRIBUTE_VALUE) g.bind("dk-product-category", digikey_rdf.DIGIKEY_PRODUCT_CATEGORY) if kicad: g.bind("kicad", kicad_rdf.KICAD) g.bind("kicad-random", kicad_rdf.KICAD_RANDOM) return g def write_graph(gen_g: Callable[[], Graph], filename: str = None, force_write: bool = False): if filename is not None: if force_write or not isfile(filename): parent = dirname(filename) if not isdir(parent): mkdir(parent) g = gen_g() if g is None: raise CliException("internal error: graph generator returned None") bs = g.serialize(encoding='utf-8', format='turtle') with open(filename, "wb") as f: f.write(bs) info("Wrote %s" % filename) else: info("Skipped writing %s, already exists" % filename) else: g = gen_g() bs = g.serialize(encoding='utf-8', format='turtle') sys.stdout.buffer.write(bs) _initNs = { "rdf": RDF, "rdfs": RDFS, "owl": OWL, "eda": EDA, "dk": digikey_rdf.DIGIKEY, "dk-attr-type": digikey_rdf.DIGIKEY_ATTRIBUTE_TYPE, "dk-attr-value": digikey_rdf.DIGIKEY_ATTRIBUTE_VALUE, "dk-part": digikey_rdf.DIGIKEY_PART, "dk-p-c": digikey_rdf.DIGIKEY_PRODUCT_CATEGORY, "kicad": kicad_rdf.KICAD, "kicad-random": kicad_rdf.KICAD_RANDOM} def sparql(g: Graph, query: str, init_bindings=None): # for k, v in _initNs.items(): # print("PREFIX %s: <%s>" % (k, v)) if isinstance(g, sparqlstore.SPARQLStore): return g.query(query, initNs=_initNs, initBindings=init_bindings) else: q = prepareQuery(query, initNs=_initNs) return g.query(q, initBindings=init_bindings) def write_config(config: configparser.ConfigParser): with open('.eda-rdf/config.ini', 'w') as configfile: config.write(configfile) def read_config() -> configparser.ConfigParser: try: with open('.eda-rdf/config.ini', 'r') as f: config = configparser.ConfigParser() config.read_file(f) return config except FileNotFoundError: raise CliException("Not a EDA-RDF project. Run eda-rdf init first.")