|
|
- import configparser
- import os
- import argparse
- import pymongo
- import ssl
- import mysql.connector
- import networkx as nx
- from . import queries
- from . import graph
- import minorminer
- from tqdm import tqdm
- import numpy as np
-
- def readConfig(configFilePath):
- config = configparser.ConfigParser()
-
- if os.path.isfile(configFilePath):
- config.read(configFilePath)
-
- return config
-
- class ArgParser:
- def __init__(self):
- self.__flags = {}
- self.__parser = argparse.ArgumentParser()
- self.__instanceDirArgSet = False
- self.__config = None
- self.__parsedArgs = {}
-
- def addArg(self, alias,
- shortFlag,
- longFlag,
- help,
- type,
- default=None,
- ignoreDatabaseConfig=False):
-
- self.__flags[alias] = {"longFlag": longFlag,
- "hasDefault": False,
- "ignoreDatabaseConfig": ignoreDatabaseConfig,
- "type": type}
-
- if default != None:
- self.__flags[alias]["hasDefault"] = True
-
- self.__parser.add_argument("-%s" % shortFlag,
- "--%s" % longFlag,
- help=help,
- type=type,
- default=default)
-
- def addInstanceDirArg(self):
- self.__instanceDirArgSet = True
-
- self.addArg(alias="datasetDir", shortFlag="d", longFlag="dataset_dir",
- help="the base direcotry of the dataset; if this flag is given the others can be omitted",
- type=str, ignoreDatabaseConfig=True)
-
-
- def parse(self):
- self.__parsedArgs = {}
- args = vars(self.__parser.parse_args())
-
- if self.__instanceDirArgSet:
- self.__config = readConfig(os.path.join(args["dataset_dir"],
- "dataset.config"))
- self.__parseDatasetConfig()
-
- for alias, flag in self.__flags.items():
- self.__parsedArgs[alias] = self.__processFlag(args, flag)
-
- self.__config = None
-
- return self.__parsedArgs
-
- def __parseDatasetConfig(self):
- for flag, value in self.__config["STRUCTURE"].items():
- self.__parsedArgs[flag] = value
-
- def __processFlag(self, args, flag):
- longFlag = flag["longFlag"]
-
- tmpValue = self.__parsedArgs[longFlag] if longFlag in self.__parsedArgs else None
-
- if flag["ignoreDatabaseConfig"] == True:
- tmpValue = None
-
- if args[longFlag]:
- tmpValue = args[longFlag]
-
- if tmpValue == None:
- tmpValue = flag["type"](input("pass arguement %s: " % longFlag))
-
-
- return tmpValue
-
- def getDBContext(dbConfigPath):
- dbContext = {}
-
- dbContext["client"] = connect_to_instance_pool(dbConfigPath)
- dbContext["db"] = dbContext["client"]["experiments"]
- dbContext["instances"] = dbContext["db"]["instances"]
- dbContext["experimentScopes"] = dbContext["db"]["experiment_scopes"]
-
- return dbContext
-
- def connect_to_instance_pool(dbConfigPath = "database.config"):
- dbConf = readConfig(dbConfigPath)
-
- client = pymongo.MongoClient(
- "mongodb://%s:%s@%s:%s/%s"
- % ( dbConf["INSTANCE_POOL"]["user"],
- dbConf["INSTANCE_POOL"]["pw"],
- dbConf["INSTANCE_POOL"]["url"],
- dbConf["INSTANCE_POOL"]["port"],
- dbConf["INSTANCE_POOL"]["database"]),
- ssl=True,
- ssl_cert_reqs=ssl.CERT_NONE)
-
- return client[dbConf["INSTANCE_POOL"]["database"]]
-
- def connect_to_experimetns_db(dbConfigPath = "database.config"):
- dbConfig = readConfig(dbConfigPath)
-
- return mysql.connector.connect(
- host=dbConfig["EXPERIMENT_DB"]["url"],
- port=dbConfig["EXPERIMENT_DB"]["port"],
- user=dbConfig["EXPERIMENT_DB"]["user"],
- password=dbConfig["EXPERIMENT_DB"]["pw"],
- database=dbConfig["EXPERIMENT_DB"]["database"]
- )
-
- def frange(start, stop, steps):
- while start < stop:
- yield start
- start += steps
-
- def create_experiment_scope(db, description, name):
- experimentScope = {}
- experimentScope["instances"] = []
-
- experimentScope["description"] = description
-
- experimentScope["_id"] = name.strip()
-
- db["experiment_scopes"].insert_one(experimentScope)
-
- def write_instance_to_pool_db(db, instance):
- instance_document = instance.writeJSONLike()
-
- result = db["instances"].insert_one(instance_document)
-
- return result.inserted_id
-
- def add_instance_to_experiment_scope(db, scope_name, instance_id):
- db["experiment_scopes"].update_one(
- {"_id": scope_name},
- {"$push": {"instances": instance_id}}
- )
-
- def write_qubo_to_pool_db(collection, qubo, sat_instance_id):
- doc = {}
-
- doc["instance"] = sat_instance_id
- doc["description"] = {"<qubo>": "<entrys>",
- "<entrys>": "<entry><entrys> | <entry> | \"\"",
- "<entry>": "<coupler><energy>",
- "<energy>": "<real_number>",
- "<coupler>": "<node><node>",
- "<node>": "<clause><literal>",
- "<clause>": "<natural_number>",
- "<literal>": "<integer>"}
-
- doc["qubo"] = __qubo_to_JSON(qubo)
-
- collection.insert_one(doc)
-
- def __qubo_to_JSON(qubo):
- quboJSON = []
-
- for coupler, value in qubo.items():
- quboJSON.append([coupler, float(value)])
-
- return quboJSON
-
- def write_wmis_embedding_to_pool_db(collection, qubo_id, solver_graph_id, embedding):
- if not __embedding_entry_exists(collection, qubo_id, solver_graph_id):
- __prepare_new_wmis_embedding_entry(collection, qubo_id, solver_graph_id)
-
- collection.update_one(
- {"qubo": qubo_id, "solver_graph": solver_graph_id},
- {"$push": {"embeddings": __embedding_to_array(embedding)}}
- )
-
- def __embedding_entry_exists(collection, qubo_id, solver_graph_id):
- filter = {"qubo": qubo_id, "solver_graph": solver_graph_id}
-
- if collection.count_documents(filter) > 0:
- return True
-
- return False
-
- def __prepare_new_wmis_embedding_entry(collection, qubo_id, solver_graph_id):
- doc = {}
-
- doc["qubo"] = qubo_id
- doc["solver_graph"] = solver_graph_id
- doc["description"] = {"<embedding>": "<chains>",
- "<chains>": "<chain><chains> | \"\"",
- "<chain>" : "<original_node><chimera_nodes>",
- "<chimera_nodes>": "<chimera_node><chimera_nodes> | \"\""}
- doc["embeddings"] = []
-
- collection.insert_one(doc)
-
- def __embedding_to_array(embedding):
- emb_arr = []
-
- for node, chain in embedding.items():
- emb_arr.append([node, chain])
-
- return emb_arr
-
- def write_solver_graph_to_pool_db(collection, graph):
- data = nx.node_link_data(graph)
-
- id = queries.get_id_of_solver_graph(collection, data)
-
- if id != None:
- return id
-
- doc = {}
- doc["data"] = data
-
- return collection.insert_one(doc).inserted_id
-
- def find_wmis_embeddings_for_scope(db, scope, solver_graph):
- solver_graph_id = write_solver_graph_to_pool_db(db["solver_graphs"],
- solver_graph)
-
- qubos = queries.WMIS_scope_query(db)
- qubos.query(scope)
-
- for qubo, qubo_id in tqdm(qubos):
- if not __embedding_entry_exists(db["embeddings"], qubo_id, solver_graph_id):
- nx_qubo = graph.qubo_to_nx_graph(qubo)
-
- emb = minorminer.find_embedding(nx_qubo.edges(),
- solver_graph.edges(),
- return_overlap=True)
-
- if emb[1] == 1:
- write_wmis_embedding_to_pool_db(db["embeddings"],
- qubo_id,
- solver_graph_id,
- emb[0])
-
- def save_simulated_annealing_result(collection, result, solver_input, emb_list_index):
- doc = {}
-
- doc["data"] = result.to_serializable()
- doc["instance"] = solver_input["instance_id"]
- doc["embedding"] = {
- "embedding_id": solver_input["embeddings_id"],
- "list_index": emb_list_index
- }
-
- collection.insert_one(doc)
-
- def analyze_wmis_sample(sample):
- data = {}
-
- data["number_of_assignments"] = np.count_nonzero(list(sample.sample.values()))
-
- return data
|