You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

354 lines
11 KiB

6 years ago
5 years ago
5 years ago
6 years ago
6 years ago
6 years ago
6 years ago
6 years ago
5 years ago
5 years ago
5 years ago
6 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
  1. import configparser
  2. import os
  3. import argparse
  4. import pymongo
  5. import ssl
  6. import mysql.connector
  7. import networkx as nx
  8. from . import queries
  9. from . import graph
  10. import minorminer
  11. from tqdm import tqdm
  12. import numpy as np
  13. import random
  14. import sys
  15. def readConfig(configFilePath):
  16. config = configparser.ConfigParser()
  17. if os.path.isfile(configFilePath):
  18. config.read(configFilePath)
  19. return config
  20. class ArgParser:
  21. def __init__(self):
  22. self.__flags = {}
  23. self.__parser = argparse.ArgumentParser()
  24. self.__instanceDirArgSet = False
  25. self.__config = None
  26. self.__parsedArgs = {}
  27. def addArg(self, alias,
  28. shortFlag,
  29. longFlag,
  30. help,
  31. type,
  32. default=None,
  33. ignoreDatabaseConfig=False):
  34. self.__flags[alias] = {"longFlag": longFlag,
  35. "hasDefault": False,
  36. "ignoreDatabaseConfig": ignoreDatabaseConfig,
  37. "type": type}
  38. if default != None:
  39. self.__flags[alias]["hasDefault"] = True
  40. self.__parser.add_argument("-%s" % shortFlag,
  41. "--%s" % longFlag,
  42. help=help,
  43. type=type,
  44. default=default)
  45. def addInstanceDirArg(self):
  46. self.__instanceDirArgSet = True
  47. self.addArg(alias="datasetDir", shortFlag="d", longFlag="dataset_dir",
  48. help="the base direcotry of the dataset; if this flag is given the others can be omitted",
  49. type=str, ignoreDatabaseConfig=True)
  50. def parse(self):
  51. self.__parsedArgs = {}
  52. args = vars(self.__parser.parse_args())
  53. if self.__instanceDirArgSet:
  54. self.__config = readConfig(os.path.join(args["dataset_dir"],
  55. "dataset.config"))
  56. self.__parseDatasetConfig()
  57. for alias, flag in self.__flags.items():
  58. self.__parsedArgs[alias] = self.__processFlag(args, flag)
  59. self.__config = None
  60. return self.__parsedArgs
  61. def __parseDatasetConfig(self):
  62. for flag, value in self.__config["STRUCTURE"].items():
  63. self.__parsedArgs[flag] = value
  64. def __processFlag(self, args, flag):
  65. longFlag = flag["longFlag"]
  66. tmpValue = self.__parsedArgs[longFlag] if longFlag in self.__parsedArgs else None
  67. if flag["ignoreDatabaseConfig"] == True:
  68. tmpValue = None
  69. if args[longFlag]:
  70. tmpValue = args[longFlag]
  71. if tmpValue == None:
  72. tmpValue = flag["type"](input("pass arguement %s: " % longFlag))
  73. return tmpValue
  74. def getDBContext(dbConfigPath):
  75. dbContext = {}
  76. dbContext["client"] = connect_to_instance_pool(dbConfigPath)
  77. dbContext["db"] = dbContext["client"]["experiments"]
  78. dbContext["instances"] = dbContext["db"]["instances"]
  79. dbContext["experimentScopes"] = dbContext["db"]["experiment_scopes"]
  80. return dbContext
  81. def connect_to_instance_pool(dbConfigPath = "database.config"):
  82. dbConf = readConfig(dbConfigPath)
  83. client = pymongo.MongoClient(
  84. "mongodb://%s:%s@%s:%s/%s"
  85. % ( dbConf["INSTANCE_POOL"]["user"],
  86. dbConf["INSTANCE_POOL"]["pw"],
  87. dbConf["INSTANCE_POOL"]["url"],
  88. dbConf["INSTANCE_POOL"]["port"],
  89. dbConf["INSTANCE_POOL"]["database"]),
  90. ssl=True,
  91. ssl_cert_reqs=ssl.CERT_NONE)
  92. return client[dbConf["INSTANCE_POOL"]["database"]]
  93. def connect_to_experimetns_db(dbConfigPath = "database.config"):
  94. dbConfig = readConfig(dbConfigPath)
  95. return mysql.connector.connect(
  96. host=dbConfig["EXPERIMENT_DB"]["url"],
  97. port=dbConfig["EXPERIMENT_DB"]["port"],
  98. user=dbConfig["EXPERIMENT_DB"]["user"],
  99. password=dbConfig["EXPERIMENT_DB"]["pw"],
  100. database=dbConfig["EXPERIMENT_DB"]["database"]
  101. )
  102. def frange(start, stop, steps):
  103. while start < stop:
  104. yield start
  105. start += steps
  106. def create_experiment_scope(db, description, name):
  107. experimentScope = {}
  108. experimentScope["instances"] = []
  109. experimentScope["description"] = description
  110. experimentScope["_id"] = name.strip()
  111. db["experiment_scopes"].insert_one(experimentScope)
  112. def write_instance_to_pool_db(db, instance):
  113. instance_document = instance.writeJSONLike()
  114. result = db["instances"].insert_one(instance_document)
  115. return result.inserted_id
  116. def add_instance_to_experiment_scope(db, scope_name, instance_id):
  117. db["experiment_scopes"].update_one(
  118. {"_id": scope_name},
  119. {"$push": {"instances": instance_id}}
  120. )
  121. def write_qubo_to_pool_db(collection, qubo, sat_instance_id):
  122. doc = {}
  123. doc["instance"] = sat_instance_id
  124. doc["description"] = {"<qubo>": "<entrys>",
  125. "<entrys>": "<entry><entrys> | <entry> | \"\"",
  126. "<entry>": "<coupler><energy>",
  127. "<energy>": "<real_number>",
  128. "<coupler>": "<node><node>",
  129. "<node>": "<clause><literal>",
  130. "<clause>": "<natural_number>",
  131. "<literal>": "<integer>"}
  132. doc["qubo"] = __qubo_to_JSON(qubo)
  133. collection.insert_one(doc)
  134. def __qubo_to_JSON(qubo):
  135. quboJSON = []
  136. for coupler, value in qubo.items():
  137. quboJSON.append([coupler, float(value)])
  138. return quboJSON
  139. def write_wmis_embedding_to_pool_db(collection, qubo_id, solver_graph_id, seed, embedding):
  140. if not __embedding_entry_exists(collection, qubo_id, solver_graph_id):
  141. __prepare_new_wmis_embedding_entry(collection, qubo_id, solver_graph_id)
  142. collection.update_one(
  143. {"qubo": qubo_id, "solver_graph": solver_graph_id},
  144. {
  145. "$push":
  146. {
  147. "embeddings":
  148. {
  149. "embedding": __embedding_to_array(embedding),
  150. "seed": seed
  151. }
  152. }
  153. }
  154. )
  155. def __embedding_entry_exists(collection, qubo_id, solver_graph_id):
  156. filter = {"qubo": qubo_id, "solver_graph": solver_graph_id}
  157. if collection.count_documents(filter) > 0:
  158. return True
  159. return False
  160. def __prepare_new_wmis_embedding_entry(collection, qubo_id, solver_graph_id):
  161. doc = {}
  162. doc["qubo"] = qubo_id
  163. doc["solver_graph"] = solver_graph_id
  164. doc["description"] = {"<embedding>": "<chains>",
  165. "<chains>": "<chain><chains> | \"\"",
  166. "<chain>" : "<original_node><chimera_nodes>",
  167. "<chimera_nodes>": "<chimera_node><chimera_nodes> | \"\""}
  168. doc["embeddings"] = []
  169. collection.insert_one(doc)
  170. def __embedding_to_array(embedding):
  171. emb_arr = []
  172. for node, chain in embedding.items():
  173. emb_arr.append([node, chain])
  174. return emb_arr
  175. def write_solver_graph_to_pool_db(collection, graph):
  176. data = nx.node_link_data(graph)
  177. id = queries.get_id_of_solver_graph(collection, data)
  178. if id != None:
  179. return id
  180. doc = {}
  181. doc["data"] = data
  182. return collection.insert_one(doc).inserted_id
  183. def find_wmis_embeddings_for_scope(db, scope, solver_graph):
  184. solver_graph_id = write_solver_graph_to_pool_db(db["solver_graphs"],
  185. solver_graph)
  186. qubos = queries.WMIS_scope_query(db)
  187. qubos.query(scope)
  188. new_embeddings_found = 0
  189. already_found = 0
  190. total_count = 0
  191. for qubo, qubo_id in tqdm(qubos):
  192. total_count += 1
  193. max_no_improvement = 10
  194. for i in range(5):
  195. if __embedding_entry_exists(db["embeddings"], qubo_id, solver_graph_id):
  196. if i == 0:
  197. already_found += 1
  198. break;
  199. else:
  200. nx_qubo = graph.qubo_to_nx_graph(qubo)
  201. seed = random.randint(0, sys.maxsize)
  202. emb = minorminer.find_embedding(nx_qubo.edges(),
  203. solver_graph.edges(),
  204. return_overlap=True,
  205. max_no_improvement=max_no_improvement,
  206. random_seed=seed)
  207. if emb[1] == 1:
  208. write_wmis_embedding_to_pool_db(db["embeddings"],
  209. qubo_id,
  210. solver_graph_id,
  211. seed,
  212. emb[0])
  213. new_embeddings_found += 1
  214. max_no_improvement *= 1.5
  215. percentage = 0
  216. if total_count > 0:
  217. percentage = round(((new_embeddings_found + already_found) / total_count) * 100)
  218. print("found {} of {} embeddigns ({}%)".format(new_embeddings_found + already_found,
  219. total_count,
  220. percentage))
  221. print("{} new embeddigns found".format(new_embeddings_found))
  222. def save_simulated_annealing_result(collection, result, solver_input, emb_list_index):
  223. doc = {}
  224. doc["data"] = result.to_serializable()
  225. doc["instance"] = solver_input["instance_id"]
  226. doc["embedding"] = {
  227. "embedding_id": solver_input["embeddings_id"],
  228. "list_index": emb_list_index
  229. }
  230. collection.insert_one(doc)
  231. def analyze_wmis_sample(sample):
  232. data = {}
  233. data["number_of_assignments"] = np.count_nonzero(list(sample.sample.values()))
  234. data["chain_break_fraction"] = sample.chain_break_fraction
  235. data["num_occurrences"] = sample.num_occurrences
  236. data["energy"] = sample.energy
  237. return data
  238. def analyde_minisat_run(run_document):
  239. data = {}
  240. data["satisfiable"] = run_document["satisfiable"]
  241. return data
  242. def majority_vote_sample(sample):
  243. assignments = {}
  244. for coupler, energy in sample.items():
  245. var = abs(coupler[1])
  246. if var not in assignments:
  247. assignments[var] = {"all": []}
  248. if energy == 1:
  249. assignments[var]["all"].append(1 if coupler[1] > 0 else 0)
  250. for var, a in assignments.items():
  251. assignments[var]["majority"] = 1 if __true_percentage(a["all"]) >= 0.5 else 0
  252. assignment = [0 for i in range(len(assignments))]
  253. for var, a in assignments.items():
  254. assignment[var - 1] = a["majority"]
  255. return assignment
  256. def __true_percentage(a):
  257. if len(a) == 0:
  258. return 0
  259. return np.count_nonzero(a) / len(a)