import configparser
|
|
import os
|
|
import argparse
|
|
import pymongo
|
|
import ssl
|
|
import mysql.connector
|
|
import networkx as nx
|
|
from . import queries
|
|
from . import graph
|
|
from . import SAT2QUBO
|
|
import minorminer
|
|
from tqdm import tqdm
|
|
import numpy as np
|
|
import random
|
|
import sys
|
|
|
|
|
|
def readConfig(configFilePath):
|
|
config = configparser.ConfigParser()
|
|
|
|
if os.path.isfile(configFilePath):
|
|
config.read(configFilePath)
|
|
|
|
return config
|
|
|
|
class ArgParser:
|
|
def __init__(self):
|
|
self.__flags = {}
|
|
self.__parser = argparse.ArgumentParser()
|
|
self.__instanceDirArgSet = False
|
|
self.__config = None
|
|
self.__parsedArgs = {}
|
|
|
|
def addArg(self, alias,
|
|
shortFlag,
|
|
longFlag,
|
|
help,
|
|
type,
|
|
default=None,
|
|
ignoreDatabaseConfig=False):
|
|
|
|
self.__flags[alias] = {"longFlag": longFlag,
|
|
"hasDefault": False,
|
|
"ignoreDatabaseConfig": ignoreDatabaseConfig,
|
|
"type": type}
|
|
|
|
if default != None:
|
|
self.__flags[alias]["hasDefault"] = True
|
|
|
|
self.__parser.add_argument("-%s" % shortFlag,
|
|
"--%s" % longFlag,
|
|
help=help,
|
|
type=type,
|
|
default=default)
|
|
|
|
def addInstanceDirArg(self):
|
|
self.__instanceDirArgSet = True
|
|
|
|
self.addArg(alias="datasetDir", shortFlag="d", longFlag="dataset_dir",
|
|
help="the base direcotry of the dataset; if this flag is given the others can be omitted",
|
|
type=str, ignoreDatabaseConfig=True)
|
|
|
|
|
|
def parse(self):
|
|
self.__parsedArgs = {}
|
|
args = vars(self.__parser.parse_args())
|
|
|
|
if self.__instanceDirArgSet:
|
|
self.__config = readConfig(os.path.join(args["dataset_dir"],
|
|
"dataset.config"))
|
|
self.__parseDatasetConfig()
|
|
|
|
for alias, flag in self.__flags.items():
|
|
self.__parsedArgs[alias] = self.__processFlag(args, flag)
|
|
|
|
self.__config = None
|
|
|
|
return self.__parsedArgs
|
|
|
|
def __parseDatasetConfig(self):
|
|
for flag, value in self.__config["STRUCTURE"].items():
|
|
self.__parsedArgs[flag] = value
|
|
|
|
def __processFlag(self, args, flag):
|
|
longFlag = flag["longFlag"]
|
|
|
|
tmpValue = self.__parsedArgs[longFlag] if longFlag in self.__parsedArgs else None
|
|
|
|
if flag["ignoreDatabaseConfig"] == True:
|
|
tmpValue = None
|
|
|
|
if args[longFlag]:
|
|
tmpValue = args[longFlag]
|
|
|
|
if tmpValue == None:
|
|
tmpValue = flag["type"](input("pass arguement %s: " % longFlag))
|
|
|
|
|
|
return tmpValue
|
|
|
|
def getDBContext(dbConfigPath):
|
|
dbContext = {}
|
|
|
|
dbContext["client"] = connect_to_instance_pool(dbConfigPath)
|
|
dbContext["db"] = dbContext["client"]["experiments"]
|
|
dbContext["instances"] = dbContext["db"]["instances"]
|
|
dbContext["experimentScopes"] = dbContext["db"]["experiment_scopes"]
|
|
|
|
return dbContext
|
|
|
|
def connect_to_instance_pool(dbConfigPath = "database.config"):
|
|
dbConf = readConfig(dbConfigPath)
|
|
|
|
client = pymongo.MongoClient(
|
|
"mongodb://%s:%s@%s:%s/%s"
|
|
% ( dbConf["INSTANCE_POOL"]["user"],
|
|
dbConf["INSTANCE_POOL"]["pw"],
|
|
dbConf["INSTANCE_POOL"]["url"],
|
|
dbConf["INSTANCE_POOL"]["port"],
|
|
dbConf["INSTANCE_POOL"]["database"]),
|
|
ssl=True,
|
|
ssl_cert_reqs=ssl.CERT_NONE)
|
|
|
|
return client[dbConf["INSTANCE_POOL"]["database"]]
|
|
|
|
def connect_to_experimetns_db(dbConfigPath = "database.config"):
|
|
dbConfig = readConfig(dbConfigPath)
|
|
|
|
return mysql.connector.connect(
|
|
host=dbConfig["EXPERIMENT_DB"]["url"],
|
|
port=dbConfig["EXPERIMENT_DB"]["port"],
|
|
user=dbConfig["EXPERIMENT_DB"]["user"],
|
|
password=dbConfig["EXPERIMENT_DB"]["pw"],
|
|
database=dbConfig["EXPERIMENT_DB"]["database"]
|
|
)
|
|
|
|
def frange(start, stop, steps):
|
|
while start < stop:
|
|
yield start
|
|
start += steps
|
|
|
|
def create_experiment_scope(db, description, name):
|
|
experimentScope = {}
|
|
experimentScope["instances"] = []
|
|
|
|
experimentScope["description"] = description
|
|
|
|
experimentScope["_id"] = name.strip()
|
|
|
|
db["experiment_scopes"].insert_one(experimentScope)
|
|
|
|
def write_instance_to_pool_db(db, instance):
|
|
instance_document = instance.writeJSONLike()
|
|
|
|
result = db["instances"].insert_one(instance_document)
|
|
|
|
return result.inserted_id
|
|
|
|
def add_instance_to_experiment_scope(db, scope_name, instance_id):
|
|
db["experiment_scopes"].update_one(
|
|
{"_id": scope_name},
|
|
{"$push": {"instances": instance_id}}
|
|
)
|
|
|
|
def write_qubo_to_pool_db(collection, qubo, sat_instance_id):
|
|
doc = {}
|
|
|
|
doc["instance"] = sat_instance_id
|
|
doc["description"] = {"<qubo>": "<entrys>",
|
|
"<entrys>": "<entry><entrys> | <entry> | \"\"",
|
|
"<entry>": "<coupler><energy>",
|
|
"<energy>": "<real_number>",
|
|
"<coupler>": "<node><node>",
|
|
"<node>": "<clause><literal>",
|
|
"<clause>": "<natural_number>",
|
|
"<literal>": "<integer>"}
|
|
|
|
doc["qubo"] = __qubo_to_JSON(qubo)
|
|
|
|
collection.insert_one(doc)
|
|
|
|
def create_wmis_qubos_for_scope(db, scope):
|
|
instances = queries.Instance_scope_query(db)
|
|
instances.query(scope)
|
|
|
|
for instance, instance_id in instances:
|
|
qubo = SAT2QUBO.WMISdictQUBO(instance)
|
|
|
|
write_qubo_to_pool_db(db["wmis_qubos"], qubo, instance_id)
|
|
|
|
def create_wmis_2_qubos_for_scope(db, scope):
|
|
instances = queries.Instance_scope_query(db)
|
|
instances.query(scope)
|
|
|
|
for instance, instance_id in instances:
|
|
qubo = SAT2QUBO.WMISdictQUBO_2(instance)
|
|
|
|
write_qubo_to_pool_db(db["wmis_2_qubos"], qubo, instance_id)
|
|
|
|
def create_wmis_3_qubos_for_scope(db, scope):
|
|
instances = queries.Instance_scope_query(db)
|
|
instances.query(scope)
|
|
|
|
for instance, instance_id in tqdm(instances):
|
|
qubo = SAT2QUBO.WMISdictQUBO_3(instance)
|
|
|
|
write_qubo_to_pool_db(db["wmis_3_qubos"], qubo, instance_id)
|
|
|
|
def create_wmis_4_qubos_for_scope(db, scope):
|
|
instances = queries.Instance_scope_query(db)
|
|
instances.query(scope)
|
|
|
|
for instance, instance_id in tqdm(instances):
|
|
qubo = SAT2QUBO.WMISdictQUBO_4(instance)
|
|
|
|
write_qubo_to_pool_db(db["wmis_4_qubos"], qubo, instance_id)
|
|
|
|
|
|
def create_primitive_isings_for_scope_2(db, scope):
|
|
instances = queries.Instance_scope_query(db)
|
|
instances.query(scope)
|
|
|
|
for instance, instance_id in instances:
|
|
ising = SAT2QUBO.primitiveQUBO_2(instance)
|
|
|
|
write_qubo_to_pool_db(db["primitive_isings_2"], ising, instance_id)
|
|
|
|
def create_primitive_qubo_for_scope_5(db, scope):
|
|
instances = queries.Instance_scope_query(db)
|
|
instances.query(scope)
|
|
|
|
for instance, instance_id in tqdm(instances):
|
|
ising = SAT2QUBO.primitiveQUBO_5(instance)
|
|
|
|
write_qubo_to_pool_db(db["primitive_isings_5"], ising, instance_id)
|
|
|
|
def create_primitive_qubo_for_scope_8(db, scope):
|
|
instances = queries.Instance_scope_query(db)
|
|
instances.query(scope)
|
|
|
|
for instance, instance_id in tqdm(instances):
|
|
ising = SAT2QUBO.primitiveQUBO_8(instance)
|
|
|
|
write_qubo_to_pool_db(db["primitive_isings_8"], ising, instance_id)
|
|
|
|
def __qubo_to_JSON(qubo):
|
|
quboJSON = []
|
|
|
|
for coupler, value in qubo.items():
|
|
quboJSON.append([coupler, float(value)])
|
|
|
|
return quboJSON
|
|
|
|
def write_wmis_embedding_to_pool_db(collection, qubo_id, solver_graph_id, seed, embedding):
|
|
if not __embedding_entry_exists(collection, qubo_id, solver_graph_id):
|
|
__prepare_new_wmis_embedding_entry(collection, qubo_id, solver_graph_id)
|
|
|
|
collection.update_one(
|
|
{"qubo": qubo_id, "solver_graph": solver_graph_id},
|
|
{
|
|
"$push":
|
|
{
|
|
"embeddings":
|
|
{
|
|
"embedding": __embedding_to_array(embedding),
|
|
"seed": seed
|
|
}
|
|
}
|
|
}
|
|
)
|
|
|
|
def __embedding_entry_exists(collection, qubo_id, solver_graph_id):
|
|
filter = {"qubo": qubo_id, "solver_graph": solver_graph_id}
|
|
|
|
if collection.count_documents(filter) > 0:
|
|
return True
|
|
|
|
return False
|
|
|
|
def __prepare_new_wmis_embedding_entry(collection, qubo_id, solver_graph_id):
|
|
doc = {}
|
|
|
|
doc["qubo"] = qubo_id
|
|
doc["solver_graph"] = solver_graph_id
|
|
doc["description"] = {"<embedding>": "<chains>",
|
|
"<chains>": "<chain><chains> | \"\"",
|
|
"<chain>" : "<original_node><chimera_nodes>",
|
|
"<chimera_nodes>": "<chimera_node><chimera_nodes> | \"\""}
|
|
doc["embeddings"] = []
|
|
|
|
collection.insert_one(doc)
|
|
|
|
def __embedding_to_array(embedding):
|
|
emb_arr = []
|
|
|
|
for node, chain in embedding.items():
|
|
emb_arr.append([node, chain])
|
|
|
|
return emb_arr
|
|
|
|
def write_solver_graph_to_pool_db(collection, graph):
|
|
data = nx.node_link_data(graph)
|
|
|
|
id = queries.get_id_of_solver_graph(collection, data)
|
|
|
|
if id != None:
|
|
return id
|
|
|
|
doc = {}
|
|
doc["data"] = data
|
|
|
|
return collection.insert_one(doc).inserted_id
|
|
|
|
def find_wmis_embeddings_for_scope(db, scope, solver_graph):
|
|
solver_graph_id = write_solver_graph_to_pool_db(db["solver_graphs"],
|
|
solver_graph)
|
|
|
|
qubos = queries.WMIS_scope_query(db)
|
|
qubos.query(scope)
|
|
|
|
new_embeddings_found = 0
|
|
already_found = 0
|
|
total_count = 0
|
|
for qubo, qubo_id in tqdm(qubos):
|
|
total_count += 1
|
|
|
|
max_no_improvement = 10
|
|
for i in range(5):
|
|
if __embedding_entry_exists(db["embeddings"], qubo_id, solver_graph_id):
|
|
if i == 0:
|
|
already_found += 1
|
|
break;
|
|
else:
|
|
nx_qubo = graph.qubo_to_nx_graph(qubo)
|
|
|
|
seed = random.randint(0, sys.maxsize)
|
|
|
|
emb = minorminer.find_embedding(nx_qubo.edges(),
|
|
solver_graph.edges(),
|
|
return_overlap=True,
|
|
max_no_improvement=max_no_improvement,
|
|
random_seed=seed)
|
|
|
|
if emb[1] == 1:
|
|
write_wmis_embedding_to_pool_db(db["embeddings"],
|
|
qubo_id,
|
|
solver_graph_id,
|
|
seed,
|
|
emb[0])
|
|
new_embeddings_found += 1
|
|
|
|
max_no_improvement *= 1.5
|
|
|
|
percentage = 0
|
|
|
|
if total_count > 0:
|
|
percentage = round(((new_embeddings_found + already_found) / total_count) * 100)
|
|
|
|
print("found {} of {} embeddigns ({}%)".format(new_embeddings_found + already_found,
|
|
total_count,
|
|
percentage))
|
|
print("{} new embeddigns found".format(new_embeddings_found))
|
|
|
|
def find_embeddings_for_scope(db, solver_graph, qubo_ising_query):
|
|
solver_graph_id = write_solver_graph_to_pool_db(db["solver_graphs"],
|
|
solver_graph)
|
|
|
|
new_embeddings_found = 0
|
|
already_found = 0
|
|
total_count = 0
|
|
for qubo, qubo_id in tqdm(qubo_ising_query):
|
|
total_count += 1
|
|
|
|
max_no_improvement = 10
|
|
for i in range(5):
|
|
if __embedding_entry_exists(db["embeddings"], qubo_id, solver_graph_id):
|
|
if i == 0:
|
|
already_found += 1
|
|
break;
|
|
else:
|
|
nx_qubo = graph.qubo_to_nx_graph(qubo)
|
|
|
|
seed = random.randint(0, sys.maxsize)
|
|
|
|
emb = minorminer.find_embedding(nx_qubo.edges(),
|
|
solver_graph.edges(),
|
|
return_overlap=True,
|
|
max_no_improvement=max_no_improvement,
|
|
random_seed=seed)
|
|
|
|
if emb[1] == 1:
|
|
write_wmis_embedding_to_pool_db(db["embeddings"],
|
|
qubo_id,
|
|
solver_graph_id,
|
|
seed,
|
|
emb[0])
|
|
new_embeddings_found += 1
|
|
|
|
max_no_improvement *= 1.5
|
|
|
|
percentage = 0
|
|
|
|
if total_count > 0:
|
|
percentage = round(((new_embeddings_found + already_found) / total_count) * 100)
|
|
|
|
print("found {} of {} embeddigns ({}%)".format(new_embeddings_found + already_found,
|
|
total_count,
|
|
percentage))
|
|
print("{} new embeddigns found".format(new_embeddings_found))
|
|
|
|
def save_sample_set(collection, result, solver_input, emb_list_index, run):
|
|
doc = {}
|
|
|
|
doc["data"] = result.to_serializable()
|
|
doc["instance"] = solver_input["instance_id"]
|
|
doc["embedding"] = {
|
|
"embedding_id": solver_input["embeddings_id"],
|
|
"list_index": emb_list_index
|
|
}
|
|
doc["run"] = run
|
|
|
|
collection.insert_one(doc)
|
|
|
|
def save_qpu_result():
|
|
doc = {}
|
|
|
|
def analyze_wmis_sample(sample):
|
|
data = {}
|
|
|
|
data["number_of_assignments"] = np.count_nonzero(list(sample.sample.values()))
|
|
data["chain_break_fraction"] = sample.chain_break_fraction
|
|
data["num_occurrences"] = sample.num_occurrences
|
|
data["energy"] = sample.energy
|
|
|
|
return data
|
|
|
|
def analyde_minisat_run(run_document):
|
|
data = {}
|
|
|
|
data["satisfiable"] = run_document["satisfiable"]
|
|
|
|
return data
|
|
|
|
def majority_vote_sample(sample):
|
|
assignments = {}
|
|
|
|
for coupler, energy in sample.items():
|
|
|
|
var = abs(coupler[1])
|
|
|
|
if var not in assignments:
|
|
assignments[var] = {"all": []}
|
|
|
|
if energy == 1:
|
|
assignments[var]["all"].append(1 if coupler[1] > 0 else 0)
|
|
|
|
for var, a in assignments.items():
|
|
assignments[var]["majority"] = 1 if __true_percentage(a["all"]) >= 0.5 else 0
|
|
|
|
|
|
assignment = [0 for i in range(len(assignments))]
|
|
|
|
for var, a in assignments.items():
|
|
assignment[var - 1] = a["majority"]
|
|
|
|
return assignment
|
|
|
|
def __true_percentage(a):
|
|
if len(a) == 0:
|
|
return 0
|
|
|
|
return np.count_nonzero(a) / len(a)
|