Skip to content

Commit 01f0b4a

Browse files
committed
Update exps.
1 parent 92705d8 commit 01f0b4a

File tree

7 files changed

+133
-18
lines changed

7 files changed

+133
-18
lines changed
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
#!/usr/bin/env python3
2+
# -*- coding: utf-8 -*-
3+
"""
4+
Created on Mon Sep 21 10:34:26 2020
5+
6+
@author: ljia
7+
"""
8+
from utils import Graph_Kernel_List, Dataset_List, compute_graph_kernel
9+
from gklearn.utils.graphdataset import load_predefined_dataset
10+
import logging
11+
12+
13+
# def get_graphs(ds_name):
14+
# from gklearn.utils.graph_synthesizer import GraphSynthesizer
15+
# gsyzer = GraphSynthesizer()
16+
# graphs = gsyzer.unified_graphs(num_graphs=100, num_nodes=num_nodes, num_edges=int(num_nodes*2), num_node_labels=0, num_edge_labels=0, seed=None, directed=False)
17+
# return graphs
18+
19+
20+
def xp_runtimes_of_all_7cores():
21+
22+
# Run and save.
23+
import pickle
24+
import os
25+
save_dir = 'outputs/runtimes_of_all_7cores/'
26+
if not os.path.exists(save_dir):
27+
os.makedirs(save_dir)
28+
29+
run_times = {}
30+
31+
for kernel_name in Graph_Kernel_List:
32+
print()
33+
print('Kernel:', kernel_name)
34+
35+
run_times[kernel_name] = []
36+
for ds_name in Dataset_List:
37+
print()
38+
print('Dataset:', ds_name)
39+
40+
# get graphs.
41+
graphs, _ = load_predefined_dataset(ds_name)
42+
43+
# Compute Gram matrix.
44+
try:
45+
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=28)
46+
run_times[kernel_name].append(run_time)
47+
except Exception as exp:
48+
run_times[kernel_name].append('error')
49+
print('An exception occured when running this experiment:')
50+
LOG_FILENAME = save_dir + 'error.txt'
51+
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
52+
logging.exception('')
53+
print(repr(exp))
54+
55+
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + ds_name + '.pkl', 'wb'))
56+
57+
# Save all.
58+
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))
59+
60+
return
61+
62+
63+
if __name__ == '__main__':
64+
xp_runtimes_of_all_7cores()

gklearn/experiments/papers/PRL_2020/synthesized_graphs_N.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
@author: ljia
77
"""
88
from utils import Graph_Kernel_List, compute_graph_kernel
9+
import logging
910

1011

1112
def generate_graphs():
@@ -39,10 +40,19 @@ def xp_synthesied_graphs_dataset_size():
3940
print('Number of graphs:', num_graphs)
4041

4142
sub_graphs = [g.copy() for g in graphs[0:num_graphs]]
42-
gram_matrix, run_time = compute_graph_kernel(sub_graphs, kernel_name)
43-
run_times[kernel_name].append(run_time)
4443

45-
pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_graphs) + '.pkl', 'wb'))
44+
try:
45+
gram_matrix, run_time = compute_graph_kernel(sub_graphs, kernel_name, n_jobs=1)
46+
run_times[kernel_name].append(run_time)
47+
except Exception as exp:
48+
run_times[kernel_name].append('error')
49+
print('An exception occured when running this experiment:')
50+
LOG_FILENAME = save_dir + 'error.txt'
51+
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
52+
logging.exception('')
53+
print(repr(exp))
54+
55+
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_graphs) + '.pkl', 'wb'))
4656

4757
# Save all.
4858
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))

gklearn/experiments/papers/PRL_2020/synthesized_graphs_degrees.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
@author: ljia
77
"""
88
from utils import Graph_Kernel_List, compute_graph_kernel
9+
import logging
910

1011

1112
def generate_graphs(degree):
@@ -39,10 +40,18 @@ def xp_synthesied_graphs_degrees():
3940
graphs = generate_graphs(degree)
4041

4142
# Compute Gram matrix.
42-
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name)
43-
run_times[kernel_name].append(run_time)
43+
try:
44+
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1)
45+
run_times[kernel_name].append(run_time)
46+
except Exception as exp:
47+
run_times[kernel_name].append('error')
48+
print('An exception occured when running this experiment:')
49+
LOG_FILENAME = save_dir + 'error.txt'
50+
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
51+
logging.exception('')
52+
print(repr(exp))
4453

45-
pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(degree) + '.pkl', 'wb'))
54+
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(degree) + '.pkl', 'wb'))
4655

4756
# Save all.
4857
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))

gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_el.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
@author: ljia
77
"""
88
from utils import Graph_Kernel_List_ESym, compute_graph_kernel
9+
import logging
910

1011

1112
def generate_graphs(num_el_alp):
@@ -39,10 +40,18 @@ def xp_synthesied_graphs_num_edge_label_alphabet():
3940
graphs = generate_graphs(num_el_alp)
4041

4142
# Compute Gram matrix.
42-
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name)
43-
run_times[kernel_name].append(run_time)
43+
try:
44+
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1)
45+
run_times[kernel_name].append(run_time)
46+
except Exception as exp:
47+
run_times[kernel_name].append('error')
48+
print('An exception occured when running this experiment:')
49+
LOG_FILENAME = save_dir + 'error.txt'
50+
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
51+
logging.exception('')
52+
print(repr(exp))
4453

45-
pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_el_alp) + '.pkl', 'wb'))
54+
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_el_alp) + '.pkl', 'wb'))
4655

4756
# Save all.
4857
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))

gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nl.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
@author: ljia
77
"""
88
from utils import Graph_Kernel_List_VSym, compute_graph_kernel
9+
import logging
910

1011

1112
def generate_graphs(num_nl_alp):
@@ -39,10 +40,18 @@ def xp_synthesied_graphs_num_node_label_alphabet():
3940
graphs = generate_graphs(num_nl_alp)
4041

4142
# Compute Gram matrix.
42-
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name)
43-
run_times[kernel_name].append(run_time)
43+
try:
44+
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1)
45+
run_times[kernel_name].append(run_time)
46+
except Exception as exp:
47+
run_times[kernel_name].append('error')
48+
print('An exception occured when running this experiment:')
49+
LOG_FILENAME = save_dir + 'error.txt'
50+
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
51+
logging.exception('')
52+
print(repr(exp))
4453

45-
pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nl_alp) + '.pkl', 'wb'))
54+
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nl_alp) + '.pkl', 'wb'))
4655

4756
# Save all.
4857
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))

gklearn/experiments/papers/PRL_2020/synthesized_graphs_num_nodes.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
@author: ljia
77
"""
88
from utils import Graph_Kernel_List, compute_graph_kernel
9+
import logging
910

1011

1112
def generate_graphs(num_nodes):
@@ -39,10 +40,18 @@ def xp_synthesied_graphs_num_nodes():
3940
graphs = generate_graphs(num_nodes)
4041

4142
# Compute Gram matrix.
42-
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name)
43-
run_times[kernel_name].append(run_time)
43+
try:
44+
gram_matrix, run_time = compute_graph_kernel(graphs, kernel_name, n_jobs=1)
45+
run_times[kernel_name].append(run_time)
46+
except Exception as exp:
47+
run_times[kernel_name].append('error')
48+
print('An exception occured when running this experiment:')
49+
LOG_FILENAME = save_dir + 'error.txt'
50+
logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG)
51+
logging.exception('')
52+
print(repr(exp))
4453

45-
pickle.dump(run_times, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nodes) + '.pkl', 'wb'))
54+
pickle.dump(run_time, open(save_dir + 'run_time.' + kernel_name + '.' + str(num_nodes) + '.pkl', 'wb'))
4655

4756
# Save all.
4857
pickle.dump(run_times, open(save_dir + 'run_times.pkl', 'wb'))

gklearn/experiments/papers/PRL_2020/utils.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,9 @@
55
66
@author: ljia
77
"""
8+
import multiprocessing
9+
10+
811
Graph_Kernel_List = ['PathUpToH', 'WLSubtree', 'SylvesterEquation', 'Marginalized', 'ShortestPath', 'Treelet', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'StructuralSP', 'CommonWalk']
912
# Graph_Kernel_List = ['CommonWalk', 'Marginalized', 'SylvesterEquation', 'ConjugateGradient', 'FixedPoint', 'SpectralDecomposition', 'ShortestPath', 'StructuralSP', 'PathUpToH', 'Treelet', 'WLSubtree']
1013

@@ -21,8 +24,10 @@
2124
Graph_Kernel_List_ECon = ['ConjugateGradient', 'FixedPoint', 'StructuralSP']
2225

2326

24-
def compute_graph_kernel(graphs, kernel_name):
25-
import multiprocessing
27+
Dataset_List = ['Alkane', 'Acyclic', 'MAO', 'PAH', 'MUTAG', 'Letter-med', 'ENZYMES', 'AIDS', 'NCI1', 'NCI109', 'DD']
28+
29+
30+
def compute_graph_kernel(graphs, kernel_name, n_jobs=multiprocessing.cpu_count()):
2631

2732
if kernel_name == 'CommonWalk':
2833
from gklearn.kernels.commonWalkKernel import commonwalkkernel
@@ -99,7 +104,7 @@ def compute_graph_kernel(graphs, kernel_name):
99104
params = {'base_kernel': 'subtree', 'height': 5}
100105

101106
# params['parallel'] = None
102-
params['n_jobs'] = multiprocessing.cpu_count()
107+
params['n_jobs'] = n_jobs
103108
params['verbose'] = True
104109
results = estimator(graphs, **params)
105110

0 commit comments

Comments
 (0)