diff --git a/examples/stabilizer/run_all_narma.sh b/examples/stabilizer/run_all_narma.sh new file mode 100644 index 0000000..b5c68ad --- /dev/null +++ b/examples/stabilizer/run_all_narma.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +for i in {0..4} +do + sbatch run_reservoir_narma.sh 3 2 20 classical None $i 1 narma 3 + sbatch run_reservoir_narma.sh 3 2 20 quantum_part None $i 1 narma 3 + sbatch run_reservoir_narma.sh 3 2 20 quantum_stab None $i 1 narma 3 + sbatch run_reservoir_narma.sh 3 2 20 quantum_part True $i 1 narma 3 + sbatch run_reservoir_narma.sh 3 2 20 quantum_stab True $i 1 narma 3 + + for j in {2..3} + do + sbatch run_reservoir_narma.sh 4 $j 20 classical None $i 1 narma 3 + sbatch run_reservoir_narma.sh 4 $j 20 quantum_part None $i 1 narma 3 + sbatch run_reservoir_narma.sh 4 $j 20 quantum_stab None $i 1 narma 3 + sbatch run_reservoir_narma.sh 4 $j 20 quantum_part True $i 1 narma 3 + sbatch run_reservoir_narma.sh 4 $j 20 quantum_stab True $i 1 narma 3 + done + + + for j in {2..4} + do + sbatch run_reservoir_narma.sh 5 $j 20 classical None $i 1 narma 5 + sbatch run_reservoir_narma.sh 5 $j 20 quantum_part None $i 1 narma 5 + sbatch run_reservoir_narma.sh 5 $j 20 quantum_stab None $i 1 narma 5 + sbatch run_reservoir_narma.sh 5 $j 20 quantum_part True $i 1 narma 5 + sbatch run_reservoir_narma.sh 5 $j 20 quantum_stab True $i 1 narma 5 + done + +done + +sbatch run_reservoir_narma.sh 3 2 20 quantum_part None $i 0 narma 5 +sbatch run_reservoir_narma.sh 3 2 20 quantum_stab None $i 0 narma 5 +sbatch run_reservoir_narma.sh 4 3 20 quantum_part None $i 0 narma 5 +sbatch run_reservoir_narma.sh 4 3 20 quantum_stab None $i 0 narma 5 \ No newline at end of file diff --git a/examples/stabilizer/run_reservoir_narma.py b/examples/stabilizer/run_reservoir_narma.py new file mode 100644 index 0000000..888454b --- /dev/null +++ b/examples/stabilizer/run_reservoir_narma.py @@ -0,0 +1,177 @@ +import sys +from reservoirpy.datasets import logistic_map, narma +import numpy as np +import reservoirpy as rpy +import pickle +from sklearn.model_selection import TimeSeriesSplit +from sklearn.linear_model import LinearRegression, Ridge +from quantumreservoirpy.util import create_shifted_array +from qiskit_aer.noise import (NoiseModel, QuantumError, ReadoutError, + pauli_error, depolarizing_error, thermal_relaxation_error) +from itertools import combinations +from quantumreservoirpy.util import randomIsing +from reservoirpy.nodes import Reservoir +from quantumreservoirpy.partialmeasurement import PartialMeasurement +from quantumreservoirpy.stabilizer import Stabilizer +from qiskit_aer import AerSimulator + +def fit_model(model, res_states, series, WARMUP, timeplex=1): + warmup = int(len(series) * WARMUP) + + X = res_states[warmup:-1] + y = series[warmup + 1 :] + + if timeplex > 1: + X = create_shifted_array(X, timeplex) + model.fit(X, y) + + return model, X, y + + +def run_prediction(model, res_states, timeplex=1): + + X = np.copy(res_states) + + if timeplex > 1: + X = create_shifted_array(X, timeplex) + X = X[-1,:] + X = X.reshape((1, -1)) + return model.predict(X) + +def henon1d(n, a=1.4, b=0.3): + ts=[0,0] + for i in range(2,n+2): + ts.append(1 - a*ts[i-1]**2 + b*ts[i-2]) + return np.array(ts[2:]) + + +def main(num_qubits, num_meas, num_reservoirs, method, noise, lentrain, decode, casename, order, timeplex=10, degree=None): + + if not degree: + degree = num_meas + #degree = min(degree, 3) + + num_neurons=num_reservoirs*(2**num_meas-1) + + if casename == "henon": + ts = henon1d(200) + elif casename == "logistic": + ts=logistic_map(200, r=3.9, x0=0.5).flatten() + elif casename == 'narma': + u = np.loadtxt('u_driver.txt', dtype=float) + u = u[:(200+order)].reshape(-1, 1) + ts = narma(n_timesteps=200, order=order, u=u).flatten() + + #ts=narma(200).flatten() + + string_identifier="casename"+str(casename)+"_num_qubits"+str(num_qubits)+"_num_meas"+str(num_meas) + string_identifier+="_degree"+str(degree)+"_num_reservoirs"+str(num_reservoirs)+"_timeplex"+str(timeplex) + string_identifier+="_method"+str(method)+"_noise"+str(noise) + if not decode: + string_identifier+="_decodeFalse" + + print(string_identifier, " number of neurons/observables=",num_neurons ) + + + + linreg = Ridge(alpha=1e-7)#LinearRegression() + + + + if noise == "None": + noise_model = None + else: +# Create an empty noise model + noise_model = NoiseModel() +# Add depolarizing error to all single qubit u1, u2, u3 gates + error = depolarizing_error(0.01, 1) + noise_model.add_all_qubit_quantum_error(error, ['u1', 'u2', 'u3']) + error = depolarizing_error(0.1, 2) + noise_model.add_all_qubit_quantum_error(error, ['cx']) + + WARMUP=0.3 + + with open("isingparams_"+"num_qubits"+str(num_qubits)+"_num_reservoirs"+str(num_reservoirs)+".pickle","rb") as f: + isingparams = pickle.load(f) + + + + if method == "classical": + res = Reservoir(num_neurons, lr=0.5, sr=0.9) + elif method == "quantum_part": + res = PartialMeasurement(num_qubits, num_meas, backend = AerSimulator(noise_model=noise_model),\ + degree=degree, num_reservoirs=num_reservoirs, isingparams=isingparams,decode=decode) + elif method == "quantum_stab": + res = Stabilizer(num_qubits, num_meas, backend = AerSimulator(noise_model=noise_model),\ + degree=degree, num_reservoirs=num_reservoirs, isingparams=isingparams,decode=decode) + + tscv = TimeSeriesSplit() + + for i, (train_index, test_index) in enumerate(tscv.split(ts)): + if not i == lentrain: + continue + + X_train=ts[train_index] + X_test=ts[test_index] + num_pred = len(test_index) + + with open("X_train"+str(i)+"_"+string_identifier+".pickle","wb") as f: + pickle.dump(X_train, f) + with open("X_test"+str(i)+"_"+string_identifier+".pickle","wb") as f: + pickle.dump(X_test, f) + + if method =='classical': + states = res.run(X_train.reshape(-1, 1), reset=True) + linreg, X, y = fit_model(linreg, states, X_train, WARMUP, 1) + else: + states = res.run(timeseries=X_train, shots=1e3, precision=1e-2) + linreg, X, y = fit_model(linreg, states, X_train, WARMUP, timeplex) + + score = linreg.score(X, y) + print("score[",method,"]=", score) + + with open("score"+str(i)+"_"+string_identifier+".pickle","wb") as f: + pickle.dump(score, f) + +# if not method == "classical": + with open("state"+str(i)+"_"+string_identifier+".pickle","wb") as f: + pickle.dump(states, f) + + firsttime=True + prediction = X_train + + for j in range(num_pred): + print(j,"/",num_pred) + if method=='classical': + if firsttime: + states = res.state() + else: + states = res.run(prediction[-1]) + tmp = run_prediction(linreg, states, 1) + else: + states = res.run(prediction[-2*int(timeplex):], shots=1e3, precision=1e-2) + tmp = run_prediction(linreg, states, timeplex) + + prediction = np.append(prediction,tmp) + + firsttime=False + + with open("prediction"+str(i)+"_"+string_identifier+".pickle","wb") as f: + pickle.dump(prediction, f) + + + +if __name__ == "__main__": + num_qubits = int(sys.argv[1]) + num_meas = int(sys.argv[2]) + num_reservoirs = int(sys.argv[3]) + method = str(sys.argv[4]) + noise = str(sys.argv[5]) + lentrain = int(sys.argv[6]) + decode = bool(int(sys.argv[7])) + casename = str(sys.argv[8]) + order = int(sys.argv[9]) + + print("Running:", num_qubits, num_meas, num_reservoirs, method, noise, lentrain, decode, casename, order) + main(num_qubits, num_meas, num_reservoirs, method, noise, lentrain, decode, casename, order) + diff --git a/examples/stabilizer/run_reservoir_narma.sh b/examples/stabilizer/run_reservoir_narma.sh new file mode 100644 index 0000000..e6b1bd1 --- /dev/null +++ b/examples/stabilizer/run_reservoir_narma.sh @@ -0,0 +1,13 @@ +#!/bin/bash +#SBATCH --job-name=random_sample +# d-hh:mm:ss +#SBATCH --time=30-00:00:00 +#SBATCH --output=/home/rubenp/quantumreservoirpy_vivaldi/%j.out +#SBATCH --nodes=1 +#SBATCH --tasks-per-node=1 +#SBATCH --cpus-per-task=1 + +source qiskit/bin/activate + +python run_reservoir_narma.py "$1" "$2" "$3" "$4" "$5" "$6" "$7" "$8" "$9" + diff --git a/examples/stabilizer/u_driver.txt b/examples/stabilizer/u_driver.txt new file mode 100644 index 0000000..e0089cb --- /dev/null +++ b/examples/stabilizer/u_driver.txt @@ -0,0 +1,1000 @@ +0.30776008 +0.00985845 +0.38491726 +0.05887939 +0.05357330 +0.16626046 +0.14024180 +0.31351169 +0.03764891 +0.10156925 +0.10676478 +0.07649344 +0.33671164 +0.14767647 +0.48934000 +0.37798523 +0.22224932 +0.30376501 +0.11532049 +0.13284256 +0.22219707 +0.37663865 +0.34093113 +0.16468522 +0.04679049 +0.21739945 +0.12094216 +0.46188233 +0.21518483 +0.36634667 +0.03332365 +0.30302919 +0.03744321 +0.38200627 +0.46234711 +0.14180078 +0.30830062 +0.01411451 +0.32299148 +0.29157127 +0.09128043 +0.05951035 +0.46383450 +0.21363312 +0.20765161 +0.24348973 +0.44591789 +0.15029175 +0.34578370 +0.35938563 +0.20686413 +0.00169618 +0.17304233 +0.03631999 +0.25210235 +0.30490461 +0.33711996 +0.29266819 +0.04109870 +0.38633165 +0.22817023 +0.12577465 +0.12418578 +0.49545345 +0.26478475 +0.31663036 +0.47008821 +0.26954887 +0.43057762 +0.43828273 +0.11539425 +0.42190755 +0.00543014 +0.23790885 +0.01469485 +0.07804051 +0.07093702 +0.18823458 +0.06370221 +0.17235602 +0.42504978 +0.12859389 +0.42544019 +0.35433443 +0.15713703 +0.17225675 +0.20992529 +0.14617466 +0.49989717 +0.00548196 +0.00200483 +0.07021880 +0.34825004 +0.33260270 +0.27209201 +0.03055233 +0.31222044 +0.45431195 +0.33196289 +0.03776808 +0.25308084 +0.09333940 +0.18773406 +0.43650468 +0.09990968 +0.43335968 +0.42316624 +0.45424487 +0.20698545 +0.34924810 +0.19442298 +0.16085123 +0.44955175 +0.12888998 +0.47091024 +0.07017203 +0.03951273 +0.19306562 +0.04288797 +0.29795641 +0.24768669 +0.45332328 +0.14347820 +0.39218980 +0.47081184 +0.38315864 +0.41768405 +0.12861717 +0.20209905 +0.16080220 +0.15593769 +0.36041761 +0.24436759 +0.14705717 +0.03622757 +0.24749510 +0.17883314 +0.37755961 +0.08816270 +0.25067775 +0.24618084 +0.01969033 +0.32798969 +0.48530575 +0.03400828 +0.40730249 +0.27659939 +0.12593585 +0.47966313 +0.18852878 +0.16775553 +0.12755468 +0.28637511 +0.36772272 +0.01974447 +0.22199964 +0.16515504 +0.46576284 +0.03770182 +0.14346218 +0.05050631 +0.03643144 +0.41232996 +0.06858878 +0.08016351 +0.39062866 +0.41132367 +0.27446089 +0.04699625 +0.21033101 +0.05204047 +0.18958699 +0.34906496 +0.38925863 +0.49945935 +0.13892517 +0.23482881 +0.09732943 +0.06221909 +0.35261692 +0.07355611 +0.45951469 +0.29067971 +0.28648538 +0.21710054 +0.15940537 +0.37390370 +0.24571264 +0.17227768 +0.11240585 +0.05033839 +0.35118093 +0.38436904 +0.12444510 +0.07264573 +0.38250163 +0.03429723 +0.20186678 +0.43770494 +0.40262599 +0.15976969 +0.01944129 +0.26097150 +0.00005413 +0.28637889 +0.49161448 +0.37810116 +0.31401772 +0.10018385 +0.20297978 +0.43122841 +0.19147145 +0.02445391 +0.12432007 +0.03431888 +0.25490783 +0.25738820 +0.07541176 +0.17938110 +0.38911625 +0.48707983 +0.00471800 +0.42075170 +0.23536092 +0.35700640 +0.14387794 +0.25913571 +0.16698087 +0.09992401 +0.07891893 +0.11509354 +0.13978445 +0.42818876 +0.07555540 +0.20741497 +0.14880650 +0.28385486 +0.32132784 +0.21496798 +0.05166681 +0.25937169 +0.13258874 +0.29295805 +0.18539331 +0.39296013 +0.03050388 +0.47057438 +0.21174588 +0.46983974 +0.27466226 +0.37529766 +0.29034359 +0.08672029 +0.03194656 +0.22298747 +0.13950842 +0.47637304 +0.41552999 +0.00943986 +0.48234609 +0.34957594 +0.37787961 +0.40450125 +0.49137926 +0.27549808 +0.15323529 +0.06758085 +0.14853536 +0.21786646 +0.14781767 +0.24215025 +0.16397909 +0.07646276 +0.36679678 +0.08174699 +0.49865159 +0.43890716 +0.38166014 +0.07867318 +0.29289917 +0.35677984 +0.32562049 +0.11896277 +0.42629722 +0.48152084 +0.17052326 +0.43482646 +0.11406721 +0.15999329 +0.35986147 +0.31556546 +0.17364080 +0.30807621 +0.38846102 +0.11809170 +0.40076964 +0.48529935 +0.34216555 +0.41164145 +0.36206588 +0.06100393 +0.35647199 +0.16970695 +0.35265974 +0.15979224 +0.01513809 +0.01923194 +0.06072120 +0.35505938 +0.48389817 +0.22924398 +0.44231485 +0.07232597 +0.42758010 +0.17642944 +0.44336048 +0.39393361 +0.43669705 +0.35691005 +0.21197020 +0.09670068 +0.11959787 +0.10962973 +0.02815967 +0.35604932 +0.12832600 +0.41269190 +0.35801141 +0.46764001 +0.22935615 +0.08426305 +0.24935015 +0.28945820 +0.08699698 +0.38716901 +0.10976024 +0.08804713 +0.30690327 +0.07569378 +0.42055526 +0.45406617 +0.49200050 +0.06882805 +0.37330353 +0.28975243 +0.29723573 +0.13982301 +0.01284136 +0.27430197 +0.30509341 +0.09159682 +0.21970828 +0.12640034 +0.14196346 +0.37965133 +0.18413136 +0.27467478 +0.15824893 +0.37701861 +0.04892747 +0.28132490 +0.30493363 +0.30148807 +0.12641389 +0.49228959 +0.17117397 +0.18125482 +0.39023585 +0.03310558 +0.03418914 +0.23322039 +0.43651968 +0.00779518 +0.09737043 +0.45756198 +0.43793675 +0.27265943 +0.25358271 +0.19068378 +0.35210410 +0.26369840 +0.32568094 +0.04237557 +0.03624269 +0.24854597 +0.37845372 +0.15943633 +0.42530689 +0.23795180 +0.01974089 +0.47879599 +0.01127304 +0.13374082 +0.42367643 +0.02509894 +0.10258646 +0.32053330 +0.47702497 +0.02556444 +0.00580722 +0.08930076 +0.23637096 +0.42846511 +0.33933137 +0.45848051 +0.25462669 +0.46159970 +0.25800476 +0.08740381 +0.35201238 +0.23952520 +0.27229621 +0.16869452 +0.18198353 +0.05061718 +0.46448310 +0.27318234 +0.37533429 +0.33663572 +0.38106384 +0.14641711 +0.36211346 +0.02473208 +0.43545942 +0.13629535 +0.06876566 +0.17888567 +0.33365054 +0.38203960 +0.07641749 +0.26318134 +0.43504562 +0.29119492 +0.41605404 +0.18945291 +0.41639397 +0.38596642 +0.35426568 +0.23306131 +0.21513565 +0.42150088 +0.01695142 +0.04976426 +0.39531239 +0.18730877 +0.26786221 +0.35074271 +0.08242757 +0.23411732 +0.34123630 +0.06611825 +0.29116260 +0.46784370 +0.05463697 +0.22579981 +0.17372077 +0.26499893 +0.23681973 +0.01673992 +0.25172586 +0.12116418 +0.18878589 +0.12543737 +0.33560625 +0.16971571 +0.17106267 +0.37637283 +0.44200232 +0.31947078 +0.39646103 +0.15610054 +0.30207198 +0.11969178 +0.47131062 +0.00630709 +0.03793979 +0.34684296 +0.02196771 +0.41703736 +0.26262037 +0.35296661 +0.25683029 +0.12126058 +0.38615527 +0.01341612 +0.48018085 +0.02899115 +0.07929789 +0.19675917 +0.44349587 +0.14872663 +0.29629687 +0.43847111 +0.42211060 +0.05492734 +0.27016239 +0.22514901 +0.40106680 +0.46889504 +0.22317551 +0.15978577 +0.33617034 +0.35313237 +0.25167205 +0.39812307 +0.45605210 +0.45789162 +0.30707055 +0.37622870 +0.20130661 +0.16381115 +0.34012167 +0.39281398 +0.05671518 +0.40747712 +0.10919011 +0.05972275 +0.43729094 +0.36046423 +0.27867836 +0.08483691 +0.48461354 +0.16910332 +0.48390438 +0.45260003 +0.27411783 +0.28371298 +0.23338068 +0.40417920 +0.17995030 +0.44536954 +0.04659778 +0.09897630 +0.37145889 +0.39408945 +0.43615737 +0.13790910 +0.16908393 +0.49528107 +0.27312596 +0.28260737 +0.23237552 +0.29559457 +0.01031701 +0.15869951 +0.03629414 +0.03732718 +0.26499191 +0.03407844 +0.06148457 +0.26879047 +0.36779741 +0.15378829 +0.38043263 +0.39781909 +0.22876924 +0.42608522 +0.29563698 +0.33050983 +0.39880684 +0.35048982 +0.15673644 +0.07360201 +0.34981659 +0.33962683 +0.10634690 +0.14283299 +0.19843179 +0.40565939 +0.11471619 +0.00478936 +0.30067279 +0.47363881 +0.49128832 +0.12636582 +0.36301277 +0.36285062 +0.29960412 +0.10261886 +0.23598645 +0.35315596 +0.48391992 +0.07020472 +0.19810575 +0.31734183 +0.22868163 +0.24540610 +0.20340957 +0.06671856 +0.38589241 +0.38807942 +0.36268810 +0.44040624 +0.29844459 +0.49104123 +0.40607208 +0.04632076 +0.39095011 +0.40761362 +0.18316354 +0.02419607 +0.49026847 +0.19241653 +0.08310986 +0.11256202 +0.32120628 +0.48471852 +0.36409771 +0.06560341 +0.01745934 +0.02572512 +0.49817307 +0.24394536 +0.22268438 +0.21403720 +0.47171527 +0.00798439 +0.39785474 +0.49680641 +0.38331550 +0.48239178 +0.45254501 +0.37484550 +0.46791571 +0.00278498 +0.43145267 +0.08711820 +0.29730316 +0.39314423 +0.12964270 +0.10470567 +0.47163621 +0.23261415 +0.28989969 +0.36921904 +0.36545651 +0.21424786 +0.01048905 +0.03663477 +0.33177759 +0.32511069 +0.37902722 +0.19119497 +0.36398523 +0.06665295 +0.17450723 +0.12701248 +0.48916299 +0.44568669 +0.08364641 +0.18271924 +0.06394777 +0.17411419 +0.23982764 +0.20180563 +0.07705053 +0.38604404 +0.28896647 +0.03340151 +0.17396549 +0.18092338 +0.47776458 +0.40173416 +0.49728413 +0.12091539 +0.00796494 +0.15064484 +0.49502403 +0.21251251 +0.10775766 +0.36478348 +0.43928199 +0.16786729 +0.16598094 +0.47942273 +0.23926034 +0.14637592 +0.26106801 +0.45591825 +0.35710669 +0.37572619 +0.48875518 +0.24445865 +0.00796881 +0.22831459 +0.24997372 +0.29847584 +0.08143585 +0.23578321 +0.13434641 +0.03084054 +0.35645467 +0.35026299 +0.11719859 +0.25326538 +0.08387674 +0.48952796 +0.32844210 +0.46587145 +0.43303445 +0.11996206 +0.13708205 +0.45616093 +0.28677196 +0.10280660 +0.40198390 +0.34345540 +0.39335317 +0.12950199 +0.27285468 +0.20288468 +0.31588543 +0.39897955 +0.00718390 +0.06666208 +0.14612192 +0.41842627 +0.04492157 +0.10552766 +0.47056658 +0.32070139 +0.09326888 +0.35398985 +0.37114635 +0.44610744 +0.22790809 +0.24393290 +0.02392990 +0.00415700 +0.12171071 +0.49292206 +0.38288817 +0.33303578 +0.38208877 +0.37763642 +0.00900666 +0.04386052 +0.45697546 +0.40027245 +0.19994403 +0.32115088 +0.31249382 +0.00042564 +0.32539987 +0.26528030 +0.21400353 +0.47647223 +0.05572152 +0.33711563 +0.42446114 +0.47019201 +0.36016039 +0.05316550 +0.49045824 +0.24302122 +0.32964397 +0.23791484 +0.17634547 +0.25266513 +0.32881644 +0.33905850 +0.21010471 +0.08489272 +0.37022649 +0.40389297 +0.25417020 +0.07284881 +0.25873725 +0.37847922 +0.36790948 +0.02729039 +0.34012500 +0.32990315 +0.36272218 +0.00621618 +0.04791577 +0.44257910 +0.20269475 +0.30331129 +0.17954724 +0.06333820 +0.18709936 +0.28451057 +0.30552834 +0.08081543 +0.03092044 +0.28431953 +0.42643939 +0.18632354 +0.08911405 +0.45777789 +0.12305203 +0.37799124 +0.42335987 +0.10398618 +0.36657053 +0.27718650 +0.40841527 +0.36103090 +0.22249732 +0.47353633 +0.17364122 +0.17043439 +0.29047269 +0.43808347 +0.13626695 +0.46640805 +0.47637871 +0.17452390 +0.42705983 +0.17278690 +0.43159965 +0.37162540 +0.10746054 +0.35281462 +0.38714204 +0.10708478 +0.46639152 +0.12560668 +0.14336088 +0.20873927 +0.28066568 +0.24776639 +0.28097617 +0.22277892 +0.04449795 +0.05131459 +0.36614181 +0.06637187 +0.47956886 +0.43484094 +0.48126432 +0.46378538 +0.40333559 +0.11064964 +0.27908985 +0.31623843 +0.40069121 +0.36676677 +0.36892953 +0.43156915 +0.10564273 +0.18888175 +0.30305753 +0.10178118 +0.46617040 +0.23497726 +0.12844425 +0.01098654 +0.23781530 +0.24798072 +0.34658981 +0.04488276 +0.23778765 +0.38049184 +0.37222129 +0.32724342 +0.43375996 +0.42067671 +0.02883771 +0.13748321 +0.47800530 +0.27717265 +0.17129945 +0.00914364 +0.11369986 +0.46944224 +0.25565657 +0.22532659 +0.12758457 +0.11160378 +0.22924203 +0.27927775 +0.25774988 +0.34672872 +0.34982010 +0.12282666 +0.36905943 +0.20839089 +0.14895292 +0.23580423 +0.28433600 +0.27154081 +0.35826998 +0.01496447 +0.47884614 +0.44288386 +0.27694126 +0.08802023 +0.28254622 +0.28409274 +0.06783923 +0.04010258 +0.07896154 +0.25699946 +0.04039023 +0.41897804 +0.13346691 +0.04745270 +0.15355460 +0.28025767 +0.11941945 +0.10440706 +0.24030425 +0.01242022 +0.14310365 +0.29156774 +0.23752678 +0.45959573 +0.36223417 +0.14159456 +0.00446736 +0.48352585 +0.14235201 +0.48242501 +0.15286679 +0.10058400 +0.21657874 +0.46559919 +0.13855381 +0.16786750 +0.14205449 +0.49658842 +0.38850128 +0.19403850 +0.46023511 +0.27209509 +0.19647318 +0.07590063 +0.04961400 +0.35801797 +0.01610514 +0.08853864 +0.35392819 +0.06859738 +0.40925864 +0.02966215 +0.40555185 +0.12907830 +0.41108692 +0.25370262 +0.47674249 +0.46044510 +0.26864761 +0.30584497 +0.16500581 +0.41981744 +0.12458445 +0.39895512 +0.37363047 +0.00056674 +0.24288399 +0.44015569 +0.18402665 +0.11165969 +0.11671150 +0.48979809 +0.38931251 +0.43069558 +0.25166625 +0.10371086 +0.02970712 +0.36268537 +0.35422186 +0.42639553 +0.43684927 +0.23469052 +0.35085567 +0.17318215 +0.38207119 +0.16745770 +0.01519410 +0.01088759 +0.45701977 +0.28070312 +0.43920364 +0.03588258 +0.49869756 +0.16139912 +0.00655454 +0.29872481 +0.00987269 +0.48782874 +0.29984237 +0.34347716 +0.30981864 +0.21107779 +0.01801424 +0.11531134 +0.36839272 +0.32013196 +0.08705767 +0.37936872 +0.06031212 +0.29234485 +0.00200415 +0.18969831 +0.33731861 +0.36934748 +0.48155726