Skip to content

Commit

Permalink
Merge pull request #8 from jpmorganchase/fef96a4_ZichangHe
Browse files Browse the repository at this point in the history
Adding missing commits from private mirror
  • Loading branch information
rsln-s authored Sep 1, 2023
2 parents 7e2abeb + 6242421 commit ca50e3f
Show file tree
Hide file tree
Showing 8 changed files with 126 additions and 210 deletions.
32 changes: 9 additions & 23 deletions examples/QAOA_LABS_optimization.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,8 @@
"</div>"
],
"text/plain": [
" N p overlap gamma \n",
"35 10 1 0.127219 [0.0788139605] \\\n",
" N p overlap gamma \\\n",
"35 10 1 0.127219 [0.0788139605] \n",
"36 10 2 0.196232 [0.06896497310000001, 0.1511922366] \n",
"37 10 3 0.258441 [0.063816785, 0.1396294913, 0.1538820941] \n",
"38 10 4 0.318326 [0.0655247286, 0.1319443875, 0.1430007569, 0.1... \n",
Expand All @@ -139,7 +139,7 @@
],
"source": [
"N = 10\n",
"parameters = parameter_utils.get_LABS_wrt_best_overlap(N)\n",
"parameters = parameter_utils.get_best_known_parameters_for_LABS_wrt_overlap(N)\n",
"known_p = parameters.p.max()\n",
"print(f\"Maximum p available for N={N} is {known_p}\")\n",
"parameters.head()"
Expand All @@ -159,7 +159,7 @@
"outputs": [],
"source": [
"p = known_p + 1\n",
"gamma, beta = parameter_utils.get_LABS_wrt_best_overlap_for_p(N, known_p)"
"gamma, beta = parameter_utils.get_best_known_parameters_for_LABS_wrt_overlap_for_p(N, known_p)"
]
},
{
Expand Down Expand Up @@ -188,20 +188,6 @@
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/usr/lib/python3.11/site-packages/numba/cuda/dispatcher.py:539: NumbaPerformanceWarning: \u001b[1mGrid size 1 will likely result in GPU under-utilization due to low occupancy.\u001b[0m\n",
" warn(NumbaPerformanceWarning(msg))\n",
"/usr/lib/python3.11/site-packages/numba/cuda/dispatcher.py:539: NumbaPerformanceWarning: \u001b[1mGrid size 1 will likely result in GPU under-utilization due to low occupancy.\u001b[0m\n",
" warn(NumbaPerformanceWarning(msg))\n",
"/usr/lib/python3.11/site-packages/numba/cuda/dispatcher.py:539: NumbaPerformanceWarning: \u001b[1mGrid size 1 will likely result in GPU under-utilization due to low occupancy.\u001b[0m\n",
" warn(NumbaPerformanceWarning(msg))\n",
"/usr/lib/python3.11/site-packages/numba/cuda/dispatcher.py:539: NumbaPerformanceWarning: \u001b[1mGrid size 1 will likely result in GPU under-utilization due to low occupancy.\u001b[0m\n",
" warn(NumbaPerformanceWarning(msg))\n"
]
},
{
"name": "stdout",
"output_type": "stream",
Expand Down Expand Up @@ -241,7 +227,7 @@
"name": "stdout",
"output_type": "stream",
"text": [
"Success probability at p=9 after optimization is 0.5242988366332573\n"
"Success probability at p=9 after optimization is 0.5242896565265064\n"
]
}
],
Expand All @@ -264,9 +250,9 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"display_name": "py39",
"language": "python",
"name": "python3"
"name": "py39"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -278,9 +264,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.3"
"version": "3.9.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
"nbformat_minor": 4
}
78 changes: 28 additions & 50 deletions examples/QAOA_portfolio_optimization.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": null,
"id": "c3de09e7-5315-4170-8263-6041a7819fac",
"metadata": {
"tags": []
Expand All @@ -58,7 +58,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": null,
"id": "57544f68-6538-4dda-a47b-254252caa54a",
"metadata": {
"tags": []
Expand All @@ -72,7 +72,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": null,
"id": "e49f9152-e49c-4cbd-a2b3-20894a927a9a",
"metadata": {
"tags": []
Expand All @@ -84,7 +84,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": null,
"id": "a800d2c6-d7a9-449a-befc-e1620676847e",
"metadata": {
"tags": []
Expand All @@ -96,7 +96,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": null,
"id": "60942736-20e4-4519-a584-65f613b7bddf",
"metadata": {
"tags": []
Expand All @@ -118,7 +118,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": null,
"id": "c4cb1e0a-f0eb-48e4-a1dc-8d342462c156",
"metadata": {
"tags": []
Expand All @@ -131,7 +131,7 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": null,
"id": "1fa46296-89cb-40e7-aaf8-09b708eda12d",
"metadata": {
"tags": []
Expand All @@ -145,7 +145,7 @@
},
{
"cell_type": "code",
"execution_count": 8,
"execution_count": null,
"id": "4bb70cb8-db79-45b3-b808-96b7d6647b68",
"metadata": {
"tags": []
Expand All @@ -158,20 +158,12 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": null,
"id": "238479eb-db75-4e19-a159-36a2bed9c2ae",
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"energy = 0.004962484517944697, Approximation ratio = 0.7564585339722991\n"
]
}
],
"outputs": [],
"source": [
"po_energy = qaoa_obj(x0).real\n",
"po_ar = (po_energy-best_portfolio[1])/(best_portfolio[0]-best_portfolio[1])\n",
Expand All @@ -185,12 +177,14 @@
"source": [
"# Optimize QAOA parameters\n",
"\n",
"Note that we are using NLopt for optimization as it supports better-performing BOBYQA optimizer. Run `pip install nlopt` to install this dependency."
"Note that we are using NLopt for optimization as it supports better-performing BOBYQA optimizer. \n",
"\n",
"Run `pip install nlopt` to install this dependency."
]
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": null,
"id": "9d730369-3d9a-4e0e-bd2b-a81d92baa7b7",
"metadata": {
"tags": []
Expand All @@ -203,7 +197,7 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": null,
"id": "55a761e4-2122-4b00-bc1f-39730d6a3dd7",
"metadata": {
"tags": []
Expand All @@ -230,20 +224,12 @@
},
{
"cell_type": "code",
"execution_count": 12,
"execution_count": null,
"id": "86b7ff7f-43f4-4242-95b9-82784f0632a5",
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"energy = 0.004135567715736568, Approximation ratio = 0.8050399835268952\n"
]
}
],
"outputs": [],
"source": [
"_, opt_energy = minimize_nlopt(qaoa_obj, x0, p=1, rhobeg=0.01/1)\n",
"opt_ar = (opt_energy-best_portfolio[1])/(best_portfolio[0]-best_portfolio[1])\n",
Expand All @@ -252,20 +238,12 @@
},
{
"cell_type": "code",
"execution_count": 13,
"execution_count": null,
"id": "90c5c6af-003c-4724-a943-c747bd24f3c6",
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"energy = 0.004135567784198798, Approximation ratio = 0.8050399795047319\n"
]
}
],
"outputs": [],
"source": [
"res = minimize(qaoa_obj, x0, method='COBYLA', options={'rhobeg':0.001})\n",
"print(f\"energy = {res.fun}, Approximation ratio = {(res.fun-best_portfolio[1])/(best_portfolio[0]-best_portfolio[1])}\")"
Expand All @@ -281,7 +259,7 @@
},
{
"cell_type": "code",
"execution_count": 14,
"execution_count": null,
"id": "0c6e7780-0392-4a9f-b0a7-4b23c0af84dc",
"metadata": {
"tags": []
Expand All @@ -293,7 +271,7 @@
},
{
"cell_type": "code",
"execution_count": 15,
"execution_count": null,
"id": "d5fec9f4-09e0-409f-a613-07f23194c94d",
"metadata": {
"tags": []
Expand Down Expand Up @@ -322,7 +300,7 @@
},
{
"cell_type": "code",
"execution_count": 16,
"execution_count": null,
"id": "bc26bf47-72ce-4944-9a9f-d217e4c77894",
"metadata": {
"tags": []
Expand All @@ -336,7 +314,7 @@
},
{
"cell_type": "code",
"execution_count": 17,
"execution_count": null,
"id": "0c4f35dc-9402-48ce-89ef-c5a80883aaf8",
"metadata": {
"tags": []
Expand All @@ -351,7 +329,7 @@
},
{
"cell_type": "code",
"execution_count": 18,
"execution_count": null,
"id": "8581ac1f-2675-4ccb-b311-ee7e658b2782",
"metadata": {
"tags": []
Expand All @@ -367,7 +345,7 @@
},
{
"cell_type": "code",
"execution_count": 19,
"execution_count": null,
"id": "2ceed19a-9391-4eed-95b9-dbfc3b8d6a9d",
"metadata": {
"tags": []
Expand All @@ -391,9 +369,9 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"display_name": "qokit",
"language": "python",
"name": "python3"
"name": "qokit"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -405,7 +383,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.11"
"version": "3.9.4"
}
},
"nbformat": 4,
Expand Down
5 changes: 1 addition & 4 deletions qokit/assets/transferred_Dan_mean_0317.json
Original file line number Diff line number Diff line change
Expand Up @@ -16677,7 +16677,6 @@
1.0,
1.0
]
<<<<<<< HEAD:assets/transferred_Dan_mean_0317.json
},
"278":{
"overlap transferred":0.0000138523,
Expand Down Expand Up @@ -17218,7 +17217,5 @@
1.0,
1.0
]
=======
>>>>>>> main:qokit/assets/transferred_Dan_mean_0317.json
}
}
}
17 changes: 0 additions & 17 deletions qokit/parameter_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,23 +142,6 @@ def convert_to_gamma_beta(*args, parameterization: QAOAParameterization | str):
return gamma, beta


def set_parameterized_func(
parameterization: QAOAParameterization | str, probabilities_from_gamma_beta: Callable, compute_objective_from_probabilities: Callable
):
"""
parameterization: the approach to parametrize the QAOA parameters
probabilities_from_gamma_beta: callable function to estimate state vector from QAOA parameters
compute_objective_from_probabilities: callable function to computate objective from state vector
"""

def f(*args):
gamma, beta = convert_to_gamma_beta(*args, parameterization=parameterization)
probabilities = probabilities_from_gamma_beta(gamma, beta)
return compute_objective_from_probabilities(probabilities)

return f


def get_sk_gamma_beta(p, parameterization: QAOAParameterization | str = "gamma beta"):
"""
Load the look-up table for initial points from
Expand Down
Loading

0 comments on commit ca50e3f

Please sign in to comment.