feat(eta_calculation): Implementing "Joint Caching and Freshness Optimization" (in progress)

Signed-off-by: Tuan-Dat Tran <tuan-dat.tran@tudattr.dev>
This commit is contained in:
Tuan-Dat Tran
2024-12-02 18:07:08 +01:00
parent b2cc80bb09
commit 4ea5505130
7 changed files with 47164 additions and 42 deletions

View File

@@ -0,0 +1,473 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "ab5cd7d1-1a57-46fc-8282-dae0a6cc2944",
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import random"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "3d1ad0b9-f6a8-4e98-84aa-6e02e4279954",
"metadata": {},
"outputs": [],
"source": [
"DATABASE_OBJECT_COUNT = 100\n",
"CACHE_SIZE = DATABASE_OBJECT_COUNT/2\n",
"ZIPF_CONSTANT = 2\n",
"\n",
"CACHE_MISS_COST = 2\n",
"CACHE_REFRESH_COST = 1\n",
"\n",
"SEED = 42\n",
"np.random.seed(SEED)\n",
"random.seed(SEED)\n",
"\n",
"LAMBDA_VALUES = np.array([np.random.zipf(ZIPF_CONSTANT) for i in np.arange(1, DATABASE_OBJECT_COUNT + 1,1)])"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "9cc83cf6-5c78-4f0d-b7cb-08cdb80c362e",
"metadata": {},
"outputs": [],
"source": [
"# LAMBDA_VALUES = np.array([0.03, 0.04,0.05,0.06,0.07,1,1.1,1.2,1.3,1.4,1.5])\n",
"# DATABASE_OBJECT_COUNT = len(LAMBDA_VALUES)\n",
"# CACHE_SIZE = 4.4\n",
"# CACHE_MISS_COST = 7\n",
"# CACHE_REFRESH_COST = 1"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "3dc07233-0b56-4fee-a93b-212836c18b42",
"metadata": {},
"outputs": [],
"source": [
"db_object_count = DATABASE_OBJECT_COUNT\n",
"cache_sz = CACHE_SIZE\n",
"\n",
"lambda_vals = LAMBDA_VALUES\n",
"c_f = CACHE_MISS_COST\n",
"c_delta = CACHE_REFRESH_COST"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "5a27d416-8f98-4814-af9e-6c6bef95f4ef",
"metadata": {},
"outputs": [],
"source": [
"def eta_star(db_object_count, c_f, cache_sz, c_delta, lambda_vals):\n",
" num = (db_object_count * c_f - cache_sz * c_delta)\n",
" denom = np.sum(1.0/lambda_vals)\n",
" return max(0, num/denom)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "6276a9ce-f839-4fe6-90f2-2195cf065fc8",
"metadata": {},
"outputs": [],
"source": [
"def h_i_star(c_f, eta, lambda_vals, c_delta):\n",
" optimized_hitrate = (c_f - (eta/lambda_vals)) / c_delta\n",
" return optimized_hitrate"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "dcd31a8c-6864-4b9a-8bb3-998f0c32baf6",
"metadata": {},
"outputs": [],
"source": [
"def get_index_of_furthest_hitrate_from_boundary(hitrates):\n",
" local_hitrates = hitrates[(hitrates < 0) | (hitrates > 1)]\n",
" smallest_delta = np.abs(np.min(local_hitrates))\n",
" biggest_delta = np.max(local_hitrates) - 1\n",
" if smallest_delta > biggest_delta:\n",
" index = np.where(hitrates == np.min(local_hitrates))[0][0]\n",
" return index\n",
" else:\n",
" index = np.where(hitrates == np.max(local_hitrates))[0][0]\n",
" return index"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "55b251f8-97ca-49a8-9ec6-be77dc1e49b2",
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"\"\"\"\n",
"Perform theoretical optimization to compute optimal hit probabilities.\n",
"\n",
"Parameters:\n",
"- lambda_vals (numpy array): Request rates for each item.\n",
"- B (float): Total cache size.\n",
"- c_f (float): Fetching linear cost (cache miss cost).\n",
"- c_delta (float): Age linear cost.\n",
"\n",
"Returns:\n",
"- h_opt (numpy array): Optimal hit probabilities for each item.\n",
"\"\"\"\n",
"optimized_hitrates = np.zeros(DATABASE_OBJECT_COUNT)\n",
"differenc_set = np.arange(DATABASE_OBJECT_COUNT)\n",
"fix_i = []\n",
"current_db_objects = DATABASE_OBJECT_COUNT\n",
"current_cache_size = CACHE_SIZE\n",
"\n",
"while True:\n",
" if current_db_objects == 0:\n",
" # Handle special case: no items left to optimize\n",
" if current_cache_size > 0:\n",
" # Redistribute unused cache size among items with zero hit probability\n",
" differenc_set = np.where(optimized_hitrates == 0)[0]\n",
" fix_i = np.setdiff1d(np.arange(DATABASE_OBJECT_COUNT), differenc_set)\n",
" current_db_objects = len(differenc_set)\n",
" continue\n",
" else:\n",
" optimized_hitrates[differenc_set] = 0\n",
" break\n",
" # Compute Lagrangian multiplier and optimal hit probabilities\n",
" mu = max(0, (current_db_objects * c_f - current_cache_size * c_delta) / np.sum(1.0 / lambda_vals[differenc_set]))\n",
" eta = eta_star(current_db_objects, c_f, current_cache_size, c_delta, lambda_vals[differenc_set])\n",
" assert(mu == eta)\n",
" optimized_hitrates[differenc_set] = (c_f - mu / lambda_vals[differenc_set]) / c_delta\n",
" # print(optimized_hitrates)\n",
" # Handle the case where mu < 0\n",
" if mu < 0:\n",
" current_cache_size = current_db_objects * c_f / c_delta # Adjust cache size for next iteration\n",
" continue\n",
" # Check for constraint violations\n",
" larger_i = np.where(optimized_hitrates > 1)[0] # h > 1\n",
" smaller_i = np.where(optimized_hitrates < 0)[0] # h < 0\n",
" # If no violations, optimization is complete\n",
" break_con = len(smaller_i) == 0 and len(larger_i) == 0\n",
" break_con1 = len((optimized_hitrates[differenc_set])[((optimized_hitrates[differenc_set]) < 0) | ((optimized_hitrates[differenc_set])> 1)]) == 0\n",
" assert(break_con == break_con1)\n",
" if break_con:\n",
" break\n",
" # Find the furthest violating item\n",
" min_viol, min_viol_i = (0, -1)\n",
" if len(smaller_i) > 0:\n",
" min_viol_i = np.argmin(optimized_hitrates)\n",
" min_viol = optimized_hitrates[min_viol_i]\n",
" max_viol, max_viol_i = (0, -1)\n",
" if len(larger_i) > 0:\n",
" larger = optimized_hitrates - 1\n",
" max_viol_i = np.argmax(larger)\n",
" max_viol = larger[max_viol_i]\n",
" # Compare the furthest violations and adjust accordingly\n",
" viol_i = min_viol_i\n",
" min_viol_flag = True # True if furthest is from the left boundary\n",
" if max_viol > abs(min_viol):\n",
" viol_i = max_viol_i\n",
" min_viol_flag = False \n",
" index = get_index_of_furthest_hitrate_from_boundary(optimized_hitrates)\n",
" if viol_i != index:\n",
" print(optimized_hitrates[viol_i])\n",
" print(optimized_hitrates[index])\n",
" assert(viol_i == index)\n",
" if min_viol_flag:\n",
" optimized_hitrates[viol_i] = 0\n",
" else:\n",
" optimized_hitrates[viol_i] = min(1, current_cache_size)\n",
"\n",
" # Update parameters for next iteration\n",
" current_cache_size =- optimized_hitrates[viol_i]\n",
" fix_i.append(viol_i)\n",
" differenc_set = np.setdiff1d(np.arange(DATABASE_OBJECT_COUNT), fix_i)\n",
" current_db_objects = DATABASE_OBJECT_COUNT - len(fix_i)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "efa16eaf-a10b-4927-99cd-190e2ffe1d1e",
"metadata": {},
"outputs": [],
"source": [
"a = optimized_hitrates\n",
"b = differenc_set"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "0e21c26f-058a-4e56-a5ad-1c47bf28656c",
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"All values optimized.\n"
]
}
],
"source": [
"\"\"\"\n",
"Perform theoretical optimization to compute optimal hit probabilities.\n",
"\n",
"Parameters:\n",
"- lambda_vals (numpy array): Request rates for each item.\n",
"- B (float): Total cache size.\n",
"- c_f (float): Fetching linear cost (cache miss cost).\n",
"- c_delta (float): Age linear cost.\n",
"\n",
"Returns:\n",
"- h_opt (numpy array): Optimal hit probabilities for each item.\n",
"\"\"\"\n",
"optimized_hitrates = np.zeros(DATABASE_OBJECT_COUNT)\n",
"differenc_set = np.arange(DATABASE_OBJECT_COUNT)\n",
"fix_i = []\n",
"current_db_objects = DATABASE_OBJECT_COUNT\n",
"current_cache_size = CACHE_SIZE\n",
"\n",
"while True:\n",
" if current_db_objects == 0:\n",
" # Handle special case: no items left to optimize\n",
" if current_cache_size > 0:\n",
" # Redistribute unused cache size among items with zero hit probability\n",
" differenc_set = np.where(optimized_hitrates == 0)[0]\n",
" fix_i = np.setdiff1d(np.arange(DATABASE_OBJECT_COUNT), differenc_set)\n",
" current_db_objects = len(differenc_set)\n",
" continue\n",
" else:\n",
" optimized_hitrates[differenc_set] = 0\n",
" break\n",
" # Compute Lagrangian multiplier and optimal hit probabilities\n",
" eta = eta_star(current_db_objects, c_f, current_cache_size, c_delta, lambda_vals[differenc_set])\n",
" optimized_hitrates[differenc_set] = (c_f - eta / lambda_vals[differenc_set]) / c_delta\n",
"\n",
" if mu < 0:\n",
" current_cache_size = current_db_objects * c_f / c_delta # Adjust cache size for next iteration\n",
" continue\n",
" \n",
" if len((optimized_hitrates[differenc_set])[((optimized_hitrates[differenc_set]) < 0) | ((optimized_hitrates[differenc_set])> 1)]) == 0:\n",
" print(\"All values optimized.\")\n",
" break\n",
" max_outbound_index = get_index_of_furthest_hitrate_from_boundary(optimized_hitrates)\n",
" optimized_hitrates[max_outbound_index] = (1 if optimized_hitrates[max_outbound_index] > 1 else 0)\n",
"\n",
" current_cache_size =- optimized_hitrates[max_outbound_index]\n",
" fix_i.append(max_outbound_index)\n",
" differenc_set = np.setdiff1d(np.arange(DATABASE_OBJECT_COUNT), fix_i)\n",
" current_db_objects = DATABASE_OBJECT_COUNT - len(fix_i)"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "4f64253f-b389-4be9-b403-08027d480121",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
" 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
" 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0.,\n",
" 1., 0., 0., 0., 0., 0., 1., 0., 0., 1., 0., 0., 0., 0., 1., 0., 1.,\n",
" 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0.,\n",
" 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0.])"
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"optimized_hitrates"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "17d818db-ec88-4c26-92af-6d74862525d9",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([ 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0.43902439, 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0.43902439, 0. , 0. ,\n",
" 0. , 0.04878049, 0. , 0.04878049, 0. ,\n",
" 0. , 0. , 0. , -0. , 0. ,\n",
" 0.04878049, 0.04878049, 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0.43902439, 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" -0. , 0. , 0. , 0. , 0. ,\n",
" -0. , 0. , 1. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , -0. , 0. ,\n",
" 0. , 0.43902439, 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0.04878049, 0. ])"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"a"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "791b3f96-527a-489e-970e-c92ec950177f",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([ 6, 27, 31, 33, 40, 41, 50, 81, 98])"
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"b"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "c22fa973-432a-4c05-89bf-2a6ea82ae3d2",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([ 6, 27, 50, 81])"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"differenc_set"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "898e1266-5aaa-46f4-ac0f-c7807ac2b6bb",
"metadata": {},
"outputs": [],
"source": [
"db_object_count = DATABASE_OBJECT_COUNT\n",
"cache_sz = CACHE_SIZE\n",
"loop_lambda = lambda_vals\n",
"\n",
"non_optimized_values = np.arange(db_object_count)"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "8cc9b8a9-f7ae-48fc-adfb-ac4b7a4998f1",
"metadata": {},
"outputs": [],
"source": [
"db_object_count = DATABASE_OBJECT_COUNT\n",
"cache_sz = CACHE_SIZE\n",
"loop_lambda = lambda_vals\n",
"\n",
"optimized_hitrate = np.zeros(db_object_count)\n",
"non_optimized_values = np.arange(db_object_count)\n",
"optimized_value = {}\n",
"\n",
"eta = eta_star(db_object_count, c_f, cache_sz, c_delta, loop_lambda[non_optimized_values])\n",
"optimized_hitrate[non_optimized_values] = h_i_star(c_f, eta, loop_lambda[non_optimized_values], c_delta)\n",
"\n",
"max_outbound_index = get_index_of_furthest_hitrate_from_boundary(optimized_hitrate)\n",
"optimized_value[max_outbound_index] = (1 if optimized_hitrate[max_outbound_index] > 1 else 0)"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "cbcf3592-fcf2-4f54-a3cd-761097c12972",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{67: 1}"
]
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"optimized_value"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "db732331-1d09-45b7-915c-73daa270b5e2",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "graphs",
"language": "python",
"name": "graphs"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.7"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -1,6 +1,251 @@
{ {
"cells": [], "cells": [
"metadata": {}, {
"cell_type": "code",
"execution_count": 1,
"id": "ab5cd7d1-1a57-46fc-8282-dae0a6cc2944",
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import random"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "3d1ad0b9-f6a8-4e98-84aa-6e02e4279954",
"metadata": {},
"outputs": [],
"source": [
"DATABASE_OBJECT_COUNT = 100\n",
"CACHE_SIZE = DATABASE_OBJECT_COUNT/2\n",
"ZIPF_CONSTANT = 2\n",
"\n",
"CACHE_MISS_COST = 1\n",
"CACHE_REFRESH_COST = 1\n",
"\n",
"SEED = 42\n",
"np.random.seed(SEED)\n",
"random.seed(SEED)\n",
"\n",
"LAMBDA_VALUES = np.array([np.random.zipf(ZIPF_CONSTANT) for i in np.arange(1, DATABASE_OBJECT_COUNT + 1,1)])"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "9cc83cf6-5c78-4f0d-b7cb-08cdb80c362e",
"metadata": {},
"outputs": [],
"source": [
"# LAMBDA_VALUES = np.array([0.03, 0.04,0.05,0.06,0.07,1,1.1,1.2,1.3,1.4,1.5])\n",
"# DATABASE_OBJECT_COUNT = len(LAMBDA_VALUES)\n",
"# CACHE_SIZE = 4.4\n",
"# CACHE_MISS_COST = 7\n",
"# CACHE_REFRESH_COST = 1"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "3dc07233-0b56-4fee-a93b-212836c18b42",
"metadata": {},
"outputs": [],
"source": [
"db_object_count = DATABASE_OBJECT_COUNT\n",
"cache_sz = CACHE_SIZE\n",
"\n",
"lambda_vals = LAMBDA_VALUES\n",
"c_f = CACHE_MISS_COST\n",
"c_delta = CACHE_REFRESH_COST"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "5a27d416-8f98-4814-af9e-6c6bef95f4ef",
"metadata": {},
"outputs": [],
"source": [
"def eta_star(db_object_count, c_f, cache_sz, c_delta, lambda_vals):\n",
" num = (db_object_count * c_f - cache_sz * c_delta)\n",
" denom = np.sum(1.0/lambda_vals)\n",
" return max(0, num/denom)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "6276a9ce-f839-4fe6-90f2-2195cf065fc8",
"metadata": {},
"outputs": [],
"source": [
"def h_i_star(c_f, eta, lambda_vals, c_delta):\n",
" optimized_hitrate = (c_f - (eta/lambda_vals)) / c_delta\n",
" return optimized_hitrate"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "dcd31a8c-6864-4b9a-8bb3-998f0c32baf6",
"metadata": {},
"outputs": [],
"source": [
"def get_index_of_furthest_hitrate_from_boundary(hitrates):\n",
" local_hitrates = hitrates[(hitrates < 0) | (hitrates > 1)]\n",
" smallest_delta = np.abs(np.min(local_hitrates))\n",
" biggest_delta = np.max(local_hitrates) - 1\n",
" if smallest_delta > biggest_delta:\n",
" index = np.where(hitrates == np.min(local_hitrates))[0][0]\n",
" return index\n",
" else:\n",
" index = np.where(hitrates == np.max(local_hitrates))[0][0]\n",
" return index"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "ccd4b95d-1cdd-4c99-a22e-4b31338993cf",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([0.30256805, 0.76752268, 0.30256805, 0.30256805, 0.65128403,\n",
" 0.30256805, 0.86051361, 0.30256805, 0.30256805, 0.30256805,\n",
" 0.65128403, 0.30256805, 0.30256805, 0.30256805, 0.65128403,\n",
" 0.65128403, 0.30256805, 0.30256805, 0.76752268, 0.30256805,\n",
" 0.30256805, 0.30256805, 0.30256805, 0.65128403, 0.30256805,\n",
" 0.30256805, 0.30256805, 0.86051361, 0.30256805, 0.30256805,\n",
" 0.30256805, 0.82564201, 0.30256805, 0.82564201, 0.30256805,\n",
" 0.30256805, 0.30256805, 0.76752268, 0.91282101, 0.30256805,\n",
" 0.82564201, 0.82564201, 0.65128403, 0.30256805, 0.30256805,\n",
" 0.30256805, 0.93025681, 0.30256805, 0.30256805, 0.30256805,\n",
" 0.86051361, 0.92250756, 0.30256805, 0.30256805, 0.30256805,\n",
" 0.30256805, 0.30256805, 0.95897459, 0.65128403, 0.30256805,\n",
" 0.97317569, 0.30256805, 0.30256805, 0.65128403, 0.30256805,\n",
" 0.93025681, 0.30256805, 0.98989229, 0.30256805, 0.30256805,\n",
" 0.65128403, 0.30256805, 0.30256805, 0.30256805, 0.76752268,\n",
" 0.65128403, 0.65128403, 0.76752268, 0.95350454, 0.30256805,\n",
" 0.30256805, 0.86051361, 0.65128403, 0.30256805, 0.30256805,\n",
" 0.65128403, 0.30256805, 0.65128403, 0.30256805, 0.30256805,\n",
" 0.65128403, 0.65128403, 0.76752268, 0.30256805, 0.65128403,\n",
" 0.30256805, 0.30256805, 0.98115049, 0.82564201, 0.65128403])"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"eta = eta_star(db_object_count, c_f, cache_sz, c_delta, lambda_vals)\n",
"optimized_hitrates = (c_f - eta / lambda_vals) / c_delta\n",
"optimized_hitrates"
]
},
{
"cell_type": "code",
"execution_count": 8,
"id": "0e21c26f-058a-4e56-a5ad-1c47bf28656c",
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"All values optimized.\n"
]
}
],
"source": [
"\"\"\"\n",
"Perform theoretical optimization to compute optimal hit probabilities.\n",
"\n",
"Parameters:\n",
"- lambda_vals (numpy array): Request rates for each item.\n",
"- B (float): Total cache size.\n",
"- c_f (float): Fetching linear cost (cache miss cost).\n",
"- c_delta (float): Age linear cost.\n",
"\n",
"Returns:\n",
"- h_opt (numpy array): Optimal hit probabilities for each item.\n",
"\"\"\"\n",
"optimized_hitrates = np.zeros(DATABASE_OBJECT_COUNT)\n",
"current_db_object_count = DATABASE_OBJECT_COUNT\n",
"current_cache_size = CACHE_SIZE\n",
"\n",
"differenc_set = np.arange(DATABASE_OBJECT_COUNT)\n",
"fix_i = []\n",
"\n",
"while True:\n",
" if current_db_object_count == 0:\n",
" print(\"No objects left to optimize.\")\n",
" if current_cache_size > 0:\n",
" # Redistribute unused cache size among items with zero hit probability\n",
" differenc_set = np.where(optimized_hitrates == 0)[0]\n",
" fix_i = np.setdiff1d(np.arange(DATABASE_OBJECT_COUNT), differenc_set)\n",
" current_db_object_count = len(differenc_set)\n",
" continue\n",
" else:\n",
" optimized_hitrates[differenc_set] = 0\n",
" break\n",
" # Compute Lagrangian multiplier and optimal hit probabilities\n",
" eta = eta_star(current_db_object_count, c_f, current_cache_size, c_delta, lambda_vals[differenc_set])\n",
" optimized_hitrates[differenc_set] = (c_f - eta / lambda_vals[differenc_set]) / c_delta\n",
"\n",
" if eta < 0:\n",
" print(\"eta was negative.\")\n",
" current_cache_size = current_db_object_count * c_f / c_delta # Adjust cache size for next iteration\n",
" continue\n",
" \n",
" if len((optimized_hitrates[differenc_set])[((optimized_hitrates[differenc_set]) < 0) | ((optimized_hitrates[differenc_set])> 1)]) == 0:\n",
" print(\"All values optimized.\")\n",
" break\n",
" \n",
" max_outbound_index = get_index_of_furthest_hitrate_from_boundary(optimized_hitrates)\n",
" optimized_hitrates[max_outbound_index] = (1 if optimized_hitrates[max_outbound_index] > 1 else 0)\n",
"\n",
" current_cache_size =- optimized_hitrates[max_outbound_index]\n",
" fix_i.append(max_outbound_index)\n",
" differenc_set = np.setdiff1d(np.arange(DATABASE_OBJECT_COUNT), fix_i)\n",
" current_db_object_count -= 1"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "11682b36-e705-4bd9-9d75-79012791d1ee",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "graphs",
"language": "python",
"name": "graphs"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.7"
}
},
"nbformat": 4, "nbformat": 4,
"nbformat_minor": 5 "nbformat_minor": 5
} }

View File

@@ -0,0 +1,473 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "ab5cd7d1-1a57-46fc-8282-dae0a6cc2944",
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import random"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "3d1ad0b9-f6a8-4e98-84aa-6e02e4279954",
"metadata": {},
"outputs": [],
"source": [
"DATABASE_OBJECT_COUNT = 100\n",
"CACHE_SIZE = DATABASE_OBJECT_COUNT/2\n",
"ZIPF_CONSTANT = 2\n",
"\n",
"CACHE_MISS_COST = 2\n",
"CACHE_REFRESH_COST = 1\n",
"\n",
"SEED = 42\n",
"np.random.seed(SEED)\n",
"random.seed(SEED)\n",
"\n",
"LAMBDA_VALUES = np.array([np.random.zipf(ZIPF_CONSTANT) for i in np.arange(1, DATABASE_OBJECT_COUNT + 1,1)])"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "9cc83cf6-5c78-4f0d-b7cb-08cdb80c362e",
"metadata": {},
"outputs": [],
"source": [
"# LAMBDA_VALUES = np.array([0.03, 0.04,0.05,0.06,0.07,1,1.1,1.2,1.3,1.4,1.5])\n",
"# DATABASE_OBJECT_COUNT = len(LAMBDA_VALUES)\n",
"# CACHE_SIZE = 4.4\n",
"# CACHE_MISS_COST = 7\n",
"# CACHE_REFRESH_COST = 1"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "3dc07233-0b56-4fee-a93b-212836c18b42",
"metadata": {},
"outputs": [],
"source": [
"db_object_count = DATABASE_OBJECT_COUNT\n",
"cache_sz = CACHE_SIZE\n",
"\n",
"lambda_vals = LAMBDA_VALUES\n",
"c_f = CACHE_MISS_COST\n",
"c_delta = CACHE_REFRESH_COST"
]
},
{
"cell_type": "code",
"execution_count": 5,
"id": "5a27d416-8f98-4814-af9e-6c6bef95f4ef",
"metadata": {},
"outputs": [],
"source": [
"def eta_star(db_object_count, c_f, cache_sz, c_delta, lambda_vals):\n",
" num = (db_object_count * c_f - cache_sz * c_delta)\n",
" denom = np.sum(1.0/lambda_vals)\n",
" return max(0, num/denom)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"id": "6276a9ce-f839-4fe6-90f2-2195cf065fc8",
"metadata": {},
"outputs": [],
"source": [
"def h_i_star(c_f, eta, lambda_vals, c_delta):\n",
" optimized_hitrate = (c_f - (eta/lambda_vals)) / c_delta\n",
" return optimized_hitrate"
]
},
{
"cell_type": "code",
"execution_count": 7,
"id": "dcd31a8c-6864-4b9a-8bb3-998f0c32baf6",
"metadata": {},
"outputs": [],
"source": [
"def get_index_of_furthest_hitrate_from_boundary(hitrates):\n",
" local_hitrates = hitrates[(hitrates < 0) | (hitrates > 1)]\n",
" smallest_delta = np.abs(np.min(local_hitrates))\n",
" biggest_delta = np.max(local_hitrates) - 1\n",
" if smallest_delta > biggest_delta:\n",
" index = np.where(hitrates == np.min(local_hitrates))[0][0]\n",
" return index\n",
" else:\n",
" index = np.where(hitrates == np.max(local_hitrates))[0][0]\n",
" return index"
]
},
{
"cell_type": "code",
"execution_count": 18,
"id": "55b251f8-97ca-49a8-9ec6-be77dc1e49b2",
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"\"\"\"\n",
"Perform theoretical optimization to compute optimal hit probabilities.\n",
"\n",
"Parameters:\n",
"- lambda_vals (numpy array): Request rates for each item.\n",
"- B (float): Total cache size.\n",
"- c_f (float): Fetching linear cost (cache miss cost).\n",
"- c_delta (float): Age linear cost.\n",
"\n",
"Returns:\n",
"- h_opt (numpy array): Optimal hit probabilities for each item.\n",
"\"\"\"\n",
"optimized_hitrates = np.zeros(DATABASE_OBJECT_COUNT)\n",
"differenc_set = np.arange(DATABASE_OBJECT_COUNT)\n",
"fix_i = []\n",
"current_db_objects = DATABASE_OBJECT_COUNT\n",
"current_cache_size = CACHE_SIZE\n",
"\n",
"while True:\n",
" if current_db_objects == 0:\n",
" # Handle special case: no items left to optimize\n",
" if current_cache_size > 0:\n",
" # Redistribute unused cache size among items with zero hit probability\n",
" differenc_set = np.where(optimized_hitrates == 0)[0]\n",
" fix_i = np.setdiff1d(np.arange(DATABASE_OBJECT_COUNT), differenc_set)\n",
" current_db_objects = len(differenc_set)\n",
" continue\n",
" else:\n",
" optimized_hitrates[differenc_set] = 0\n",
" break\n",
" # Compute Lagrangian multiplier and optimal hit probabilities\n",
" mu = max(0, (current_db_objects * c_f - current_cache_size * c_delta) / np.sum(1.0 / lambda_vals[differenc_set]))\n",
" eta = eta_star(current_db_objects, c_f, current_cache_size, c_delta, lambda_vals[differenc_set])\n",
" assert(mu == eta)\n",
" optimized_hitrates[differenc_set] = (c_f - mu / lambda_vals[differenc_set]) / c_delta\n",
" # print(optimized_hitrates)\n",
" # Handle the case where mu < 0\n",
" if mu < 0:\n",
" current_cache_size = current_db_objects * c_f / c_delta # Adjust cache size for next iteration\n",
" continue\n",
" # Check for constraint violations\n",
" larger_i = np.where(optimized_hitrates > 1)[0] # h > 1\n",
" smaller_i = np.where(optimized_hitrates < 0)[0] # h < 0\n",
" # If no violations, optimization is complete\n",
" break_con = len(smaller_i) == 0 and len(larger_i) == 0\n",
" break_con1 = len((optimized_hitrates[differenc_set])[((optimized_hitrates[differenc_set]) < 0) | ((optimized_hitrates[differenc_set])> 1)]) == 0\n",
" assert(break_con == break_con1)\n",
" if break_con:\n",
" break\n",
" # Find the furthest violating item\n",
" min_viol, min_viol_i = (0, -1)\n",
" if len(smaller_i) > 0:\n",
" min_viol_i = np.argmin(optimized_hitrates)\n",
" min_viol = optimized_hitrates[min_viol_i]\n",
" max_viol, max_viol_i = (0, -1)\n",
" if len(larger_i) > 0:\n",
" larger = optimized_hitrates - 1\n",
" max_viol_i = np.argmax(larger)\n",
" max_viol = larger[max_viol_i]\n",
" # Compare the furthest violations and adjust accordingly\n",
" viol_i = min_viol_i\n",
" min_viol_flag = True # True if furthest is from the left boundary\n",
" if max_viol > abs(min_viol):\n",
" viol_i = max_viol_i\n",
" min_viol_flag = False \n",
" index = get_index_of_furthest_hitrate_from_boundary(optimized_hitrates)\n",
" if viol_i != index:\n",
" print(optimized_hitrates[viol_i])\n",
" print(optimized_hitrates[index])\n",
" assert(viol_i == index)\n",
" if min_viol_flag:\n",
" optimized_hitrates[viol_i] = 0\n",
" else:\n",
" optimized_hitrates[viol_i] = min(1, current_cache_size)\n",
"\n",
" # Update parameters for next iteration\n",
" current_cache_size =- optimized_hitrates[viol_i]\n",
" fix_i.append(viol_i)\n",
" differenc_set = np.setdiff1d(np.arange(DATABASE_OBJECT_COUNT), fix_i)\n",
" current_db_objects = DATABASE_OBJECT_COUNT - len(fix_i)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"id": "efa16eaf-a10b-4927-99cd-190e2ffe1d1e",
"metadata": {},
"outputs": [],
"source": [
"a = optimized_hitrates\n",
"b = differenc_set"
]
},
{
"cell_type": "code",
"execution_count": 10,
"id": "0e21c26f-058a-4e56-a5ad-1c47bf28656c",
"metadata": {
"scrolled": true
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"All values optimized.\n"
]
}
],
"source": [
"\"\"\"\n",
"Perform theoretical optimization to compute optimal hit probabilities.\n",
"\n",
"Parameters:\n",
"- lambda_vals (numpy array): Request rates for each item.\n",
"- B (float): Total cache size.\n",
"- c_f (float): Fetching linear cost (cache miss cost).\n",
"- c_delta (float): Age linear cost.\n",
"\n",
"Returns:\n",
"- h_opt (numpy array): Optimal hit probabilities for each item.\n",
"\"\"\"\n",
"optimized_hitrates = np.zeros(DATABASE_OBJECT_COUNT)\n",
"differenc_set = np.arange(DATABASE_OBJECT_COUNT)\n",
"fix_i = []\n",
"current_db_objects = DATABASE_OBJECT_COUNT\n",
"current_cache_size = CACHE_SIZE\n",
"\n",
"while True:\n",
" if current_db_objects == 0:\n",
" # Handle special case: no items left to optimize\n",
" if current_cache_size > 0:\n",
" # Redistribute unused cache size among items with zero hit probability\n",
" differenc_set = np.where(optimized_hitrates == 0)[0]\n",
" fix_i = np.setdiff1d(np.arange(DATABASE_OBJECT_COUNT), differenc_set)\n",
" current_db_objects = len(differenc_set)\n",
" continue\n",
" else:\n",
" optimized_hitrates[differenc_set] = 0\n",
" break\n",
" # Compute Lagrangian multiplier and optimal hit probabilities\n",
" eta = eta_star(current_db_objects, c_f, current_cache_size, c_delta, lambda_vals[differenc_set])\n",
" optimized_hitrates[differenc_set] = (c_f - eta / lambda_vals[differenc_set]) / c_delta\n",
"\n",
" if mu < 0:\n",
" current_cache_size = current_db_objects * c_f / c_delta # Adjust cache size for next iteration\n",
" continue\n",
" \n",
" if len((optimized_hitrates[differenc_set])[((optimized_hitrates[differenc_set]) < 0) | ((optimized_hitrates[differenc_set])> 1)]) == 0:\n",
" print(\"All values optimized.\")\n",
" break\n",
" max_outbound_index = get_index_of_furthest_hitrate_from_boundary(optimized_hitrates)\n",
" optimized_hitrates[max_outbound_index] = (1 if optimized_hitrates[max_outbound_index] > 1 else 0)\n",
"\n",
" current_cache_size =- optimized_hitrates[max_outbound_index]\n",
" fix_i.append(max_outbound_index)\n",
" differenc_set = np.setdiff1d(np.arange(DATABASE_OBJECT_COUNT), fix_i)\n",
" current_db_objects = DATABASE_OBJECT_COUNT - len(fix_i)"
]
},
{
"cell_type": "code",
"execution_count": 11,
"id": "4f64253f-b389-4be9-b403-08027d480121",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
" 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
" 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0.,\n",
" 1., 0., 0., 0., 0., 0., 1., 0., 0., 1., 0., 0., 0., 0., 1., 0., 1.,\n",
" 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0., 0., 0.,\n",
" 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0.])"
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"optimized_hitrates"
]
},
{
"cell_type": "code",
"execution_count": 12,
"id": "17d818db-ec88-4c26-92af-6d74862525d9",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([ 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0.43902439, 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0.43902439, 0. , 0. ,\n",
" 0. , 0.04878049, 0. , 0.04878049, 0. ,\n",
" 0. , 0. , 0. , -0. , 0. ,\n",
" 0.04878049, 0.04878049, 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0.43902439, 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" -0. , 0. , 0. , 0. , 0. ,\n",
" -0. , 0. , 1. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , -0. , 0. ,\n",
" 0. , 0.43902439, 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0. , 0. ,\n",
" 0. , 0. , 0. , 0.04878049, 0. ])"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"a"
]
},
{
"cell_type": "code",
"execution_count": 13,
"id": "791b3f96-527a-489e-970e-c92ec950177f",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([ 6, 27, 31, 33, 40, 41, 50, 81, 98])"
]
},
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"b"
]
},
{
"cell_type": "code",
"execution_count": 14,
"id": "c22fa973-432a-4c05-89bf-2a6ea82ae3d2",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"array([ 6, 27, 50, 81])"
]
},
"execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"differenc_set"
]
},
{
"cell_type": "code",
"execution_count": 15,
"id": "898e1266-5aaa-46f4-ac0f-c7807ac2b6bb",
"metadata": {},
"outputs": [],
"source": [
"db_object_count = DATABASE_OBJECT_COUNT\n",
"cache_sz = CACHE_SIZE\n",
"loop_lambda = lambda_vals\n",
"\n",
"non_optimized_values = np.arange(db_object_count)"
]
},
{
"cell_type": "code",
"execution_count": 16,
"id": "8cc9b8a9-f7ae-48fc-adfb-ac4b7a4998f1",
"metadata": {},
"outputs": [],
"source": [
"db_object_count = DATABASE_OBJECT_COUNT\n",
"cache_sz = CACHE_SIZE\n",
"loop_lambda = lambda_vals\n",
"\n",
"optimized_hitrate = np.zeros(db_object_count)\n",
"non_optimized_values = np.arange(db_object_count)\n",
"optimized_value = {}\n",
"\n",
"eta = eta_star(db_object_count, c_f, cache_sz, c_delta, loop_lambda[non_optimized_values])\n",
"optimized_hitrate[non_optimized_values] = h_i_star(c_f, eta, loop_lambda[non_optimized_values], c_delta)\n",
"\n",
"max_outbound_index = get_index_of_furthest_hitrate_from_boundary(optimized_hitrate)\n",
"optimized_value[max_outbound_index] = (1 if optimized_hitrate[max_outbound_index] > 1 else 0)"
]
},
{
"cell_type": "code",
"execution_count": 17,
"id": "cbcf3592-fcf2-4f54-a3cd-761097c12972",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"{67: 1}"
]
},
"execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"optimized_value"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "db732331-1d09-45b7-915c-73daa270b5e2",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "graphs",
"language": "python",
"name": "graphs"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.7"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -1,25 +1,25 @@
function [h_optt] = Theoritical_opt(lambda,B,c_f,c_delta) function [h_optt] = Theoritical_opt(lambda,B,c_f,c_delta)
%% Theoritical optimization %% Theoritical optimization
%% Iterative identification of active constraints %% Iterative identification of active constraints
N=length(lambda) N=length(lambda)
flag=1; flag=1;
h_optt=zeros(N,1); %optimal hit prob h_optt=zeros(N,1); %optimal hit prob
differenc_set=1:N; % the set of variables to optimize differenc_set=1:N; % the set of variables to optimize
fix_i=[]; % set of variables that reached optimality and are excluded from the optimization fix_i=[]; % set of variables that reached optimality and are excluded from the optimization
n=N; n=N;
b=B; b=B;
%% %%
while flag while flag
if(n==0) if(n==0)
if(b>0) % if there is left over cache size and mu is not zero (the loop would break), redistribute it among the zero hit probability if(b>0) % if there is left over cache size and mu is not zero (the loop would break), redistribute it among the zero hit probability
differenc_set=find(h_optt==0)'; differenc_set=find(h_optt==0)';
fix_i=setdiff(1:N,differenc_set)'; fix_i=setdiff(1:N,differenc_set)';
n=length(differenc_set); n=length(differenc_set);
continue; continue;
else else
h_optt(differenc_set)=0; h_optt(differenc_set)=0;
break; break;
end end
@@ -27,23 +27,23 @@ while flag
% Optimal Lagrangian mult. and hit prob. calculated theoritically for the set of variables in differenc_set % Optimal Lagrangian mult. and hit prob. calculated theoritically for the set of variables in differenc_set
mu=max(0,(n*c_f-b*c_delta)/ sum(1./lambda(differenc_set))); %optimal lagrangian mult. mu=max(0,(n*c_f-b*c_delta)/ sum(1./lambda(differenc_set))); %optimal lagrangian mult.
h_optt(differenc_set)=(c_f-mu./lambda(differenc_set))/c_delta %optimal hit prob h_optt(differenc_set)=(c_f-mu./lambda(differenc_set))/c_delta %optimal hit prob
% mu has to be >=0 % mu has to be >=0
if(mu<0) if(mu<0)
b=(n*c_f/c_delta); % this sets mu to zero in the next iteration b=(n*c_f/c_delta); % this sets mu to zero in the next iteration
continue; continue;
end end
% check the violation of the hit_prob const % check the violation of the hit_prob const
larger_i=find(h_optt>1); % h>1 larger_i=find(h_optt>1); % h>1
smaller_i=find(h_optt<0); % h<0 smaller_i=find(h_optt<0); % h<0
if(length(smaller_i)==0 && length(larger_i)==0) if(length(smaller_i)==0 && length(larger_i)==0)
break; break;
end end
% find the furthest object from the 0 boundary % find the furthest object from the 0 boundary
min_viol=0; min_viol=0;
min_viol_i=-1; min_viol_i=-1;
@@ -51,14 +51,14 @@ while flag
[min_viol, min_viol_i]=min(h_optt); [min_viol, min_viol_i]=min(h_optt);
end end
% find the furthest object from the 1 boundary % find the furthest object from the 1 boundary
max_viol=0; max_viol=0;
max_viol_i=-1; max_viol_i=-1;
if(length(larger_i)>0) if(length(larger_i)>0)
larger=h_optt-1; larger=h_optt-1;
[max_viol ,max_viol_i]=max(h_optt-1); [max_viol ,max_viol_i]=max(h_optt-1);
end end
% compare both furthest objects from both boundaries % compare both furthest objects from both boundaries
viol_i=min_viol_i; viol_i=min_viol_i;
min_viol_flag=1; % True if the furthest one is from the left min_viol_flag=1; % True if the furthest one is from the left
@@ -73,18 +73,15 @@ while flag
else else
h_optt(viol_i)=min(1,b); h_optt(viol_i)=min(1,b);
end end
%calculate the new parameters after removing the furthest object from %calculate the new parameters after removing the furthest object from
%the decision variables %the decision variables
B_new=b-(h_optt(viol_i)); B_new=b-(h_optt(viol_i));
b=B_new; b=B_new;
fix_i=[fix_i' viol_i']'; fix_i=[fix_i' viol_i']';
differenc_set=setdiff(1:N,fix_i) ; differenc_set=setdiff(1:N,fix_i) ;
n=N-length(fix_i); n=N-length(fix_i);
end end
end end

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long