refactor(simulation): Copied changes from aoi_cache_simulation to multi_aoi_cache_simulation
Signed-off-by: Tuan-Dat Tran <tuan-dat.tran@tudattr.dev>
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
1
00_aoi_caching_simulation/.gitignore
vendored
Normal file
1
00_aoi_caching_simulation/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.aoi_cache/
|
||||||
File diff suppressed because one or more lines are too long
@@ -121,7 +121,7 @@
|
|||||||
" self.db = db\n",
|
" self.db = db\n",
|
||||||
" self.storage = {} # Dictionary to store cached objects\n",
|
" self.storage = {} # Dictionary to store cached objects\n",
|
||||||
" self.ttl = {} # Dictionary to store TTLs\n",
|
" self.ttl = {} # Dictionary to store TTLs\n",
|
||||||
" self.age = {} # Dictionary to store age of each object\n",
|
" self.initial_fetch = {} # Dictionary to store when an object was fetched from the databse to determine the age\n",
|
||||||
" self.cache_size_over_time = [] # To record cache state at each interval\n",
|
" self.cache_size_over_time = [] # To record cache state at each interval\n",
|
||||||
" self.cache_next_request_over_time = []\n",
|
" self.cache_next_request_over_time = []\n",
|
||||||
" self.request_log = {i: [] for i in range(1, DATABASE_OBJECTS + 1)}\n",
|
" self.request_log = {i: [] for i in range(1, DATABASE_OBJECTS + 1)}\n",
|
||||||
@@ -134,80 +134,91 @@
|
|||||||
" self.cumulative_cache_time = {i: 0 for i in range(1, DATABASE_OBJECTS + 1)} # Stores the cumulative time the object has spent between its eviction and when it was first pulled into the cache\n",
|
" self.cumulative_cache_time = {i: 0 for i in range(1, DATABASE_OBJECTS + 1)} # Stores the cumulative time the object has spent between its eviction and when it was first pulled into the cache\n",
|
||||||
" \n",
|
" \n",
|
||||||
" def get(self, obj_id):\n",
|
" def get(self, obj_id):\n",
|
||||||
" if obj_id in self.storage and \\\n",
|
" if obj_id in self.storage:\n",
|
||||||
" (self.ttl[obj_id] > env.now or CACHE_TTL == 0):\n",
|
" # Cache hit: Refresh TTL if TTL-Cache\n",
|
||||||
|
" if self.cache_type == CacheType.TTL:\n",
|
||||||
|
" if self.ttl[obj_id] > env.now:\n",
|
||||||
|
" self.ttl[obj_id] = env.now + CACHE_TTL\n",
|
||||||
|
" \n",
|
||||||
" # Cache hit: increment hit count and update cumulative age\n",
|
" # Cache hit: increment hit count and update cumulative age\n",
|
||||||
" self.hits[obj_id] += 1\n",
|
" self.hits[obj_id] += 1\n",
|
||||||
" self.cumulative_age[obj_id] += self.age[obj_id]\n",
|
|
||||||
" self.access_count[obj_id] += 1\n",
|
" self.access_count[obj_id] += 1\n",
|
||||||
|
" \n",
|
||||||
|
" self.cumulative_age[obj_id] += (env.now - self.initial_fetch[obj_id])\n",
|
||||||
|
"\n",
|
||||||
|
" # Cache hit: Refresh database object on hit\n",
|
||||||
|
" # self.initial_fetch[obj_id] = env.now\n",
|
||||||
" else:\n",
|
" else:\n",
|
||||||
" # Cache miss: increment miss count\n",
|
" assert obj_id not in self.storage.keys(), \"Found object in cache on miss.\"\n",
|
||||||
" self.misses[obj_id] += 1\n",
|
" assert obj_id not in self.initial_fetch.keys(), \"Found age timer on miss.\"\n",
|
||||||
" self.cumulative_age[obj_id] += 0\n",
|
" assert obj_id not in self.object_start_time.keys(), \"Found cache time ratio timer on miss.\"\n",
|
||||||
" self.access_count[obj_id] += 1\n",
|
" # Cache miss: Add TTL if TTL-Cache\n",
|
||||||
" self.age[obj_id] = 0\n",
|
" # When full cache: If non-TTL-Cache: Evict. If TTL-Cache: Don't add to Cache.\n",
|
||||||
" \n",
|
" if self.cache_type == CacheType.TTL:\n",
|
||||||
" # Fetch the object from the database if it’s not in cache\n",
|
" assert obj_id not in self.ttl.keys(), \"Found cache time ratio timer on miss.\"\n",
|
||||||
" obj = self.db.get_object(obj_id)\n",
|
|
||||||
" self.object_start_time[obj_id] = env.now\n",
|
|
||||||
" \n",
|
|
||||||
" # If the cache is full, evict the oldest object\n",
|
|
||||||
" if len(self.storage) > CACHE_CAPACITY:\n",
|
|
||||||
" if self.cache_type == CacheType.LRU:\n",
|
|
||||||
" self.evict_oldest()\n",
|
|
||||||
" elif self.cache_type == CacheType.RANDOM_EVICTION:\n",
|
|
||||||
" self.evict_random()\n",
|
|
||||||
" \n",
|
|
||||||
" # Add the object to cache, set TTL, reset age, and schedule next refresh\n",
|
|
||||||
" self.storage[obj_id] = obj\n",
|
|
||||||
" if CACHE_TTL != 0:\n",
|
|
||||||
" self.ttl[obj_id] = env.now + CACHE_TTL\n",
|
" self.ttl[obj_id] = env.now + CACHE_TTL\n",
|
||||||
" else:\n",
|
" else:\n",
|
||||||
" self.ttl[obj_id] = 0\n",
|
" if len(self.storage) == DATABASE_OBJECTS:\n",
|
||||||
|
" if self.cache_type == CacheType.LRU:\n",
|
||||||
|
" self.evict_oldest()\n",
|
||||||
|
" elif self.cache_type == CacheType.RANDOM_EVICTION:\n",
|
||||||
|
" self.evict_random()\n",
|
||||||
|
" elif self.cache-type == CacheType.TTL:\n",
|
||||||
|
" return\n",
|
||||||
|
" \n",
|
||||||
|
" # Cache miss: increment miss count\n",
|
||||||
|
" self.misses[obj_id] += 1\n",
|
||||||
|
" self.access_count[obj_id] += 1\n",
|
||||||
|
" \n",
|
||||||
|
" # Cache miss: Fetch the object from the database\n",
|
||||||
|
" self.storage[obj_id] = self.db.get_object(obj_id)\n",
|
||||||
|
" self.object_start_time[obj_id] = env.now\n",
|
||||||
|
" \n",
|
||||||
|
" self.initial_fetch[obj_id] = env.now\n",
|
||||||
|
" self.cumulative_age[obj_id] += (env.now - self.initial_fetch[obj_id])\n",
|
||||||
|
" \n",
|
||||||
" if MAX_REFRESH_RATE != 0:\n",
|
" if MAX_REFRESH_RATE != 0:\n",
|
||||||
" self.next_refresh[obj_id] = env.now + np.random.exponential(1/self.db.mu_values[obj_id]) # Schedule refresh\n",
|
" self.next_refresh[obj_id] = env.now + np.random.exponential(1/self.db.mu_values[obj_id]) # Schedule refresh\n",
|
||||||
"\n",
|
|
||||||
" \n",
|
" \n",
|
||||||
" def evict_oldest(self):\n",
|
" def evict_oldest(self):\n",
|
||||||
" \"\"\"Remove the oldest item from the cache to make space.\"\"\"\n",
|
" \"\"\"Remove the oldest item from the cache to make space.\"\"\"\n",
|
||||||
" oldest_id = max(self.age, key=self.age.get) # Find the oldest item by age\n",
|
" oldest_id = min(self.initial_fetch, key=self.initial_fetch.get) # Find the oldest item by age\n",
|
||||||
" print(f\"[{env.now:.2f}] Cache: Evicting oldest object {oldest_id} to make space at {self.ttl[oldest_id]:.2f}\")\n",
|
" print(f\"[{env.now:.2f}] Cache: Evicting oldest object {oldest_id} to make space at {self.ttl[oldest_id]:.2f}\")\n",
|
||||||
|
" self.cumulative_cache_time[obj_id] += (env.now - self.object_start_time[obj_id])\n",
|
||||||
" del self.storage[oldest_id]\n",
|
" del self.storage[oldest_id]\n",
|
||||||
" del self.ttl[oldest_id]\n",
|
" del self.initial_fetch[oldest_id]\n",
|
||||||
" del self.age[oldest_id]\n",
|
" del self.object_start_time[obj_id]\n",
|
||||||
"\n",
|
"\n",
|
||||||
" def evict_random(self):\n",
|
" def evict_random(self):\n",
|
||||||
" \"\"\"Remove a random item from the cache to make space.\"\"\"\n",
|
" \"\"\"Remove a random item from the cache to make space.\"\"\"\n",
|
||||||
" random_id = np.random.choice(list(self.storage.keys())) # Select a random key from the cache\n",
|
" random_id = np.random.choice(list(self.storage.keys())) # Select a random key from the cache\n",
|
||||||
" print(f\"[{env.now:.2f}] Cache: Evicting random object {random_id} to make space at {self.ttl[random_id]:.2f}\")\n",
|
" print(f\"[{env.now:.2f}] Cache: Evicting random object {random_id} to make space at {self.ttl[random_id]:.2f}\")\n",
|
||||||
|
" self.cumulative_cache_time[obj_id] += (env.now - self.object_start_time[obj_id])\n",
|
||||||
" del self.storage[random_id]\n",
|
" del self.storage[random_id]\n",
|
||||||
" del self.ttl[random_id]\n",
|
" del self.initial_fetch[random_id]\n",
|
||||||
" del self.age[random_id]\n",
|
" del self.object_start_time[obj_id]\n",
|
||||||
" \n",
|
" \n",
|
||||||
" def refresh_object(self, obj_id):\n",
|
" def refresh_object(self, obj_id):\n",
|
||||||
" \"\"\"Refresh the object from the database to keep it up-to-date. TTL is increased on refresh.\"\"\"\n",
|
" \"\"\"Refresh the object from the database to keep it up-to-date. TTL is increased on refresh.\"\"\"\n",
|
||||||
" obj = self.db.get_object(obj_id)\n",
|
" obj = self.db.get_object(obj_id)\n",
|
||||||
" self.storage[obj_id] = obj\n",
|
" self.storage[obj_id] = obj\n",
|
||||||
" if CACHE_TTL != 0:\n",
|
" if self.cache_type == CacheType.TTL:\n",
|
||||||
" self.ttl[obj_id] = env.now + CACHE_TTL\n",
|
" self.ttl[obj_id] = env.now + CACHE_TTL\n",
|
||||||
" else:\n",
|
" self.cumulative_cache_time[obj_id] += (env.now - self.object_start_time[obj_id])\n",
|
||||||
" self.ttl[obj_id] = 0\n",
|
|
||||||
" self.age[obj_id] = 0\n",
|
|
||||||
" # print(f\"[{env.now:.2f}] Cache: Refreshed object {obj_id}\")\n",
|
" # print(f\"[{env.now:.2f}] Cache: Refreshed object {obj_id}\")\n",
|
||||||
" \n",
|
" \n",
|
||||||
" def age_objects(self):\n",
|
" def check_expired(self):\n",
|
||||||
" \"\"\"Increment age of each cached object.\"\"\"\n",
|
" \"\"\"Increment age of each cached object.\"\"\"\n",
|
||||||
" for obj_id in list(self.age.keys()):\n",
|
" if self.cache_type == CacheType.TTL:\n",
|
||||||
" self.age[obj_id] += 1\n",
|
" for obj_id in list(self.ttl.keys()):\n",
|
||||||
" # print(f\"[{env.now:.2f}] Cache: Object {obj_id} aged to {self.age[obj_id]}\")\n",
|
" if self.ttl[obj_id] <= env.now:\n",
|
||||||
" if CACHE_TTL != 0 and self.ttl[obj_id] <= env.now:\n",
|
" # Remove object if its TTL expired\n",
|
||||||
" # Remove object if its TTL expired\n",
|
" # print(f\"[{env.now:.2f}] Cache: Object {obj_id} expired\")\n",
|
||||||
" # print(f\"[{env.now:.2f}] Cache: Object {obj_id} expired\")\n",
|
" self.cumulative_cache_time[obj_id] += (env.now - self.object_start_time[obj_id])\n",
|
||||||
" self.cumulative_cache_time[obj_id] += (env.now - self.object_start_time[obj_id])\n",
|
" del self.storage[obj_id]\n",
|
||||||
" del self.storage[obj_id]\n",
|
" del self.ttl[obj_id]\n",
|
||||||
" del self.ttl[obj_id]\n",
|
" del self.initial_fetch[obj_id]\n",
|
||||||
" del self.age[obj_id]\n",
|
" del self.object_start_time[obj_id]\n",
|
||||||
" del self.object_start_time[obj_id]\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
" \n",
|
" \n",
|
||||||
" def record_cache_state(self):\n",
|
" def record_cache_state(self):\n",
|
||||||
@@ -226,8 +237,8 @@
|
|||||||
"def age_cache_process(env, cache):\n",
|
"def age_cache_process(env, cache):\n",
|
||||||
" \"\"\"Process that ages cache objects over time, removes expired items, and refreshes based on object-specific intervals.\"\"\"\n",
|
" \"\"\"Process that ages cache objects over time, removes expired items, and refreshes based on object-specific intervals.\"\"\"\n",
|
||||||
" while True:\n",
|
" while True:\n",
|
||||||
" cache.age_objects() # Age objects and remove expired ones\n",
|
" if cache.cache_type == CacheType.TTL:\n",
|
||||||
"\n",
|
" cache.check_expired() # Remove expired objects\n",
|
||||||
"\n",
|
"\n",
|
||||||
" if MAX_REFRESH_RATE != 0:\n",
|
" if MAX_REFRESH_RATE != 0:\n",
|
||||||
" # Refresh objects based on their individual refresh intervals\n",
|
" # Refresh objects based on their individual refresh intervals\n",
|
||||||
@@ -239,7 +250,7 @@
|
|||||||
" cache.next_refresh[obj_id] = env.now + np.random.exponential(1/cache.db.mu_values[obj_id])\n",
|
" cache.next_refresh[obj_id] = env.now + np.random.exponential(1/cache.db.mu_values[obj_id])\n",
|
||||||
" \n",
|
" \n",
|
||||||
" cache.record_cache_state() # Record cache state at each time step\n",
|
" cache.record_cache_state() # Record cache state at each time step\n",
|
||||||
" yield env.timeout(1) # Run every second\n"
|
" yield env.timeout(0.05) # Run every second"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -256,10 +267,13 @@
|
|||||||
" while True:\n",
|
" while True:\n",
|
||||||
" obj_id, next_request = min(cache.db.next_request.items(), key=lambda x: x[1])\n",
|
" obj_id, next_request = min(cache.db.next_request.items(), key=lambda x: x[1])\n",
|
||||||
" yield env.timeout(next_request - env.now)\n",
|
" yield env.timeout(next_request - env.now)\n",
|
||||||
|
"\n",
|
||||||
|
" # For progress bar\n",
|
||||||
" if (int(env.now) % 1) == 0 and int(env.now) != last_print:\n",
|
" if (int(env.now) % 1) == 0 and int(env.now) != last_print:\n",
|
||||||
" last_print = int(env.now)\n",
|
" last_print = int(env.now)\n",
|
||||||
" pbar.n = min(cache.access_count.values())\n",
|
" pbar.n = min(cache.access_count.values())\n",
|
||||||
" pbar.refresh()\n",
|
" pbar.refresh()\n",
|
||||||
|
" \n",
|
||||||
" if env.now >= next_request:\n",
|
" if env.now >= next_request:\n",
|
||||||
" # print(f\"[{env.now:.2f}] Client: Requesting object {obj_id}\")\n",
|
" # print(f\"[{env.now:.2f}] Client: Requesting object {obj_id}\")\n",
|
||||||
" cache.get(obj_id)\n",
|
" cache.get(obj_id)\n",
|
||||||
@@ -268,7 +282,12 @@
|
|||||||
" next_request = env.now + np.random.exponential(1/cache.db.lambda_values[obj_id])\n",
|
" next_request = env.now + np.random.exponential(1/cache.db.lambda_values[obj_id])\n",
|
||||||
" cache.request_log[obj_id].append(next_request)\n",
|
" cache.request_log[obj_id].append(next_request)\n",
|
||||||
" cache.db.next_request[obj_id] = next_request\n",
|
" cache.db.next_request[obj_id] = next_request\n",
|
||||||
|
" \n",
|
||||||
|
" # Simulation stop condition\n",
|
||||||
" if all(access_count >= ACCESS_COUNT_LIMIT for access_count in cache.access_count.values()):\n",
|
" if all(access_count >= ACCESS_COUNT_LIMIT for access_count in cache.access_count.values()):\n",
|
||||||
|
" print(f\"Simulation ended after {env.now} seconds.\")\n",
|
||||||
|
" for obj_id in cache.storage.keys():\n",
|
||||||
|
" cache.cumulative_cache_time[obj_id] += (env.now - cache.object_start_time[obj_id])\n",
|
||||||
" event.succeed()"
|
" event.succeed()"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@@ -281,18 +300,19 @@
|
|||||||
"source": [
|
"source": [
|
||||||
"configurations = {\n",
|
"configurations = {\n",
|
||||||
" \"default\": (DATABASE_OBJECTS, 10, CacheType.LRU, 5),\n",
|
" \"default\": (DATABASE_OBJECTS, 10, CacheType.LRU, 5),\n",
|
||||||
" \"No Refresh\": (DATABASE_OBJECTS, 0, CacheType.LRU, 5),\n",
|
" \"No Refresh\": (DATABASE_OBJECTS, 0, CacheType.TTL, 5),\n",
|
||||||
" \"Infinite TTL\": (int(DATABASE_OBJECTS / 2), 0, CacheType.LRU, 0),\n",
|
" \"Infinite TTL\": (int(DATABASE_OBJECTS / 2), 0, CacheType.LRU, 0),\n",
|
||||||
" \"Random Eviction\": (int(DATABASE_OBJECTS / 2), 10, CacheType.RANDOM_EVICTION, 5),\n",
|
" \"Random Eviction\": (int(DATABASE_OBJECTS / 2), 10, CacheType.RANDOM_EVICTION, 5),\n",
|
||||||
" \"RE without Refresh\": (int(DATABASE_OBJECTS / 2), 0, CacheType.RANDOM_EVICTION, 5),\n",
|
" \"RE without Refresh\": (int(DATABASE_OBJECTS / 2), 0, CacheType.RANDOM_EVICTION, 5),\n",
|
||||||
" \"No Refresh (0.5s ttl)\": (DATABASE_OBJECTS, 0, CacheType.LRU, 0.5),\n",
|
" \"No Refresh (0.5s ttl)\": (DATABASE_OBJECTS, 0, CacheType.TTL, 0.5),\n",
|
||||||
" \"No Refresh (1.0s ttl)\": (DATABASE_OBJECTS, 0, CacheType.LRU, 1),\n",
|
" \"No Refresh (1.0s ttl)\": (DATABASE_OBJECTS, 0, CacheType.TTL, 1),\n",
|
||||||
" \"No Refresh (2.0s ttl)\": (DATABASE_OBJECTS, 0, CacheType.LRU, 2),\n",
|
" \"No Refresh (2.0s ttl)\": (DATABASE_OBJECTS, 0, CacheType.TTL, 2),\n",
|
||||||
" \"No Refresh (3.0s ttl)\": (DATABASE_OBJECTS, 0, CacheType.LRU, 3),\n",
|
" \"No Refresh (3.0s ttl)\": (DATABASE_OBJECTS, 0, CacheType.TTL, 3),\n",
|
||||||
" \"No Refresh (4.0s ttl)\": (DATABASE_OBJECTS, 0, CacheType.LRU, 4),\n",
|
" \"No Refresh (4.0s ttl)\": (DATABASE_OBJECTS, 0, CacheType.TTL, 4),\n",
|
||||||
" \"No Refresh (5.0s ttl)\": (DATABASE_OBJECTS, 0, CacheType.LRU, 5),\n",
|
" \"No Refresh (5.0s ttl)\": (DATABASE_OBJECTS, 0, CacheType.TTL, 5),\n",
|
||||||
"}\n",
|
"}\n",
|
||||||
"experiments = configurations.keys()\n"
|
"\n",
|
||||||
|
"experiments = configurations.keys()"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -355,6 +375,10 @@
|
|||||||
" cache_type = config[2]\n",
|
" cache_type = config[2]\n",
|
||||||
" CACHE_TTL = config[3]\n",
|
" CACHE_TTL = config[3]\n",
|
||||||
"\n",
|
"\n",
|
||||||
|
" if cache_type == CacheType.TTL:\n",
|
||||||
|
" assert CACHE_TTL > 0, \"Needs CACHE_TTL to be greater than 0 when using TTL-Cache.\"\n",
|
||||||
|
" assert CACHE_CAPACITY >= DATABASE_OBJECTS, \"Cache Size needs to be greater or equal to the amount of Database Objects.\"\n",
|
||||||
|
" \n",
|
||||||
" # Initialize simulation environment\n",
|
" # Initialize simulation environment\n",
|
||||||
" env = simpy.Environment()\n",
|
" env = simpy.Environment()\n",
|
||||||
" \n",
|
" \n",
|
||||||
@@ -376,12 +400,13 @@
|
|||||||
" for obj_id in range(1, DATABASE_OBJECTS + 1):\n",
|
" for obj_id in range(1, DATABASE_OBJECTS + 1):\n",
|
||||||
" if cache.access_count[obj_id] != 0:\n",
|
" if cache.access_count[obj_id] != 0:\n",
|
||||||
" hit_rate = cache.hits[obj_id] / max(1, cache.access_count[obj_id]) # Avoid division by zero\n",
|
" hit_rate = cache.hits[obj_id] / max(1, cache.access_count[obj_id]) # Avoid division by zero\n",
|
||||||
" avg_age = cache.cumulative_age[obj_id] / max(1, cache.access_count[obj_id])\n",
|
" expected_hit_rate = 1-math.exp(-db.lambda_values[obj_id]*CACHE_TTL)\n",
|
||||||
" avg_cache_time = cache.cumulative_cache_time[obj_id] / max(1, simulation_end_time) # Only average over hits\n",
|
" avg_cache_time = cache.cumulative_cache_time[obj_id] / max(1, simulation_end_time) # Only average over hits\n",
|
||||||
" expected_age = (0.5*pow(hit_rate,2))\n",
|
" avg_age = cache.cumulative_age[obj_id] / max(1, cache.access_count[obj_id])\n",
|
||||||
" # print(f\"Object {obj_id}: Hit Rate = {hit_rate:.2f}, Average Time spend in Cache: {avg_cache_time:.2f},Average Age = {avg_age:.2f}, Exprected Age = {expected_age:.2f}\")\n",
|
" expected_age = pow(hit_rate,2) / 2\n",
|
||||||
" statistics.append({\"obj_id\": obj_id,\"hit_rate\": hit_rate, \"avg_cache_time\":avg_cache_time, \"avg_age\": avg_age, \"expected_age\": expected_age})\n",
|
" # print(f\"Object {obj_id}: Hit Rate = {hit_rate:.2f}, Expected Hit Rate = {expected_hit_rate:.2f}, Average Time spend in Cache: {avg_cache_time:.2f}, Average Age = {avg_age:.2f}, Expected Age = {expected_age:.2f}\")\n",
|
||||||
"\n",
|
" statistics.append({\"obj_id\": obj_id,\"hit_rate\": hit_rate, \"expected_hit_rate\": expected_hit_rate, \"avg_cache_time\":avg_cache_time, \"avg_age\": avg_age, \"expected_age\": expected_age})\n",
|
||||||
|
" \n",
|
||||||
" stats = pd.DataFrame(statistics)\n",
|
" stats = pd.DataFrame(statistics)\n",
|
||||||
" stats.to_csv(f\"{TEMP_BASE_DIR}/hit_age.csv\",index=False)\n",
|
" stats.to_csv(f\"{TEMP_BASE_DIR}/hit_age.csv\",index=False)\n",
|
||||||
" stats.drop(\"obj_id\", axis=1).describe().to_csv(f\"{TEMP_BASE_DIR}/overall_hit_age.csv\")\n",
|
" stats.drop(\"obj_id\", axis=1).describe().to_csv(f\"{TEMP_BASE_DIR}/overall_hit_age.csv\")\n",
|
||||||
@@ -391,12 +416,18 @@
|
|||||||
" misses = pd.DataFrame.from_dict(cache.misses, orient='index', columns=['misses'])\n",
|
" misses = pd.DataFrame.from_dict(cache.misses, orient='index', columns=['misses'])\n",
|
||||||
" mu = pd.DataFrame.from_dict(db.mu_values, orient='index', columns=['mu'])\n",
|
" mu = pd.DataFrame.from_dict(db.mu_values, orient='index', columns=['mu'])\n",
|
||||||
" lmbda = pd.DataFrame.from_dict(db.lambda_values, orient='index', columns=['lambda'])\n",
|
" lmbda = pd.DataFrame.from_dict(db.lambda_values, orient='index', columns=['lambda'])\n",
|
||||||
|
" \n",
|
||||||
" hit_rate = pd.DataFrame(stats['hit_rate'])\n",
|
" hit_rate = pd.DataFrame(stats['hit_rate'])\n",
|
||||||
" hit_rate.index = range(1,DATABASE_OBJECTS + 1)\n",
|
" hit_rate.index = range(1,DATABASE_OBJECTS + 1)\n",
|
||||||
|
" expected_hit_rate = pd.DataFrame(stats['expected_hit_rate'])\n",
|
||||||
|
" expected_hit_rate.index = range(1,DATABASE_OBJECTS + 1)\n",
|
||||||
|
" expected_hit_rate_delta = pd.DataFrame((hit_rate.to_numpy()-expected_hit_rate.to_numpy()), columns=['expected_hit_rate_delta'])\n",
|
||||||
|
" expected_hit_rate_delta.index = range(1,DATABASE_OBJECTS + 1)\n",
|
||||||
" avg_cache_time = pd.DataFrame(stats['avg_cache_time'])\n",
|
" avg_cache_time = pd.DataFrame(stats['avg_cache_time'])\n",
|
||||||
" avg_cache_time.index = range(1,DATABASE_OBJECTS + 1)\n",
|
" avg_cache_time.index = range(1,DATABASE_OBJECTS + 1)\n",
|
||||||
" cache_time_delta = pd.DataFrame((hit_rate.to_numpy()-avg_cache_time.to_numpy()), columns=['cache_time_delta'])\n",
|
" cache_time_delta = pd.DataFrame((hit_rate.to_numpy()-avg_cache_time.to_numpy()), columns=['cache_time_delta'])\n",
|
||||||
" cache_time_delta.index = range(1,DATABASE_OBJECTS + 1)\n",
|
" cache_time_delta.index = range(1,DATABASE_OBJECTS + 1)\n",
|
||||||
|
" \n",
|
||||||
" avg_age = pd.DataFrame(stats['avg_age'])\n",
|
" avg_age = pd.DataFrame(stats['avg_age'])\n",
|
||||||
" avg_age.index = range(1,DATABASE_OBJECTS + 1)\n",
|
" avg_age.index = range(1,DATABASE_OBJECTS + 1)\n",
|
||||||
" expected_age = (0.5*pow(hit_rate,2)).rename(columns={'hit_rate': \"expected_age\"})\n",
|
" expected_age = (0.5*pow(hit_rate,2)).rename(columns={'hit_rate': \"expected_age\"})\n",
|
||||||
@@ -405,7 +436,8 @@
|
|||||||
" \n",
|
" \n",
|
||||||
" merged = access_count.merge(hits, left_index=True, right_index=True).merge(misses, left_index=True, right_index=True) \\\n",
|
" merged = access_count.merge(hits, left_index=True, right_index=True).merge(misses, left_index=True, right_index=True) \\\n",
|
||||||
" .merge(mu, left_index=True, right_index=True).merge(lmbda, left_index=True, right_index=True) \\\n",
|
" .merge(mu, left_index=True, right_index=True).merge(lmbda, left_index=True, right_index=True) \\\n",
|
||||||
" .merge(hit_rate, left_index=True, right_index=True).merge(avg_cache_time, left_index=True, right_index=True).merge(cache_time_delta, left_index=True, right_index=True) \\\n",
|
" .merge(hit_rate, left_index=True, right_index=True).merge(expected_hit_rate, left_index=True, right_index=True).merge(expected_hit_rate_delta, left_index=True, right_index=True) \\\n",
|
||||||
|
" .merge(avg_cache_time, left_index=True, right_index=True).merge(cache_time_delta, left_index=True, right_index=True) \\\n",
|
||||||
" .merge(avg_age, left_index=True, right_index=True).merge(expected_age, left_index=True, right_index=True).merge(age_delta, left_index=True, right_index=True)\n",
|
" .merge(avg_age, left_index=True, right_index=True).merge(expected_age, left_index=True, right_index=True).merge(age_delta, left_index=True, right_index=True)\n",
|
||||||
" merged.to_csv(f\"{TEMP_BASE_DIR}/details.csv\", index_label=\"obj_id\")\n",
|
" merged.to_csv(f\"{TEMP_BASE_DIR}/details.csv\", index_label=\"obj_id\")\n",
|
||||||
"\n",
|
"\n",
|
||||||
|
|||||||
Reference in New Issue
Block a user