{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import pandas as pd\n", "import numpy as np\n", "import os\n", "import sumolib\n", "import copy\n", "from tqdm import tqdm\n", "from datetime import datetime" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [], "source": [ "m = 105\n", "midnight = int(datetime(2024, 1, 5, 0, 0, 0).timestamp())\n", "next_day = int(datetime(2024, 1, 6, 0, 0, 0).timestamp())\n", "fmins = range(midnight, next_day, 300)\n", "\n", "# 현재시각\n", "present_time = fmins[m]\n", "sigtable_start = fmins[m] - 1200\n", "sim_start = fmins[m] - 900\n", "sim_end = fmins[m] - 600\n", "\n", "# network and dataframes\n", "net = sumolib.net.readNet('../../Data/networks/sn.net.xml')\n", "inter_node = pd.read_csv('../../data/tables/inter_node.csv', index_col=0)\n", "plan = pd.read_csv('../../data/tables/plan.csv', index_col=0)\n", "match6 = pd.read_csv('../../Data/tables/matching/match6.csv', index_col=0)\n", "match6 = match6[['node_id', 'phase_no', 'ring_type', 'inc_edge', 'out_edge']].reset_index(drop=True)\n", "histid = pd.read_csv(f'../../Data/tables/histids/histids_{present_time}.csv', index_col=0)\n", "histid = histid.reset_index(drop=True).drop(columns=['inter_no'])\n", "\n", "# helper dictionaries\n", "inter2node = dict(zip(inter_node['inter_no'], inter_node['node_id']))\n", "node2inter = dict(zip(inter_node['node_id'], inter_node['inter_no']))\n", "pa2ch = {'i0':['u00'], 'i1':[], 'i2':['u20'], 'i3':['c30', 'u30', 'u31', 'u32'], 'i6':['u60'], 'i7':[], 'i8':[], 'i9':[]}\n", "node_ids = sorted(inter_node.node_id.unique())\n", "parent_ids = sorted(inter_node[inter_node.inter_type=='parent'].node_id.unique())\n", "nodes = [net.getNode(node_id) for node_id in node_ids]" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "['c30',\n", " 'i0',\n", " 'i1',\n", " 'i2',\n", " 'i3',\n", " 'i6',\n", " 'i7',\n", " 'i8',\n", " 'i9',\n", " 'u00',\n", " 'u20',\n", " 'u30',\n", " 'u31',\n", " 'u32',\n", " 'u60']" ] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "node_ids" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "c30 140\n", "i0 150\n", "i1 150\n", "i2 150\n", "i3 150\n", "i6 150\n", "i7 150\n", "i8 150\n", "i9 150\n", "u00 160\n", "u20 150\n", "u60 150\n" ] } ], "source": [ "node2mincycle = {}\n", "for inter_no in sorted(plan.inter_no.unique()):\n", " mincycle = plan[plan.inter_no==inter_no].cycle.min()\n", " parent_id = inter2node[inter_no]\n", " node2mincycle[parent_id] = mincycle\n", " for child_id in pa2ch:\n", " node2mincycle[child_id] = mincycle\n", "for node_id in sorted(node2mincycle):\n", " print(node_id, node2mincycle[node_id])" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "def make_histids(histid, match6, parent_ids, pa2ch):\n", " new_histids = []\n", " for parent_id in parent_ids:\n", " for child_id in pa2ch[parent_id]:\n", " new_histid = histid.copy()[histid.node_id==parent_id]\n", " new_histid[['inc_edge_A', 'out_edge_A', 'inc_edge_B', 'out_edge_B']] = np.nan\n", " for i, row in new_histid.iterrows():\n", " phas_A = row.phas_A\n", " phas_B = row.phas_B\n", " new_match = match6[match6.node_id==child_id]\n", " Arow = new_match[(new_match.phase_no==phas_A) & (new_match.ring_type=='A')]\n", " if ~ Arow[['inc_edge', 'out_edge']].isna().all().all():\n", " inc_edge = Arow.iloc[0].inc_edge\n", " out_edge = Arow.iloc[0].out_edge\n", " new_histid.loc[i, ['inc_edge_A', 'out_edge_A']] = [inc_edge, out_edge]\n", " Brow = new_match[(new_match.phase_no==phas_B) & (new_match.ring_type=='B')]\n", " if ~ Brow[['inc_edge', 'out_edge']].isna().all().all():\n", " inc_edge = Brow.iloc[0].inc_edge\n", " out_edge = Brow.iloc[0].out_edge\n", " new_histid.loc[i, ['inc_edge_B', 'out_edge_B']] = [inc_edge, out_edge]\n", " new_histid.loc[i, 'node_id'] = child_id\n", " new_histids.append(new_histid)\n", " new_histids = pd.concat(new_histids)\n", " histids = pd.concat([histid.copy(), new_histids])\n", " histids = histids.sort_values(by=['start_unix', 'node_id', 'phas_A', 'phas_B']).reset_index(drop=True)\n", " return histids" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [], "source": [ "def initialize_states(net, nodes, histids):\n", " node2init = {}\n", " for node in nodes:\n", " node_id = node.getID()\n", " conns = [(c.getJunctionIndex(), c) for c in node.getConnections()]\n", " conns = [c for c in conns if c[0] >= 0]\n", " conns = sorted(conns, key=lambda x: x[0])\n", " state = []\n", " for i, ci in conns:\n", " if ci.getTLLinkIndex() < 0:\n", " continue\n", " are_foes = False\n", " for j, cj in conns:\n", " if ci.getTo() == cj.getTo():\n", " continue\n", " if node.areFoes(i, j):\n", " are_foes = True\n", " break\n", " state.append('r' if are_foes else 'g')\n", " node2init[node_id] = state\n", "\n", " # 어떤 연결과도 상충이 일어나지는 않지만, 신호가 부여되어 있는 경우에는 r을 부여\n", " for _, row in histids.iterrows():\n", " node_id = row['node_id']\n", " inc_edge_A = row.inc_edge_A\n", " inc_edge_B = row.inc_edge_B\n", " out_edge_A = row.out_edge_A\n", " out_edge_B = row.out_edge_B\n", "\n", " if pd.isna(inc_edge_A) or pd.isna(out_edge_A):\n", " pass\n", " else:\n", " inc_edge_A = net.getEdge(inc_edge_A)\n", " out_edge_A = net.getEdge(out_edge_A)\n", " for conn in inc_edge_A.getConnections(out_edge_A):\n", " index = conn.getTLLinkIndex()\n", " if index >= 0:\n", " node2init[node_id][index] = 'r'\n", "\n", " if pd.isna(inc_edge_B) or pd.isna(out_edge_B):\n", " pass\n", " else:\n", " inc_edge_B = net.getEdge(inc_edge_B)\n", " out_edge_B = net.getEdge(out_edge_B)\n", " for conn in inc_edge_B.getConnections(out_edge_B):\n", " index = conn.getTLLinkIndex()\n", " if index >= 0:\n", " node2init[node_id][index] = 'r'\n", " return node2init" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "def make_sigtable(histids, node2init, net):\n", " sigtable = histids.copy()\n", " sigtable['init_state'] = sigtable['node_id'].map(node2init)\n", " sigtable['state'] = sigtable['init_state'].map(lambda x:''.join(x))\n", " for i, row in sigtable.iterrows():\n", " node_id = row.node_id\n", " inc_edge_A = row.inc_edge_A\n", " inc_edge_B = row.inc_edge_B\n", " out_edge_A = row.out_edge_A\n", " out_edge_B = row.out_edge_B\n", " state = copy.deepcopy(node2init)[node_id]\n", " if pd.isna(inc_edge_A) or pd.isna(out_edge_A):\n", " pass\n", " else:\n", " inc_edge_A = net.getEdge(inc_edge_A)\n", " out_edge_A = net.getEdge(out_edge_A)\n", " for conn in inc_edge_A.getConnections(out_edge_A):\n", " index = conn.getTLLinkIndex()\n", " if index >= 0:\n", " state[index] = 'G'\n", " sigtable.at[i, 'state'] = ''.join(state)\n", "\n", " if pd.isna(inc_edge_B) or pd.isna(out_edge_B):\n", " pass\n", " else:\n", " inc_edge_B = net.getEdge(inc_edge_B)\n", " out_edge_B = net.getEdge(out_edge_B)\n", " for conn in inc_edge_B.getConnections(out_edge_B):\n", " index = conn.getTLLinkIndex()\n", " if index >= 0:\n", " state[index] = 'G'\n", " sigtable.at[i, 'state'] = ''.join(state)\n", " sigtable = sigtable.dropna(subset='state')\n", " sigtable = sigtable.reset_index(drop=True)\n", " sigtable['phase_sumo'] = sigtable.groupby(['node_id', 'start_unix']).cumcount()\n", " # sigtable = sigtable[sigtable.start_unix >= sigtable_start]\n", " sigtable = sigtable[['node_id', 'start_unix', 'phase_sumo', 'duration', 'state']]\n", " sigtable = sigtable.sort_values(by=['start_unix', 'node_id'])\n", " sigtable['start_dt'] = sigtable['start_unix'].apply(lambda x:datetime.fromtimestamp(x))\n", " return sigtable" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "def make_Sigtable(sigtable):\n", " Sigtable = []\n", " for node_id, group in sigtable.groupby('node_id'):\n", " new_rows_list = []\n", " for i in range(1, len(group)):\n", " prev_row = group.iloc[i-1:i].copy()\n", " next_row = group.iloc[i:i+1].copy()\n", " new_rows = pd.concat([prev_row, prev_row, next_row]).reset_index(drop=True)\n", " new_rows.loc[0, 'phase_sumo'] = str(prev_row.phase_sumo.iloc[0]) + '_g'\n", " new_rows.loc[0, 'duration'] = new_rows.loc[0, 'duration'] - 5\n", " new_rows.loc[1, 'phase_sumo'] = str(prev_row.phase_sumo.iloc[0]) + '_y'\n", " new_rows.loc[1, 'duration'] = 4\n", " yellow_state = ''\n", " red_state = ''\n", " for a, b in zip(prev_row.state.iloc[0], next_row.state.iloc[0]):\n", " if a == 'G' and b == 'r':\n", " yellow_state += 'y'\n", " red_state += 'r'\n", " else:\n", " yellow_state += a\n", " red_state += a\n", " new_rows.loc[2, 'phase_sumo'] = str(next_row.phase_sumo.iloc[0]) + '__r'\n", " new_rows.loc[2, 'duration'] = 1\n", " new_rows.loc[1, 'state'] = yellow_state\n", " new_rows.loc[2, 'state'] = red_state\n", " new_rows_list.append(new_rows)\n", " next_row['phase_sumo'] = str(next_row.phase_sumo.iloc[0]) + '_g'\n", " next_row['duration'] -= 5\n", " # next_row.loc['duration'] -= 5\n", " new_rows_list.append(next_row)\n", " new_rows = pd.concat(new_rows_list)\n", " Sigtable.append(new_rows)\n", " Sigtable = pd.concat(Sigtable).sort_values(by=['node_id', 'start_unix', 'phase_sumo']).reset_index(drop=True)\n", " return Sigtable" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [], "source": [ "# # sigtable 시작시각 : 현재시각 - 1200 = 08:25\n", "# # 시뮬레이션 시작시각 : 현재시각 - 900 = 08:30\n", "# # 시뮬레이션 종료시각 : 현재시각 - 600 = 08:35\n", "# # 현재시각 : 08:45\n", "# print(datetime.fromtimestamp(sigtable_start))\n", "# print(datetime.fromtimestamp(sim_start))\n", "# print(datetime.fromtimestamp(sim_end))\n", "# print(datetime.fromtimestamp(present_time))" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [], "source": [ "def make_SIGTABLE(Sigtable, sim_start, sim_end):\n", " offsets = {}\n", " SIGTABLE = []\n", " for node_id, group in Sigtable.groupby('node_id'):\n", " lsbs = group[group['start_unix'] < sim_start]['start_unix'].max() # the last start_unix before sim_start\n", " offsets[node_id] = lsbs - sim_start\n", " group = group[(group['start_unix'] < sim_end) & (group['start_unix'] >= lsbs)]\n", " SIGTABLE.append(group)\n", " SIGTABLE = pd.concat(SIGTABLE)\n", " return SIGTABLE, offsets\n", "\n", "def make_signals(SIGTABLE, offsets, present_time):\n", " strings = ['\\n']\n", " for node_id, group in SIGTABLE.groupby('node_id'):\n", " strings.append(f' \\n')\n", " for i, row in group.iterrows():\n", " duration = row.duration\n", " state = row.state\n", " strings.append(f' \\n')\n", " strings.append(' \\n')\n", " strings.append('')\n", " strings = ''.join(strings)\n", " # 저장\n", " path_output = f'../../Data/networks/sn_{present_time}.add.xml'\n", " with open(path_output, 'w') as f:\n", " f.write(strings)" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "def generate_signals(m):\n", " midnight = int(datetime(2024, 1, 5, 0, 0, 0).timestamp())\n", " next_day = int(datetime(2024, 1, 6, 0, 0, 0).timestamp())\n", " fmins = range(midnight, next_day, 300)\n", "\n", " # 현재시각\n", " present_time = fmins[m]\n", " sigtable_start = fmins[m] - 1200\n", " sim_start = fmins[m] - 900\n", " sim_end = fmins[m] - 600\n", " \n", " # network and dataframes\n", " net = sumolib.net.readNet('../../Data/networks/sn.net.xml')\n", " inter_node = pd.read_csv('../../data/tables/inter_node.csv', index_col=0)\n", " match6 = pd.read_csv('../../Data/tables/matching/match6.csv', index_col=0)\n", " match6 = match6[['node_id', 'phase_no', 'ring_type', 'inc_edge', 'out_edge']].reset_index(drop=True)\n", " histid = pd.read_csv(f'../../Data/tables/histids/histids_{present_time}.csv', index_col=0)\n", " histid = histid.reset_index(drop=True).drop(columns=['inter_no'])\n", " \n", " # helper dictionaries\n", " inter2node = dict(zip(inter_node['inter_no'], inter_node['node_id']))\n", " node2inter = dict(zip(inter_node['node_id'], inter_node['inter_no']))\n", " pa2ch = {'i0':['u00'], 'i1':[], 'i2':['u20'], 'i3':['c30', 'u30', 'u31', 'u32'], 'i6':['u60'], 'i7':[], 'i8':[], 'i9':[]}\n", " node_ids = sorted(inter_node.node_id.unique())\n", " parent_ids = sorted(inter_node[inter_node.inter_type=='parent'].node_id.unique())\n", " nodes = [net.getNode(node_id) for node_id in node_ids]\n", "\n", " # histids\n", " histids = make_histids(histid, match6, parent_ids, pa2ch)\n", "\n", " # node2init\n", " node2init = initialize_states(net, nodes, histids)\n", "\n", " # sigtable\n", " sigtable = make_sigtable(histids, node2init, net)\n", "\n", " # Sigtable\n", " Sigtable = make_Sigtable(sigtable)\n", "\n", " # SIGTABLE\n", " SIGTABLE, offsets = make_SIGTABLE(Sigtable, sim_start, sim_end)\n", "\n", " make_signals(SIGTABLE, offsets, present_time)\n", " print(f'A signal file (add.xml) has been created for the timeslot between {datetime.fromtimestamp(sim_start)} and {datetime.fromtimestamp(sim_end)}')" ] }, { "cell_type": "code", "execution_count": 12, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "A signal file (add.xml) has been created for the timeslot between 2024-01-05 08:30:00 and 2024-01-05 08:35:00\n" ] } ], "source": [ "generate_signals(105)" ] } ], "metadata": { "kernelspec": { "display_name": "rts", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.10" } }, "nbformat": 4, "nbformat_minor": 2 }