Browse Source

tested throughly for twos scripts.

master
김선중 1 year ago
parent
commit
78f7c4b26a
5 changed files with 520 additions and 107 deletions
  1. BIN
      Scripts/__pycache__/generate_signals.cpython-312.pyc
  2. +69
    -9
      Scripts/generate_signals.py
  3. +449
    -64
      analysis/0725_main_test/6_use_class_gs.ipynb
  4. +0
    -0
      test_0731/results/issues_generate_signals.txt
  5. +2
    -34
      test_0731/results/sn_1722384300.add.xml

BIN
Scripts/__pycache__/generate_signals.cpython-312.pyc View File


+ 69
- 9
Scripts/generate_signals.py View File

@ -52,6 +52,7 @@ class SignalGenerator():
self.uturn_ids = ids['uturn_ids']
self.coord_ids = ids['coord_ids']
# 1. 데이터 준비
def prepare_data(self):
print("1. 데이터를 준비합니다.")
@ -66,11 +67,13 @@ class SignalGenerator():
self.prepare_auxiliaries()
self.time15 = datetime.now()
# 1-1. 네트워크 불러오기
def load_networks(self):
self.net = sumolib.net.readNet(os.path.join(self.path_networks, 'sn.net.xml'))
print("1-1. 네트워크가 로드되었습니다.")
# 1-2. 테이블 불러오기
def load_tables(self):
if self.config_name == 'draft':
@ -197,6 +200,7 @@ class SignalGenerator():
self.plan.at[i, f'dura_A{j}'] = row[f'dura_B{j}']
print("1-2. 테이블들이 로드되었습니다.")
# 1-3. 네트워크 무결성 검사
def check_networks(self):
# https://sumo.dlr.de/docs/Netedit/neteditUsageExamples.html#simplify_tls_program_state_after_changing_connections
@ -218,6 +222,7 @@ class SignalGenerator():
traci.close()
print("1-3. 네트워크의 모든 clean state requirement들을 체크했습니다.")
# 1-4. 테이블 무결성 검사
def check_tables(self):
self.check_history()
@ -226,6 +231,7 @@ class SignalGenerator():
# self.check_moves() # 이동류번호에 대한 무결성검사 필요하나 아직 작성하지 않음. (24. 2. 5 화)
print("1-4. 테이블들의 무결성 검사를 완료했습니다.")
# 1-4-1. 신호이력(history) 검사
def check_history(self):
# 1-4-1-1. inter_no 검사
@ -252,6 +258,7 @@ class SignalGenerator():
if invalid_inter_nos:
msg = f"1-4-1-3. 음수이거나 200보다 큰 현시시간이 존재합니다. : {invalid_inter_nos}"
# 1-5. 보조 딕셔너리, 데이터프레임, 리스트 등 만들기
def prepare_auxiliaries(self):
# inter2node : a dictionary that maps inter_no to the node_id
@ -346,7 +353,8 @@ class SignalGenerator():
self.time22 = datetime.now()
self.make_hrhists()
self.time23 = datetime.now()
# 2-1. rhistory
def make_rhistory(self):
# 1. 조회시점의 유닉스 타임 이전의 신호이력 수집
@ -398,6 +406,7 @@ class SignalGenerator():
self.rhistory['start_unix'] = self.rhistory['start_unix'].astype(int)
self.rhistory = self.rhistory[['inter_no', 'start_unix'] + [f'dura_{alph}{j}' for alph in ['A', 'B'] for j in range(1,9)] + ['cycle']]
def load_prow(self, inter_no, time):
'''
load the planned row
@ -418,6 +427,7 @@ class SignalGenerator():
return program_start, prow
# 2-2. rhists
def make_rhists(self):
self.rhists = []
@ -516,6 +526,7 @@ class SignalGenerator():
self.rhists = pd.concat(self.rhists)#.sort_values(by=['start_unix','inter_no'])
self.rhists = self.rhists[self.rhists.start_unix >= self.present_time - self.subtractor // 2]
def calculate_DS(self, rhist, curr_unix):
# program_starts = np.array(self.timetable.start_seconds)
# idx = (program_starts <= self.present_time).sum() - 1
@ -543,6 +554,7 @@ class SignalGenerator():
S_n = S_n_durs.values.sum() // 2
return D_n, S_n
# 2-3. hrhists
def make_hrhists(self):
# 계층화된 형태로 변환
@ -578,6 +590,7 @@ class SignalGenerator():
self.hrhists['start_unix'] = self.hrhists['start_unix'].astype(int)
# self.hrhists = self.hrhists.sort_values(by = ['start_unix', 'inter_no', 'phas_A', 'phas_B']).reset_index(drop=True)
# 3. 이동류정보 전처리
def process_movement(self):
print("3. 이동류정보 테이블을 변환합니다.")
@ -586,6 +599,7 @@ class SignalGenerator():
self.update_movement()
self.time32 = datetime.now()
# 3-1. movement
def make_movement(self):
if self.config_name == 'draft' or self.config_name == 'test_0721':
@ -628,21 +642,56 @@ class SignalGenerator():
self.movement = movement
elif self.config_name == 'test_0731':
self.movement = pd.read_csv(os.path.join(self.path_tables, 'TL_IF_SIGL.csv'))
self.movement = self.movement.drop(columns=['FRST_REG_DT', 'RINGA_FLOW', 'RINGB_FLOW'])
self.movement = self.movement.rename(columns={
phases = pd.read_csv(os.path.join(self.path_tables, 'TL_IF_SIGL.csv'))
phases = phases.drop(columns=['FRST_REG_DT', 'RINGA_FLOW', 'RINGB_FLOW'])
phases = phases.rename(columns={
'PHASE_DT':'start_unix','CRSRD_ID':'inter_no',
'RINGA_PHASE':'phas_A', 'RINGB_PHASE':'phas_B',
'MAP_MODE':'STOS_NO'
})
isp2move_A = self.isp2move['A']
isp2move_B = self.isp2move['B']
self.movement['move_A'] = self.movement.apply(
phases['move_A'] = phases.apply(
lambda row: int(isp2move_A.get((row.inter_no, row.STOS_NO, row.phas_A), -1)), axis=1)
self.movement['move_B'] = self.movement.apply(
phases['move_B'] = phases.apply(
lambda row: int(isp2move_B.get((row.inter_no, row.STOS_NO, row.phas_B), -1)), axis=1)
self.movement['start_unix'] = pd.to_datetime(self.movement['start_unix'])
self.movement['start_unix'] = self.movement['start_unix'].apply(lambda x: int(x.timestamp()))
phases['start_unix'] = phases['start_unix'].apply(lambda x:int(datetime.strptime(x, '%Y-%m-%d %H:%M:%S').timestamp()))
for fsec in range(self.present_time - self.sim_timespan, self.present_time + 1, 5):
# 1. 상태 테이블 조회해서 전체 데이터중 필요데이터(교차로번호, A링 현시번호, A링 이동류번호, B링 현시번호, B링 이동류번호)만 수집 : A
moves = [group.iloc[-1:] for _, group in phases[phases.start_unix < fsec].groupby('inter_no')]
if not moves:
move = pd.DataFrame({
'start_unix':[], 'inter_no':[], 'phas_A':[], 'phas_B':[],
'STOS_NO':[], 'move_A':[], 'move_B':[]})
else:
move = pd.concat(moves)
move = move.drop(columns='start_unix')
# 2. 이력 테이블 조회해서 교차로별로 유닉스시간 최대인 데이터(교차로번호, 종료유닉스타임)만 수집 : B
recent_histories = [group.iloc[-1:] for _, group in self.history[self.history['end_unix'] < fsec].groupby('inter_no')] # 교차로별로 유닉스시간이 최대인 행들
if not recent_histories:
rhistory = pd.DataFrame({'inter_no':[], 'end_unix':[]}) # recent history
else:
rhistory = pd.concat(recent_histories)
recent_unix = rhistory[['inter_no', 'end_unix']]
# 3. 상태 테이블 조회정보(A)와 이력 테이블 조회정보(B) 조인(키값 : 교차로번호) : C
move = pd.merge(move, recent_unix, how='left', on='inter_no')
move['end_unix'] = move['end_unix'].fillna(0).astype(int)
# # 4. C데이터 프레임에 신규 컬럼(시작 유닉스타임) 생성 후 종료유닉스 타임 값 입력, 종료 유닉스 타임 컬럼 제거
move = move.rename(columns = {'end_unix':'start_unix'})
# 5. 이동류 이력정보 READ
# - CSV 파일로 서버에 저장된 이동류정보를 읽어옴(파일이 없는 경우에는 데이터가 없는 프레임 D 생성)
try: # movement가 존재할 경우 그걸 그대로 씀.
self.movement
except AttributeError: # movement가 존재하지 않는 경우 생성
self.movement = pd.DataFrame()
# 6. 이동류 이력정보 데이터테이블(D)에 C데이터 add
self.movement = pd.concat([self.movement, move])
# 7. D데이터 프레임에서 중복데이터 제거(교차로번호, 시작 유닉스타임, A링 현시번호, B링 현시번호 같은 행은 제거)
self.movement = self.movement.drop_duplicates(['inter_no','phas_A','phas_B','start_unix'])
# 8. D데이터 보관 시간 기준시간을 시작 유닉스 타임의 최대값 - self.subtractor // 2을 값으로 산출하고, 보관 시간 기준시간보다 작은 시작 유닉스 타임을 가진 행은 모두 제거(1시간 데이터만 보관)
self.movement = self.movement[self.movement.start_unix > fsec - self.subtractor // 2]
# 3-2. movement_updated
@ -670,6 +719,7 @@ class SignalGenerator():
else:
self.movement_updated = self.movement
# 4. 통합테이블 생성
def make_histids(self):
print("4. 통합 테이블을 생성합니다.")
@ -680,11 +730,13 @@ class SignalGenerator():
self.attach_children()
self.time43 = datetime.now()
# 4-1. movedur : movements and durations
def merge_dfs(self):
self.movedur = pd.merge(self.hrhists, self.movement_updated, how='inner', on=['inter_no', 'start_unix', 'phas_A', 'phas_B'])
self.movedur = self.movedur[['inter_no', 'start_unix', 'phas_A', 'phas_B', 'move_A', 'move_B', 'duration']]
# 4-2. histid
def assign_signals(self):
self.histid = self.movedur.copy()
@ -695,7 +747,6 @@ class SignalGenerator():
mapping_dict = self.matching.set_index(['node_id', 'move_no'])['state'].to_dict()
# matching : 가능한 모든 (노드id, 이동류번호)에 대한 신호 * 시차제와 연관 有
for i, row in self.histid.iterrows():
node_id = row.node_id
move_A = row.move_A
@ -715,6 +766,7 @@ class SignalGenerator():
state_B = ''.join(self.node2init[node_id])
self.histid.at[i, 'state_B'] = state_B
# 4-3. histids
def attach_children(self):
new_histids = []
@ -745,6 +797,7 @@ class SignalGenerator():
self.histids = self.histids.sort_values(by=['start_unix', 'node_id', 'phas_A', 'phas_B']).reset_index(drop=True)
self.histids = self.histids[['inter_no', 'node_id', 'start_unix', 'phas_A', 'phas_B', 'move_A', 'move_B', 'duration', 'state_A', 'state_B']]
# 5. 신호 생성
def get_signals(self):
print("5. 신호를 생성합니다.")
@ -755,6 +808,7 @@ class SignalGenerator():
self.make_tl_file()
self.time53 = datetime.now()
# 5-1. 신호 파일의 시작 및 종료시각 설정
def set_timepoints(self):
self.offsets = {}
@ -783,6 +837,7 @@ class SignalGenerator():
self.sigtable = pd.concat(self.sigtable).reset_index(drop=True)
self.sigtable['phase_sumo'] = self.sigtable.groupby(['node_id', 'start_unix']).cumcount()
# 5-2. 적색 및 황색신호 부여
def assign_red_yellow(self):
'''
@ -847,6 +902,7 @@ class SignalGenerator():
self.SIGTABLE.append(SIG)
self.SIGTABLE = pd.concat(self.SIGTABLE)
# 5-2-1 helper function of 5-2
def get_red(self, pre_state:str, cur_state:str):
assert len(pre_state) == len(cur_state), "cur_state, nex_state의 길이가 서로 다릅니다."
@ -862,6 +918,7 @@ class SignalGenerator():
raise ValueError(f"예상치 못한 신호조합: previous={p}, current={c}")
return state_r
# 5-2-2 helper function of 5-2
def get_yellow(self, cur_state:str, nex_state:str):
assert len(cur_state) == len(nex_state), "cur_state, nex_state의 길이가 서로 다릅니다."
@ -879,6 +936,7 @@ class SignalGenerator():
raise ValueError(f"예상치 못한 신호조합: current={c}, next={n}")
return state_y
# 5-2-3 helper function of 5-2
def cumulate(self, sig, alph):
@ -950,6 +1008,7 @@ class SignalGenerator():
csig = pd.concat(csig).reset_index(drop=True)
return csig
# 5-3. 신호파일 생성
def make_tl_file(self):
strings = ['<additional>\n']
@ -967,6 +1026,7 @@ class SignalGenerator():
with open(self.path_output, 'w') as f:
f.write(strings)
# 6. 이슈사항 저장
def write_issues(self):
print('6. 이슈사항을 저장합니다.')

+ 449
- 64
analysis/0725_main_test/6_use_class_gs.ipynb View File

@ -53,6 +53,27 @@
"self.process_history()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"3. 이동류정보 테이블을 변환합니다.\n",
"4. 통합 테이블을 생성합니다.\n",
"5. 신호를 생성합니다.\n"
]
}
],
"source": [
"self.process_movement()\n",
"self.make_histids()\n",
"self.get_signals()"
]
},
{
"cell_type": "code",
"execution_count": 4,
@ -244,15 +265,6 @@
"phases"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"self.history['end_unix'] = self.history['end_unix'].apply(lambda x:int(datetime.strptime(x, '%Y-%m-%d %H:%M:%S').timestamp()))\n"
]
},
{
"cell_type": "code",
"execution_count": 5,
@ -264,7 +276,7 @@
},
{
"cell_type": "code",
"execution_count": 26,
"execution_count": 6,
"metadata": {},
"outputs": [
{
@ -287,70 +299,443 @@
"print(fsecs[-1])"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
"fsecs = list(range(self.present_time - self.sim_timespan, self.present_time + 1, 5))"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"ename": "ValueError",
"evalue": "Cannot merge a Series without a name",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[1;32mIn[7], line 20\u001b[0m\n\u001b[0;32m 18\u001b[0m recent_unix \u001b[38;5;241m=\u001b[39m rhistory[[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124minter_no\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mend_unix\u001b[39m\u001b[38;5;124m'\u001b[39m]]\n\u001b[0;32m 19\u001b[0m \u001b[38;5;66;03m# 3. 상태 테이블 조회정보(A)와 이력 테이블 조회정보(B) 조인(키값 : 교차로번호) : C\u001b[39;00m\n\u001b[1;32m---> 20\u001b[0m move \u001b[38;5;241m=\u001b[39m \u001b[43mpd\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmerge\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmove\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mrecent_unix\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mhow\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mleft\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mon\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43minter_no\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 21\u001b[0m move[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mend_unix\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m=\u001b[39m move[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mend_unix\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39mfillna(\u001b[38;5;241m0\u001b[39m)\u001b[38;5;241m.\u001b[39mastype(\u001b[38;5;28mint\u001b[39m)\n\u001b[0;32m 22\u001b[0m \u001b[38;5;66;03m# # 4. C데이터 프레임에 신규 컬럼(시작 유닉스타임) 생성 후 종료유닉스 타임 값 입력, 종료 유닉스 타임 컬럼 제거\u001b[39;00m\n",
"File \u001b[1;32mc:\\github\\siggen\\siggen_env\\Lib\\site-packages\\pandas\\core\\reshape\\merge.py:152\u001b[0m, in \u001b[0;36mmerge\u001b[1;34m(left, right, how, on, left_on, right_on, left_index, right_index, sort, suffixes, copy, indicator, validate)\u001b[0m\n\u001b[0;32m 135\u001b[0m \u001b[38;5;129m@Substitution\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124mleft : DataFrame or named Series\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 136\u001b[0m \u001b[38;5;129m@Appender\u001b[39m(_merge_doc, indents\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0\u001b[39m)\n\u001b[0;32m 137\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mmerge\u001b[39m(\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 150\u001b[0m validate: \u001b[38;5;28mstr\u001b[39m \u001b[38;5;241m|\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[0;32m 151\u001b[0m ) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m DataFrame:\n\u001b[1;32m--> 152\u001b[0m left_df \u001b[38;5;241m=\u001b[39m \u001b[43m_validate_operand\u001b[49m\u001b[43m(\u001b[49m\u001b[43mleft\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 153\u001b[0m right_df \u001b[38;5;241m=\u001b[39m _validate_operand(right)\n\u001b[0;32m 154\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m how \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcross\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n",
"File \u001b[1;32mc:\\github\\siggen\\siggen_env\\Lib\\site-packages\\pandas\\core\\reshape\\merge.py:2689\u001b[0m, in \u001b[0;36m_validate_operand\u001b[1;34m(obj)\u001b[0m\n\u001b[0;32m 2687\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(obj, ABCSeries):\n\u001b[0;32m 2688\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m obj\u001b[38;5;241m.\u001b[39mname \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m-> 2689\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCannot merge a Series without a name\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m 2690\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m obj\u001b[38;5;241m.\u001b[39mto_frame()\n\u001b[0;32m 2691\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n",
"\u001b[1;31mValueError\u001b[0m: Cannot merge a Series without a name"
]
"data": {
"text/html": [
"<div>\n",
"<style scoped>\n",
" .dataframe tbody tr th:only-of-type {\n",
" vertical-align: middle;\n",
" }\n",
"\n",
" .dataframe tbody tr th {\n",
" vertical-align: top;\n",
" }\n",
"\n",
" .dataframe thead th {\n",
" text-align: right;\n",
" }\n",
"</style>\n",
"<table border=\"1\" class=\"dataframe\">\n",
" <thead>\n",
" <tr style=\"text-align: right;\">\n",
" <th></th>\n",
" <th>inter_no</th>\n",
" <th>phas_A</th>\n",
" <th>phas_B</th>\n",
" <th>STOS_NO</th>\n",
" <th>move_A</th>\n",
" <th>move_B</th>\n",
" <th>start_unix</th>\n",
" </tr>\n",
" </thead>\n",
" <tbody>\n",
" <tr>\n",
" <th>6</th>\n",
" <td>455</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>6</td>\n",
" <td>2</td>\n",
" <td>1722383976</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7</th>\n",
" <td>456</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>0</td>\n",
" <td>6</td>\n",
" <td>2</td>\n",
" <td>1722383967</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>437</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>6</td>\n",
" <td>2</td>\n",
" <td>1722384007</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7</th>\n",
" <td>456</td>\n",
" <td>3</td>\n",
" <td>3</td>\n",
" <td>0</td>\n",
" <td>7</td>\n",
" <td>-1</td>\n",
" <td>1722383967</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7</th>\n",
" <td>456</td>\n",
" <td>4</td>\n",
" <td>4</td>\n",
" <td>0</td>\n",
" <td>17</td>\n",
" <td>-1</td>\n",
" <td>1722383967</td>\n",
" </tr>\n",
" <tr>\n",
" <th>6</th>\n",
" <td>455</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>0</td>\n",
" <td>-1</td>\n",
" <td>-1</td>\n",
" <td>1722383976</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>457</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>6</td>\n",
" <td>2</td>\n",
" <td>1722384053</td>\n",
" </tr>\n",
" <tr>\n",
" <th>7</th>\n",
" <td>456</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>5</td>\n",
" <td>2</td>\n",
" <td>1722383967</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>437</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>0</td>\n",
" <td>5</td>\n",
" <td>1</td>\n",
" <td>1722384007</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>442</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>6</td>\n",
" <td>1</td>\n",
" <td>1722384098</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>457</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>0</td>\n",
" <td>5</td>\n",
" <td>1</td>\n",
" <td>1722384053</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>443</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>6</td>\n",
" <td>2</td>\n",
" <td>1722384102</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>437</td>\n",
" <td>3</td>\n",
" <td>3</td>\n",
" <td>0</td>\n",
" <td>8</td>\n",
" <td>3</td>\n",
" <td>1722384007</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>457</td>\n",
" <td>3</td>\n",
" <td>3</td>\n",
" <td>0</td>\n",
" <td>17</td>\n",
" <td>17</td>\n",
" <td>1722384053</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>442</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>0</td>\n",
" <td>6</td>\n",
" <td>2</td>\n",
" <td>1722384098</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>437</td>\n",
" <td>4</td>\n",
" <td>4</td>\n",
" <td>0</td>\n",
" <td>7</td>\n",
" <td>4</td>\n",
" <td>1722384007</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>442</td>\n",
" <td>3</td>\n",
" <td>3</td>\n",
" <td>0</td>\n",
" <td>5</td>\n",
" <td>2</td>\n",
" <td>1722384098</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>457</td>\n",
" <td>4</td>\n",
" <td>4</td>\n",
" <td>0</td>\n",
" <td>8</td>\n",
" <td>3</td>\n",
" <td>1722384053</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>442</td>\n",
" <td>4</td>\n",
" <td>4</td>\n",
" <td>0</td>\n",
" <td>7</td>\n",
" <td>4</td>\n",
" <td>1722384098</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>457</td>\n",
" <td>5</td>\n",
" <td>5</td>\n",
" <td>0</td>\n",
" <td>7</td>\n",
" <td>4</td>\n",
" <td>1722384053</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>437</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>6</td>\n",
" <td>2</td>\n",
" <td>1722384207</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>443</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>0</td>\n",
" <td>5</td>\n",
" <td>2</td>\n",
" <td>1722384102</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>457</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>6</td>\n",
" <td>2</td>\n",
" <td>1722384232</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>436</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>5</td>\n",
" <td>2</td>\n",
" <td>1722384236</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>442</td>\n",
" <td>5</td>\n",
" <td>5</td>\n",
" <td>0</td>\n",
" <td>8</td>\n",
" <td>3</td>\n",
" <td>1722384098</td>\n",
" </tr>\n",
" <tr>\n",
" <th>4</th>\n",
" <td>443</td>\n",
" <td>3</td>\n",
" <td>3</td>\n",
" <td>0</td>\n",
" <td>7</td>\n",
" <td>18</td>\n",
" <td>1722384102</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>457</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>0</td>\n",
" <td>5</td>\n",
" <td>1</td>\n",
" <td>1722384232</td>\n",
" </tr>\n",
" <tr>\n",
" <th>1</th>\n",
" <td>437</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>0</td>\n",
" <td>5</td>\n",
" <td>1</td>\n",
" <td>1722384207</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>442</td>\n",
" <td>6</td>\n",
" <td>6</td>\n",
" <td>0</td>\n",
" <td>8</td>\n",
" <td>3</td>\n",
" <td>1722384098</td>\n",
" </tr>\n",
" <tr>\n",
" <th>0</th>\n",
" <td>436</td>\n",
" <td>2</td>\n",
" <td>2</td>\n",
" <td>0</td>\n",
" <td>8</td>\n",
" <td>3</td>\n",
" <td>1722384236</td>\n",
" </tr>\n",
" <tr>\n",
" <th>3</th>\n",
" <td>442</td>\n",
" <td>1</td>\n",
" <td>1</td>\n",
" <td>0</td>\n",
" <td>6</td>\n",
" <td>1</td>\n",
" <td>1722384297</td>\n",
" </tr>\n",
" <tr>\n",
" <th>8</th>\n",
" <td>457</td>\n",
" <td>3</td>\n",
" <td>3</td>\n",
" <td>0</td>\n",
" <td>17</td>\n",
" <td>17</td>\n",
" <td>1722384232</td>\n",
" </tr>\n",
" </tbody>\n",
"</table>\n",
"</div>"
],
"text/plain": [
" inter_no phas_A phas_B STOS_NO move_A move_B start_unix\n",
"6 455 1 1 0 6 2 1722383976\n",
"7 456 2 2 0 6 2 1722383967\n",
"1 437 1 1 0 6 2 1722384007\n",
"7 456 3 3 0 7 -1 1722383967\n",
"7 456 4 4 0 17 -1 1722383967\n",
"6 455 2 2 0 -1 -1 1722383976\n",
"8 457 1 1 0 6 2 1722384053\n",
"7 456 1 1 0 5 2 1722383967\n",
"1 437 2 2 0 5 1 1722384007\n",
"3 442 1 1 0 6 1 1722384098\n",
"8 457 2 2 0 5 1 1722384053\n",
"4 443 1 1 0 6 2 1722384102\n",
"1 437 3 3 0 8 3 1722384007\n",
"8 457 3 3 0 17 17 1722384053\n",
"3 442 2 2 0 6 2 1722384098\n",
"1 437 4 4 0 7 4 1722384007\n",
"3 442 3 3 0 5 2 1722384098\n",
"8 457 4 4 0 8 3 1722384053\n",
"3 442 4 4 0 7 4 1722384098\n",
"8 457 5 5 0 7 4 1722384053\n",
"1 437 1 1 0 6 2 1722384207\n",
"4 443 2 2 0 5 2 1722384102\n",
"8 457 1 1 0 6 2 1722384232\n",
"0 436 1 1 0 5 2 1722384236\n",
"3 442 5 5 0 8 3 1722384098\n",
"4 443 3 3 0 7 18 1722384102\n",
"8 457 2 2 0 5 1 1722384232\n",
"1 437 2 2 0 5 1 1722384207\n",
"3 442 6 6 0 8 3 1722384098\n",
"0 436 2 2 0 8 3 1722384236\n",
"3 442 1 1 0 6 1 1722384297\n",
"8 457 3 3 0 17 17 1722384232"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"fsec = fsecs[5]\n",
"# 1. 상태 테이블 조회해서 전체 데이터중 필요데이터(교차로번호, A링 현시번호, A링 이동류번호, B링 현시번호, B링 이동류번호)만 수집 : A\n",
"moves = [group.iloc[-1] for _, group in phases[phases.start_unix < fsec].groupby('inter_no')]\n",
"if not moves:\n",
" move = pd.DataFrame({\n",
" 'start_unix':[], 'inter_no':[], 'phas_A':[], 'phas_B':[],\n",
" 'STOS_NO':[], 'move_A':[], 'move_B':[]})\n",
"else:\n",
" move = pd.concat(moves)\n",
"move = move.drop(columns='start_unix')\n",
"# 2. 이력 테이블 조회해서 교차로별로 유닉스시간 최대인 데이터(교차로번호, 종료유닉스타임)만 수집 : B\n",
"recent_histories = [group.iloc[-1:] for _, group in self.history[self.history['end_unix'] < fsec].groupby('inter_no')] # 교차로별로 유닉스시간이 최대인 행들\n",
"for fsec in range(self.present_time - self.sim_timespan, self.present_time + 1, 5):\n",
" # 1. 상태 테이블 조회해서 전체 데이터중 필요데이터(교차로번호, A링 현시번호, A링 이동류번호, B링 현시번호, B링 이동류번호)만 수집 : A\n",
" moves = [group.iloc[-1:] for _, group in phases[phases.start_unix < fsec].groupby('inter_no')]\n",
" if not moves:\n",
" move = pd.DataFrame({\n",
" 'start_unix':[], 'inter_no':[], 'phas_A':[], 'phas_B':[],\n",
" 'STOS_NO':[], 'move_A':[], 'move_B':[]})\n",
" else:\n",
" move = pd.concat(moves)\n",
" move = move.drop(columns='start_unix')\n",
" # 2. 이력 테이블 조회해서 교차로별로 유닉스시간 최대인 데이터(교차로번호, 종료유닉스타임)만 수집 : B\n",
" recent_histories = [group.iloc[-1:] for _, group in self.history[self.history['end_unix'] < fsec].groupby('inter_no')] # 교차로별로 유닉스시간이 최대인 행들\n",
"\n",
"if not recent_histories:\n",
" rhistory = pd.DataFrame({'inter_no':[], 'end_unix':[]}) # recent history\n",
"else:\n",
" rhistory = pd.concat(recent_histories)\n",
"recent_unix = rhistory[['inter_no', 'end_unix']]\n",
"# 3. 상태 테이블 조회정보(A)와 이력 테이블 조회정보(B) 조인(키값 : 교차로번호) : C\n",
"move = pd.merge(move, recent_unix, how='left', on='inter_no')\n",
"move['end_unix'] = move['end_unix'].fillna(0).astype(int)\n",
"# # 4. C데이터 프레임에 신규 컬럼(시작 유닉스타임) 생성 후 종료유닉스 타임 값 입력, 종료 유닉스 타임 컬럼 제거\n",
"move = move.rename(columns = {'end_unix':'start_unix'})\n",
"# 5. 이동류 이력정보 READ\n",
"# - CSV 파일로 서버에 저장된 이동류정보를 읽어옴(파일이 없는 경우에는 데이터가 없는 프레임 D 생성)\n",
"try:\n",
" movement # movement가 존재할 경우 그걸 그대로 씀.\n",
"except NameError: # movement가 존재하지 않는 경우 생성\n",
" movement = pd.DataFrame()\n",
"# 6. 이동류 이력정보 데이터테이블(D)에 C데이터 add\n",
"movement = pd.concat([movement, move])\n",
"# 7. D데이터 프레임에서 중복데이터 제거(교차로번호, 시작 유닉스타임, A링 현시번호, B링 현시번호 같은 행은 제거)\n",
"movement = movement.drop_duplicates(['inter_no','phas_A','phas_B','start_unix'])\n",
"# 8. D데이터 보관 시간 기준시간을 시작 유닉스 타임의 최대값 - self.subtractor // 2을 값으로 산출하고, 보관 시간 기준시간보다 작은 시작 유닉스 타임을 가진 행은 모두 제거(1시간 데이터만 보관)\n",
"movement = movement[movement.start_unix > fsec - self.subtractor // 2]\n",
" if not recent_histories:\n",
" rhistory = pd.DataFrame({'inter_no':[], 'end_unix':[]}) # recent history\n",
" else:\n",
" rhistory = pd.concat(recent_histories)\n",
" recent_unix = rhistory[['inter_no', 'end_unix']]\n",
" # 3. 상태 테이블 조회정보(A)와 이력 테이블 조회정보(B) 조인(키값 : 교차로번호) : C\n",
" move = pd.merge(move, recent_unix, how='left', on='inter_no')\n",
" move['end_unix'] = move['end_unix'].fillna(0).astype(int)\n",
" # # 4. C데이터 프레임에 신규 컬럼(시작 유닉스타임) 생성 후 종료유닉스 타임 값 입력, 종료 유닉스 타임 컬럼 제거\n",
" move = move.rename(columns = {'end_unix':'start_unix'})\n",
" # 5. 이동류 이력정보 READ\n",
" # - CSV 파일로 서버에 저장된 이동류정보를 읽어옴(파일이 없는 경우에는 데이터가 없는 프레임 D 생성)\n",
" try:\n",
" movement # movement가 존재할 경우 그걸 그대로 씀.\n",
" except NameError: # movement가 존재하지 않는 경우 생성\n",
" movement = pd.DataFrame()\n",
" # 6. 이동류 이력정보 데이터테이블(D)에 C데이터 add\n",
" movement = pd.concat([movement, move])\n",
" # 7. D데이터 프레임에서 중복데이터 제거(교차로번호, 시작 유닉스타임, A링 현시번호, B링 현시번호 같은 행은 제거)\n",
" movement = movement.drop_duplicates(['inter_no','phas_A','phas_B','start_unix'])\n",
" # 8. D데이터 보관 시간 기준시간을 시작 유닉스 타임의 최대값 - self.subtractor // 2을 값으로 산출하고, 보관 시간 기준시간보다 작은 시작 유닉스 타임을 가진 행은 모두 제거(1시간 데이터만 보관)\n",
" movement = movement[movement.start_unix > fsec - self.subtractor // 2]\n",
"movement\n"
]
},

+ 0
- 0
test_0731/results/issues_generate_signals.txt View File


+ 2
- 34
test_0731/results/sn_1722384300.add.xml View File

@ -62,10 +62,6 @@
<phase duration="41" state="gGGGrgrrrrgGGGrgrrr"/>
<phase duration="4" state="gyyyrgrrrrgyyyrgrrr"/>
<phase duration="16" state="grrrGgrrrrgrrrGgrrr"/>
<phase duration="4" state="grrrygrrrrgrrrygrrr"/>
<phase duration="36" state="grrrrgrrrrgrrrrgrrr"/>
<phase duration="4" state="grrrrgrrrrgrrrrgrrr"/>
<phase duration="36" state="grrrrgGGGGgrrrrgrrr"/>
</tlLogic>
<tlLogic id="106332" type="static" programID="106332_prog" offset="-102">
<phase duration="27" state="gGGGrGGrrrgrrgrrrrrrrrgrrrr"/>
@ -91,16 +87,6 @@
<phase duration="41" state="grrrrrrGGGgrrgrrrrrrrrgrrrr"/>
<phase duration="4" state="grrrrrrGGGgrrgrrrrrrrrgrrrr"/>
<phase duration="12" state="grrrrrrGGGgrrgrrrrrrrrgrrrr"/>
<phase duration="4" state="grrrrrryyygrrgrrrrrrrrgrrrr"/>
<phase duration="31" state="gGGGrGGrrrgrrgrrrrrrrrgrrrr"/>
<phase duration="4" state="gGGGryyrrrgrrgrrrrrrrrgrrrr"/>
<phase duration="23" state="gGGGrrrrrrgrrgrGGGGGrrgrrrr"/>
<phase duration="4" state="gyyyrrrrrrgrrgrGGGGGrrgrrrr"/>
<phase duration="26" state="grrrrrrrrrgrrgrGGGGGGGgrrrr"/>
<phase duration="4" state="grrrrrrrrrgrrgryyyyyyygrrrr"/>
<phase duration="43" state="grrrrrrrrrgrrgrrrrrrrrgGGGG"/>
<phase duration="4" state="grrrrrrrrrgrrgrrrrrrrrgyyyy"/>
<phase duration="41" state="grrrrrrGGGgrrgrrrrrrrrgrrrr"/>
</tlLogic>
<tlLogic id="107587" type="static" programID="107587_prog" offset="-193">
<phase duration="73" state="GGGGr"/>
@ -197,10 +183,6 @@
<phase duration="41" state="GGGrrGGG"/>
<phase duration="4" state="GGGrrGGG"/>
<phase duration="16" state="GGGGGGGG"/>
<phase duration="4" state="GGGyyGGG"/>
<phase duration="36" state="GGGrrGGG"/>
<phase duration="4" state="GGGrrGGG"/>
<phase duration="36" state="GGGrrGGG"/>
</tlLogic>
<tlLogic id="109297" type="static" programID="109297_prog" offset="-127">
<phase duration="41" state="GGGGGGrr"/>
@ -226,10 +208,6 @@
<phase duration="41" state="GGGGGGrr"/>
<phase duration="4" state="GGGGGGrr"/>
<phase duration="16" state="GGGGGGGG"/>
<phase duration="4" state="GGGGGGyy"/>
<phase duration="36" state="GGGGGGrr"/>
<phase duration="4" state="GGGGGGrr"/>
<phase duration="36" state="GGGGGGrr"/>
</tlLogic>
<tlLogic id="109313" type="static" programID="109313_prog" offset="-193">
<phase duration="73" state="GGGGGGGGr"/>
@ -276,10 +254,6 @@
<phase duration="4" state="grrrrgrrgyygrr"/>
<phase duration="81" state="grrrrgGGgrrgrr"/>
<phase duration="4" state="grrrrgyygrrgrr"/>
<phase duration="31" state="grrrrgrrgrrgGG"/>
<phase duration="4" state="grrrrgrrgrrgyy"/>
<phase duration="26" state="gGGGGgrrgrrgrr"/>
<phase duration="4" state="gyyyygrrgrrgrr"/>
<phase duration="46" state="grrrrgrrgGGgrr"/>
<phase duration="4" state="grrrrgrrgyygrr"/>
<phase duration="81" state="grrrrgGGgrrgrr"/>
@ -288,14 +262,8 @@
</tlLogic>
<tlLogic id="109901" type="static" programID="109901_prog" offset="-24">
<phase duration="59" state="GGGG"/>
<phase duration="4" state="GGGG"/>
<phase duration="59" state="GGGG"/>
<phase duration="4" state="GGGG"/>
<phase duration="59" state="GGGG"/>
<phase duration="4" state="GGGG"/>
<phase duration="59" state="GGGG"/>
<phase duration="4" state="GGGG"/>
<phase duration="59" state="GGGG"/>
<phase duration="4" state="yyyy"/>
<phase duration="23" state="rrrr"/>
</tlLogic>
<tlLogic id="109986" type="static" programID="109986_prog" offset="-193">
<phase duration="73" state="gGGGGrgrrgGGGGrgrr"/>

Loading…
Cancel
Save