Browse Source

deleted fetch_tables.py and so.

master
김선중 1 year ago
parent
commit
02523ae37d
3 changed files with 27 additions and 222 deletions
  1. +0
    -132
      Scripts/fetch_tables.py
  2. +0
    -75
      Scripts/fetch_tables_sc.py
  3. +27
    -15
      analysis/0709_fetch_tables/fetch_tables.ipynb

+ 0
- 132
Scripts/fetch_tables.py View File

@ -1,132 +0,0 @@
# (rts) PS C:\Github\siggen> python .\Scripts\fetch_tables.py
import pandas as pd
import pyodbc
import os, json, csv
from tqdm import tqdm
from datetime import datetime
starting_time = datetime.now()
# 루트폴더 지정
path_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
with open(os.path.join(path_root, 'Scripts', 'config.json'), 'r') as config_file:
config = json.load(config_file)
# 주요 폴더 경로 지정
paths = config['paths']
path_tables = os.path.join(path_root, *paths['tables'])
path_results = os.path.join(path_root, *paths['results'])
# 이슈사항 목록
issues = []
# DB 접속정보
connection_info = config['connection_info']
DSNNAME = connection_info["DSNNAME"]
DBUSER = connection_info["DBUSER"]
DBPWD = connection_info["DBPWD"]
# 오류 발생을 위한 코드
# DSNNAME += 'a'
# DBUSER += 'a'
# DBPWD += 'a'
# 데이터베이스 연결
try:
cnxn = pyodbc.connect(f'DSN={DSNNAME};UID={DBUSER};PWD={DBPWD};charset=utf-8')
cursor = cnxn.cursor()
print("데이터베이스 연결에 성공했습니다.")
except pyodbc.InterfaceError:
print("데이터베이스 연결 실패: 데이터 원본 이름을 확인하거나 기본 드라이버를 지정하세요.")
# 여기서 오류 처리 로직을 추가할 수 있습니다.
except pyodbc.OperationalError as e:
if "Login failed" in str(e):
print("로그인 실패: 사용자 이름 또는 비밀번호가 유효하지 않습니다.")
else:
print("연결 실패: 운영 체제 레벨에서 오류가 발생했습니다.")
# 여기서 오류 처리 로직을 추가할 수 있습니다.
except Exception as e:
print(f"예기치 않은 오류가 발생했습니다: {e}")
# 여기서 오류 처리 로직을 추가할 수 있습니다.
schema = 'SNITS_INT'
tables = ['S_INT_CONFIG', # 교차로 제어기
'S_INT_PHASE_CONFIG', # 교차로 현시구성
'S_INT_TPLAN', # 교차로 시간계획
'S_SA_CYCLE_PLAN',
'S_SA_DPLAN', # 그룹 일계획
'S_SA_WPLAN', # 그룹 주간계획
'S_TOD_HIS'] # 신호 TOD 이력
# 폴더 Data\tables\yyyymmdd_hhmmss 생성
timestamp = starting_time.strftime('%Y%m%d_%H%M%S')
# base_dir = os.path.join(path_tables, timestamp)
os.makedirs(os.path.join(path_tables, timestamp), exist_ok=True)
def fetch_table(table, condition=""):
try:
query = f"SELECT * FROM {schema}.{table} {condition}"
cursor.execute(query)
csv_file_path = os.path.join(path_tables, timestamp, f"{table}.csv")
with open(csv_file_path, 'w', newline='', encoding='utf-8-sig') as csvfile:
csv_writer = csv.writer(csvfile)
columns = [column[0] for column in cursor.description]
csv_writer.writerow(columns)
for row in cursor.fetchall():
csv_writer.writerow(row)
except pyodbc.ProgrammingError as e:
if '42S02' in str(e):
print(f"오류: '{table}' 테이블이 스키마에 존재하지 않습니다.")
else:
print(f"SQL 실행 오류: {e}")
except Exception as e:
print(f"예기치 않은 오류가 발생했습니다: {e}")
fetch_table('S_INT_CONFIG')
fetch_table('S_INT_PHASE_CONFIG')
fetch_table('S_INT_TPLAN')
fetch_table('S_SA_CYCLE_PLAN')
fetch_table('S_SA_DPLAN')
fetch_table('S_SA_WPLAN')
fetch_table('S_TOD_HIS', condition="WHERE INT_CREATE_DATE >= TO_TIMESTAMP('2023-10-17 23:15:00.0') ORDER BY INT_NO ASC, INT_CREATE_DATE DESC;")
# 오류 발생을 위한 코드
fetch_table('foo')
print("테이블을 모두 불러왔습니다.")
cnxn.close()
inter_info = pd.read_csv(os.path.join(path_tables, timestamp, 'S_INT_CONFIG.csv'))
plan = pd.read_csv(os.path.join(path_tables, timestamp, 'S_INT_TPLAN.csv'))
history = pd.read_csv(os.path.join(path_tables, timestamp, 'S_TOD_HIS.csv'))
print(inter_info)
print(plan)
print(history)
# 1-4-2. 교차로정보(inter_info) 검사
def check_inter_info():
# 1-4-2-1. inter_lat, inter_lon 적절성 검사
inter_info.loc[0, 'INT_LAT'] = 38.0 # 에러 발생을 위한 코드
max_lon, min_lon = 127.3, 127.0
max_lat, min_lat = 37.5, 37.2
for _, row in inter_info.iterrows():
latbool = min_lat <= row['INT_LAT'] <= max_lat
lonbool = min_lon <= row['INT_LNG'] <= max_lon
if not(latbool and lonbool):
msg = f"1-4-2-1. 위도 또는 경도가 범위를 벗어난 교차로가 있습니다: INT_NO : {row['INT_NO']}"
issues.append(msg)
def write_issues():
print('3. 이슈사항을 저장합니다.')
path_issues = os.path.join(path_results, "issues_fetch_tables.txt")
with open(path_issues, "w", encoding="utf-8") as file:
for item in issues:
file.write(item + "\n")
if issues:
print("데이터 처리 중 발생한 특이사항은 다음과 같습니다. :")
for review in issues:
print(review)
check_inter_info()
write_issues()
print("elapsed time :", datetime.now() - starting_time)

+ 0
- 75
Scripts/fetch_tables_sc.py View File

@ -1,75 +0,0 @@
import sched
import time
import pyodbc
import os, json, csv
from datetime import datetime, timedelta
# 스케줄러 객체 생성
scheduler = sched.scheduler(time.time, time.sleep)
def fetch_table(scheduler, table, condition="", initial_run=False, is_move=False):
now = datetime.now()
timestamp = int(time.mktime(now.timetuple()))
timestamp -= timestamp % 5 # 초값이 0 또는 5인 시점의 유닉스 시각으로 조정
folder_name = str(timestamp)
folder_path = os.path.join(path_root, 'Data', 'fetched_tables', folder_name)
if not os.path.exists(folder_path):
os.makedirs(folder_path)
if is_move:
# move 테이블의 경우, 5초 간격으로 데이터를 가져와야 함
for offset in range(-300, 1, 5):
current_timestamp = timestamp + offset
file_name = f'move_{current_timestamp}.csv'
file_path = os.path.join(folder_path, file_name)
fetch_and_save(table, file_path, condition)
else:
file_path = os.path.join(folder_path, f'{table}.csv')
fetch_and_save(table, file_path, condition)
# 다음 실행을 위해 스케줄러에 이 함수를 다시 예약합니다.
if initial_run:
next_run = now + timedelta(minutes=5 - now.minute % 5, seconds=-now.second, microseconds=-now.microsecond)
scheduler.enterabs(time.mktime(next_run.timetuple()), 1, fetch_table, (scheduler, table, condition, False, is_move))
def fetch_and_save(table, file_path, condition):
query = f"SELECT * FROM {schema}.{table} {condition}"
cursor.execute(query)
with open(file_path, 'w', newline='', encoding='utf-8-sig') as csvfile:
csv_writer = csv.writer(csvfile)
columns = [column[0] for column in cursor.description]
csv_writer.writerow(columns)
for row in cursor.fetchall():
csv_writer.writerow(row)
if __name__ == "__main__":
path_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
credentials_path = os.path.join(path_root, 'Scripts', 'credentials.json')
with open(credentials_path, 'r') as file:
credentials = json.load(file)
DSNNAME = credentials["DSNNAME"]
DBUSER = credentials["DBUSER"]
DBPWD = credentials["DBPWD"]
cnxn = pyodbc.connect(f'DSN={DSNNAME};UID={DBUSER};PWD={DBPWD};charset=utf-8')
cursor = cnxn.cursor()
schema = 'snits_siggen'
tables_and_conditions = {
'inter_info': "",
'angle': "WHERE collect_unix = (SELECT MAX(collect_unix) FROM angle)",
'plan': "WHERE collect_unix = (SELECT MAX(collect_unix) FROM plan)",
'history': "WHERE end_unix >= UNIX_TIMESTAMP() - 9000", # UNIX_TIMESTAMP()를 적절한 TiberoDB 함수로 대체
'move': "" # move 테이블은 별도로 처리
}
# 초기 작업 예약 및 스케줄러 실행
now = datetime.now()
initial_delay = timedelta(minutes=5 - now.minute % 5, seconds=-now.second, microseconds=-now.microsecond).total_seconds()
for table, condition in tables_and_conditions.items():
is_move = (table == 'move')
scheduler.enter(initial_delay, 1, fetch_table, (scheduler, table, condition, True, is_move))
scheduler.run()

+ 27
- 15
analysis/0709_fetch_tables/fetch_tables.ipynb View File

@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
@ -13,19 +13,31 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 4,
"metadata": {},
"outputs": [
{
"ename": "FileNotFoundError",
"evalue": "[Errno 2] No such file or directory: 'c:\\\\Github\\\\snits_siggen\\\\configs\\\\config.json'",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mFileNotFoundError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[1;32mIn[2], line 3\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;66;03m# 루트폴더 지정\u001b[39;00m\n\u001b[0;32m 2\u001b[0m path_root \u001b[38;5;241m=\u001b[39m os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mdirname(os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mdirname(os\u001b[38;5;241m.\u001b[39mpath\u001b[38;5;241m.\u001b[39mabspath(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m'\u001b[39m)))\n\u001b[1;32m----> 3\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28;43mopen\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mos\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpath\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mjoin\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpath_root\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mconfigs\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mconfig.json\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mr\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m \u001b[38;5;28;01mas\u001b[39;00m config_file:\n\u001b[0;32m 4\u001b[0m config \u001b[38;5;241m=\u001b[39m json\u001b[38;5;241m.\u001b[39mload(config_file)\n\u001b[0;32m 6\u001b[0m display(config)\n",
"File \u001b[1;32mc:\\Github\\snits_siggen\\siggen\\lib\\site-packages\\IPython\\core\\interactiveshell.py:284\u001b[0m, in \u001b[0;36m_modified_open\u001b[1;34m(file, *args, **kwargs)\u001b[0m\n\u001b[0;32m 277\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m file \u001b[38;5;129;01min\u001b[39;00m {\u001b[38;5;241m0\u001b[39m, \u001b[38;5;241m1\u001b[39m, \u001b[38;5;241m2\u001b[39m}:\n\u001b[0;32m 278\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[0;32m 279\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mIPython won\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mt let you open fd=\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfile\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m by default \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 280\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mas it is likely to crash IPython. If you know what you are doing, \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 281\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124myou can use builtins\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m open.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 282\u001b[0m )\n\u001b[1;32m--> 284\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mio_open\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfile\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
"\u001b[1;31mFileNotFoundError\u001b[0m: [Errno 2] No such file or directory: 'c:\\\\Github\\\\snits_siggen\\\\configs\\\\config.json'"
"data": {
"text/plain": [
"{'connection_info': {'DSNNAME': 'Tibero6',\n",
" 'DBUSER': 'snits',\n",
" 'DBPWD': 'snitsUIspxmworks#PW'},\n",
" 'paths': {'data': ['Data'],\n",
" 'intermediates': ['Intermediates'],\n",
" 'results': ['Results'],\n",
" 'tables': ['Data', 'tables'],\n",
" 'networks': ['Data', 'networks'],\n",
" 'scripts': ['Scripts']}}"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"c:\\Github\\snits_siggen\\Data\\tables\n"
]
}
],
@ -50,7 +62,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
@ -64,7 +76,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 6,
"metadata": {},
"outputs": [],
"source": [
@ -88,7 +100,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
@ -109,7 +121,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 8,
"metadata": {},
"outputs": [
{

Loading…
Cancel
Save