{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import pyodbc, os, json, csv\n", "from datetime import datetime" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "{'connection_info': {'DSNNAME': 'Tibero6',\n", " 'DBUSER': 'snits',\n", " 'DBPWD': 'snitsUIspxmworks#PW'},\n", " 'paths': {'data': ['Data'],\n", " 'intermediates': ['Intermediates'],\n", " 'results': ['Results'],\n", " 'tables': ['Data', 'tables'],\n", " 'networks': ['Data', 'networks'],\n", " 'scripts': ['Scripts']}}" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stdout", "output_type": "stream", "text": [ "c:\\github\\siggen\\Data\\tables\n" ] } ], "source": [ "# 루트폴더 지정\n", "path_root = os.path.dirname(os.path.dirname(os.path.abspath('.')))\n", "with open(os.path.join(path_root, 'configs', 'config_revised.json'), 'r') as config_file:\n", " config = json.load(config_file)\n", "\n", "display(config)\n", "\n", "starting_time = datetime.now()\n", "\n", "# 주요 폴더 경로 지정\n", "paths = config['paths']\n", "path_tables = os.path.join(path_root, *paths['tables'])\n", "# path_results = os.path.join(path_root, *paths['results'])\n", "\n", "print(path_tables)\n", "# print(path_results)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "config = {'connection_info': {'DSNNAME': 'Tibero6',\n", " 'DBUSER': 'snits',\n", " 'DBPWD': 'snitsUIspxmworks#PW'},\n", " 'paths': {'data': ['Data'],\n", " 'intermediates': ['Intermediates'],\n", " 'results': ['Results'],\n", " 'tables': ['Data', 'tables'],\n", " 'networks': ['Data', 'networks'],\n", " 'scripts': ['Scripts']}}" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "connection_info = config['connection_info']\n", "DSNNAME = connection_info[\"DSNNAME\"]\n", "DBUSER = connection_info[\"DBUSER\"]\n", "DBPWD = connection_info[\"DBPWD\"]\n", "cnxn = pyodbc.connect(f'DSN={DSNNAME};UID={DBUSER};PWD={DBPWD};charset=utf-8')\n", "cursor = cnxn.cursor()" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "# 스키마 및 테이블명\n", "schema = 'SNMS'\n", "tables = ['TC_IF_TOD_DAY_PLAN', # TOD 일계획 현황#\n", " 'TC_IF_TOD_HOLIDAY_PLAN', # TOD 휴일 계획#\n", " 'TC_IF_TOD_RED_YELLO', # TOD 적색 및 황색 시간#\n", " 'TC_IF_TOD_WEEK_PLAN', # TOD 주 계획#\n", " # 'TL_IF_SIGL', # 신호 운영 이력\n", " 'TL_IF_SIGL_CYCL', # 신호 운영 주기 이력#\n", " 'TM_FA_CRSRD', # 교차로 마스터#\n", " 'TN_IF_SIGL_FLOW', # 신호 현시 별 이동류 방향#\n", " ]\n", "\n", "# 폴더 Data\\tables\\yyyymmdd_hhmmss 생성\n", "timestamp = starting_time.strftime('%Y%m%d')\n", "# base_dir = os.path.join(path_tables, timestamp)\n", "os.makedirs(os.path.join(path_tables, timestamp), exist_ok=True)\n" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'2024_0717_18_00_00'" ] }, "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from datetime import datetime, timedelta\n", "end_dt = datetime(2024, 7, 17, 18, 0)\n", "end_dt.strftime(\"%Y_%m%d_%H_%M_%S\")" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "2024_0717_17_55_20\n", "2024_0717_18_00_20\n", "2024-07-17 17:55:20\n", "2024-07-17 18:00:20\n" ] } ], "source": [ "term_dt = '2024_0717_18_00_20'\n", "term_DT = datetime.strptime(term_dt, '%Y_%m%d_%H_%M_%S')\n", "init_DT = term_DT - timedelta(minutes=5)\n", "init_dt = init_DT.strftime(\"%Y_%m%d_%H_%M_%S\")\n", "print(init_dt)\n", "print(term_dt)\n", "print(init_DT)\n", "print(term_DT)" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "def fetch_table(table, condition=\"\"):\n", " try:\n", " query = f\"SELECT * FROM {schema}.{table} {condition}\"\n", " cursor.execute(query)\n", " csv_file_path = os.path.join(path_tables, timestamp, f\"{table}.csv\")\n", " with open(csv_file_path, 'w', newline='', encoding='utf-8-sig') as csvfile:\n", " csv_writer = csv.writer(csvfile)\n", " columns = [column[0] for column in cursor.description]\n", " csv_writer.writerow(columns)\n", " for row in cursor.fetchall():\n", " csv_writer.writerow(row)\n", " except Exception as e:\n", " print(f\"오류 발생: {e}\")" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "TC_IF_TOD_DAY_PLAN\n", "TC_IF_TOD_HOLIDAY_PLAN\n", "TC_IF_TOD_RED_YELLO\n", "TC_IF_TOD_WEEK_PLAN\n", "TL_IF_SIGL_CYCL\n", "TM_FA_CRSRD\n", "TN_IF_SIGL_FLOW\n" ] } ], "source": [ "for table in tables:\n", " print(table)\n", " fetch_table(table)" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "fetch_table('TL_IF_SIGL', condition=\"WHERE TRUNC(PHASE_DT) = TO_DATE('2024-07-29', 'YYYY-MM-DD') AND CRSRD_ID IN (436, 437, 438, 442, 443, 444, 455, 456, 457, 458)\")" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "SELECT * FROM SNMS.TC_IF_TOD_DAY_PLAN\n" ] } ], "source": [ "print(\"SELECT * FROM SNMS.TC_IF_TOD_DAY_PLAN\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "ta", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.4" } }, "nbformat": 4, "nbformat_minor": 2 }