|
import sched
|
|
import time
|
|
import pyodbc
|
|
import os, json, csv
|
|
from datetime import datetime, timedelta
|
|
|
|
# 스케줄러 객체 생성
|
|
scheduler = sched.scheduler(time.time, time.sleep)
|
|
|
|
def fetch_table(scheduler, table, condition="", initial_run=False, is_move=False):
|
|
now = datetime.now()
|
|
timestamp = int(time.mktime(now.timetuple()))
|
|
timestamp -= timestamp % 5 # 초값이 0 또는 5인 시점의 유닉스 시각으로 조정
|
|
folder_name = str(timestamp)
|
|
folder_path = os.path.join(path_root, 'Data', 'fetched_tables', folder_name)
|
|
|
|
if not os.path.exists(folder_path):
|
|
os.makedirs(folder_path)
|
|
|
|
if is_move:
|
|
# move 테이블의 경우, 5초 간격으로 데이터를 가져와야 함
|
|
for offset in range(-300, 1, 5):
|
|
current_timestamp = timestamp + offset
|
|
file_name = f'move_{current_timestamp}.csv'
|
|
file_path = os.path.join(folder_path, file_name)
|
|
fetch_and_save(table, file_path, condition)
|
|
else:
|
|
file_path = os.path.join(folder_path, f'{table}.csv')
|
|
fetch_and_save(table, file_path, condition)
|
|
|
|
# 다음 실행을 위해 스케줄러에 이 함수를 다시 예약합니다.
|
|
if initial_run:
|
|
next_run = now + timedelta(minutes=5 - now.minute % 5, seconds=-now.second, microseconds=-now.microsecond)
|
|
scheduler.enterabs(time.mktime(next_run.timetuple()), 1, fetch_table, (scheduler, table, condition, False, is_move))
|
|
|
|
def fetch_and_save(table, file_path, condition):
|
|
query = f"SELECT * FROM {schema}.{table} {condition}"
|
|
cursor.execute(query)
|
|
with open(file_path, 'w', newline='', encoding='utf-8-sig') as csvfile:
|
|
csv_writer = csv.writer(csvfile)
|
|
columns = [column[0] for column in cursor.description]
|
|
csv_writer.writerow(columns)
|
|
for row in cursor.fetchall():
|
|
csv_writer.writerow(row)
|
|
|
|
if __name__ == "__main__":
|
|
path_root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
credentials_path = os.path.join(path_root, 'Scripts', 'credentials.json')
|
|
with open(credentials_path, 'r') as file:
|
|
credentials = json.load(file)
|
|
|
|
DSNNAME = credentials["DSNNAME"]
|
|
DBUSER = credentials["DBUSER"]
|
|
DBPWD = credentials["DBPWD"]
|
|
|
|
cnxn = pyodbc.connect(f'DSN={DSNNAME};UID={DBUSER};PWD={DBPWD};charset=utf-8')
|
|
cursor = cnxn.cursor()
|
|
schema = 'snits_siggen'
|
|
|
|
tables_and_conditions = {
|
|
'inter_info': "",
|
|
'angle': "WHERE collect_unix = (SELECT MAX(collect_unix) FROM angle)",
|
|
'plan': "WHERE collect_unix = (SELECT MAX(collect_unix) FROM plan)",
|
|
'history': "WHERE end_unix >= UNIX_TIMESTAMP() - 9000", # UNIX_TIMESTAMP()를 적절한 TiberoDB 함수로 대체
|
|
'move': "" # move 테이블은 별도로 처리
|
|
}
|
|
|
|
# 초기 작업 예약 및 스케줄러 실행
|
|
now = datetime.now()
|
|
initial_delay = timedelta(minutes=5 - now.minute % 5, seconds=-now.second, microseconds=-now.microsecond).total_seconds()
|
|
for table, condition in tables_and_conditions.items():
|
|
is_move = (table == 'move')
|
|
scheduler.enter(initial_delay, 1, fetch_table, (scheduler, table, condition, True, is_move))
|
|
|
|
scheduler.run()
|