from bs4 import BeautifulSoup import urllib.request as req import os.path searchDate = '20201111000000' #API키 apikey="api 키" #API 지정 api="http://openapi.jejuits.go.kr/rfcapi/rest/jejuits/getTrafficInfoDailyStat?authApiKey={key}&statDt={stat}" #API의 URL 구성하기 url = api.format(key=apikey, stat=searchDate) print("URL : ", url) #XML 다운로드 fileName = "jejuTraffic.xml" if not os.path.exists(fileName): req.urlretrieve(url, fileName) print("if not os.path.exists") # 다운받은 파일을 분석하기 xml_data = open(fileName, "r", encoding="utf-8").read() soup = BeautifulSoup(xml_data, 'html.parser') info = {} for trafficList in soup.find_all("data"): for location in trafficList.find_all("list"): linkId = location.find("link_id").string speed = location.find("sped").string tfyl = 0 if location.find("tfyl") != None: tfyl = location.find("tfyl").string trvlhh = location.find("trvl_hh").string ocpyrate = location.find("ocpy_rate").string if not (linkId in info): info[linkId] = [] info[linkId].append({'tfyl': tfyl, 'speed': speed, 'ocpyrate': ocpyrate, 'trvlhh': trvlhh}) # 지역의 날씨를 구분해서 분류하기 for linkId in info.keys(): print("** 구간 ID : ", linkId) for i in info[linkId]: print(" - 교통량 : ", i['tfyl']) print(" - 통계일시 : ", searchDate[:4], '년 ', searchDate[4:6], '월 ', searchDate[6:8], '일 ', searchDate[8:10], '시 ', searchDate[10:12], '분 ', searchDate[12:], '초', sep='') print(" - 평균속도 : ", i['speed']) print(" - 점유율 : ", i['ocpyrate']) print(" - 통행시간 : ", i['trvlhh']) print("")

반응형