from urllib.parse import urlparse

from selenium import webdriver

from bs4 import BeautifulSoup

import json

import time

class X:


    def __init__(self):

        self.ele = {

            "중동역": {"lat":None, "lng":None, "check": "False"},

            "부천역": {"lat":None, "lng":None, "check": "False"},

            "소사역": {"lat":None, "lng":None, "check": "False"},

            "역곡역": {"lat": None, "lng": None, "check": "False"},

            "온수역": {"lat": None, "lng": None, "check": "False"},

            "오류동역": {"lat": None, "lng": None, "check": "False"},

            "개봉역": {"lat": None, "lng": None, "check": "False"},

            "구일역": {"lat": None, "lng": None, "check": "False"},

            "구로역": {"lat": None, "lng": None, "check": "False"},

            "가산디지털단지역": {"lat": None, "lng": None, "check": "False"},

            "독산역": {"lat": None, "lng": None, "check": "False"},

            "금천구청역": {"lat": None, "lng": None, "check": "False"},

            "석수역": {"lat": None, "lng": None, "check": "False"},

            "관악역": {"lat": None, "lng": None, "check": "False"},

            "안양역": {"lat": None, "lng": None, "check": "False"},

            "명학역": {"lat": None, "lng": None, "check": "False"},

            "금정역": {"lat": None, "lng": None, "check": "False"},

            "범계역": {"lat": None, "lng": None, "check": "False"},

            "평촌역": {"lat": None, "lng": None, "check": "False"},

            "인덕원역": {"lat": None, "lng": None, "check": "False"},

        }

        self.webdriv = webdriver.Chrome(X.jsonRead())


    @classmethod

    def jsonRead(self):


        path = ""

        try:

            with open("./driv_path.json", "r") as json_file:

                doc = dict(json.load(json_file))

                path = doc.get("path")

        except FileNotFoundError as e:

            print (e)


        return path



    def urlReq(self):

        self.webdriv.get(url="https://www.google.com/")

        # 암묵적으로 웹 자원 로드를 위해 3초까지 기다려 준다

        self.webdriv.implicitly_wait(3)


        for k in self.ele.keys():

            # new window open

            windows_before = self.webdriv.window_handles[0]

            self.webdriv.execute_script("window.open('');")


            windows_after  = self.webdriv.window_handles[1]

            self.webdriv.switch_to.window(window_name=windows_after)

            self.webdriv.get(url="https://www.google.com/")

            time.sleep(2)

            self.webdriv.find_element_by_xpath('//*[@id="tsf"]/div[2]/div/div[1]/div/div[1]/input')\

                .send_keys(k)


            try:


                self.webdriv.find_element_by_xpath('//*[@id="tsf"]/div[2]/div/div[2]/div[2]/div/center/input[1]')\

                    .click()

            except:

                self.webdriv.find_element_by_xpath('//*[@id="tsf"]/div[2]/div/div[3]/center/input[1]')\

                    .click()


            finally:

                # =================================

                time.sleep(2)

                # e = self.webdriv.find_elements_by_css_selector("#hdtb-msb-vis > div > a")

                bsobj = BeautifulSoup(self.webdriv.page_source, "html.parser")

                e = bsobj.select("#hdtb-msb-vis > div > a")

                print (e)


                for n, i in enumerate(e):

                    if i.string == "지도":

                        print (n+1, i)

                        self.webdriv.find_element_by_xpath('//*[@id="hdtb-msb-vis"]/div['+ str(n+2) +']/a').click()

                        time.sleep(10)

                        url = urlparse(self.webdriv.current_url)

                        r = str(url.path).split("/")[4]\

                            .replace("@", "")\

                            .replace(",17z", "")\

                            .split(",")


                        self.ele[k]["lat"], _   = float(r[0]),   "위도"

                        self.ele[k]["lng"], _   = float(r[1]),   "경도"

                        self.ele[k]["check"], _ = "True", "데이터 들어감"

                        print (k)

                        # =================================

                        self.webdriv.close()

                        self.webdriv.switch_to.window(windows_before)



    def mkJson(self):


        with open("data.json", "w", encoding="utf-8") as outfile:

            json.dump(self.ele, outfile, ensure_ascii=False, indent=4)


    # 소멸자

    def __del__(self):

        self.webdriv.close()



=================================================================

from gmplot import gmplot

from gps_proj.map.m2 import X

# 위도 : 37.4881796

# 경도 : 126.7682477


class M:

    node = X()

    @classmethod

    def first(cls):

        cls.node.urlReq()


    @classmethod

    def doScatterPoint(cls):

        gmap = gmplot.GoogleMapPlotter(37.493988, 126.767433, 10)


        lat = [ i.get('lat') for i in cls.node.ele.values()]

        lng = [ i.get('lng') for i in cls.node.ele.values()]

        # gmap.scatter(lat, lng, "#3B0B39", size=40, marker=False)

        gmap.plot(lat, lng, "cornflowerblue", edge_width=6)

        gmap.draw("t.html")


def main():

    M.first()

    M.doScatterPoint()



if __name__ == "__main__":

    main()



'언어 > python' 카테고리의 다른 글

2019년 3월 9일 ( 주말 프로젝트 )  (0) 2019.03.09
data crawling  (0) 2019.03.03
pysimplegui  (0) 2019.02.10
python + crawling + elasticsearch  (0) 2019.02.04
프로젝트 코드 일부분  (0) 2019.01.20