##
# date
##
from elasticsearch import Elasticsearch
import requests as req
from selenium.webdriver import chrome
from urllib.parse import urlencode
import json
import yaml

from bs4 import BeautifulSoup

from time import localtime
import time
import os
import sys

# ========================================


class NaverNews:

    def __init__(self):

        self.elastiClient = NaverNews.elaInformation()

        urlSettingObj = NaverNews.urlRequestSetting()

        ## url 정보
        self.reqUrl = urlSettingObj.get("url")

        ## ex) fromat => 20190712
        self.currTimeObj = NaverNews.getCurrTime()

        self.urlInfo = {
            "etcParams": urlSettingObj.get("etcParam"),
            "page": None
        }

    """ Title 에 내가 원하는 단어가 있니??
    """
    def isTrue(self):

        ## ====================================================================
        # 경로 이동
        ## ====================================================================
        os.chdir(r"C:\Users\ezfarm\PycharmProjects\ElasticSearchProj\htmlObj")

        htmlPath = r"C:\Users\ezfarm\PycharmProjects\ElasticSearchProj\htmlObj"

        for htmlF in os.listdir():
            abstractPath = os.path.abspath(htmlF)
            print (abstractPath)

            ### =============================
            # html file read
            ### =============================
            try:

                 htmlFileRead = open(abstractPath)

            except FileNotFoundError as e:
                print (e)
                pass
            else:
                ### =============================
                # html file read
                ### =============================
                bsObject = BeautifulSoup(htmlFileRead,"html.parser")

                HEAD_LINE = bsObject.select("ul.type06_headline > li")

                for h in HEAD_LINE:
                    try:

                        headline = h.select("dl > dt")[1]
                    except IndexError as e:

                        try:

                            headline = h.select_one("dl > dt")
                        except:
                            print ("요청 error")
                            pass
                        else:
                            responseObj = self.textPreprocessing(headline.a.string)

                            if responseObj["isTrue"]:
                                self.elasticInsertDocuments(responseObj["title"],
                                                            h.select_one("dl > dd > span.lede").string)
                    else:
                        responseObj = self.textPreprocessing(headline.a.string)

                        if responseObj["isTrue"]:
                            self.elasticInsertDocuments(responseObj["title"],
                                                        h.select_one("dl > dd > span.lede").string)


    def textPreprocessing(self, txt):
        tmp = str(txt).strip().replace("\n", "")
        mark = {"title": tmp, "isTrue": False}

        for i in ["김정은", "이명박", "미사일"]:

            if i in tmp:
                mark["isTrue"] = True
                break

        return mark

    def elasticInsertDocuments(self, title, hObject):

        documents = {
            "title"   : title,
            "context" : hObject,
            "cllctdt" : self.currTimeObj
        }

        try:

            self.elastiClient.index (
                index    ="naver_headline_index",   # 적재할 index
                doc_type ="doc",
                body     =documents
            )
        except:
            print ("적재 실패 !!!")
            pass
        else:
            time.sleep(1.2)
            print("elasticsearch insert success !!!")
            print (documents)

    def doRequests(self):

        for n, p in enumerate(range(1, 95)):
            self.urlInfo["page"] = str(p)
            """
            mode=LSD&mid=sec&sid1=100&date=20190712&page=7
            """
            paramsEtc = self.urlInfo["etcParams"]  + "&" + \
                        "date=" + self.currTimeObj + "&" + \
                        "page=" + self.urlInfo["page"]

            requestUrl = self.reqUrl + "?" + paramsEtc

            try:

                html = req.get(requestUrl)
            except req.exceptions.RequestException as e:
                print (e)
                sys.exit(1)
            else:
                # print ("{} page 작업 중 ...".format(n+1))
                # bsObject = BeautifulSoup(html.text, "html.parser")
                htmlName = "html_file_{}.html".format(str(n+1))
                htmlFile = open(r"C:\Users\ezfarm\PycharmProjects\ElasticSearchProj\htmlObj\{}".format(htmlName),
                                "w")

                try:

                    htmlFile.write(html.text)
                except:
                    print ("html file write error")
                    pass
                else:
                    print ("{} 번째 데이터 파일 write success !!!".format(n+1))
                    htmlFile.close()

    """ reuqest setting 
    """

    @classmethod
    def urlRequestSetting(cls):

        try:

            f = open(r"C:\Users\ezfarm\PycharmProjects\ElasticSearchProj\conf\url.yml", "r", encoding="utf-8")

        except FileNotFoundError as e:
            print(e)
            sys.exit(1)
        else:
            yDoc = yaml.load(f, Loader=yaml.Loader)
            f.close()  # memory 해제
            return yDoc


    """ 검색 날짜 설정 
    """
    @classmethod
    def getCurrTime(cls):

        currObjTime = time.strftime("%Y%m%d", localtime())
        return currObjTime


    """ elasticsearch server가 살아 있는지 확인 
    """
    @classmethod
    def isAliveElastic(cls, elaAddress):

        try:

            req.get("http://" + elaAddress + ":9200")

        except req.exceptions.RequestException as e:
            """ server is die !!
            """
            print(e)
            sys.exit(1)
        else:
            print("elasticsearch server is alive !!!")
            return Elasticsearch(host=elaAddress)


    """ elasticsearch server address 정보 return
    """
    @classmethod
    def elaInformation(cls):

        path = r"C:\Users\ezfarm\PycharmProjects\ElasticSearchProj\conf\elainfo.json"

        try:

            f = open(path, "r", encoding="utf-8")
        except:
            sys.exit(1)
        else:
            jsonDoc = json.load(f)
            f.close()
            elasticNode = NaverNews.isAliveElastic(jsonDoc.get("ela"))
            return elasticNode


def main():
    elanode = NaverNews()
    elanode.isTrue()
    #elanode.doRequests()

if __name__ == "__main__":
    main()

'언어 > python' 카테고리의 다른 글

백준 2108  (0) 2019.12.08
from csv to json convert + logstash  (0) 2019.11.26
naver music 크롤링 + elasticsearch  (0) 2019.05.22
네이버 뉴스 크롤링 + 형태소  (0) 2019.05.01
페이스북 - python  (0) 2019.04.24