crawling + json

언어/java2019. 2. 13. 06:17

package stuProj19021202;


import java.io.FileWriter;

import java.io.IOException;

import java.net.URL;


import org.json.simple.JSONArray;

import org.json.simple.JSONObject;

import org.jsoup.Connection;

import org.jsoup.Jsoup;

import org.jsoup.nodes.Document;

import org.jsoup.nodes.Element;

import org.jsoup.select.Elements;


import stuProj190212.Test;


public class Crlng {

// #########################################

static Connection.Response response = null;

static Document naverDoc = null;

static Element element = null;

static Elements elements = null;

static JSONArray jsonArry = new JSONArray();

static FileWriter fw = null;

// #########################################

public static void urlRequests() {

try {

response = Jsoup.connect("http://www.naver.com")

.method(Connection.Method.GET)

.execute();

naverDoc = response.parse();

element = naverDoc.selectFirst("ul.ah_l");

elements = element.select("li.ah_item a");

for (int i = 0; i < elements.size(); i++) {

JSONObject jsonObj = new JSONObject();

jsonObj.put(elements.get(i).select("span.ah_r").text(), elements.get(i).select("span.ah_k").text());

jsonArry.add(jsonObj);

}

System.out.println(jsonArry);

} catch (IOException e) {

// TODO Auto-generated catch block

e.printStackTrace();

}

}

public static void mkJsonFile() {

String path = Crlng.class.getResource("").getPath();

try {

fw = new FileWriter(path + "mkjs.json");

fw.write(jsonArry.toJSONString());

fw.flush();

fw.close();

} catch (IOException e1) {

// TODO Auto-generated catch block

e1.printStackTrace();

}

System.out.println("json file 생성 success !!!");

}

}



'언어 > java' 카테고리의 다른 글

elasticsearch java api search  (0) 2019.03.13
크롤링 => json 파일로 적재  (0) 2019.03.03
자바 그룹스터디 4회차  (0) 2018.09.25
stack 을 활용한 swap  (0) 2018.09.02
list => sort  (0) 2017.12.26