이혜인

Crawling again with js, rate crawling

const request = require('request');
const cheerio = require('cheerio');
const puppeteer = require('puppeteer');
const {Builder,Key,until} = require('selenium-webdriver'); //모듈 불러오기
var webdriver = require('selenium-webdriver');
var By = webdriver.By;
const chrome = require('selenium-webdriver/chrome');//크롬 사용시
let booking_url = "https://megabox.co.kr/booking";
const rate_url = "https://www.megabox.co.kr/movie";
// var booking_options = { encoding: "utf-8", method: "GET", uri: booking_url};
let r =0;
let movie_data = [];
(async () => {
r = 0;
const browser = await puppeteer.launch({
headless: true
});
const page = await browser.newPage();
await page.goto(rate_url);
const content = await page.content();
const $ = cheerio.load(content);
const $rate_lists = $("ol.list>li");
$rate_lists.each((index, list) => {
const name = $(list).find('div.tit-area > p.tit').attr('title');
const rate = $(list).find('div.rate-date > span.rate').text();
});
r = 0;
for(i of movie_data){
console.log(i);
}
browser.close();
})();
from bs4 import BeautifulSoup
from selenium import webdriver
import chromedriver_autoinstaller
chromedriver_autoinstaller.install()
booking_url = "https://megabox.co.kr/booking"
rate_url = "https://www.megabox.co.kr/movie"
options = webdriver.ChromeOptions()
options.add_argument("headless") #창 안 띄움
options.add_experimental_option("excludeSwitches", ["enable-logging"])
driver = webdriver.Chrome(options = options)
driver.maximize_window()
# driver.implicitly_wait(2)
driver.get(booking_url)
driver2=webdriver.Chrome(options = options)
driver2.maximize_window()
# driver2.implicitly_wait(2)
driver2.get(rate_url)
theater_location = dict()
# iframes = driver.find_elements_by_css_selector('iframe')
driver.switch_to.frame('frameBokdMBooking')
page1 = driver.page_source
soup1 = BeautifulSoup(page1, "html.parser")
seoul = soup1.select("#mCSB_4_container>ul>li>button")
Gyeonggi = soup1.select("#mCSB_5_container>ul>li>button")
Incheon = soup1.select("#mCSB_6_container>ul>li>button")
DCS = soup1.select("#mCSB_7_container>ul>li>button")#Daejeon Chungcheong Sejong
BDG = soup1.select("#mCSB_8_container>ul>li>button")#Busan Daegu Gyeongsang
GJ= soup1.select("#mCSB_9_container>ul>li>button")#gwangju_jeonla
Gangwon = soup1.select("#mCSB_10_container>ul>li>button")
loc = [seoul, Gyeonggi, Incheon, DCS, BDG, GJ, Gangwon]
def get_location_code(location):
for brch in location:
theater_location[brch['brch-nm']] = brch['brch-no']
for parameter in loc:
get_location_code(parameter)
page2 = driver2.page_source
soup2 = BeautifulSoup(page2, "html.parser")
ticketing_rate = soup2.select('.rate')
movie_name = soup2.select('.tit-area > p.tit')
get_movie_info = soup1.select("#mCSB_1_container>ul>li>button")
movie_dict = dict()
rank = 1
for movie in get_movie_info:
movie_dict[movie['movie-nm']] = [movie['movie-no'], movie['form-at']]
for r, m in zip(ticketing_rate, movie_name):
movie_dict[m['title']].append(r.string)
for value in movie_dict.values():
if(len(value) == 2):
value.append("예메율 0.0%")
if(rank<=10):
value.append({'rank' : rank})
rank += 1
#form-at: 처음 그냥 받아올 때 해당 날짜에 영화 있는지 확인(장소 상관 없이)
#brch-no로 쿼리 주고 나서 form-at확인 필요
\ No newline at end of file
{
"name": "me",
"version": "1.0.0",
"description": "",
"main": "app.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"body-parser": "^1.20.0",
"cheerio": "^1.0.0-rc.11",
"express": "^4.18.1",
"puppeteer": "^14.1.1",
"selenium-webdriver": "^4.1.2"
}
}