-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathforum_scraper.R
129 lines (97 loc) · 3.54 KB
/
forum_scraper.R
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
library(dplyr)
library(rvest)
library(tidyverse)
### HardwareZone forums (HWZ)
# get page count
url_hwz <- "https://forums.hardwarezone.com.sg/eat-drink-man-woman-16/"
html_hwz <- read_html(url_hwz)
page_count_hwz <-
html_nodes(html_hwz, '.desc') %>%
html_text() %>%
gsub('.*of ', '', .) %>%
as.integer()
paste('Page count:', page_count_hwz[1], sep=' ')
part_links <- vector()
# take the first 100 pages with the most replies
for (i in 1:100) {
url_hwz <- paste("https://forums.hardwarezone.com.sg/eat-drink-man-woman-16/index", i,
".html?sort=replycount&order=desc",
sep='')
html_hwz <- read_html(url_hwz)
part_links_temp <-
html_nodes(html_hwz, '#threadslist') %>%
html_children() %>%
html_nodes('a') %>%
html_attr('href')
part_links_temp <-
part_links_temp[
-grep('\\/eat-drink-man-woman-16\\/[[:punct:]]|users|misc|\\#post|\\-\\d\\.html', part_links_temp)] %>%
unique()
part_links <- c(part_links, part_links_temp)
}
forum_text_hwz <- vector()
for (i in 1:length(part_links)) {
url_hwz_post <- paste("https://forums.hardwarezone.com.sg", part_links[i], sep='')
html_hwz_post <- read_html(url_hwz_post)
forum_text_temp <-
html_nodes(html_hwz_post, '.post_message') %>%
html_text()
# limit to 280 characters (twitter limit as benchmark)
# also removes html remnants
forum_text_temp <-
forum_text_temp[nchar(forum_text_temp) <= 280] %>%
gsub('[[:cntrl:]]', ' ', .)
forum_text_hwz <- c(forum_text_hwz, forum_text_temp) %>% unique()
}
paste('Number of forum comments:', length(forum_text_hwz), sep=' ')
### SammyBoy forums (SBF)
# get page count
url_sbf <- "https://www.sammyboy.com/forums/the-courtyard-cafe.2/"
html_sbf <- read_html(url_sbf)
page_count_sbf <-
html_nodes(html_sbf, '.block-outer') %>%
html_children() %>%
html_text() %>%
gsub('[[:cntrl:]]', '', .) %>%
gsub('.*of ', '', .) %>%
gsub('[[:alpha:]]', '', .) %>%
as.integer()
paste('Page count:', page_count_sbf[1], sep=' ')
part_links <- vector()
# select random sample of 30 pages
page_count_sbf <- sample(1:page_count_sbf, 30, replace=TRUE)
for (page_no in page_count_sbf) {
url_sbf <- paste("https://www.sammyboy.com/forums/the-courtyard-cafe.2/page-", page_no,
sep='')
html_sbf <- read_html(url_sbf)
part_links_temp <-
html_nodes(html_sbf, '.structItem-cell--main') %>%
html_children() %>%
html_nodes('a') %>%
html_attr('href')
part_links_temp <-
part_links_temp[
-grep('\\/members', part_links_temp)] %>%
unique()
part_links <- c(part_links, part_links_temp)
}
forum_text_sbf <- vector()
for (i in 1:length(part_links)) {
url_sbf_post <- paste("https://www.sammyboy.com", part_links[i], sep='')
html_sbf_post <- read_html(url_sbf_post)
forum_text_temp <-
html_nodes(html_sbf_post, '.bbWrapper') %>%
html_text()
# limit to 280 characters (twitter limit as benchmark)
# also removes html remnants
forum_text_temp <-
forum_text_temp[nchar(forum_text_temp) <= 280] %>%
gsub('[[:cntrl:]]', ' ', .)
forum_text_sbf <- c(forum_text_sbf, forum_text_temp) %>% unique()
}
paste('Number of forum comments:', length(forum_text_sbf), sep=' ')
### Combining
# remove repeated comments, if any
forum_text <- c(forum_text_hwz, forum_text_sbf) %>% unique()
forum_text %>% saveRDS("forum_text.rds")
print(paste('Total forum comments:', length(forum_text), sep=' '))