-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.ts
80 lines (73 loc) · 2.18 KB
/
index.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import { Configuration, RequestQueue } from "@crawlee/core";
import { CheerioCrawler } from "@crawlee/cheerio";
interface SearchResult {
title: string;
snippet: string;
url: string;
}
export class Duck {
private data: SearchResult[] = [];
private options: {
maxPage?: number;
proxyConfiguration?: any;
crawleeOptions?: any;
};
constructor(
options: {
maxPage?: number;
proxyConfiguration?: any;
crawleeOptions?: any;
} = {}
) {
this.options = options;
}
crawlerFactory() {
const { crawleeOptions, proxyConfiguration } = this.options;
const config = new Configuration({
persistStorage: false,
...crawleeOptions,
});
const data = this.data;
const maxPage = this.options.maxPage || 1;
let page = 0;
return new CheerioCrawler(
{
proxyConfiguration,
async requestHandler({ request, response, body, contentType, $ }) {
page++;
// Do some data extraction from the page with Cheerio.
$(".results > .result > div").each((index, el) => {
const title = $(el).find("h2 > a").text();
const snippet = $(el).find(".result__snippet").text();
const redirectUrl = $(el).find("h2 > a").attr("href") as string;
let url;
try {
const u = new URL("https:" + redirectUrl);
url = u.searchParams.get("uddg") || redirectUrl;
} catch (e) {
url = redirectUrl;
}
data.push({ title, snippet, url });
});
if (page < maxPage) {
const url = new URL(request.url);
const length = data.length;
url.searchParams.set("s", length.toString());
const queue = await RequestQueue.open();
// console.log('adding request', url.toString())
queue.addRequest({ url: url.toString() });
}
},
},
config
);
}
async search(q: string) {
const crawler = this.crawlerFactory();
const url = `https://duckduckgo.com/html/?q=${encodeURIComponent(q)}`;
await crawler.run([url]);
const res = [...this.data];
this.data = [];
return res;
}
}