-
Notifications
You must be signed in to change notification settings - Fork 32
/
stock2csvINPUT.py
42 lines (35 loc) · 1.38 KB
/
stock2csvINPUT.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
# coding:utf-8
import requests
from bs4 import BeautifulSoup
import os
import time
import csv
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.104 Safari/537.36 Core/1.53.1708.400 QQBrowser/9.5.9635.400'
}
# parameter
# shareCode/year/season : num ,
def sharesCrawl():
shareCodeStr = str(input("请输入股票代码:"))
yearStr = str(input("请输入年份:"))
seasonStr = str(input("请输入季度:"))
url = 'http://quotes.money.163.com/trade/lsjysj_'+shareCodeStr+'.html?year='+yearStr+'&season='+seasonStr
data = requests.get(url, headers=headers)
soup = BeautifulSoup(data.text, 'lxml')
table = soup.findAll('table',{'class':'table_bg001'})[0]
rows = table.findAll('tr')
csvFile = open('./' + shareCodeStr + '.csv', 'wb')
writer = csv.writer(csvFile)
writer.writerow(('日期', '开盘价', '最高价', '最低价', '收盘价', '涨跌额', '涨跌幅', '成交量', '成交金额', '振幅', '换手率'))
try:
for row in rows:
csvRow = []
for cell in row.findAll('td'):
csvRow.append(cell.get_text())
if csvRow != []:
writer.writerow(csvRow)
except:
print '----- 爬虫出错了!-----'
finally:
csvFile.close()
sharesCrawl()