|
@@ -1,5 +1,6 @@
|
1
|
1
|
import scrapy
|
2
|
2
|
import re
|
|
3
|
+import logging
|
3
|
4
|
from crawl.comm.football import FTResult
|
4
|
5
|
|
5
|
6
|
class FootballSpider(scrapy.Spider):
|
|
@@ -28,9 +29,9 @@ class FootballSpider(scrapy.Spider):
|
28
|
29
|
awayTeam = self.trimBrackets(tdNodeList[3].css('.ke::text').get()) # 客队
|
29
|
30
|
half = tdNodeList[4].css('span::text').get()
|
30
|
31
|
whole = tdNodeList[5].css('span::text').get()
|
31
|
|
- status = tdNodeList[9].css('span::text').get()
|
|
32
|
+ status = tdNodeList[9].css('::text').get()
|
32
|
33
|
|
33
|
|
- FTResult(
|
|
34
|
+ ftResult = FTResult(
|
34
|
35
|
matchTime,
|
35
|
36
|
matchWeek,
|
36
|
37
|
leagueName,
|
|
@@ -40,7 +41,12 @@ class FootballSpider(scrapy.Spider):
|
40
|
41
|
half,
|
41
|
42
|
whole,
|
42
|
43
|
status
|
43
|
|
- ).persist()
|
|
44
|
+ )
|
|
45
|
+
|
|
46
|
+ logging.info("采集到数据 --> %s" % ftResult.toString())
|
|
47
|
+
|
|
48
|
+ ftResult.persist()
|
|
49
|
+
|
44
|
50
|
|
45
|
51
|
# 是否存在下一页
|
46
|
52
|
pgNodes = response.css('.m-page .u-pg2')
|