Documentation ¶
Index ¶
- type DefaultNode
- func (this *DefaultNode) AcceptRequest(request *http.Request)
- func (this *DefaultNode) AcceptResult(scrapyResult *crawler.ScrapeResult)
- func (this *DefaultNode) AddToCrawlingQuene(request *http.Request)
- func (this *DefaultNode) CanWeStopSpider(spiderName string) bool
- func (this *DefaultNode) CloseSpider(spiderName string)
- func (this *DefaultNode) DeleteDeadNode(nodeName string)
- func (this *DefaultNode) DistributeRequest(request *http.Request)
- func (this *DefaultNode) GetMasterName() string
- func (this *DefaultNode) GetMasterNode() *node.NodeInfo
- func (this *DefaultNode) GetNodeInfo() *node.NodeInfo
- func (this *DefaultNode) GetSpidersName() []string
- func (this *DefaultNode) Init(cluster cluster.Cluster)
- func (this *DefaultNode) IsMasterNode() bool
- func (this *DefaultNode) IsMe(nodeName string) bool
- func (this *DefaultNode) IsStop() bool
- func (this *DefaultNode) Join()
- func (this *DefaultNode) MakeMasterNode(nodeName string)
- func (this *DefaultNode) PauseCrawl()
- func (this *DefaultNode) Ready()
- func (this *DefaultNode) ReportToMaster(result *crawler.ScrapeResult)
- func (this *DefaultNode) StartCrawl()
- func (this *DefaultNode) StartSpider(spiderName string) (bool, string)
- func (this *DefaultNode) StopCrawl()
- func (this *DefaultNode) UnpauseCrawl()
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type DefaultNode ¶
type DefaultNode struct { NodeInfo *node.NodeInfo Settings *util.Settings Cluster cluster.Cluster Crawler *crawler.Crawler }
func NewDefaultNode ¶
func NewDefaultNode(settings *util.Settings, resultQuene *crawler.ResultQuene) *DefaultNode
func (*DefaultNode) AcceptRequest ¶
func (this *DefaultNode) AcceptRequest(request *http.Request)
get distribute request if node not running ,start it
func (*DefaultNode) AcceptResult ¶
func (this *DefaultNode) AcceptResult(scrapyResult *crawler.ScrapeResult)
result of crawl request add scraped request to cluster tell cluster request is down
func (*DefaultNode) AddToCrawlingQuene ¶
func (this *DefaultNode) AddToCrawlingQuene(request *http.Request)
func (*DefaultNode) CanWeStopSpider ¶
func (this *DefaultNode) CanWeStopSpider(spiderName string) bool
func (*DefaultNode) CloseSpider ¶
func (this *DefaultNode) CloseSpider(spiderName string)
func (*DefaultNode) DeleteDeadNode ¶
func (this *DefaultNode) DeleteDeadNode(nodeName string)
first of all this is master node parse crawler remove it from cluster unparse crawler
func (*DefaultNode) DistributeRequest ¶
func (this *DefaultNode) DistributeRequest(request *http.Request)
distribute request to every node judge node tell cluster where is the request
func (*DefaultNode) GetMasterName ¶
func (this *DefaultNode) GetMasterName() string
get master name of cluster
func (*DefaultNode) GetMasterNode ¶
func (this *DefaultNode) GetMasterNode() *node.NodeInfo
get master node of cluster
func (*DefaultNode) GetNodeInfo ¶
func (this *DefaultNode) GetNodeInfo() *node.NodeInfo
func (*DefaultNode) GetSpidersName ¶
func (this *DefaultNode) GetSpidersName() []string
func (*DefaultNode) Init ¶
func (this *DefaultNode) Init(cluster cluster.Cluster)
func (*DefaultNode) IsMasterNode ¶
func (this *DefaultNode) IsMasterNode() bool
if this is the master node
func (*DefaultNode) IsMe ¶
func (this *DefaultNode) IsMe(nodeName string) bool
is the node is myself
func (*DefaultNode) IsStop ¶
func (this *DefaultNode) IsStop() bool
if there is none request left ,return true
func (*DefaultNode) Join ¶
func (this *DefaultNode) Join()
func (*DefaultNode) MakeMasterNode ¶
func (this *DefaultNode) MakeMasterNode(nodeName string)
make master node
func (*DefaultNode) Ready ¶
func (this *DefaultNode) Ready()
func (*DefaultNode) ReportToMaster ¶
func (this *DefaultNode) ReportToMaster(result *crawler.ScrapeResult)
report result of request to master
func (*DefaultNode) StartSpider ¶
func (this *DefaultNode) StartSpider(spiderName string) (bool, string)
if spider is running return false tell cluster start a spider get start requests, push them to cluster request try to start the crawler,