Documentation ¶
Index ¶
- Constants
- type Cluster
- func (this *Cluster) AddNode(nodeInfo *NodeInfo)
- func (this *Cluster) AddRequest(request *http.Request)
- func (this *Cluster) AddToCrawlingQuene(request *http.Request)
- func (this *Cluster) CrawlStatus() *crawler.CrawlerStatus
- func (this *Cluster) Crawled(scrapyResult *crawler.ScrapeResult)
- func (this *Cluster) DeleteDeadNode(nodeName string)
- func (this *Cluster) ElectMaster() *NodeInfo
- func (this *Cluster) GetMasterName() string
- func (this *Cluster) GetMasterNode() *NodeInfo
- func (this *Cluster) HasNode(nodeName string) bool
- func (this *Cluster) IsMasterNode() bool
- func (this *Cluster) IsReady() bool
- func (this *Cluster) IsSpiderRunning(spiderName string) bool
- func (this *Cluster) IsStop() bool
- func (this *Cluster) Join()
- func (this *Cluster) MakeMasterNode(nodeName string)
- func (this *Cluster) PopRequest() *http.Request
- func (this *Cluster) Ready()
- func (this *Cluster) StartSpider(spiderName string)
- type ClusterInfo
- type Node
- func (this *Node) AcceptRequest(request *http.Request)
- func (this *Node) AcceptResult(scrapyResult *crawler.ScrapeResult)
- func (this *Node) AddMasterNode(masterNodeInfo *NodeInfo)
- func (this *Node) AddNodeToCluster(nodeInfo *NodeInfo)
- func (this *Node) AddToCrawlingQuene(request *http.Request)
- func (this *Node) DeleteDeadNode(nodeName string)
- func (this *Node) DistributeRequest(request *http.Request)
- func (this *Node) GetAllNode() []*NodeInfo
- func (this *Node) GetMasterName() string
- func (this *Node) GetMasterNode() *NodeInfo
- func (this *Node) IsMasterNode() bool
- func (this *Node) IsMe(nodeName string) bool
- func (this *Node) IsStop() bool
- func (this *Node) Join()
- func (this *Node) MakeMasterNode(nodeName string)
- func (this *Node) PauseCrawl()
- func (this *Node) Ready()
- func (this *Node) ReportToMaster(result *crawler.ScrapeResult)
- func (this *Node) StartCrawl()
- func (this *Node) StartSpider(spiderName string) (bool, string)
- func (this *Node) StopCrawl()
- func (this *Node) UnpauseCrawl()
- type NodeInfo
- type RequestStatus
Constants ¶
const ( CLUSTER_STATUS_INIT = iota CLUSTER_STATUS_JOIN CLUSTER_STATUS_ELECTION CLUSTER_STATUS_READY )
cluster status * init:where every thing have init * join:try to connect to other node ,if not make itself master,else ,get other master * election(option):when circle is builded a start to elect a master * ready:ready to start crawl
Variables ¶
This section is empty.
Functions ¶
This section is empty.
Types ¶
type Cluster ¶
type Cluster struct { ClusterInfo *ClusterInfo RequestStatus *RequestStatus // contains filtered or unexported fields }
func (*Cluster) AddRequest ¶
add a request to quene
func (*Cluster) AddToCrawlingQuene ¶
record distribute request job
func (*Cluster) CrawlStatus ¶
func (this *Cluster) CrawlStatus() *crawler.CrawlerStatus
get crawl status
func (*Cluster) Crawled ¶
func (this *Cluster) Crawled(scrapyResult *crawler.ScrapeResult)
a request job is done delete it from crawling quene add crawled num
func (*Cluster) DeleteDeadNode ¶
func (*Cluster) IsSpiderRunning ¶
is the spider running
func (*Cluster) MakeMasterNode ¶
make master node by node name
func (*Cluster) PopRequest ¶
pop a request from waiting quene add to crawling quenu
func (*Cluster) StartSpider ¶
when start a spider ,cluster should record it
type ClusterInfo ¶
type ClusterInfo struct { Status int Name string NodeList []*NodeInfo LocalNode *NodeInfo MasterNode *NodeInfo }
basic cluster infomation
type Node ¶
type Node struct { NodeInfo *NodeInfo Settings *util.Settings Cluster *Cluster Crawler *crawler.Crawler }
func (*Node) AcceptRequest ¶
get distribute request if node not running ,start it
func (*Node) AcceptResult ¶
func (this *Node) AcceptResult(scrapyResult *crawler.ScrapeResult)
result of crawl request add scraped request to cluster tell cluster request is down
func (*Node) AddMasterNode ¶
slave node get request of master node info then change the master node
func (*Node) AddNodeToCluster ¶
add a node to cluster if this is master node,elect a new master node and send it to other
func (*Node) AddToCrawlingQuene ¶
func (*Node) DeleteDeadNode ¶
first of all this is master node parse crawler remove it from cluster unparse crawler
func (*Node) DistributeRequest ¶
distribute request to every node judge node tell cluster where is the request
func (*Node) ReportToMaster ¶
func (this *Node) ReportToMaster(result *crawler.ScrapeResult)
report result of request to master
func (*Node) StartSpider ¶
if spider is running return false tell cluster start a spider get start requests, push them to cluster request try to start the crawler,
type RequestStatus ¶
type RequestStatus struct { CrawledMap map[string]int // node + num CrawlingMap map[string]map[string]*http.Request WaitingQuene *crawler.RequestQuene }
receive basic request and record crawled requets
func NewRequestStatus ¶
func NewRequestStatus() *RequestStatus
func (*RequestStatus) Crawled ¶
func (this *RequestStatus) Crawled(scrapyResult *crawler.ScrapeResult)
delete in CrawlingMap add for CrawledMap
func (*RequestStatus) DeleteDeadNode ¶
func (this *RequestStatus) DeleteDeadNode(nodeName string)
remove request from crawlingmap for dead node add those requests to waiting quenu