logs + bugfixes
This commit is contained in:
parent
518057f92f
commit
d2d938158b
1 changed files with 10 additions and 4 deletions
|
|
@ -75,7 +75,6 @@ func (s *MandarakeParser) HandleTasks(tasks []shared.Task, sender chan shared.Ta
|
||||||
|
|
||||||
receiver := make(chan shared.Task, len(tasks))
|
receiver := make(chan shared.Task, len(tasks))
|
||||||
for _, task := range tasks {
|
for _, task := range tasks {
|
||||||
task.RetryCount = 3
|
|
||||||
receiver <- task
|
receiver <- task
|
||||||
}
|
}
|
||||||
close(receiver)
|
close(receiver)
|
||||||
|
|
@ -102,6 +101,11 @@ func (s *MandarakeParser) worker(receiver chan shared.Task, sender chan shared.T
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if page == nil {
|
||||||
|
log.Debug("Mandarake worker | Page for task is nil")
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
p := int32(s.getMinPrice(page))
|
p := int32(s.getMinPrice(page))
|
||||||
|
|
||||||
sender <- shared.TaskResult{
|
sender <- shared.TaskResult{
|
||||||
|
|
@ -133,15 +137,17 @@ func (s *MandarakeParser) initClient() error {
|
||||||
//make client
|
//make client
|
||||||
jar, err := cookiejar.New(nil)
|
jar, err := cookiejar.New(nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.WithError(err).Error("Mandarake | Init client")
|
log.WithError(err).Error("Mandarake | Cookie jar")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
u, err := url.Parse(s.parseParams.cookieUrl)
|
u, err := url.Parse(s.parseParams.cookieUrl)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
log.WithError(err).Error("Mandarake | Parse cookie URL")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
s.client.Jar.SetCookies(u, c)
|
|
||||||
|
jar.SetCookies(u, c)
|
||||||
|
|
||||||
taskClient := &http.Client{
|
taskClient := &http.Client{
|
||||||
Timeout: time.Second * 30,
|
Timeout: time.Second * 30,
|
||||||
|
|
@ -236,7 +242,7 @@ func (s *MandarakeParser) findData(doc *html.Node, params price) []string {
|
||||||
)
|
)
|
||||||
|
|
||||||
crawler = func(node *html.Node) {
|
crawler = func(node *html.Node) {
|
||||||
if node.Type == html.ElementNode && node.Data == params.tag {
|
if node.Type == html.ElementNode && node.Data == params.subTag {
|
||||||
if strings.Contains(node.FirstChild.Data, params.substring) {
|
if strings.Contains(node.FirstChild.Data, params.substring) {
|
||||||
values = append(values, node.FirstChild.Data)
|
values = append(values, node.FirstChild.Data)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue