package scrapper import ( "context" "errors" "github.com/chromedp/chromedp" log "github.com/sirupsen/logrus" "scrapper-mandarake/internal/common" ) func (s *Scrapper) Start(ctx context.Context, tasksChan <-chan common.Task, resultsChan chan<- common.Result) error { if s.goroutinesNumber <= 0 { err := errors.New("gorutines num <= 0, abort") log.WithError(err).Error(pkgLogHeader) return err } poolCtx, cancel := context.WithCancel(ctx) s.poolCancel = cancel log.Infof("%v Start handling tasks", pkgLogHeader) log.Infof("%v Setting up browser", pkgLogHeader) cr, err := s.setupBrowser(poolCtx) if err != nil { log.WithError(err).Error(pkgLogHeader + logGetPrice + "failed to setup browser") return err } log.WithField("Copyright message", cr).Infof("%v Finished setting up browser.", pkgLogHeader) allocCtx, allocCancel := chromedp.NewRemoteAllocator(poolCtx, s.externalBrowser) s.allocCancel = allocCancel log.Infof("%v processing tasks...", pkgLogHeader) s.wg.Add(s.goroutinesNumber) for i := 0; i < s.goroutinesNumber; i++ { go s.worker(allocCtx, tasksChan, resultsChan) } return nil } func (s *Scrapper) Stop() { if s.allocCancel != nil { s.allocCancel() } if s.poolCancel != nil { s.poolCancel() } s.wg.Wait() }