Skip to content

Commit

Permalink
feat: Optimize concurrent scraping (#1828)
Browse files Browse the repository at this point in the history
* Concurrent Scraper Updgrade

Will now no longer wait for the entire wait group to finish before adding more workers. It will add them one when below the concurrent scraper count

* Formating

* Tweak to WG functions

Switch to using a function to retrive the count instead of direct access. Something this low level probably should be protected.

* go fmt

---------

Co-authored-by: crwxaj <52156245+crwxaj@users.noreply.github.com>
  • Loading branch information
pops64 and crwxaj authored Aug 28, 2024
1 parent 9db4330 commit d648c4c
Show file tree
Hide file tree
Showing 39 changed files with 108 additions and 119 deletions.
30 changes: 28 additions & 2 deletions pkg/models/model_scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,12 @@ package models

import (
"encoding/json"
"sync"
"sync/atomic"
)

var scrapers []Scraper

type ScraperFunc func(*sync.WaitGroup, bool, []string, chan<- ScrapedScene, string, string, bool) error
type ScraperFunc func(*ScrapeWG, bool, []string, chan<- ScrapedScene, string, string, bool) error

type Scraper struct {
ID string `json:"id"`
Expand Down Expand Up @@ -90,3 +90,29 @@ func RegisterScraper(id string, name string, avatarURL string, domain string, f
s.MasterSiteId = masterSiteId
scrapers = append(scrapers, s)
}

// Custom wg functions, to allow access to the current count of waitgroups. This allows running scrapers at max count always
type ScrapeWG struct {
count int64
}

func (wg *ScrapeWG) Add(n int64) {
atomic.AddInt64(&wg.count, n)
}

func (wg *ScrapeWG) Done() {
wg.Add(-1)
if atomic.LoadInt64(&wg.count) < 0 {
panic("negative wait group counter")
}
}

func (wg *ScrapeWG) Wait(n int64) {
for atomic.LoadInt64(&wg.count) >= n && atomic.LoadInt64(&wg.count) != 0 {
continue
}
}

func (wg *ScrapeWG) Count() int64 {
return atomic.LoadInt64(&wg.count)
}
3 changes: 1 addition & 2 deletions pkg/scrape/baberoticavr.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ import (
"regexp"
"strconv"
"strings"
"sync"

"github.com/go-resty/resty/v2"
"github.com/gocolly/colly/v2"
Expand All @@ -16,7 +15,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func BaberoticaVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func BaberoticaVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "baberoticavr"
siteID := "BaberoticaVR"
Expand Down
13 changes: 6 additions & 7 deletions pkg/scrape/badoink.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import (
"regexp"
"strconv"
"strings"
"sync"
"time"

"github.com/go-resty/resty/v2"
Expand All @@ -23,7 +22,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func BadoinkSite(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, URL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func BadoinkSite(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, URL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
logScrapeStart(scraperID, siteID)

Expand Down Expand Up @@ -268,23 +267,23 @@ func BadoinkSite(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out
return nil
}

func BadoinkVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func BadoinkVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "badoinkvr", "BadoinkVR", "https://badoinkvr.com/vrpornvideos?order=newest", singeScrapeAdditionalInfo, limitScraping)
}

func B18VR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func B18VR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "18vr", "18VR", "https://18vr.com/vrpornvideos?order=newest", singeScrapeAdditionalInfo, limitScraping)
}

func VRCosplayX(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func VRCosplayX(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "vrcosplayx", "VRCosplayX", "https://vrcosplayx.com/cosplaypornvideos?order=newest", singeScrapeAdditionalInfo, limitScraping)
}

func BabeVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func BabeVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "babevr", "BabeVR", "https://babevr.com/vrpornvideos?order=newest", singeScrapeAdditionalInfo, limitScraping)
}

func KinkVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func KinkVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "kinkvr", "KinkVR", "https://kinkvr.com/bdsm-vr-videos?order=newest", singeScrapeAdditionalInfo, limitScraping)
}

Expand Down
3 changes: 1 addition & 2 deletions pkg/scrape/caribbeancom.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import (
"encoding/json"
"strconv"
"strings"
"sync"

"github.com/bregydoc/gtranslate"
"github.com/gocolly/colly/v2"
Expand All @@ -15,7 +14,7 @@ import (
"golang.org/x/text/language"
)

func CariVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func CariVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "caribbeancomvr"
siteID := "CaribbeanCom VR"
Expand Down
7 changes: 3 additions & 4 deletions pkg/scrape/czechvr.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import (
"regexp"
"strconv"
"strings"
"sync"

"github.com/gocolly/colly/v2"
"github.com/mozillazg/go-slugify"
Expand All @@ -14,7 +13,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func CzechVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, nwID string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func CzechVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, nwID string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
logScrapeStart(scraperID, siteID)
commonDb, _ := models.GetCommonDB()
Expand Down Expand Up @@ -199,14 +198,14 @@ func CzechVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan
}

func addCZVRScraper(id string, name string, nwid string, avatarURL string) {
registerScraper(id, name, avatarURL, "czechvrnetwork.com", func(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
registerScraper(id, name, avatarURL, "czechvrnetwork.com", func(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return CzechVR(wg, updateSite, knownScenes, out, singleSceneURL, id, name, nwid, singeScrapeAdditionalInfo, limitScraping)
})
}

func init() {
// scraper for scraping single scenes where only the url is provided
registerScraper("czechvr-single_scene", "Czech VR - Other Studios", "", "czechvrnetwork.com", func(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
registerScraper("czechvr-single_scene", "Czech VR - Other Studios", "", "czechvrnetwork.com", func(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return CzechVR(wg, updateSite, knownScenes, out, singleSceneURL, "", "", "", "", limitScraping)
})
addCZVRScraper("czechvr", "Czech VR", "15", "https://www.czechvr.com/images/favicon/android-chrome-256x256.png")
Expand Down
3 changes: 1 addition & 2 deletions pkg/scrape/darkroomvr.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,14 @@ import (
"fmt"
"strconv"
"strings"
"sync"

"github.com/gocolly/colly/v2"
"github.com/nleeper/goment"
"github.com/thoas/go-funk"
"github.com/xbapps/xbvr/pkg/models"
)

func DarkRoomVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func DarkRoomVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "darkroomvr"
siteID := "DarkRoomVR"
Expand Down
3 changes: 1 addition & 2 deletions pkg/scrape/fuckpassvr.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import (
"net/url"
"regexp"
"strings"
"sync"

"github.com/go-resty/resty/v2"
"github.com/gocolly/colly/v2"
Expand All @@ -14,7 +13,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func FuckPassVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func FuckPassVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "fuckpassvr-native"
siteID := "FuckPassVR"
Expand Down
3 changes: 1 addition & 2 deletions pkg/scrape/groobyvr.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import (
"regexp"
"strconv"
"strings"
"sync"

"github.com/gocolly/colly/v2"
"github.com/mozillazg/go-slugify"
Expand All @@ -14,7 +13,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func GroobyVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func GroobyVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "groobyvr"
siteID := "GroobyVR"
Expand Down
3 changes: 1 addition & 2 deletions pkg/scrape/hologirlsvr.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,14 @@ package scrape
import (
"regexp"
"strings"
"sync"

"github.com/gocolly/colly/v2"
"github.com/mozillazg/go-slugify"
"github.com/thoas/go-funk"
"github.com/xbapps/xbvr/pkg/models"
)

func HoloGirlsVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func HoloGirlsVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "hologirlsvr"
siteID := "HoloGirlsVR"
Expand Down
8 changes: 4 additions & 4 deletions pkg/scrape/lethalhardcorevr.go
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
package scrape

import (
"context"
"regexp"
"strings"
"sync"

"github.com/gocolly/colly/v2"
"github.com/mozillazg/go-slugify"
Expand All @@ -26,7 +26,7 @@ func isGoodTag(lookup string) bool {
return true
}

func LethalHardcoreSite(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, URL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func LethalHardcoreSite(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, URL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
logScrapeStart(scraperID, siteID)

Expand Down Expand Up @@ -176,11 +176,11 @@ func LethalHardcoreSite(wg *sync.WaitGroup, updateSite bool, knownScenes []strin
return nil
}

func LethalHardcoreVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func LethalHardcoreVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return LethalHardcoreSite(wg, updateSite, knownScenes, out, singleSceneURL, "lethalhardcorevr", "LethalHardcoreVR", "https://lethalhardcorevr.com/lethal-hardcore-vr-scenes.html?studio=95595&sort=released", singeScrapeAdditionalInfo, limitScraping)
}

func WhorecraftVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func WhorecraftVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return LethalHardcoreSite(wg, updateSite, knownScenes, out, singleSceneURL, "whorecraftvr", "WhorecraftVR", "https://lethalhardcorevr.com/lethal-hardcore-vr-scenes.html?studio=95347&sort=released", singeScrapeAdditionalInfo, limitScraping)
}

Expand Down
3 changes: 1 addition & 2 deletions pkg/scrape/littlecaprice.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ package scrape
import (
"net/url"
"strings"
"sync"
"time"

"github.com/gocolly/colly/v2"
Expand All @@ -12,7 +11,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func LittleCaprice(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func LittleCaprice(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "littlecaprice"
siteID := "Little Caprice Dreams"
Expand Down
3 changes: 1 addition & 2 deletions pkg/scrape/navr.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import (
"html"
"strconv"
"strings"
"sync"

"github.com/gocolly/colly/v2"
"github.com/mozillazg/go-slugify"
Expand All @@ -14,7 +13,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func NaughtyAmericaVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func NaughtyAmericaVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "naughtyamericavr"
siteID := "NaughtyAmerica VR"
Expand Down
9 changes: 4 additions & 5 deletions pkg/scrape/povr.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import (
"regexp"
"strconv"
"strings"
"sync"

"github.com/gocolly/colly/v2"
"github.com/nleeper/goment"
Expand All @@ -15,7 +14,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func POVR(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, company string, siteURL string, singeScrapeAdditionalInfo string, limitScraping bool, masterSiteId string) error {
func POVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, company string, siteURL string, singeScrapeAdditionalInfo string, limitScraping bool, masterSiteId string) error {
defer wg.Done()
logScrapeStart(scraperID, siteID)

Expand Down Expand Up @@ -164,18 +163,18 @@ func addPOVRScraper(id string, name string, company string, avatarURL string, cu
}

if masterSiteId == "" {
registerScraper(id, suffixedName, avatarURL, "povr.com", func(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
registerScraper(id, suffixedName, avatarURL, "povr.com", func(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return POVR(wg, updateSite, knownScenes, out, singleSceneURL, id, siteNameSuffix, company, siteURL, singeScrapeAdditionalInfo, limitScraping, "")
})
} else {
registerAlternateScraper(id, suffixedName, avatarURL, "povr.com", masterSiteId, func(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
registerAlternateScraper(id, suffixedName, avatarURL, "povr.com", masterSiteId, func(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return POVR(wg, updateSite, knownScenes, out, singleSceneURL, id, siteNameSuffix, company, siteURL, singeScrapeAdditionalInfo, limitScraping, masterSiteId)
})
}
}

func init() {
registerScraper("povr-single_scene", "POVR - Other Studios", "", "povr.com", func(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
registerScraper("povr-single_scene", "POVR - Other Studios", "", "povr.com", func(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return POVR(wg, updateSite, knownScenes, out, singleSceneURL, "", "", "", "", singeScrapeAdditionalInfo, limitScraping, "")
})
var scrapers config.ScraperList
Expand Down
7 changes: 3 additions & 4 deletions pkg/scrape/realitylovers.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import (
"fmt"
"regexp"
"strings"
"sync"
"time"

"github.com/go-resty/resty/v2"
Expand All @@ -15,7 +14,7 @@ import (
"github.com/xbapps/xbvr/pkg/models"
)

func RealityLoversSite(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, domain string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func RealityLoversSite(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, scraperID string, siteID string, domain string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
logScrapeStart(scraperID, siteID)

Expand Down Expand Up @@ -151,11 +150,11 @@ func RealityLoversSite(wg *sync.WaitGroup, updateSite bool, knownScenes []string
return nil
}

func RealityLovers(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func RealityLovers(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return RealityLoversSite(wg, updateSite, knownScenes, out, singleSceneURL, "realitylovers", "RealityLovers", "realitylovers.com", singeScrapeAdditionalInfo, limitScraping)
}

func TSVirtualLovers(wg *sync.WaitGroup, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
func TSVirtualLovers(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return RealityLoversSite(wg, updateSite, knownScenes, out, singleSceneURL, "tsvirtuallovers", "TSVirtualLovers", "tsvirtuallovers.com", singeScrapeAdditionalInfo, limitScraping)
}

Expand Down
Loading

0 comments on commit d648c4c

Please sign in to comment.