diff --git a/graphql/documents/data/scrapers.graphql b/graphql/documents/data/scrapers.graphql index 94b6434b164..84593326ab8 100644 --- a/graphql/documents/data/scrapers.graphql +++ b/graphql/documents/data/scrapers.graphql @@ -203,6 +203,27 @@ fragment ScrapedGalleryData on ScrapedGallery { } } +fragment ScrapedImageData on ScrapedImage { + title + code + details + photographer + urls + date + + studio { + ...ScrapedSceneStudioData + } + + tags { + ...ScrapedSceneTagData + } + + performers { + ...ScrapedScenePerformerData + } +} + fragment ScrapedStashBoxSceneData on ScrapedScene { title code diff --git a/graphql/documents/queries/scrapers/scrapers.graphql b/graphql/documents/queries/scrapers/scrapers.graphql index 366938fd4d5..e021e775606 100644 --- a/graphql/documents/queries/scrapers/scrapers.graphql +++ b/graphql/documents/queries/scrapers/scrapers.graphql @@ -31,6 +31,17 @@ query ListGalleryScrapers { } } +query ListImageScrapers { + listScrapers(types: [IMAGE]) { + id + name + image { + urls + supported_scrapes + } + } +} + query ListMovieScrapers { listScrapers(types: [MOVIE]) { id @@ -108,12 +119,27 @@ query ScrapeSingleGallery( } } +query ScrapeSingleImage( + $source: ScraperSourceInput! + $input: ScrapeSingleImageInput! +) { + scrapeSingleImage(source: $source, input: $input) { + ...ScrapedImageData + } +} + query ScrapeGalleryURL($url: String!) { scrapeGalleryURL(url: $url) { ...ScrapedGalleryData } } +query ScrapeImageURL($url: String!) { + scrapeImageURL(url: $url) { + ...ScrapedImageData + } +} + query ScrapeMovieURL($url: String!) { scrapeMovieURL(url: $url) { ...ScrapedMovieData diff --git a/graphql/schema/schema.graphql b/graphql/schema/schema.graphql index 9c35d103f17..527c049109c 100644 --- a/graphql/schema/schema.graphql +++ b/graphql/schema/schema.graphql @@ -150,6 +150,12 @@ type Query { input: ScrapeSingleMovieInput! ): [ScrapedMovie!]! + "Scrape for a single image" + scrapeSingleImage( + source: ScraperSourceInput! + input: ScrapeSingleImageInput! + ): [ScrapedImage!]! + "Scrapes content based on a URL" scrapeURL(url: String!, ty: ScrapeContentType!): ScrapedContent @@ -159,6 +165,8 @@ type Query { scrapeSceneURL(url: String!): ScrapedScene "Scrapes a complete gallery record based on a URL" scrapeGalleryURL(url: String!): ScrapedGallery + "Scrapes a complete image record based on a URL" + scrapeImageURL(url: String!): ScrapedImage "Scrapes a complete movie record based on a URL" scrapeMovieURL(url: String!): ScrapedMovie diff --git a/graphql/schema/types/scraper.graphql b/graphql/schema/types/scraper.graphql index 958aff5d228..cd054996509 100644 --- a/graphql/schema/types/scraper.graphql +++ b/graphql/schema/types/scraper.graphql @@ -10,6 +10,7 @@ enum ScrapeType { "Type of the content a scraper generates" enum ScrapeContentType { GALLERY + IMAGE MOVIE PERFORMER SCENE @@ -21,6 +22,7 @@ union ScrapedContent = | ScrapedTag | ScrapedScene | ScrapedGallery + | ScrapedImage | ScrapedMovie | ScrapedPerformer @@ -39,6 +41,8 @@ type Scraper { scene: ScraperSpec "Details for gallery scraper" gallery: ScraperSpec + "Details for image scraper" + image: ScraperSpec "Details for movie scraper" movie: ScraperSpec } @@ -123,6 +127,26 @@ input ScrapedGalleryInput { # no studio, tags or performers } +type ScrapedImage { + title: String + code: String + details: String + photographer: String + urls: [String!] + date: String + studio: ScrapedStudio + tags: [ScrapedTag!] + performers: [ScrapedPerformer!] +} + +input ScrapedImageInput { + title: String + code: String + details: String + urls: [String!] + date: String +} + input ScraperSourceInput { "Index of the configured stash-box instance to use. Should be unset if scraper_id is set" stash_box_index: Int @deprecated(reason: "use stash_box_endpoint") @@ -185,6 +209,15 @@ input ScrapeSingleGalleryInput { gallery_input: ScrapedGalleryInput } +input ScrapeSingleImageInput { + "Instructs to query by string" + query: String + "Instructs to query by image id" + image_id: ID + "Instructs to query by image fragment" + image_input: ScrapedImageInput +} + input ScrapeSingleMovieInput { "Instructs to query by string" query: String diff --git a/internal/api/resolver_mutation_image.go b/internal/api/resolver_mutation_image.go index 0cd5d34874b..56bf5c57317 100644 --- a/internal/api/resolver_mutation_image.go +++ b/internal/api/resolver_mutation_image.go @@ -27,7 +27,7 @@ func (r *mutationResolver) getImage(ctx context.Context, id int) (ret *models.Im return ret, nil } -func (r *mutationResolver) ImageUpdate(ctx context.Context, input ImageUpdateInput) (ret *models.Image, err error) { +func (r *mutationResolver) ImageUpdate(ctx context.Context, input models.ImageUpdateInput) (ret *models.Image, err error) { translator := changesetTranslator{ inputMap: getUpdateInputMap(ctx), } @@ -45,7 +45,7 @@ func (r *mutationResolver) ImageUpdate(ctx context.Context, input ImageUpdateInp return r.getImage(ctx, ret.ID) } -func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*ImageUpdateInput) (ret []*models.Image, err error) { +func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*models.ImageUpdateInput) (ret []*models.Image, err error) { inputMaps := getUpdateInputMaps(ctx) // Start the transaction and save the image @@ -88,7 +88,7 @@ func (r *mutationResolver) ImagesUpdate(ctx context.Context, input []*ImageUpdat return newRet, nil } -func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInput, translator changesetTranslator) (*models.Image, error) { +func (r *mutationResolver) imageUpdate(ctx context.Context, input models.ImageUpdateInput, translator changesetTranslator) (*models.Image, error) { imageID, err := strconv.Atoi(input.ID) if err != nil { return nil, fmt.Errorf("converting id: %w", err) diff --git a/internal/api/resolver_query_scraper.go b/internal/api/resolver_query_scraper.go index a2974d0d8ec..d1c2a5e9741 100644 --- a/internal/api/resolver_query_scraper.go +++ b/internal/api/resolver_query_scraper.go @@ -126,6 +126,15 @@ func (r *queryResolver) ScrapeGalleryURL(ctx context.Context, url string) (*scra return marshalScrapedGallery(content) } +func (r *queryResolver) ScrapeImageURL(ctx context.Context, url string) (*scraper.ScrapedImage, error) { + content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeImage) + if err != nil { + return nil, err + } + + return marshalScrapedImage(content) +} + func (r *queryResolver) ScrapeMovieURL(ctx context.Context, url string) (*models.ScrapedMovie, error) { content, err := r.scraperCache().ScrapeURL(ctx, url, scraper.ScrapeContentTypeMovie) if err != nil { @@ -363,6 +372,39 @@ func (r *queryResolver) ScrapeSingleGallery(ctx context.Context, source scraper. } } +func (r *queryResolver) ScrapeSingleImage(ctx context.Context, source scraper.Source, input ScrapeSingleImageInput) ([]*scraper.ScrapedImage, error) { + if source.StashBoxIndex != nil { + return nil, ErrNotSupported + } + + if source.ScraperID == nil { + return nil, fmt.Errorf("%w: scraper_id must be set", ErrInput) + } + + var c scraper.ScrapedContent + + switch { + case input.ImageID != nil: + imageID, err := strconv.Atoi(*input.ImageID) + if err != nil { + return nil, fmt.Errorf("%w: image id is not an integer: '%s'", ErrInput, *input.ImageID) + } + c, err = r.scraperCache().ScrapeID(ctx, *source.ScraperID, imageID, scraper.ScrapeContentTypeImage) + if err != nil { + return nil, err + } + return marshalScrapedImages([]scraper.ScrapedContent{c}) + case input.ImageInput != nil: + c, err := r.scraperCache().ScrapeFragment(ctx, *source.ScraperID, scraper.Input{Image: input.ImageInput}) + if err != nil { + return nil, err + } + return marshalScrapedImages([]scraper.ScrapedContent{c}) + default: + return nil, ErrNotImplemented + } +} + func (r *queryResolver) ScrapeSingleMovie(ctx context.Context, source scraper.Source, input ScrapeSingleMovieInput) ([]*models.ScrapedMovie, error) { return nil, ErrNotSupported } diff --git a/internal/api/scraped_content.go b/internal/api/scraped_content.go index 6d900389220..22473440142 100644 --- a/internal/api/scraped_content.go +++ b/internal/api/scraped_content.go @@ -76,6 +76,27 @@ func marshalScrapedGalleries(content []scraper.ScrapedContent) ([]*scraper.Scrap return ret, nil } +func marshalScrapedImages(content []scraper.ScrapedContent) ([]*scraper.ScrapedImage, error) { + var ret []*scraper.ScrapedImage + for _, c := range content { + if c == nil { + // graphql schema requires images to be non-nil + continue + } + + switch g := c.(type) { + case *scraper.ScrapedImage: + ret = append(ret, g) + case scraper.ScrapedImage: + ret = append(ret, &g) + default: + return nil, fmt.Errorf("%w: cannot turn ScrapedContent into ScrapedImage", models.ErrConversion) + } + } + + return ret, nil +} + // marshalScrapedMovies converts ScrapedContent into ScrapedMovie. If conversion // fails, an error is returned. func marshalScrapedMovies(content []scraper.ScrapedContent) ([]*models.ScrapedMovie, error) { @@ -129,6 +150,16 @@ func marshalScrapedGallery(content scraper.ScrapedContent) (*scraper.ScrapedGall return g[0], nil } +// marshalScrapedImage will marshal a single scraped image +func marshalScrapedImage(content scraper.ScrapedContent) (*scraper.ScrapedImage, error) { + g, err := marshalScrapedImages([]scraper.ScrapedContent{content}) + if err != nil { + return nil, err + } + + return g[0], nil +} + // marshalScrapedMovie will marshal a single scraped movie func marshalScrapedMovie(content scraper.ScrapedContent) (*models.ScrapedMovie, error) { m, err := marshalScrapedMovies([]scraper.ScrapedContent{content}) diff --git a/pkg/models/image.go b/pkg/models/image.go index 8a8b5ba5047..0ffecbb71af 100644 --- a/pkg/models/image.go +++ b/pkg/models/image.go @@ -53,6 +53,28 @@ type ImageFilterType struct { UpdatedAt *TimestampCriterionInput `json:"updated_at"` } +type ImageUpdateInput struct { + ClientMutationID *string `json:"clientMutationId"` + ID string `json:"id"` + Title *string `json:"title"` + Code *string `json:"code"` + Urls []string `json:"urls"` + Date *string `json:"date"` + Details *string `json:"details"` + Photographer *string `json:"photographer"` + Rating100 *int `json:"rating100"` + Organized *bool `json:"organized"` + SceneIds []string `json:"scene_ids"` + StudioID *string `json:"studio_id"` + TagIds []string `json:"tag_ids"` + PerformerIds []string `json:"performer_ids"` + GalleryIds []string `json:"gallery_ids"` + PrimaryFileID *string `json:"primary_file_id"` + + // deprecated + URL *string `json:"url"` +} + type ImageDestroyInput struct { ID string `json:"id"` DeleteFile *bool `json:"delete_file"` diff --git a/pkg/scraper/action.go b/pkg/scraper/action.go index 0011441fb69..2a5634da881 100644 --- a/pkg/scraper/action.go +++ b/pkg/scraper/action.go @@ -31,6 +31,7 @@ type scraperActionImpl interface { scrapeSceneByScene(ctx context.Context, scene *models.Scene) (*ScrapedScene, error) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*ScrapedGallery, error) + scrapeImageByImage(ctx context.Context, image *models.Image) (*ScrapedImage, error) } func (c config) getScraper(scraper scraperTypeConfig, client *http.Client, globalConfig GlobalConfig) scraperActionImpl { diff --git a/pkg/scraper/cache.go b/pkg/scraper/cache.go index 498a4ce3996..be1061969e9 100644 --- a/pkg/scraper/cache.go +++ b/pkg/scraper/cache.go @@ -76,11 +76,18 @@ type GalleryFinder interface { models.URLLoader } +type ImageFinder interface { + models.ImageGetter + models.FileLoader + models.URLLoader +} + type Repository struct { TxnManager models.TxnManager SceneFinder SceneFinder GalleryFinder GalleryFinder + ImageFinder ImageFinder TagFinder TagFinder PerformerFinder PerformerFinder MovieFinder match.MovieNamesFinder @@ -92,6 +99,7 @@ func NewRepository(repo models.Repository) Repository { TxnManager: repo.TxnManager, SceneFinder: repo.Scene, GalleryFinder: repo.Gallery, + ImageFinder: repo.Image, TagFinder: repo.Tag, PerformerFinder: repo.Performer, MovieFinder: repo.Movie, @@ -356,6 +364,28 @@ func (c Cache) ScrapeID(ctx context.Context, scraperID string, id int, ty Scrape return nil, fmt.Errorf("scraper %s: %w", scraperID, err) } + if scraped != nil { + ret = scraped + } + + case ScrapeContentTypeImage: + is, ok := s.(imageScraper) + if !ok { + return nil, fmt.Errorf("%w: cannot use scraper %s as a image scraper", ErrNotSupported, scraperID) + } + + scene, err := c.getImage(ctx, id) + if err != nil { + return nil, fmt.Errorf("scraper %s: unable to load image id %v: %w", scraperID, id, err) + } + + // don't assign nil concrete pointer to ret interface, otherwise nil + // detection is harder + scraped, err := is.viaImage(ctx, c.client, scene) + if err != nil { + return nil, fmt.Errorf("scraper %s: %w", scraperID, err) + } + if scraped != nil { ret = scraped } @@ -414,3 +444,31 @@ func (c Cache) getGallery(ctx context.Context, galleryID int) (*models.Gallery, } return ret, nil } + +func (c Cache) getImage(ctx context.Context, imageID int) (*models.Image, error) { + var ret *models.Image + r := c.repository + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { + qb := r.ImageFinder + + var err error + ret, err = qb.Find(ctx, imageID) + if err != nil { + return err + } + + if ret == nil { + return fmt.Errorf("gallery with id %d not found", imageID) + } + + err = ret.LoadFiles(ctx, qb) + if err != nil { + return err + } + + return ret.LoadURLs(ctx, qb) + }); err != nil { + return nil, err + } + return ret, nil +} diff --git a/pkg/scraper/config.go b/pkg/scraper/config.go index 3a0aadf51e8..102a9648060 100644 --- a/pkg/scraper/config.go +++ b/pkg/scraper/config.go @@ -45,6 +45,12 @@ type config struct { // Configuration for querying a gallery by a URL GalleryByURL []*scrapeByURLConfig `yaml:"galleryByURL"` + // Configuration for querying an image by a URL + ImageByURL []*scrapeByURLConfig `yaml:"imageByURL"` + + // Configuration for querying gallery by an Image fragment + ImageByFragment *scraperTypeConfig `yaml:"imageByFragment"` + // Configuration for querying a movie by a URL MovieByURL []*scrapeByURLConfig `yaml:"movieByURL"` @@ -289,6 +295,21 @@ func (c config) spec() Scraper { ret.Gallery = &gallery } + image := ScraperSpec{} + if c.ImageByFragment != nil { + image.SupportedScrapes = append(image.SupportedScrapes, ScrapeTypeFragment) + } + if len(c.ImageByURL) > 0 { + image.SupportedScrapes = append(image.SupportedScrapes, ScrapeTypeURL) + for _, v := range c.ImageByURL { + image.Urls = append(image.Urls, v.URL...) + } + } + + if len(image.SupportedScrapes) > 0 { + ret.Image = &image + } + movie := ScraperSpec{} if len(c.MovieByURL) > 0 { movie.SupportedScrapes = append(movie.SupportedScrapes, ScrapeTypeURL) @@ -314,6 +335,8 @@ func (c config) supports(ty ScrapeContentType) bool { return c.GalleryByFragment != nil || len(c.GalleryByURL) > 0 case ScrapeContentTypeMovie: return len(c.MovieByURL) > 0 + case ScrapeContentTypeImage: + return c.ImageByFragment != nil || len(c.ImageByURL) > 0 } panic("Unhandled ScrapeContentType") @@ -339,6 +362,12 @@ func (c config) matchesURL(url string, ty ScrapeContentType) bool { return true } } + case ScrapeContentTypeImage: + for _, scraper := range c.ImageByURL { + if scraper.matchesURL(url) { + return true + } + } case ScrapeContentTypeMovie: for _, scraper := range c.MovieByURL { if scraper.matchesURL(url) { diff --git a/pkg/scraper/group.go b/pkg/scraper/group.go index bbf0a680adb..1fdb5ed35f1 100644 --- a/pkg/scraper/group.go +++ b/pkg/scraper/group.go @@ -33,6 +33,9 @@ func (g group) fragmentScraper(input Input) *scraperTypeConfig { case input.Gallery != nil: // TODO - this should be galleryByQueryFragment return g.config.GalleryByFragment + case input.Image != nil: + // TODO - this should be imageByImageFragment + return g.config.ImageByFragment case input.Scene != nil: return g.config.SceneByQueryFragment } @@ -75,6 +78,15 @@ func (g group) viaGallery(ctx context.Context, client *http.Client, gallery *mod return s.scrapeGalleryByGallery(ctx, gallery) } +func (g group) viaImage(ctx context.Context, client *http.Client, gallery *models.Image) (*ScrapedImage, error) { + if g.config.ImageByFragment == nil { + return nil, ErrNotSupported + } + + s := g.config.getScraper(*g.config.ImageByFragment, client, g.globalConf) + return s.scrapeImageByImage(ctx, gallery) +} + func loadUrlCandidates(c config, ty ScrapeContentType) []*scrapeByURLConfig { switch ty { case ScrapeContentTypePerformer: @@ -85,6 +97,8 @@ func loadUrlCandidates(c config, ty ScrapeContentType) []*scrapeByURLConfig { return c.MovieByURL case ScrapeContentTypeGallery: return c.GalleryByURL + case ScrapeContentTypeImage: + return c.ImageByURL } panic("loadUrlCandidates: unreachable") diff --git a/pkg/scraper/image.go b/pkg/scraper/image.go index 5757bc9b383..09865ce7045 100644 --- a/pkg/scraper/image.go +++ b/pkg/scraper/image.go @@ -11,6 +11,36 @@ import ( "github.com/stashapp/stash/pkg/utils" ) +type ScrapedImage struct { + Title *string `json:"title"` + Code *string `json:"code"` + Details *string `json:"details"` + Photographer *string `json:"photographer"` + Director *string `json:"director"` + URL *string `json:"url"` + URLs []string `json:"urls"` + Date *string `json:"date"` + // This should be a base64 encoded data URL + Scene *string `json:"scene"` + File *models.SceneFileType `json:"file"` + Studio *models.ScrapedStudio `json:"studio"` + Tags []*models.ScrapedTag `json:"tags"` + Performers []*models.ScrapedPerformer `json:"performers"` + Movies []*models.ScrapedMovie `json:"movies"` + RemoteSiteID *string `json:"remote_site_id"` + Fingerprints []*models.StashBoxFingerprint `json:"fingerprints"` +} + +func (ScrapedImage) IsScrapedContent() {} + +type ScrapedImageInput struct { + Title *string `json:"title"` + Code *string `json:"code"` + Details *string `json:"details"` + URLs []string `json:"urls"` + Date *string `json:"date"` +} + func setPerformerImage(ctx context.Context, client *http.Client, p *models.ScrapedPerformer, globalConfig GlobalConfig) error { if p.Image == nil || !strings.HasPrefix(*p.Image, "http") { // nothing to do diff --git a/pkg/scraper/json.go b/pkg/scraper/json.go index 1d6358a921f..5f5b6908b61 100644 --- a/pkg/scraper/json.go +++ b/pkg/scraper/json.go @@ -90,6 +90,8 @@ func (s *jsonScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeCont return scraper.scrapeGallery(ctx, q) case ScrapeContentTypeMovie: return scraper.scrapeMovie(ctx, q) + case ScrapeContentTypeImage: + return scraper.scrapeMovie(ctx, q) } return nil, ErrNotSupported @@ -207,6 +209,30 @@ func (s *jsonScraper) scrapeByFragment(ctx context.Context, input Input) (Scrape return scraper.scrapeScene(ctx, q) } +func (s *jsonScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*ScrapedImage, error) { + // construct the URL + queryURL := queryURLParametersFromImage(image) + if s.scraper.QueryURLReplacements != nil { + queryURL.applyReplacements(s.scraper.QueryURLReplacements) + } + url := queryURL.constructURL(s.scraper.QueryURL) + + scraper := s.getJsonScraper() + + if scraper == nil { + return nil, errors.New("json scraper with name " + s.scraper.Scraper + " not found in config") + } + + doc, err := s.loadURL(ctx, url) + + if err != nil { + return nil, err + } + + q := s.getJsonQuery(doc) + return scraper.scrapeImage(ctx, q) +} + func (s *jsonScraper) scrapeGalleryByGallery(ctx context.Context, gallery *models.Gallery) (*ScrapedGallery, error) { // construct the URL queryURL := queryURLParametersFromGallery(gallery) diff --git a/pkg/scraper/mapped.go b/pkg/scraper/mapped.go index 3d4ee463853..81e677774ed 100644 --- a/pkg/scraper/mapped.go +++ b/pkg/scraper/mapped.go @@ -181,6 +181,15 @@ type mappedGalleryScraperConfig struct { Performers mappedConfig `yaml:"Performers"` Studio mappedConfig `yaml:"Studio"` } + +type mappedImageScraperConfig struct { + mappedConfig + + Tags mappedConfig `yaml:"Tags"` + Performers mappedConfig `yaml:"Performers"` + Studio mappedConfig `yaml:"Studio"` +} + type _mappedGalleryScraperConfig mappedGalleryScraperConfig func (s *mappedGalleryScraperConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { @@ -766,6 +775,7 @@ type mappedScraper struct { Common commonMappedConfig `yaml:"common"` Scene *mappedSceneScraperConfig `yaml:"scene"` Gallery *mappedGalleryScraperConfig `yaml:"gallery"` + Image *mappedImageScraperConfig `yaml:"image"` Performer *mappedPerformerScraperConfig `yaml:"performer"` Movie *mappedMovieScraperConfig `yaml:"movie"` } @@ -997,6 +1007,69 @@ func (s mappedScraper) scrapeScene(ctx context.Context, q mappedQuery) (*Scraped return nil, nil } +func (s mappedScraper) scrapeImage(ctx context.Context, q mappedQuery) (*ScrapedImage, error) { + var ret ScrapedImage + + imageScraperConfig := s.Image + if imageScraperConfig == nil { + return nil, nil + } + + imageMap := imageScraperConfig.mappedConfig + + imagePerformersMap := imageScraperConfig.Performers + imageTagsMap := imageScraperConfig.Tags + imageStudioMap := imageScraperConfig.Studio + + logger.Debug(`Processing image:`) + results := imageMap.process(ctx, q, s.Common) + + // now apply the performers and tags + if imagePerformersMap != nil { + logger.Debug(`Processing image performers:`) + performerResults := imagePerformersMap.process(ctx, q, s.Common) + + for _, p := range performerResults { + performer := &models.ScrapedPerformer{} + p.apply(performer) + ret.Performers = append(ret.Performers, performer) + } + } + + if imageTagsMap != nil { + logger.Debug(`Processing image tags:`) + tagResults := imageTagsMap.process(ctx, q, s.Common) + + for _, p := range tagResults { + tag := &models.ScrapedTag{} + p.apply(tag) + ret.Tags = append(ret.Tags, tag) + } + } + + if imageStudioMap != nil { + logger.Debug(`Processing gallery studio:`) + studioResults := imageStudioMap.process(ctx, q, s.Common) + + if len(studioResults) > 0 { + studio := &models.ScrapedStudio{} + studioResults[0].apply(studio) + ret.Studio = studio + } + } + + // if no basic fields are populated, and no relationships, then return nil + if len(results) == 0 && len(ret.Performers) == 0 && len(ret.Tags) == 0 && ret.Studio == nil { + return nil, nil + } + + if len(results) > 0 { + results[0].apply(&ret) + } + + return &ret, nil +} + func (s mappedScraper) scrapeGallery(ctx context.Context, q mappedQuery) (*ScrapedGallery, error) { var ret ScrapedGallery diff --git a/pkg/scraper/postprocessing.go b/pkg/scraper/postprocessing.go index 0cf9b5a17fb..a9b5b85dc7b 100644 --- a/pkg/scraper/postprocessing.go +++ b/pkg/scraper/postprocessing.go @@ -32,6 +32,8 @@ func (c Cache) postScrape(ctx context.Context, content ScrapedContent) (ScrapedC } case ScrapedGallery: return c.postScrapeGallery(ctx, v) + case ScrapedImage: + return c.postScrapeImage(ctx, v) case *models.ScrapedMovie: if v != nil { return c.postScrapeMovie(ctx, *v) @@ -211,6 +213,49 @@ func (c Cache) postScrapeGallery(ctx context.Context, g ScrapedGallery) (Scraped return g, nil } +func (c Cache) postScrapeImage(ctx context.Context, g ScrapedImage) (ScrapedContent, error) { + // set the URL/URLs field + if g.URL == nil && len(g.URLs) > 0 { + g.URL = &g.URLs[0] + } + if g.URL != nil && len(g.URLs) == 0 { + g.URLs = []string{*g.URL} + } + + r := c.repository + if err := r.WithReadTxn(ctx, func(ctx context.Context) error { + pqb := r.PerformerFinder + tqb := r.TagFinder + sqb := r.StudioFinder + + for _, p := range g.Performers { + err := match.ScrapedPerformer(ctx, pqb, p, nil) + if err != nil { + return err + } + } + + tags, err := postProcessTags(ctx, tqb, g.Tags) + if err != nil { + return err + } + g.Tags = tags + + if g.Studio != nil { + err := match.ScrapedStudio(ctx, sqb, g.Studio, nil) + if err != nil { + return err + } + } + + return nil + }); err != nil { + return nil, err + } + + return g, nil +} + func postProcessTags(ctx context.Context, tqb models.TagQueryer, scrapedTags []*models.ScrapedTag) ([]*models.ScrapedTag, error) { var ret []*models.ScrapedTag diff --git a/pkg/scraper/query_url.go b/pkg/scraper/query_url.go index 4bae01c06a8..5cb454be437 100644 --- a/pkg/scraper/query_url.go +++ b/pkg/scraper/query_url.go @@ -73,6 +73,24 @@ func queryURLParametersFromGallery(gallery *models.Gallery) queryURLParameters { return ret } +func queryURLParametersFromImage(image *models.Image) queryURLParameters { + ret := make(queryURLParameters) + ret["checksum"] = image.Checksum + + if image.Path != "" { + ret["filename"] = filepath.Base(image.Path) + } + if image.Title != "" { + ret["title"] = image.Title + } + + if len(image.URLs.List()) > 0 { + ret["url"] = image.URLs.List()[0] + } + + return ret +} + func (p queryURLParameters) applyReplacements(r queryURLReplacements) { for k, v := range p { rpl, found := r[k] diff --git a/pkg/scraper/scraper.go b/pkg/scraper/scraper.go index 23ad411bdb0..e1cef2af281 100644 --- a/pkg/scraper/scraper.go +++ b/pkg/scraper/scraper.go @@ -33,6 +33,7 @@ const ( ScrapeContentTypeMovie ScrapeContentType = "MOVIE" ScrapeContentTypePerformer ScrapeContentType = "PERFORMER" ScrapeContentTypeScene ScrapeContentType = "SCENE" + ScrapeContentTypeImage ScrapeContentType = "IMAGE" ) var AllScrapeContentType = []ScrapeContentType{ @@ -40,11 +41,12 @@ var AllScrapeContentType = []ScrapeContentType{ ScrapeContentTypeMovie, ScrapeContentTypePerformer, ScrapeContentTypeScene, + ScrapeContentTypeImage, } func (e ScrapeContentType) IsValid() bool { switch e { - case ScrapeContentTypeGallery, ScrapeContentTypeMovie, ScrapeContentTypePerformer, ScrapeContentTypeScene: + case ScrapeContentTypeGallery, ScrapeContentTypeMovie, ScrapeContentTypePerformer, ScrapeContentTypeScene, ScrapeContentTypeImage: return true } return false @@ -80,6 +82,8 @@ type Scraper struct { Scene *ScraperSpec `json:"scene"` // Details for gallery scraper Gallery *ScraperSpec `json:"gallery"` + // Details for image scraper + Image *ScraperSpec `json:"image"` // Details for movie scraper Movie *ScraperSpec `json:"movie"` } @@ -155,6 +159,7 @@ type Input struct { Performer *ScrapedPerformerInput Scene *ScrapedSceneInput Gallery *ScrapedGalleryInput + Image *ScrapedImageInput } // populateURL populates the URL field of the input based on the @@ -213,6 +218,14 @@ type sceneScraper interface { viaScene(ctx context.Context, client *http.Client, scene *models.Scene) (*ScrapedScene, error) } +// imageScraper is a scraper which supports image scrapes with +// image data as the input. +type imageScraper interface { + scraper + + viaImage(ctx context.Context, client *http.Client, image *models.Image) (*ScrapedImage, error) +} + // galleryScraper is a scraper which supports gallery scrapes with // gallery data as the input. type galleryScraper interface { diff --git a/pkg/scraper/script.go b/pkg/scraper/script.go index bfb03ee3aad..f979ce96aba 100644 --- a/pkg/scraper/script.go +++ b/pkg/scraper/script.go @@ -199,6 +199,10 @@ func (s *scriptScraper) scrape(ctx context.Context, input string, ty ScrapeConte var movie *models.ScrapedMovie err := s.runScraperScript(ctx, input, &movie) return movie, err + case ScrapeContentTypeImage: + var image *ScrapedImage + err := s.runScraperScript(ctx, input, &image) + return image, err } return nil, ErrNotSupported @@ -232,6 +236,20 @@ func (s *scriptScraper) scrapeGalleryByGallery(ctx context.Context, gallery *mod return ret, err } +func (s *scriptScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*ScrapedImage, error) { + inString, err := json.Marshal(imageToUpdateInput(image)) + + if err != nil { + return nil, err + } + + var ret *ScrapedImage + + err = s.runScraperScript(ctx, string(inString), &ret) + + return ret, err +} + func handleScraperStderr(name string, scraperOutputReader io.ReadCloser) { const scraperPrefix = "[Scrape / %s] " diff --git a/pkg/scraper/stash.go b/pkg/scraper/stash.go index b7f483667a3..a28ce08b1ab 100644 --- a/pkg/scraper/stash.go +++ b/pkg/scraper/stash.go @@ -307,6 +307,10 @@ func (s *stashScraper) scrapeGalleryByGallery(ctx context.Context, gallery *mode return &ret, nil } +func (s *stashScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*ScrapedImage, error) { + panic("Not implemented") +} + func (s *stashScraper) scrapeByURL(_ context.Context, _ string, _ ScrapeContentType) (ScrapedContent, error) { return nil, ErrNotSupported } @@ -369,3 +373,26 @@ func galleryToUpdateInput(gallery *models.Gallery) models.GalleryUpdateInput { Date: dateToStringPtr(gallery.Date), } } + +func imageToUpdateInput(gallery *models.Image) models.ImageUpdateInput { + dateToStringPtr := func(s *models.Date) *string { + if s != nil { + v := s.String() + return &v + } + + return nil + } + + // fallback to file basename if title is empty + title := gallery.GetTitle() + urls := gallery.URLs.List() + + return models.ImageUpdateInput{ + ID: strconv.Itoa(gallery.ID), + Title: &title, + Details: &gallery.Details, + Urls: urls, + Date: dateToStringPtr(gallery.Date), + } +} diff --git a/pkg/scraper/xpath.go b/pkg/scraper/xpath.go index 29a4b0a1926..eee23423f98 100644 --- a/pkg/scraper/xpath.go +++ b/pkg/scraper/xpath.go @@ -71,6 +71,8 @@ func (s *xpathScraper) scrapeByURL(ctx context.Context, url string, ty ScrapeCon return scraper.scrapeGallery(ctx, q) case ScrapeContentTypeMovie: return scraper.scrapeMovie(ctx, q) + case ScrapeContentTypeImage: + return scraper.scrapeImage(ctx, q) } return nil, ErrNotSupported @@ -210,6 +212,30 @@ func (s *xpathScraper) scrapeGalleryByGallery(ctx context.Context, gallery *mode return scraper.scrapeGallery(ctx, q) } +func (s *xpathScraper) scrapeImageByImage(ctx context.Context, image *models.Image) (*ScrapedImage, error) { + // construct the URL + queryURL := queryURLParametersFromImage(image) + if s.scraper.QueryURLReplacements != nil { + queryURL.applyReplacements(s.scraper.QueryURLReplacements) + } + url := queryURL.constructURL(s.scraper.QueryURL) + + scraper := s.getXpathScraper() + + if scraper == nil { + return nil, errors.New("xpath scraper with name " + s.scraper.Scraper + " not found in config") + } + + doc, err := s.loadURL(ctx, url) + + if err != nil { + return nil, err + } + + q := s.getXPathQuery(doc) + return scraper.scrapeImage(ctx, q) +} + func (s *xpathScraper) loadURL(ctx context.Context, url string) (*html.Node, error) { r, err := loadURL(ctx, url, s.client, s.config, s.globalConfig) if err != nil { diff --git a/ui/v2.5/.yarnrc.yml b/ui/v2.5/.yarnrc.yml new file mode 100644 index 00000000000..3186f3f0795 --- /dev/null +++ b/ui/v2.5/.yarnrc.yml @@ -0,0 +1 @@ +nodeLinker: node-modules diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx index 3a3daf70841..01b0f9d7c04 100644 --- a/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageEditPanel.tsx @@ -1,5 +1,12 @@ import React, { useEffect, useState } from "react"; -import { Button, Form, Col, Row } from "react-bootstrap"; +import { + Button, + Form, + Col, + Row, + Dropdown, + DropdownButton, +} from "react-bootstrap"; import { FormattedMessage, useIntl } from "react-intl"; import Mousetrap from "mousetrap"; import * as GQL from "src/core/generated-graphql"; @@ -22,6 +29,16 @@ import { PerformerSelect, } from "src/components/Performers/PerformerSelect"; import { formikUtils } from "src/utils/form"; +import { Icon } from "../../Shared/Icon"; +import { faSyncAlt } from "@fortawesome/free-solid-svg-icons"; +import { + mutateReloadScrapers, + queryScrapeImage, + queryScrapeImageURL, + useListImageScrapers, +} from "../../../core/StashService"; +import { ImageScrapeDialog } from "./ImageScrapeDialog"; +import { ScrapedImageDataFragment } from "src/core/generated-graphql"; interface IProps { image: GQL.ImageDataFragment; @@ -46,6 +63,10 @@ export const ImageEditPanel: React.FC = ({ const [performers, setPerformers] = useState([]); + const Scrapers = useListImageScrapers(); + const [queryableScrapers, setQueryableScrapers] = useState([]); + const [scrapedImage, setScrapedImage] = useState(); + const schema = yup.object({ title: yup.string().ensure(), code: yup.string().ensure(), @@ -121,6 +142,14 @@ export const ImageEditPanel: React.FC = ({ } }); + useEffect(() => { + const newQueryableScrapers = (Scrapers?.data?.listScrapers ?? []).filter( + (s) => s.image?.supported_scrapes.includes(GQL.ScrapeType.Fragment) + ); + + setQueryableScrapers(newQueryableScrapers); + }, [Scrapers]); + async function onSave(input: InputValues) { setIsLoading(true); try { @@ -135,6 +164,127 @@ export const ImageEditPanel: React.FC = ({ setIsLoading(false); } + async function onScrapeClicked(scraper: GQL.Scraper) { + if (!image || !image.id) return; + + setIsLoading(true); + try { + const result = await queryScrapeImage(scraper.id, image.id); + if (!result.data || !result.data.scrapeSingleImage?.length) { + Toast.success("No galleries found"); + return; + } + setScrapedImage(result.data.scrapeSingleImage[0]); + } catch (e) { + Toast.error(e); + } finally { + setIsLoading(false); + } + } + + function urlScrapable(scrapedUrl: string): boolean { + return (Scrapers?.data?.listScrapers ?? []).some((s) => + (s?.image?.urls ?? []).some((u) => scrapedUrl.includes(u)) + ); + } + + function updateImageFromScrapedGallery( + imageData: GQL.ScrapedImageDataFragment + ) { + if (imageData.title) { + formik.setFieldValue("title", imageData.title); + } + + if (imageData.code) { + formik.setFieldValue("code", imageData.code); + } + + if (imageData.details) { + formik.setFieldValue("details", imageData.details); + } + + if (imageData.photographer) { + formik.setFieldValue("photographer", imageData.photographer); + } + + if (imageData.date) { + formik.setFieldValue("date", imageData.date); + } + + if (imageData.urls) { + formik.setFieldValue("urls", imageData.urls); + } + + if (imageData.studio?.stored_id) { + formik.setFieldValue("studio_id", imageData.studio.stored_id); + } + + if (imageData.performers?.length) { + const idPerfs = imageData.performers.filter((p) => { + return p.stored_id !== undefined && p.stored_id !== null; + }); + + if (idPerfs.length > 0) { + onSetPerformers( + idPerfs.map((p) => { + return { + id: p.stored_id!, + name: p.name ?? "", + alias_list: [], + }; + }) + ); + } + } + + if (imageData?.tags?.length) { + const idTags = imageData.tags.filter((t) => { + return t.stored_id !== undefined && t.stored_id !== null; + }); + + if (idTags.length > 0) { + const newIds = idTags.map((t) => t.stored_id); + formik.setFieldValue("tag_ids", newIds as string[]); + } + } + } + + async function onReloadScrapers() { + setIsLoading(true); + try { + await mutateReloadScrapers(); + } catch (e) { + Toast.error(e); + } finally { + setIsLoading(false); + } + } + + async function onScrapeDialogClosed(data?: ScrapedImageDataFragment) { + if (data) { + updateImageFromScrapedGallery(data); + } + setScrapedImage(undefined); + } + + async function onScrapeImageURL(url: string) { + if (!url) { + return; + } + setIsLoading(true); + try { + const result = await queryScrapeImageURL(url); + if (!result || !result.data || !result.data.scrapeImageURL) { + return; + } + setScrapedImage(result.data.scrapeImageURL); + } catch (e) { + Toast.error(e); + } finally { + setIsLoading(false); + } + } + if (isLoading) return ; const splitProps = { @@ -226,6 +376,58 @@ export const ImageEditPanel: React.FC = ({ return renderInputField("details", "textarea", "details", props); } + function maybeRenderScrapeDialog() { + if (!scrapedImage) { + return; + } + + const currentImage = { + id: image.id!, + ...formik.values, + }; + + return ( + { + onScrapeDialogClosed(data); + }} + /> + ); + } + + function renderScraperMenu() { + /* + if (isNew) { + return; + } + */ + + return ( + + {queryableScrapers.map((s) => ( + onScrapeClicked(s)}> + {s.name} + + ))} + onReloadScrapers()}> + + + + + + + + + ); + } + return (
= ({ message={intl.formatMessage({ id: "dialogs.unsaved_changes" })} /> + {maybeRenderScrapeDialog()}
@@ -252,13 +455,14 @@ export const ImageEditPanel: React.FC = ({
+
{renderScraperMenu()}
{renderInputField("title")} {renderInputField("code", "text", "scene_code")} - {renderURLListField("urls")} + {renderURLListField("urls", onScrapeImageURL, urlScrapable)} {renderDateField("date")} {renderInputField("photographer")} diff --git a/ui/v2.5/src/components/Images/ImageDetails/ImageScrapeDialog.tsx b/ui/v2.5/src/components/Images/ImageDetails/ImageScrapeDialog.tsx new file mode 100644 index 00000000000..d5274c3d3b0 --- /dev/null +++ b/ui/v2.5/src/components/Images/ImageDetails/ImageScrapeDialog.tsx @@ -0,0 +1,276 @@ +import React, { useState } from "react"; +import { useIntl } from "react-intl"; +import * as GQL from "src/core/generated-graphql"; +import { + ScrapeDialog, + ScrapedInputGroupRow, + ScrapedStringListRow, + ScrapedTextAreaRow, +} from "src/components/Shared/ScrapeDialog/ScrapeDialog"; +import clone from "lodash-es/clone"; +import { + ObjectListScrapeResult, + ScrapeResult, +} from "src/components/Shared/ScrapeDialog/scrapeResult"; +import { + ScrapedPerformersRow, + ScrapedStudioRow, + ScrapedTagsRow, +} from "src/components/Shared/ScrapeDialog/ScrapedObjectsRow"; +import { sortStoredIdObjects } from "src/utils/data"; +import { Performer } from "src/components/Performers/PerformerSelect"; +import { + useCreateScrapedPerformer, + useCreateScrapedStudio, + useCreateScrapedTag, +} from "src/components/Shared/ScrapeDialog/createObjects"; +import { uniq } from "lodash-es"; + +interface IImageScrapeDialogProps { + image: Partial; + imagePerformer: Performer[]; + scraped: GQL.ScrapedImage; + + onClose: (scrapedImage?: GQL.ScrapedImage) => void; +} + +interface IHasStoredID { + stored_id?: string | null; +} + +export const ImageScrapeDialog: React.FC = ({ + image, + imagePerformer, + scraped, + onClose, +}) => { + const intl = useIntl(); + const [title, setTitle] = useState>( + new ScrapeResult(image.title, scraped.title) + ); + const [code, setCode] = useState>( + new ScrapeResult(image.code, scraped.code) + ); + const [urls, setURLs] = useState>( + new ScrapeResult( + image.urls, + scraped.urls + ? uniq((image.urls ?? []).concat(scraped.urls ?? [])) + : undefined + ) + ); + const [date, setDate] = useState>( + new ScrapeResult(image.date, scraped.date) + ); + + const [photographer, setPhotographer] = useState>( + new ScrapeResult(image.photographer, scraped.photographer) + ); + + const [studio, setStudio] = useState>( + new ScrapeResult(image.studio_id, scraped.studio?.stored_id) + ); + const [newStudio, setNewStudio] = useState( + scraped.studio && !scraped.studio.stored_id ? scraped.studio : undefined + ); + + function mapStoredIdObjects( + scrapedObjects?: IHasStoredID[] + ): string[] | undefined { + if (!scrapedObjects) { + return undefined; + } + const ret = scrapedObjects + .map((p) => p.stored_id) + .filter((p) => { + return p !== undefined && p !== null; + }) as string[]; + + if (ret.length === 0) { + return undefined; + } + + // sort by id numerically + ret.sort((a, b) => { + return parseInt(a, 10) - parseInt(b, 10); + }); + + return ret; + } + + function sortIdList(idList?: string[] | null) { + if (!idList) { + return; + } + + const ret = clone(idList); + // sort by id numerically + ret.sort((a, b) => { + return parseInt(a, 10) - parseInt(b, 10); + }); + + return ret; + } + + const [performers, setPerformers] = useState< + ObjectListScrapeResult + >( + new ObjectListScrapeResult( + sortStoredIdObjects( + imagePerformer.map((p) => ({ + stored_id: p.id, + name: p.name, + })) + ), + sortStoredIdObjects(scraped.performers ?? undefined) + ) + ); + const [newPerformers, setNewPerformers] = useState( + scraped.performers?.filter((t) => !t.stored_id) ?? [] + ); + + const [tags, setTags] = useState>( + new ScrapeResult( + sortIdList(image.tag_ids), + mapStoredIdObjects(scraped.tags ?? undefined) + ) + ); + const [newTags, setNewTags] = useState( + scraped.tags?.filter((t) => !t.stored_id) ?? [] + ); + + const [details, setDetails] = useState>( + new ScrapeResult(image.details, scraped.details) + ); + + const createNewStudio = useCreateScrapedStudio({ + scrapeResult: studio, + setScrapeResult: setStudio, + setNewObject: setNewStudio, + }); + + const createNewPerformer = useCreateScrapedPerformer({ + scrapeResult: performers, + setScrapeResult: setPerformers, + newObjects: newPerformers, + setNewObjects: setNewPerformers, + }); + + const createNewTag = useCreateScrapedTag({ + scrapeResult: tags, + setScrapeResult: setTags, + newObjects: newTags, + setNewObjects: setNewTags, + }); + + // don't show the dialog if nothing was scraped + if ( + [title, code, urls, date, studio, performers, tags, details].every( + (r) => !r.scraped + ) && + !newStudio && + newPerformers.length === 0 && + newTags.length === 0 + ) { + onClose(); + return <>; + } + + function makeNewScrapedItem(): GQL.ScrapedImageDataFragment { + const newStudioValue = studio.getNewValue(); + + return { + title: title.getNewValue(), + code: code.getNewValue(), + urls: urls.getNewValue(), + date: date.getNewValue(), + photographer: photographer.getNewValue(), + studio: newStudioValue + ? { + stored_id: newStudioValue, + name: "", + } + : undefined, + performers: performers.getNewValue(), + tags: tags.getNewValue()?.map((m) => { + return { + stored_id: m, + name: "", + }; + }), + details: details.getNewValue(), + }; + } + + function renderScrapeRows() { + return ( + <> + setTitle(value)} + /> + setCode(value)} + /> + setURLs(value)} + /> + setDate(value)} + /> + setPhotographer(value)} + /> + setStudio(value)} + newStudio={newStudio} + onCreateNew={createNewStudio} + /> + setPerformers(value)} + newObjects={newPerformers} + onCreateNew={createNewPerformer} + /> + setTags(value)} + newObjects={newTags} + onCreateNew={createNewTag} + /> + setDetails(value)} + /> + + ); + } + + return ( + { + onClose(apply ? makeNewScrapedItem() : undefined); + }} + /> + ); +}; diff --git a/ui/v2.5/src/components/Settings/SettingsScrapingPanel.tsx b/ui/v2.5/src/components/Settings/SettingsScrapingPanel.tsx index 9aef6942bb3..bfc914ca827 100644 --- a/ui/v2.5/src/components/Settings/SettingsScrapingPanel.tsx +++ b/ui/v2.5/src/components/Settings/SettingsScrapingPanel.tsx @@ -7,6 +7,7 @@ import { useListPerformerScrapers, useListSceneScrapers, useListGalleryScrapers, + useListImageScrapers, } from "src/core/StashService"; import { useToast } from "src/hooks/Toast"; import TextUtils from "src/utils/text"; @@ -87,6 +88,8 @@ export const SettingsScrapingPanel: React.FC = () => { useListSceneScrapers(); const { data: galleryScrapers, loading: loadingGalleries } = useListGalleryScrapers(); + const { data: imageScrapers, loading: loadingImages } = + useListImageScrapers(); const { data: movieScrapers, loading: loadingMovies } = useListMovieScrapers(); @@ -166,6 +169,28 @@ export const SettingsScrapingPanel: React.FC = () => { ); } + function renderImageScrapeTypes(types: ScrapeType[]) { + const typeStrings = types.map((t) => { + switch (t) { + case ScrapeType.Fragment: + return intl.formatMessage( + { id: "config.scraping.entity_metadata" }, + { entityType: intl.formatMessage({ id: "image" }) } + ); + default: + return t; + } + }); + + return ( +
    + {typeStrings.map((t) => ( +
  • {t}
  • + ))} +
+ ); + } + function renderMovieScrapeTypes(types: ScrapeType[]) { const typeStrings = types.map((t) => { switch (t) { @@ -232,6 +257,26 @@ export const SettingsScrapingPanel: React.FC = () => { ); } + function renderImageScrapers() { + const elements = (imageScrapers?.listScrapers ?? []).map((scraper) => ( + + {scraper.name} + + {renderImageScrapeTypes(scraper.image?.supported_scrapes ?? [])} + + {renderURLs(scraper.image?.urls ?? [])} + + )); + + return renderTable( + intl.formatMessage( + { id: "config.scraping.entity_scrapers" }, + { entityType: intl.formatMessage({ id: "image" }) } + ), + elements + ); + } + function renderPerformerScrapers() { const elements = (performerScrapers?.listScrapers ?? []).map((scraper) => ( @@ -304,6 +349,7 @@ export const SettingsScrapingPanel: React.FC = () => { loading || loadingScenes || loadingGalleries || + loadingImages || loadingPerformers || loadingMovies ) @@ -368,6 +414,7 @@ export const SettingsScrapingPanel: React.FC = () => {
{renderSceneScrapers()} {renderGalleryScrapers()} + {renderImageScrapers()} {renderPerformerScrapers()} {renderMovieScrapers()}
diff --git a/ui/v2.5/src/core/StashService.ts b/ui/v2.5/src/core/StashService.ts index 15ee2868273..8f49bef3f81 100644 --- a/ui/v2.5/src/core/StashService.ts +++ b/ui/v2.5/src/core/StashService.ts @@ -1929,6 +1929,8 @@ export const queryScrapeMovieURL = (url: string) => export const useListGalleryScrapers = () => GQL.useListGalleryScrapersQuery(); +export const useListImageScrapers = () => GQL.useListImageScrapersQuery(); + export const queryScrapeGallery = (scraperId: string, galleryId: string) => client.query({ query: GQL.ScrapeSingleGalleryDocument, @@ -1950,6 +1952,27 @@ export const queryScrapeGalleryURL = (url: string) => fetchPolicy: "network-only", }); +export const queryScrapeImage = (scraperId: string, imageId: string) => + client.query({ + query: GQL.ScrapeSingleImageDocument, + variables: { + source: { + scraper_id: scraperId, + }, + input: { + image_id: imageId, + }, + }, + fetchPolicy: "network-only", + }); + +export const queryScrapeImageURL = (url: string) => + client.query({ + query: GQL.ScrapeImageUrlDocument, + variables: { url }, + fetchPolicy: "network-only", + }); + export const mutateSubmitStashBoxSceneDraft = ( input: GQL.StashBoxDraftSubmissionInput ) =>