Skip to content
This repository has been archived by the owner on Oct 2, 2022. It is now read-only.

Commit

Permalink
Better errors, fixed subtitle issue, and re-added .circleci folder
Browse files Browse the repository at this point in the history
  • Loading branch information
turtletowerz committed Nov 21, 2019
1 parent 3c9007d commit c43ce66
Show file tree
Hide file tree
Showing 10 changed files with 88 additions and 98 deletions.
11 changes: 11 additions & 0 deletions .circleci/config.yml
@@ -0,0 +1,11 @@
version: 2
jobs:
build:
working_directory: /go/src/s32x.com/anirip
docker:
- image: circleci/golang:1.11.3
steps:
- checkout
- run:
name: Run unit tests
command: make test
22 changes: 0 additions & 22 deletions common/error.go

This file was deleted.

20 changes: 10 additions & 10 deletions common/httpclient.go
@@ -1,7 +1,6 @@
package common /* import "s32x.com/anirip/common" */

import (
"errors"
"fmt"
"io"
"io/ioutil"
Expand Down Expand Up @@ -32,14 +31,15 @@ type HTTPClient struct {

// NewHTTPClient generates a new HTTPClient Requester that
// contains a random user-agent to emulate browser requests
func NewHTTPClient() (*HTTPClient, error) {
func NewHTTPClient() *HTTPClient {
// Create the client and attach a cookie jar
client := &http.Client{}
client.Jar, _ = cookiejar.New(nil)

return &HTTPClient{
Client: client,
UserAgent: randomUA(),
}, nil
}
}

// randomUA retrieves a list of user-agents and returns a
Expand Down Expand Up @@ -81,7 +81,7 @@ func (c *HTTPClient) Get(url string, header http.Header) (*http.Response, error)
// Assemble our request and attach all headers and cookies
req, err := http.NewRequest(http.MethodGet, url, nil)
if err != nil {
return nil, err
return nil, fmt.Errorf("creating GET request: %w", err)
}
if header != nil {
req.Header = header
Expand All @@ -97,7 +97,7 @@ func (c *HTTPClient) request(req *http.Request) (*http.Response, error) {
// Executes the request
res, err := c.Client.Do(req)
if err != nil {
return nil, err
return nil, fmt.Errorf("processing request: %w", err)
}

// If the server is in IUAM mode, solve the challenge and retry
Expand All @@ -107,11 +107,11 @@ func (c *HTTPClient) request(req *http.Request) (*http.Response, error) {
var rb []byte
rb, err = ioutil.ReadAll(res.Body)
if err != nil {
return nil, err
return nil, fmt.Errorf("reading IUAM response: %w", err)
}
return c.bypassCF(req, rb)
}
return res, err
return res, nil
}

// bypass attempts to re-execute a standard request after first bypassing
Expand All @@ -121,7 +121,7 @@ func (c *HTTPClient) bypassCF(req *http.Request, body []byte) (*http.Response, e
r1, _ := regexp.Compile(`setTimeout\(function\(\){\s+(var s,t,o,p,b,r,e,a,k,i,n,g,f.+?\r?\n[\s\S]+?a\.value =.+?)\r?\n`)
r1Match := r1.FindSubmatch(body)
if len(r1Match) != 2 {
return nil, errors.New("Failed to match on IUAM challenge")
return nil, fmt.Errorf("failed to match on IUAM challenge")
}
js := string(r1Match[1])

Expand Down Expand Up @@ -154,7 +154,7 @@ func (c *HTTPClient) bypassCF(req *http.Request, body []byte) (*http.Response, e
passMatch := pass.FindSubmatch(body)

if !(len(vcMatch) == 2 && len(passMatch) == 2) {
return nil, errors.New("Failed to extract Cloudflare IUAM challenge")
return nil, fmt.Errorf("failed to extract IUAM challenge")
}

// Assemble the CFClearence request
Expand All @@ -168,7 +168,7 @@ func (c *HTTPClient) bypassCF(req *http.Request, body []byte) (*http.Response, e
// Execute, populate cookies after 5 seconds and re-execute prior request
time.Sleep(4000 * time.Millisecond)
if _, err := c.Get(u.String(), nil); err != nil {
return nil, err
return nil, fmt.Errorf("getting IUAM request: %w", err)
}
return c.request(req)
}
2 changes: 1 addition & 1 deletion common/log/log.go
Expand Up @@ -17,4 +17,4 @@ func Warn(format string, a ...interface{}) { color.Yellow(prefix+format, a...) }
func Success(format string, a ...interface{}) { color.Green(prefix+format, a...) }

// Error logs errors
func Error(err error) { color.Red(prefix + err.Error()) }
func Error(err error) { color.Red(prefix + "Error: " + err.Error()) }
16 changes: 9 additions & 7 deletions common/video.go
@@ -1,6 +1,7 @@
package common /* import "s32x.com/anirip/common" */

import (
"fmt"
"os/exec"
"path/filepath"
)
Expand All @@ -25,21 +26,22 @@ func (p *VideoProcessor) DumpHLS(url string) error {
"-c", "copy", "incomplete.episode.mkv")
cmd.Dir = p.tempDir
if err := cmd.Run(); err != nil {
return err
return fmt.Errorf("running download command: %w", err)
}

// Rename the file since it's no longer incomplete
// and return
return Rename(p.tempDir+pathSep+"incomplete.episode.mkv",
p.tempDir+pathSep+"episode.mkv", 10)
if err := Rename(p.tempDir+pathSep+"incomplete.episode.mkv",p.tempDir+pathSep+"episode.mkv", 10); err != nil {
return fmt.Errorf("renaming incomplete episode: %w", err)
}
return nil
}

// MergeSubtitles merges the VIDEO.mkv and the VIDEO.ass
func (p *VideoProcessor) MergeSubtitles(audioLang, subtitleLang string) error {
Delete(p.tempDir, "unmerged.episode.mkv")
if err := Rename(p.tempDir+pathSep+"episode.mkv",
p.tempDir+pathSep+"unmerged.episode.mkv", 10); err != nil {
return err
if err := Rename(p.tempDir+pathSep+"episode.mkv", p.tempDir+pathSep+"unmerged.episode.mkv", 10); err != nil {
return fmt.Errorf("renaming unmerged episode: %w", err)
}
cmd := new(exec.Cmd)
if subtitleLang == "" {
Expand All @@ -65,7 +67,7 @@ func (p *VideoProcessor) MergeSubtitles(audioLang, subtitleLang string) error {
}
cmd.Dir = p.tempDir
if err := cmd.Run(); err != nil {
return err
return fmt.Errorf("running download command: %w", err)
}
Delete(p.tempDir, "subtitles.episode.ass")
Delete(p.tempDir, "unmerged.episode.mkv")
Expand Down
18 changes: 11 additions & 7 deletions crunchyroll/episode.go
Expand Up @@ -2,6 +2,7 @@ package crunchyroll /* import "s32x.com/anirip/crunchyroll" */

import (
"bytes"
"fmt"
"io/ioutil"
"net/http"
"net/url"
Expand Down Expand Up @@ -53,13 +54,13 @@ func (e *Episode) GetEpisodeInfo(client *common.HTTPClient, quality string) erro
// client.Header.Add("Referer", "http://www.crunchyroll.com/"+strings.Split(e.Path, "/")[1])
resp, err := client.Get(e.URL, nil)
if err != nil {
return common.NewError("There was an error requesting the episode doc", err)
return fmt.Errorf("getting episode page: %w", err)
}

// Creates the document that will be used to scrape for episode metadata
doc, err := goquery.NewDocumentFromResponse(resp)
if err != nil {
return common.NewError("There was an error reading the episode doc", err)
return fmt.Errorf("generating episode document: %w", err)
}

// Request querystring
Expand Down Expand Up @@ -87,21 +88,21 @@ func (e *Episode) GetEpisodeInfo(client *common.HTTPClient, quality string) erro
header.Add("X-Requested-With", "ShockwaveFlash/22.0.0.192")
resp, err = client.Post("http://www.crunchyroll.com/xml/?"+queryString, header, reqBody)
if err != nil {
return common.NewError("There was an error retrieving the manifest", err)
return fmt.Errorf("getting manifest page: %w", err)
}

// Gets the xml string from the received xml response body
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return common.NewError("There was an error reading the xml response", err)
return fmt.Errorf("reading manifest page: %w", err)
}

// Checks for an unsupported region first
// TODO Use REGEX to extract xml
xmlString := string(body)
if strings.Contains(xmlString, "<code>") && strings.Contains(xmlString, "</code>") {
if strings.SplitN(strings.SplitN(xmlString, "<code>", 2)[1], "</code>", 2)[0] == "4" {
return common.NewError("This video is not available in your region", err)
return fmt.Errorf("video not avaliable in your region: %w", err)
}
}

Expand All @@ -111,7 +112,7 @@ func (e *Episode) GetEpisodeInfo(client *common.HTTPClient, quality string) erro
if strings.Contains(xmlString, "<file>") && strings.Contains(xmlString, "</file>") {
eFile = strings.SplitN(strings.SplitN(xmlString, "<file>", 2)[1], "</file>", 2)[0]
} else {
return common.NewError("No hosts were found for the episode", err)
return fmt.Errorf("no episode hosts found: %w", err)
}

e.Title = strings.Replace(strings.Replace(doc.Find("#showmedia_about_name").First().Text(), "“", "", -1), "”", "", -1)
Expand All @@ -122,7 +123,10 @@ func (e *Episode) GetEpisodeInfo(client *common.HTTPClient, quality string) erro

// Download downloads entire episode to our temp directory
func (e *Episode) Download(vp *common.VideoProcessor) error {
return vp.DumpHLS(e.StreamURL)
if err := vp.DumpHLS(e.StreamURL); err != nil {
return fmt.Errorf("dumping HLS stream: %w", err)
}
return nil
}

// GetFilename returns the Episodes filename
Expand Down
18 changes: 10 additions & 8 deletions crunchyroll/session.go
Expand Up @@ -2,6 +2,7 @@ package crunchyroll /* import "s32x.com/anirip/crunchyroll" */

import (
"bytes"
"fmt"
"net/http"
"net/url"
"strings"
Expand All @@ -16,25 +17,26 @@ func Login(c *common.HTTPClient, user, pass string) error {
// Perform preflight request to retrieve the login page
res, err := c.Get("https://www.crunchyroll.com/login", nil)
if err != nil {
return err
return fmt.Errorf("getting login page: %w", err)
}

defer res.Body.Close()
doc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
return err
return fmt.Errorf("generating login document: %w", err)
}

// Scrape the login token
token, _ := doc.Find("#login_form__token").First().Attr("value")

// Sets the credentials and attempts to generate new cookies
if err := createSession(c, user, pass, token); err != nil {
return err
return fmt.Errorf("creating session: %w", err)
}

// Validates the session created and returns
if err := validateSession(c); err != nil {
return err
return fmt.Errorf("validating session: %w", err)
}
log.Info("Successfully logged in!")
return nil
Expand All @@ -56,7 +58,7 @@ func createSession(c *common.HTTPClient, user, pass, token string) error {
head.Add("Referer", "https://www.crunchyroll.com/login")
head.Add("Content-Type", "application/x-www-form-urlencoded")
if _, err := c.Post("https://www.crunchyroll.com/login", head, body); err != nil {
return common.NewError("Failed to execute authentication request", err)
return fmt.Errorf("posting auth request: %w", err)
}
return nil
}
Expand All @@ -66,17 +68,17 @@ func createSession(c *common.HTTPClient, user, pass, token string) error {
func validateSession(c *common.HTTPClient) error {
resp, err := c.Get("http://www.crunchyroll.com/", nil)
if err != nil {
return common.NewError("Failed to execute session validation request", err)
return fmt.Errorf("getting validation page: %w", err)
}

doc, err := goquery.NewDocumentFromResponse(resp)
if err != nil {
return common.NewError("Failed to parse session validation page", err)
return fmt.Errorf("generating validation document: %w", err)
}

user := strings.TrimSpace(doc.Find("li.username").First().Text())
if resp.StatusCode == 200 && user != "" {
return nil
}
return common.NewError("Failed to verify session", nil)
return fmt.Errorf("could not verify session")
}
5 changes: 3 additions & 2 deletions crunchyroll/show.go
@@ -1,6 +1,7 @@
package crunchyroll /* import "s32x.com/anirip/crunchyroll" */

import (
"fmt"
"strconv"
"strings"

Expand All @@ -21,13 +22,13 @@ type Show struct {
func (s *Show) Scrape(client *common.HTTPClient, showURL string) error {
res, err := client.Get(showURL, nil)
if err != nil {
return common.NewError("There was an error retrieving show page", err)
return fmt.Errorf("getting show page: %w", err)
}

// Creates the goquery document for scraping
showDoc, err := goquery.NewDocumentFromResponse(res)
if err != nil {
return common.NewError("There was an error while accessing the show page", err)
return fmt.Errorf("generating show document: %w", err)
}

// Sets Title, Path and URL on our show object
Expand Down

0 comments on commit c43ce66

Please sign in to comment.