Skip to content
This repository has been archived by the owner on Dec 21, 2022. It is now read-only.

Commit

Permalink
Merge 39bbc77 into 0fd67ae
Browse files Browse the repository at this point in the history
  • Loading branch information
lloydmeta committed Nov 16, 2015
2 parents 0fd67ae + 39bbc77 commit a49d855
Show file tree
Hide file tree
Showing 4 changed files with 9 additions and 7 deletions.
6 changes: 3 additions & 3 deletions build.sbt
Expand Up @@ -12,12 +12,12 @@ resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/release

libraryDependencies ++= Seq(
"org.scalatest" %% "scalatest" % "2.2.4" % "test",
"com.typesafe.akka" %% "akka-testkit" % "2.3.9" % "test",
"com.typesafe.akka" %% "akka-actor" % "2.3.9",
"com.typesafe.akka" %% "akka-testkit" % "2.3.14" % "test",
"com.typesafe.akka" %% "akka-actor" % "2.3.14",
"net.databinder.dispatch" %% "dispatch-core" % "0.11.3",
"com.ning" % "async-http-client" % "1.9.31",
"commons-validator" % "commons-validator" % "1.4.1",
"org.jsoup" % "jsoup" % "1.8.1"
"org.jsoup" % "jsoup" % "1.8.3"
)

publishTo <<= version { v: String =>
Expand Down
6 changes: 4 additions & 2 deletions src/main/scala/com/beachape/metascraper/Scraper.scala
Expand Up @@ -54,8 +54,10 @@ class Scraper(httpClient: Http, urlSchemas: Seq[String])(implicit ec: ExecutionC
/**
* Returns [[ScrapedData]] based on the Schemas requested
*
* The list of [[Schema]] are processed from left to right, meaning the ones to the beginning of the
* list get precedence over those towards the end
* The list of [[Schema]] generated from the list of Schema factories are processed from left to right,
* meaning that the ones closer to the beginning of the list get precedence over those towards the end.
*
* In fact, if an earlier schema successfully extracts data, then later schemas may not even be used.
*/
def extractData(resp: Response, url: String, schemaFactories: Seq[SchemaFactory], numberOfImages: Int): ScrapedData = {
if (resp.getStatusCode / 100 == 2) {
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/com/beachape/metascraper/ScraperActor.scala
Expand Up @@ -15,7 +15,7 @@ import scala.util.{ Failure, Success }
object ScraperActor {

/**
* Factory method for the params required to instantiate a MonitorActor
* Factory method for the params required to instantiate a ScraperActor
*
* @param threadMultiplier Int multiplier to calculate the number of threads to use for
* this actor's async HTTP executor service. The number of cores found by
Expand Down
Expand Up @@ -14,7 +14,7 @@ class ScraperActorSpec extends TestKit(ActorSystem("testSystem"))
with BeforeAndAfter
with ImplicitSender {

val scraperActorRef = TestActorRef(new ScraperActor)
val scraperActorRef = TestActorRef(ScraperActor())

describe("integration testing by sending ScrapeUrl messages") {

Expand Down

0 comments on commit a49d855

Please sign in to comment.