Skip to content
This repository has been archived by the owner on Oct 24, 2022. It is now read-only.

Commit

Permalink
Merge pull request #311 from MITLibraries/222_logging_config
Browse files Browse the repository at this point in the history
Use Loggers
  • Loading branch information
JPrevost committed Aug 11, 2015
2 parents 709cf14 + 679e958 commit 920bfe2
Show file tree
Hide file tree
Showing 11 changed files with 74 additions and 88 deletions.
6 changes: 3 additions & 3 deletions app/controllers/Application.scala
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ object Application extends Controller with Security {
harvester ! h
h.complete
} else {
println("A harvest tried to start that had an invalid date.")
Logger.info("A harvest tried to start that had an invalid date.")
}
}
Ok("kicked off all harvests")
Expand Down Expand Up @@ -1386,10 +1386,10 @@ object Application extends Controller with Security {
val authorized_key = Play.configuration.getString("auth.harvest.key").get
if (key == authorized_key) {
indexer ! dtype
println("DEBUG: Reindex Job for " + dtype + " started")
Logger.info("Reindex Job for " + dtype + " started")
Ok("Reindexing " + dtype + "s: started")
} else {
println("DEBUG: A reindex tried to start without a valid key.")
Logger.warn("A reindex tried to start without a valid key.")
Unauthorized("Reindexing " + dtype + "s: not allowed")
}
}
Expand Down
4 changes: 2 additions & 2 deletions app/controllers/Search.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@ object Search extends Controller {
val offset = (page) * perpage
val elastic_url = indexSvc + target + "/_search?q=" + encQuery + "&from=" + offset + "&size=" + perpage
val req = if (indexSvc.contains("bonsai.io")) {
println("DEBUG: use basic auth for WS elasticsearch call")
Logger.debug("use basic auth for WS elasticsearch call")
WS.url(elastic_url)
.withAuth(extractCredentials("username", indexSvc),
extractCredentials("password", indexSvc),
WSAuthScheme.BASIC)
} else {
println("DEBUG: no auth for WS elasticsearch call")
Logger.debug("no auth for WS elasticsearch call")
WS.url(elastic_url)
}

Expand Down
2 changes: 1 addition & 1 deletion app/models/Validator.scala
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ case class Validator(id: Int, scheme_id: Int, description: String, userId: Strin
// compose the service call
val svcCall = interpolate(serviceUrl.replaceAll("\\{topic\\}", topicId), true)
val req = WS.url(svcCall)
//println("svcCall: " + svcCall)
Logger.debug("svcCall: " + svcCall)
// need to define general abstraction - now just key off scheme tag
if ("issn".equals(scheme.tag)) {
req.get.map { resp => issnParse(resp.json) }
Expand Down
16 changes: 8 additions & 8 deletions app/services/jsonModel.scala
Original file line number Diff line number Diff line change
Expand Up @@ -145,16 +145,16 @@ object contentModelJson {
def buildContentModel(model: JsValue) = {
val formats = (model \ "cformats").get
procJsArray(formats, 0, cformatFromContentModel)
println("finished formats")
Logger.info("finished formats")
val schemes = (model \ "schemes").get
procJsArray(schemes, 0, schemeFromContentModel)
println("finished schemes")
Logger.info("finished schemes")
val ctypes = (model \ "ctypes").get
procJsArray(ctypes, 0, ctypeFromContentModel)
println("finished ctypes")
Logger.info("finished ctypes")
val resmaps = (model \ "resmaps").get
procJsArray(resmaps, 0, resmapFromContentModel)
println("finished resmaps")
Logger.info("finished resmaps")
}

def cformatFromContentModel(jss: JsValue) {
Expand All @@ -168,7 +168,7 @@ object contentModelJson {
}

def schemeFromContentModel(jss: JsValue) {
//println(Json.stringify(jss))
Logger.debug(Json.stringify(jss))
val tag = forName(jss, "tag")
// only create if not already defined
if (Scheme.findByTag(tag).isEmpty) {
Expand Down Expand Up @@ -302,7 +302,7 @@ object publisherModelJson {
def buildPublisherModel(model: JsValue) = {
val pubs = (model \ "publishers").get
procJsArray(pubs, 0, pubFromPublisherModel)
println("finished publishers")
Logger.info("finished publishers")
}

def pubFromPublisherModel(jss: JsValue) {
Expand All @@ -322,7 +322,7 @@ object publisherModelJson {
}

def collFromPublisherModel(pid: Int)(jss: JsValue) {
//println(Json.stringify(jss))
Logger.debug(Json.stringify(jss))
val tag = forName(jss, "tag")
// only create if not already defined && dependencies found
if (Collection.findByTag(tag).isEmpty) {
Expand Down Expand Up @@ -426,7 +426,7 @@ object subscriberModelJson {
def buildSubscriberModel(model: JsValue) = {
val subs = (model \ "subscribers").get
procJsArray(subs, 0, subFromSubscriberModel)
println("finished subscribers")
Logger.info("finished subscribers")
}

def subFromSubscriberModel(jss: JsValue) {
Expand Down
38 changes: 18 additions & 20 deletions app/workers/Cataloger.scala
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import java.util.Date
import javax.xml.parsers.SAXParser

import org.xml.sax.InputSource

import play.api._
import scales.xml.jaxen._
import scales.xml._
import scales.utils._
Expand All @@ -31,7 +31,7 @@ import models.{Collection, ContentType, Finder, Item, ResourceMap, Scheme, Topic
class CatalogWorker extends Actor {
def receive = {
case item: Item => Cataloger.catalog(item)
case _ => println("bar")
case _ => Logger.error("Unhandled case in CatalogWorker#receive")
}
}

Expand All @@ -51,7 +51,7 @@ class Cataloger(resmap: ResourceMap, content: StoredContent) {
val (idHits, lblHits) = findValues(Finder.forSchemeAndFormat(scheme.id, format), source)
// add cardinality checking here
var idx = 0
// println("IDHits size: " + idHits.size)
Logger.debug("IDHits size: " + idHits.size)
for (id <- idHits) {
// check for and utilize existing topics
val topic = Topic.forSchemeAndTag(scheme.tag, id).getOrElse(createTopic(scheme, id, lblHits(idx)))
Expand All @@ -70,13 +70,13 @@ class Cataloger(resmap: ResourceMap, content: StoredContent) {
var lblHits: Seq[String] = null
if (doc != null) {
// do Id & label
// println("in process about to evaluate: " + finder.idKey)
Logger.debug("in process about to evaluate: " + finder.idKey)
var keyParts = finder.idKey.split(" ")
// new way
// println("keyParts0: " + keyParts(0))
Logger.debug("keyParts0: " + keyParts(0))
val xp = new ScalesXPath(keyParts(0)).withNameConversion(ScalesXPath.localOnly)
val hits = xp.evaluate(top(doc))
// println("Post eval num hits: " + hits.size)
Logger.debug("Post eval num hits: " + hits.size)
if (hits.size > 0) {
if (keyParts.length == 2) {
val regX = keyParts(1).r
Expand All @@ -103,17 +103,15 @@ class Cataloger(resmap: ResourceMap, content: StoredContent) {
idHits = List()
}
}
// also stow in infoCache
//idHits.foreach(println)
//infoCache += ("id" -> idHits)

if (idHits.size > 0) {
val idl = finder.idLabel
// if idl is an XPath, evaluate it
if (idl != null && idl.length > 0 && idl.indexOf("/") >= 0) {
// println("in process about to evaluate label: " + idl)
Logger.debug("in process about to evaluate label: " + idl)
lblHits = xpathFind(idl, doc)
} else if (idl != null && idl.length > 0) {
// println("process filtered value; " + filteredValue(idl, 0))
Logger.debug("process filtered value; " + filteredValue(idl, 0))
var lblList = List[String]()
var count = 0
for (a <- idHits) {
Expand Down Expand Up @@ -200,7 +198,7 @@ class Cataloger(resmap: ResourceMap, content: StoredContent) {
// is value cached?
var value = infoCache.get(token) match {
case Some(x) =>
// println("In filter token: " + token + " index: " + index + " size: " + x.size)
Logger.debug("In filter token: " + token + " index: " + index + " size: " + x.size)
x(index)
case _ => null
}
Expand All @@ -221,7 +219,7 @@ class Cataloger(resmap: ResourceMap, content: StoredContent) {
value = idHits(index)
var valList: List[String] = List()
for (idHit <- idHits) {
println("idHit: " + idHit)
Logger.info("idHit: " + idHit)
valList = idHit :: valList
}
infoCache += (token -> valList.reverse)
Expand All @@ -235,7 +233,7 @@ class Cataloger(resmap: ResourceMap, content: StoredContent) {

def docToParse(name: String) = {
val fname = filteredValue(name, 0)
// println("doc2p: fname: " + fname)
Logger.debug("doc2p: fname: " + fname)
// check doc cache first
docCache.get(fname) match {
case Some(x) => x
Expand Down Expand Up @@ -271,37 +269,37 @@ object Cataloger {
val cataloger = new Cataloger(resmap, Store.content(item))
val ctype = ContentType.findById(item.ctypeId).get
var errorDetected = false
println(s"Cataloging Item: ${item.objKey}")
Logger.info(s"Cataloging Item: ${item.objKey}")

try {
// start with metadata schemes
ctype.schemes("meta").foreach( sch => {
// println("Found scheme:" + sch.tag)
Logger.debug("Found scheme:" + sch.tag)
cataloger.metadata(sch, item) }
)
// next topic schemes
ctype.schemes("topic").foreach( cataloger.topics(_, item) )
} catch {
case e: Exception => println(e); errorDetected = true
case e: Exception => Logger.info(e.toString); errorDetected = true
}

// now assign to meta-topics as appropriate
if (errorDetected == true) {
println("An error occurred cataloging this item.")
Logger.error("An error occurred cataloging this item.")
} else if (cataloger.addedTopics == 0) {
// assign to 'null' meta-topic
item.addTopic(Topic.forSchemeAndTag("meta", "null") match {
case Some(x) => x
case _ => makeTopic("meta", "null", "Items lacking any topic");
})
println("No topics")
Logger.info("No topics")
} else {
// assign to the catch-all meta-topic as well
item.addTopic(Topic.forSchemeAndTag("meta", "any") match {
case Some(x) => x
case _ => makeTopic("meta", "any", "Items with some topics");
})
println("Found some topics")
Logger.info("Found some topics")
}
// next indexing schemes (which will have already been found as metadata)
// must follow topic extraction, since items' topics are indexed
Expand Down
30 changes: 15 additions & 15 deletions app/workers/Conveyor.scala
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class ConveyorWorker extends Actor {
case (item: Item, subscr: Subscriber) => Conveyor.transferItem(item, subscr)
case (hold: Hold, accept: Boolean) => Conveyor.resolveHold(hold, accept)
case (pick: TopicPick, accept: Boolean) => Conveyor.resolvePick(pick, accept)
case _ => println("I'm lost")
case _ => Logger.error("Unhandle Case in ConveryWorker#receive")
}
}

Expand Down Expand Up @@ -67,7 +67,7 @@ object Conveyor {
plan.interest match {
case "subscribe" => subscribe(plan.fulfill)
case "review" => review()
case _ => println("Don't go there")
case _ => Logger.error("Unhandled Case in Conveyor#proccessInterests")
}
}

Expand All @@ -85,26 +85,26 @@ object Conveyor {
plan.template match {
case "subscribe" => subscribe(plan.fulfill)
case "review" => review()
case _ => println("Don't go there")
case _ => Logger.error("Unhandled Case in Conveyor#processTemplates")
}
}

def subscribe(action: String) = {
mints.foreach( i =>
if(sub.subscribesTo(topic.id)) {
println("Conveyor: Subscriber already subscribes to Topic, not re-subscribing.")
Logger.info("Conveyor: Subscriber already subscribes to Topic, not re-subscribing.")
} else {
println("Conveyor: Added new Subscription.")
Logger.info("Conveyor: Added new Subscription.")
Subscription.create(i.subscriberId, topic.id, action, sub.created, new Date)
})
}

def review() = {
mints.foreach( t =>
if(TopicPick.picked(topic.id, t.subscriberId)) {
println("Conveyor: TopicPick detected duplicate so did nothing.")
Logger.info("Conveyor: TopicPick detected duplicate so did nothing.")
} else {
println("Conveyor: Added new TopicPick.")
Logger.info("Conveyor: Added new TopicPick.")
TopicPick.create(t.subscriberId, topic.id, conveyorAgent.id)
})
}
Expand All @@ -131,7 +131,7 @@ object Conveyor {
if (plan.isDefined) {
plan.get.interest match {
case "review" => reviewInterest(interest, plan.get)
case _ => println("Don't go there")
case _ => Logger.error("Unhandled Case in Conveyor#newInterest")
}
}
}
Expand Down Expand Up @@ -215,7 +215,7 @@ object Conveyor {
case "deliver" => transfer(item, sub, Transfer.make(sub.subscriberId, sub.id, item.id, sub.action))
case "review" => Hold.make(sub.subscriberId, sub.id, item.id)
case "notify" => notify(item, sub)
case _ => println("Unknown action: " + sub.action)
case _ => Logger.error("Unknown action: " + sub.action)
}
}
}
Expand All @@ -231,7 +231,7 @@ object Conveyor {
chan.protocol match {
case "sword" => swordTransfer(item, chan, trans)
case "drain" => chan.recordTransfer // essentially No-Op
case _ => println("Don't know how to transfer via: " + chan.protocol)
case _ => Logger.error("Don't know how to transfer via: " + chan.protocol)
}
}

Expand All @@ -240,7 +240,7 @@ object Conveyor {
.withHeaders(CONTENT_TYPE -> "application/zip",
"X-packaging" -> "http://purl.org/net/sword-types/METSDSpaceSIP")
.withAuth(channel.userId, channel.password, WSAuthScheme.BASIC)
println("About to deposit: " + req)
Logger.info("About to deposit: " + req)
val resp = req.post(Packager.packageItemAsFile(item))

resp onComplete {
Expand All @@ -249,24 +249,24 @@ object Conveyor {
}

def failedSwordTransferAttempt(t: String) = {
println("An error occurred attempting to submit a Sword package")
Logger.error("An error occurred attempting to submit a Sword package")
val sysadminEmails = User.allByRole("sysadmin").map(x => x.email).mkString(",")
val msg = views.txt.email.sword_transfer_failure(item, channel, trans, t).body
sendSwordFailureEmail(sysadminEmails, msg)
}

def sendSwordFailureEmail(addresses: String, msg: String) = {
println(msg)
Logger.info(msg)
Emailer.notify(addresses, "SCOAP3Hub: failure of sword delivery detected", msg)
Transfer.delete(trans.id)
}

def readSwordResponse(response: play.api.libs.ws.WSResponse) = {
if (response.status == 201) {
println("Successful Transfer of " + item.objKey)
Logger.info("Successful Transfer of " + item.objKey)
channel.recordTransfer
} else {
println("The SWORD server did not accept the transfer. Response was " + response.toString)
Logger.warn("The SWORD server did not accept the transfer. Response was " + response.toString)
// email admin details
val sysadminEmails = User.allByRole("sysadmin").map(x => x.email).mkString(",")
val msg = views.txt.email.sword_transfer_failure(item, channel, trans, response.toString).body
Expand Down
Loading

0 comments on commit 920bfe2

Please sign in to comment.