Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixes #15741: Update scala to 2.12.10 #2468

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ class PreUnmarshallCheckConsistency extends PreUnmarshall with Loggable {
* In case of none, return a failure.
*/
val failure: Box[String] = Failure(s"Missing tags ${tags.map(t => s"OPERATINGSYSTEM/${t}").mkString(", ")}. At least one of them is mandatory")
(failure /: tags) { case (state, tag) =>
tags.foldLeft(failure) { case (state, tag) =>
state match {
case Full(x) => Full(x)
case _ => checkNodeSeq(report, "OPERATINGSYSTEM", false, Some(tag)).or(failure)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ class FileHistoryLogRepository[ID,T](
for {
versions <- this.versions(id)
hlogs <- {
( (Full(Seq()):Box[Seq[HLog]]) /: versions ) { (current, v) =>
versions.foldLeft(Full(Seq()):Box[Seq[HLog]]) { (current, v) =>
for {
seq <- current
hlog <- this.get(id,v)
Expand Down Expand Up @@ -216,4 +216,4 @@ object FileHistoryLogRepository {
case e:IllegalArgumentException => None
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ class InventoryDitServiceImpl(
sortWith{ (x,y) => x._1.compareTo(y._1) >= 0 }

override def getDit(dn:DN) : Box[InventoryDit] = {
( (Empty:Box[InventoryDit]) /: baseDns ) {
baseDns.foldLeft(Empty:Box[InventoryDit]) {
case (f:Full[_], _) => f
case (_, (baseDn,dit) ) if(baseDn.isAncestorOf(dn,true)) => Full(dit)
case _ => Empty
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -515,7 +515,7 @@ class InventoryMapper(
val m = MachineInventory(id,inventoryStatus,machineType,name,mbUuid,inventoryDate, receiveDate
, manufacturer, systemSerialNumber)
//map subentries and return result
(m /: tree.children) { case (m,(rdn,t)) => mapAndAddMachineElement(t.root(),m) }
tree.children.foldLeft(m) { case (m,(rdn,t)) => mapAndAddMachineElement(t.root(),m) }
}
}

Expand Down Expand Up @@ -996,7 +996,7 @@ class InventoryMapper(
node <- nodeFromEntry(tree.root)
} yield {
//map subentries and return result
(node /: tree.children) { case (m,(rdn,t)) => mapAndAddNodeElement(t.root(),m) }
tree.children.foldLeft(node) { case (m,(rdn,t)) => mapAndAddNodeElement(t.root(),m) }
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ object Control {
* } yield { processingOk } //match on Failure / Full(report)
*/
def pipeline[T,U](seq: Seq[T],init:U)(call:(T,U) => Box[U]) : Box[U] = {
((Full(init):Box[U]) /: seq){ (currentValue, nextProcessor) =>
seq.foldLeft(Full(init):Box[U]){ (currentValue, nextProcessor) =>
currentValue match {
case x:EmptyBox => return x //interrupt pipeline early
case Full(value) => call(nextProcessor,value)
Expand Down
19 changes: 18 additions & 1 deletion webapp/sources/rudder-parent-pom/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -361,8 +361,10 @@ limitations under the License.
<rudder-version>5.0.14-SNAPSHOT</rudder-version>
<spring-run-dep-version>5.0.14-SNAPSHOT</spring-run-dep-version>

<scala-version>2.12.7</scala-version>
<scala-version>2.12.10</scala-version>
<scala-binary-version>2.12</scala-binary-version>
<scala-parser-combinators-version>1.1.2</scala-parser-combinators-version>
<scala-xml-version>1.2.0</scala-xml-version>
<lift-version>3.3.0</lift-version>
<slf4j-version>1.7.25</slf4j-version>
<logback-version>1.2.3</logback-version>
Expand Down Expand Up @@ -433,6 +435,16 @@ limitations under the License.
<artifactId>scala-reflect</artifactId>
<version>${scala-version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-parser-combinators_${scala-binary-version}</artifactId>
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why do we need it now ?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm really not sure, and actually even if I had a good explanation for that one, I don't understand why it needs to be declared in rudder-web since it is a compile dep in rudder-core. But it seems that they changed the scope of that module in scala-compiler from compile to provided, and so we don't get it through liftweb-json (I think).

<version>${scala-parser-combinators-version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-xml_${scala-binary-version}</artifactId>
<version>${scala-xml-version}</version>
</dependency>
<dependency>
<groupId>org.typelevel</groupId>
<artifactId>cats-core_${scala-binary-version}</artifactId>
Expand Down Expand Up @@ -687,6 +699,11 @@ limitations under the License.
<version>${specs2-version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-parser-combinators_${scala-binary-version}</artifactId>
<scope>test</scope>
</dependency>

<!--
included to use slf4j native backend
Expand Down
5 changes: 5 additions & 0 deletions webapp/sources/rudder/rudder-core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,11 @@ along with Rudder. If not, see <http://www.gnu.org/licenses/>.
<version>${rudder-version}</version>
</dependency>

<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-parser-combinators_${scala-binary-version}</artifactId>
</dependency>

<dependency>
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ object TechniqueCategoryId {
def buildId(path:String) : TechniqueCategoryId = {
val absPath = "/" + path
val parts = absPath.split("/").filterNot(x => empty.findFirstIn(x).isDefined)
( (RootTechniqueCategoryId:TechniqueCategoryId) /: parts) { (id,name) =>
parts.foldLeft((RootTechniqueCategoryId:TechniqueCategoryId)) { (id,name) =>
SubTechniqueCategoryId(TechniqueCategoryName(name), id)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ class TechniqueRepositoryImpl(
// {name, techniques, subcategories} are the same and transform them into move.
val deleted = changed.collect { case d: Deleted => d }
// for each delete, look for a corresponding add, and in that case mark them as to be removed from changed
val (moveToAdd, otherToRemove) = ((List.empty[Moved], List.empty[TechniqueCategoryModType]) /: deleted) { case ((move, toDelete), d@Deleted(currentDel)) =>
val (moveToAdd, otherToRemove) = deleted.foldLeft((List.empty[Moved], List.empty[TechniqueCategoryModType])) { case ((move, toDelete), d@Deleted(currentDel)) =>
changed.find(c => c match {
// hypothesis: it's a directory rename if the display name and content is the same
case Added(cat, parentId) => currentDel.name == cat.name && currentDel.subCategoryIds == cat.subCategoryIds && currentDel.techniqueIds == cat.techniqueIds
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class SectionSpecWriterImpl extends SectionSpecWriter {
if (rootSection.name != SECTION_ROOT_NAME)
Failure(s"root section name should be equals to ${SECTION_ROOT_NAME} but is ${rootSection.name}")
else {
val children = (rootSection.children.flatMap( serializeChild(_))/:NodeSeq.Empty)((a,b) => a ++ b)
val children = rootSection.children.flatMap( serializeChild(_)).foldLeft(NodeSeq.Empty)((a,b) => a ++ b)
val xml = createXmlNode(SECTIONS_ROOT,children)
Full(xml)
}
Expand All @@ -39,7 +39,7 @@ class SectionSpecWriterImpl extends SectionSpecWriter {
}

private[this] def serializeSection(section:SectionSpec):NodeSeq = {
val children = (section.children.flatMap( serializeChild(_))/:NodeSeq.Empty)((a,b) => a ++ b)
val children = section.children.flatMap(serializeChild(_)).foldLeft(NodeSeq.Empty)((a,b) => a ++ b)
val xml = ( createXmlNode(SECTION,children)
% Attribute(SECTION_NAME, Text(section.name),Null)
% Attribute(SECTION_IS_MULTIVALUED, Text(section.isMultivalued.toString),Null)
Expand Down Expand Up @@ -83,7 +83,7 @@ class SectionSpecWriterImpl extends SectionSpecWriter {
val longDescription = createXmlTextNode(VAR_LONG_DESCRIPTION, variable.longDescription)
val isMultiValued = createXmlTextNode(VAR_IS_MULTIVALUED, variable.multivalued.toString)
val checked = createXmlTextNode(VAR_IS_CHECKED, variable.checked.toString)
val items = (valueLabels.map(serializeItem(_))/:NodeSeq.Empty)((a,b) => a ++ b)
val items = valueLabels.map(serializeItem(_)).foldLeft(NodeSeq.Empty)((a,b) => a ++ b)
val constraint = serializeConstraint(variable.constraint)

val children = ( name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,7 +233,7 @@ object RuleTarget extends Loggable {
def isANodeWithRole (nodeId : NodeId, isPolicyServer : Boolean ,serverRoles : Set[ServerRole]) : Boolean = {
serverRoles.size>0 || isPolicyServer || nodeId == Constants.ROOT_POLICY_SERVER_ID
}
(Set[NodeId]() /: targets) { case (nodes , target) => target match {
targets.foldLeft(Set[NodeId]()) { case (nodes , target) => target match {
case AllTarget => return allNodes.keySet
case AllTargetExceptPolicyServers => nodes ++ allNodes.collect { case(k,n) if(!n._1) => k }
case PolicyServerTarget(nodeId) => nodes + nodeId
Expand All @@ -258,15 +258,15 @@ object RuleTarget extends Loggable {
case TargetIntersection(targets) =>
val nodeSets = targets.map(t => getNodeIds(Set(t), allNodes, groups))
// Compute the intersection of the sets of Nodes
val intersection = (allNodes.keySet/: nodeSets) {
val intersection = nodeSets.foldLeft(allNodes.keySet) {
case (currentIntersection, nodes) => currentIntersection.intersect(nodes)
}
nodes ++ intersection

case TargetUnion(targets) =>
val nodeSets = targets.map(t => getNodeIds(Set(t), allNodes, groups))
// Compute the union of the sets of Nodes
val union = (Set[NodeId]()/: nodeSets) {
val union = nodeSets.foldLeft(Set[NodeId]()) {
case (currentUnion, nodes) => currentUnion.union(nodes)
}
nodes ++ union
Expand Down Expand Up @@ -373,7 +373,7 @@ object RuleTarget extends Loggable {
case Seq(t:TargetExclusion) => t
case _ =>
val start = TargetExclusion(TargetUnion(Set()),TargetUnion(Set()))
val res = (start /: targets) {
val res = targets.foldLeft(start) {
case (res,e:TargetExclusion) =>
res.updateInclude(e.includedTarget).updateExclude(e.excludedTarget)
case (res,t) => res.updateInclude(t)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ object ReportType {
if (reportTypes.isEmpty) {
NoAnswer
} else {
( reportTypes :\ (EnforceNotApplicable : ReportType) ) {
reportTypes.foldLeft(EnforceNotApplicable : ReportType) {
case (_, BadPolicyMode) | (BadPolicyMode, _) => BadPolicyMode
case (_, Unexpected) | (Unexpected, _) => Unexpected
case (_, EnforceError) | (EnforceError, _) => EnforceError
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -182,7 +182,7 @@ object RunHooks {
* But we still want the whole operation to be non-bloking.
*/
import HookReturnCode._
( Future(Ok("",""):HookReturnCode) /: hooks.hooksFile) { case (previousFuture, nextHookName) =>
hooks.hooksFile.foldLeft(Future(Ok("",""):HookReturnCode)) { case (previousFuture, nextHookName) =>
val path = hooks.basePath + File.separator + nextHookName
previousFuture.flatMap {
case x: Success =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ object ResultHelper {
}

def sequence[T,U](seq : Seq[T])( f : T => Result[U]) : Result[Seq[U]] = {
((Right(Seq()) : Result[Seq[U]]) /: seq) {
seq.foldLeft((Right(Seq()) : Result[Seq[U]])) {
case (e @ Left(_), _) => e
case (Right(res), value) =>f(value).map( res :+ _)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ class EventLogJdbcRepository(
// here, we have to build the parameters by hand
// the first is the array needed by xpath, the following are eventType - if any
val eventTypeParam = eventTypeFilter.zipWithIndex
val param = ( HPS.set(1, List(changeRequest.value.toString)) /: eventTypeParam ) { case (current, (event,index)) =>
val param = eventTypeParam.foldLeft(HPS.set(1, List(changeRequest.value.toString))) { case (current, (event,index)) =>
// zipwithIndex starts at 0, and we have already 1 used for the array, so we +2 the index
current *> HPS.set(index+2, event.eventType.serialize)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ class UpdateExpectedReportsJdbcRepository(

// we want to close all error nodeexpectedreports, and all non current
// we keep valid current identified by nodeId, we won't have to save them afterward.
val (toClose, okConfigs) = ( (List[(DateTime, NodeId, NodeConfigId, DateTime)](), List[NodeId]() ) /: oldConfigs) { case ((nok, ok), next) =>
val (toClose, okConfigs) = oldConfigs.foldLeft((List[(DateTime, NodeId, NodeConfigId, DateTime)](), List[NodeId]() )) { case ((nok, ok), next) =>
next match {
case Left(n) => ((currentConfigs(n._1)._2, n._1, n._2, n._3)::nok, ok)
case Right(r) =>
Expand All @@ -288,7 +288,7 @@ class UpdateExpectedReportsJdbcRepository(
//same reasoning for config info: only update the ones for witch the last id is not the correct one
//we use configs because we must know if a nodeInfo is completly missing and add it
type T = (Vector[NodeConfigIdInfo], NodeId)
val (toAdd, toUpdate, okInfos) = ( (List[T](), List[T](), List[NodeId]() ) /: configs ) { case ((add, update, ok), next) =>
val (toAdd, toUpdate, okInfos) = configs.foldLeft((List[T](), List[T](), List[NodeId]() )) { case ((add, update, ok), next) =>
configInfos.get(next.nodeId) match {
case None => // add it
( (Vector(NodeConfigIdInfo(next.nodeConfigId, next.beginDate, None)), next.nodeId) :: add, update, ok)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -469,7 +469,7 @@ class RoLDAPDirectiveRepository(
con <- ldap
entries <- userLibMutex.readLock( con.getTree(rudderDit.ACTIVE_TECHNIQUES_LIB.dn) ) ?~! "The root category of the user library of techniques seems to be missing in LDAP directory. Please check its content"
} yield {
val allMaps = (emptyAll /: entries.toSeq) { case (current, e) =>
val allMaps = entries.toSeq.foldLeft(emptyAll) { case (current, e) =>
if(isACategory(e)) {
mapper.entry2ActiveTechniqueCategory(e) match {
case Full(category) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,7 @@ class LDAPEntityMapper(
}

def serializeAcceptations(dates:Map[TechniqueVersion,DateTime]) : JObject = {
( JObject(List()) /: dates) { case (js, (version, date)) =>
dates.foldLeft(JObject(List())) { case (js, (version, date)) =>
js ~ (version.toString -> GeneralizedTime(date).toString)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,7 @@ class RoLDAPNodeGroupRepository(
con <- ldap
entries <- groupLibMutex.readLock( con.getTree(rudderDit.GROUP.dn) ) ?~! "The root category of the node group library seems to be missing in LDAP directory. Please check its content"
} yield {
val allMaps = (emptyAll /: entries.toSeq) { case (current, e) =>
val allMaps = entries.toSeq.foldLeft(emptyAll) { case (current, e) =>
if(isACategory(e)) {
mapper.entry2NodeGroupCategory(e) match {
case Full(category) =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -210,13 +210,13 @@ class ItemArchiveManagerImpl(
case f:Failure => (Seq(ActiveTechniqueNotArchived(activeTechnique.id, f)), Seq.empty[DirectiveNotArchived])
}
}
val (atNotArchived, dirNotArchived) = ( (Seq.empty[ActiveTechniqueNotArchived], Seq.empty[DirectiveNotArchived]) /: activeTechniquesInError) {
val (atNotArchived, dirNotArchived) = activeTechniquesInError.foldLeft((Seq.empty[ActiveTechniqueNotArchived], Seq.empty[DirectiveNotArchived])) {
case ( (ats,dirs) , (at,dir) ) => (ats ++ at, dirs ++ dir)
}
(catInError, atNotArchived, dirNotArchived)
}

(NotArchivedElements( Seq(), Seq(), Seq()) /: byCategories) {
byCategories.foldLeft(NotArchivedElements( Seq(), Seq(), Seq())) {
case (NotArchivedElements(cats, ats, dirs), (cat,at,dir)) => NotArchivedElements(cats++cat, ats++at, dirs++dir)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ object ZipUtils {
val c = existing :+ Zippable(getPath(f), None)


(c /: sortFile(f.listFiles)) { (seq,ff) =>
sortFile(f.listFiles).foldLeft(c) { (seq,ff) =>
recZippable(ff,seq)
}
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,8 @@ case class RuleCategory(

// Merge current map with already merged map
def mergeChildMaps (
currentMap : ChildMap
, alreadyMerged : ChildMap
alreadyMerged : ChildMap
, currentMap : ChildMap
) : ChildMap = {
// get all distinct keys from both map
val keys = (currentMap.keys ++ alreadyMerged.keys).toList.distinct
Expand All @@ -163,7 +163,7 @@ case class RuleCategory(
val baseMap : ChildMap = Map((List.empty[RuleCategoryId]) -> (this :: Nil))

// fold all maps together
(augmentedChildMap :\ baseMap) (mergeChildMaps _ )
augmentedChildMap.foldLeft(baseMap) (mergeChildMaps _ )
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ trait PromiseGenerationService {
_ = PolicyLogger.debug(s"RuleVals built in ${timeRuleVal} ms, start to expand their values.")

nodeContextsTime = System.currentTimeMillis
activeNodeIds = (Set[NodeId]()/:ruleVals){case(s,r) => s ++ r.nodeIds}
activeNodeIds = ruleVals.foldLeft(Set[NodeId]()){case(s,r) => s ++ r.nodeIds}
nodeContexts <- getNodeContexts(activeNodeIds, allNodeInfos, groupLib, allLicenses, allParameters, globalAgentRun, globalComplianceMode, globalPolicyMode) ?~! "Could not get node interpolation context"
timeNodeContexts = (System.currentTimeMillis - nodeContextsTime)
_ = PolicyLogger.debug(s"Node contexts built in ${timeNodeContexts} ms, start to build new node configurations.")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ class InterpolatedValueCompilerImpl extends RegexParsers with InterpolatedValueC
def parseToken(tokens:List[Token]): InterpolationContext => Box[String] = {
def build(context: InterpolationContext) = {
val init: Box[String] = Full("")
( init /: tokens){
tokens.foldLeft(init){
case (eb:EmptyBox, _ ) => eb
case (Full(str), token) => analyse(context, token) match {
case eb:EmptyBox => eb
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ final object MergePolicyService {
}

// group directives by non-multi-instance, multi-instance non-multi-policy, multi-instance-multi-policy
val groupedDrafts = (GroupedDrafts(Map(),Map(),Set()) /: updatedTrackingKeyValues) { case(grouped, draft) =>
val groupedDrafts = updatedTrackingKeyValues.foldLeft(GroupedDrafts(Map(),Map(),Set())) { case(grouped, draft) =>
if(draft.technique.isMultiInstance) {
draft.technique.generationMode match {
case TechniqueGenerationMode.MultipleDirectives | TechniqueGenerationMode.MultipleDirectivesWithParameters =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ class SystemVariableServiceImpl(
}

//build the final string
(""/:roles) { (x,y) => x + y }
roles.foldLeft("") { (x,y) => x + y }
} else {
""
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ trait DefaultStringQueryParser extends StringQueryParser {
)
}

val lines = ( (Full(List()):Box[List[CriterionLine]]) /: query.criteria.toList ){
val lines = query.criteria.toList.foldLeft(Full(List()):Box[List[CriterionLine]]){
(opt,x) => opt.flatMap(l => parseLine(x).map( _::l ) )
} match {
case f@Failure(_,_,_) => return f
Expand Down Expand Up @@ -196,7 +196,7 @@ trait JsonQueryLexer extends QueryLexer {
// try to parse all lines. On the first parsing error (parseCrtierion returns Failure),
// stop and return a Failure
// if all parsing are OK, return a Full(list(criterionLine)
( (Full(List[StringCriterionLine]()):Box[List[StringCriterionLine]]) /: arr){
arr.foldLeft(Full(List[StringCriterionLine]()):Box[List[StringCriterionLine]]){
(opt,x) => opt.flatMap(l=> parseCriterion(x).map( _:: l))
} match {
case Full(l) => Full(l.reverse)
Expand Down
Loading