Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions Sources/SwiftDriver/Driver/CompilerMode.swift
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,10 @@ extension CompilerMode {
}
}

public var isBatchCompile: Bool {
batchModeInfo != nil
}

// Whether this compilation mode supports the use of bridging pre-compiled
// headers.
public var supportsBridgingPCH: Bool {
Expand Down
36 changes: 29 additions & 7 deletions Sources/SwiftDriver/Driver/Driver.swift
Original file line number Diff line number Diff line change
Expand Up @@ -859,12 +859,14 @@ extension Driver {
recordedInputModificationDates: recordedInputModificationDates)
return
}

try performTheBuild(allJobs: jobs, forceResponseFiles: forceResponseFiles)

buildRecordInfo?.writeBuildRecord(
jobs,
incrementalCompilationState?.skippedCompilationInputs)
do {
defer {
buildRecordInfo?.writeBuildRecord(
jobs,
incrementalCompilationState?.skippedCompilationInputs)
}
try performTheBuild(allJobs: jobs, forceResponseFiles: forceResponseFiles)
}

// If requested, warn for options that weren't used by the driver after the build is finished.
if parsedOptions.hasArgument(.driverWarnUnusedOptions) {
Expand Down Expand Up @@ -896,8 +898,11 @@ extension Driver {
allJobs: [Job],
forceResponseFiles: Bool
) throws {
let continueBuildingAfterErrors = computeContinueBuildingAfterErrors()
try executor.execute(
workload: .init(allJobs, incrementalCompilationState),
workload: .init(allJobs,
incrementalCompilationState,
continueBuildingAfterErrors: continueBuildingAfterErrors),
delegate: createToolExecutionDelegate(),
numParallelJobs: numParallelJobs ?? 1,
forceResponseFiles: forceResponseFiles,
Expand Down Expand Up @@ -1414,8 +1419,25 @@ extension Driver {

return numJobs
}

private mutating func computeContinueBuildingAfterErrors() -> Bool {
// Note: Batch mode handling of serialized diagnostics requires that all
// batches get to run, in order to make sure that all diagnostics emitted
// during the compilation end up in at least one serialized diagnostic file.
// Therefore, treat batch mode as implying -continue-building-after-errors.
// (This behavior could be limited to only when serialized diagnostics are
// being emitted, but this seems more consistent and less surprising for
// users.)
// FIXME: We don't really need (or want) a full ContinueBuildingAfterErrors.
// If we fail to precompile a bridging header, for example, there's no need
// to go on to compilation of source files, and if compilation of source files
// fails, we shouldn't try to link. Instead, we'd want to let all jobs finish
// but not schedule any new ones.
return compilerMode.isBatchCompile || parsedOptions.contains(.continueBuildingAfterErrors)
}
}


extension Diagnostic.Message {
static func remark_max_determinism_overriding(_ option: Option) -> Diagnostic.Message {
.remark("SWIFTC_MAXIMUM_DETERMINISM overriding \(option.spelling)")
Expand Down
24 changes: 19 additions & 5 deletions Sources/SwiftDriver/Execution/DriverExecutor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -47,12 +47,26 @@ public protocol DriverExecutor {
func description(of job: Job, forceResponseFiles: Bool) throws -> String
}

public enum DriverExecutorWorkload {
case all([Job])
case incremental(IncrementalCompilationState)
public struct DriverExecutorWorkload {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I do like this better, from a SPM compatibility perspective.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks!

public let continueBuildingAfterErrors: Bool
public enum Kind {
case all([Job])
case incremental(IncrementalCompilationState)
}
public let kind: Kind

public init(_ allJobs: [Job],
_ incrementalCompilationState: IncrementalCompilationState?,
continueBuildingAfterErrors: Bool
) {
self.continueBuildingAfterErrors = continueBuildingAfterErrors
self.kind = incrementalCompilationState
.map {.incremental($0)}
?? .all(allJobs)
}

init(_ allJobs: [Job], _ incrementalCompilationState: IncrementalCompilationState?) {
self = incrementalCompilationState.map {.incremental($0)} ?? .all(allJobs)
static public func all(_ jobs: [Job]) -> Self {
.init(jobs, nil, continueBuildingAfterErrors: false)
}
}

Expand Down
2 changes: 1 addition & 1 deletion Sources/SwiftDriver/Jobs/Planning.swift
Original file line number Diff line number Diff line change
Expand Up @@ -545,7 +545,7 @@ extension Driver {
/// So, in order to avoid making jobs and rebatching, the code would have to just get outputs for each
/// compilation. But `compileJob` intermixes the output computation with other stuff.
mutating func formBatchedJobs(_ jobs: [Job], forIncremental: Bool) throws -> [Job] {
guard let _ = compilerMode.batchModeInfo else {
guard compilerMode.isBatchCompile else {
// Don't even go through the logic so as to not print out confusing
// "batched foobar" messages.
return jobs
Expand Down
40 changes: 34 additions & 6 deletions Sources/SwiftDriverExecution/MultiJobExecutor.swift
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,12 @@ public final class MultiJobExecutor {
/// any newly-required job. Set only once.
private(set) var executeAllJobsTaskBuildEngine: LLTaskBuildEngine? = nil

/// If a job fails, the driver needs to stop running jobs.
private(set) var isBuildCancelled = false

/// The value of the option
let continueBuildingAfterErrors: Bool


init(
argsResolver: ArgsResolver,
Expand All @@ -106,7 +112,8 @@ public final class MultiJobExecutor {
producerMap: self.producerMap,
primaryIndices: self.primaryIndices,
postCompileIndices: self.postCompileIndices,
incrementalCompilationState: self.incrementalCompilationState
incrementalCompilationState: self.incrementalCompilationState,
continueBuildingAfterErrors: self.continueBuildingAfterErrors
) = Self.fillInJobsAndProducers(workload)

self.argsResolver = argsResolver
Expand All @@ -131,20 +138,21 @@ public final class MultiJobExecutor {
producerMap: [VirtualPath: Int],
primaryIndices: Range<Int>,
postCompileIndices: Range<Int>,
incrementalCompilationState: IncrementalCompilationState?)
incrementalCompilationState: IncrementalCompilationState?,
continueBuildingAfterErrors: Bool)
{
var jobs = [Job]()
var producerMap = [VirtualPath: Int]()
let primaryIndices, postCompileIndices: Range<Int>
let incrementalCompilationState: IncrementalCompilationState?
switch workload {
switch workload.kind {
case let .incremental(ics):
incrementalCompilationState = ics
primaryIndices = Self.addJobs(
ics.mandatoryPreOrCompileJobsInOrder,
to: &jobs,
producing: &producerMap
)
)
postCompileIndices = Self.addJobs(
ics.postCompileJobs,
to: &jobs,
Expand All @@ -161,7 +169,8 @@ public final class MultiJobExecutor {
producerMap: producerMap,
primaryIndices: primaryIndices,
postCompileIndices: postCompileIndices,
incrementalCompilationState: incrementalCompilationState)
incrementalCompilationState: incrementalCompilationState,
continueBuildingAfterErrors: workload.continueBuildingAfterErrors)
}

/// Allow for dynamically adding jobs, since some compile jobs are added dynamically.
Expand Down Expand Up @@ -225,6 +234,19 @@ public final class MultiJobExecutor {
executeAllJobsTaskBuildEngine!.taskNeedsInput(key, inputID: index)
}
}

fileprivate func cancelBuildIfNeeded(_ result: ProcessResult) {
switch (result.exitStatus, continueBuildingAfterErrors) {
case (.terminated(let code), false) where code != EXIT_SUCCESS:
isBuildCancelled = true
#if !os(Windows)
case (.signalled, _):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm guessing this is more correct than in my solution where I used result.success as the flag and never considered if the failure was due to a signal or not.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks!

isBuildCancelled = true
#endif
default:
break
}
}
}

/// The work to be done.
Expand Down Expand Up @@ -426,8 +448,8 @@ class ExecuteJobRule: LLBuildRule {
rememberIfInputSucceeded(engine, value: value)
}

/// Called when the build engine thinks all inputs are available in order to run the job.
override func inputsAvailable(_ engine: LLTaskBuildEngine) {
// Return early any of the input failed.
guard allInputsSucceeded else {
return engine.taskIsComplete(DriverBuildValue.jobExecution(success: false))
}
Expand Down Expand Up @@ -464,6 +486,10 @@ class ExecuteJobRule: LLBuildRule {
}

private func executeJob(_ engine: LLTaskBuildEngine) {
if context.isBuildCancelled {
engine.taskIsComplete(DriverBuildValue.jobExecution(success: false))
return
}
let context = self.context
let resolver = context.argsResolver
let job = myJob
Expand Down Expand Up @@ -514,6 +540,8 @@ class ExecuteJobRule: LLBuildRule {
context.executorDelegate.jobFinished(job: job, result: result, pid: pid)
}
value = .jobExecution(success: success)

context.cancelBuildIfNeeded(result)
} catch {
if error is DiagnosticData {
context.diagnosticsEngine.emit(error)
Expand Down
28 changes: 28 additions & 0 deletions Tests/SwiftDriverTests/SwiftDriverTests.swift
Original file line number Diff line number Diff line change
Expand Up @@ -1471,6 +1471,34 @@ final class SwiftDriverTests: XCTestCase {
}
}

func testBatchModeContinueAfterErrors() throws {
throw XCTSkip("This test requires the fix to honoring -driver-use-frontend-path")
struct MockExecutor: DriverExecutor {
let resolver = try! ArgsResolver(fileSystem: localFileSystem)

struct ShouldNeverGetHere: LocalizedError {}

func execute(job: Job, forceResponseFiles: Bool, recordedInputModificationDates: [TypedVirtualPath : Date]) throws -> ProcessResult {
throw ShouldNeverGetHere()
}
func execute(workload: DriverExecutorWorkload,
delegate: JobExecutionDelegate,
numParallelJobs: Int,
forceResponseFiles: Bool,
recordedInputModificationDates: [TypedVirtualPath : Date]) throws {
XCTAssert(workload.continueBuildingAfterErrors)
}
func checkNonZeroExit(args: String..., environment: [String : String]) throws -> String {
throw ShouldNeverGetHere()
}
func description(of job: Job, forceResponseFiles: Bool) throws -> String {
throw ShouldNeverGetHere()
}
}

let driver = try Driver(args: ["swiftc", "foo1.swift", "bar1.swift", "-enable-batch-mode", "-driver-use-frontend-path", "/bin/echo"], executor: MockExecutor())
}

func testSingleThreadedWholeModuleOptimizationCompiles() throws {
var driver1 = try Driver(args: ["swiftc", "-whole-module-optimization", "foo.swift", "bar.swift", "-module-name", "Test", "-target", "x86_64-apple-macosx10.15", "-emit-module-interface", "-emit-objc-header-path", "Test-Swift.h", "-emit-private-module-interface-path", "Test.private.swiftinterface"])
let plannedJobs = try driver1.planBuild()
Expand Down