diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 507c288bcf..e7384568ec 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -132,7 +132,7 @@ jobs:
python-version: 3.7
- uses: actions/download-artifact@v3
with:
- name: build-artifacts
+ name: artifact-onefuzztypes
path: artifacts
- name: Build
shell: bash
@@ -157,7 +157,7 @@ jobs:
cp dist/onefuzz.exe ${GITHUB_WORKSPACE}/artifacts/windows-cli/
- uses: actions/upload-artifact@v3
with:
- name: build-artifacts
+ name: artifact-cli
path: artifacts
- name: lint
shell: bash
@@ -220,7 +220,7 @@ jobs:
- run: src/ci/onefuzztypes.sh
- uses: actions/upload-artifact@v3
with:
- name: build-artifacts
+ name: artifact-onefuzztypes
path: artifacts
proxy:
runs-on: ubuntu-20.04
@@ -239,7 +239,7 @@ jobs:
- run: src/ci/proxy.sh
- uses: actions/upload-artifact@v3
with:
- name: build-artifacts
+ name: artifact-proxy
path: artifacts
service:
runs-on: ubuntu-22.04
@@ -314,19 +314,31 @@ jobs:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
+ - uses: actions/cache@v3
+ id: cache-build
+ with:
+ key: afl|${{runner.os}}-${{runner.arch}}|${{ hashFiles('src/ci/afl.sh') }}
+ path: artifacts
- run: src/ci/afl.sh
+ if: steps.cache-build.outputs.cache-hit != 'true'
- uses: actions/upload-artifact@v3
with:
- name: build-artifacts
+ name: artifact-afl
path: artifacts
aflpp:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
+ - uses: actions/cache@v3
+ id: cache-build
+ with:
+ key: aflpp|${{runner.os}}-${{runner.arch}}|${{ hashFiles('src/ci/aflpp.sh') }}
+ path: artifacts
- run: src/ci/aflpp.sh
+ if: steps.cache-build.outputs.cache-hit != 'true'
- uses: actions/upload-artifact@v3
with:
- name: build-artifacts
+ name: artifact-aflpp
path: artifacts
bicep-check:
name: Check Bicep files
@@ -352,7 +364,7 @@ jobs:
shell: bash
- uses: actions/upload-artifact@v3
with:
- name: build-artifacts
+ name: artifact-dotnet-fuzzing-tools-linux
path: artifacts
dotnet-fuzzing-tools-windows:
runs-on: windows-2022
@@ -367,7 +379,7 @@ jobs:
shell: pwsh
- uses: actions/upload-artifact@v3
with:
- name: build-artifacts
+ name: artifact-dotnet-fuzzing-tools-windows
path: artifacts
radamsa-linux:
runs-on: ubuntu-20.04
@@ -378,13 +390,13 @@ jobs:
with:
# key on the shell script only: this script fixes the
# version to a particular commit, so if it changes we need to rebuild
- key: radamsa-linux-${{ hashFiles('src/ci/radamsa-linux.sh') }}
+ key: radamsa|${{runner.os}}-${{runner.arch}}|${{ hashFiles('src/ci/radamsa-linux.sh') }}
path: artifacts
- run: src/ci/radamsa-linux.sh
if: steps.cache-radamsa-build-linux.outputs.cache-hit != 'true'
- uses: actions/upload-artifact@v3
with:
- name: build-artifacts
+ name: artifact-radamsa-linux
path: artifacts
radamsa-win64:
runs-on: windows-2019
@@ -395,24 +407,25 @@ jobs:
with:
# key on the shell script only: this script fixes the
# version to a particular commit, so if it changes we need to rebuild
- key: radamsa-windows-${{ hashFiles('src/ci/radamsa-windows.sh') }}
+ key: radamsa|${{runner.os}}-${{runner.arch}}|${{ hashFiles('src/ci/radamsa-windows.sh') }}
path: artifacts
- run: c:\msys64\usr\bin\bash src/ci/radamsa-windows.sh
if: steps.cache-radamsa-build-windows.outputs.cache-hit != 'true'
- uses: actions/upload-artifact@v3
with:
- name: build-artifacts
+ name: artifact-radamsa-windows
path: artifacts
package:
needs:
- agent
- azcopy
- cli
- - onefuzztypes
- proxy
- service
- afl
- aflpp
+ - dotnet-fuzzing-tools-linux
+ - dotnet-fuzzing-tools-windows
- radamsa-linux
- radamsa-win64
runs-on: ubuntu-20.04
@@ -422,6 +435,38 @@ jobs:
with:
name: build-artifacts
path: artifacts
+ - uses: actions/download-artifact@v3
+ with:
+ name: artifact-cli
+ path: artifacts
+ - uses: actions/download-artifact@v3
+ with:
+ name: artifact-proxy
+ path: artifacts
+ - uses: actions/download-artifact@v3
+ with:
+ name: artifact-radamsa-linux
+ path: artifacts
+ - uses: actions/download-artifact@v3
+ with:
+ name: artifact-radamsa-windows
+ path: artifacts
+ - uses: actions/download-artifact@v3
+ with:
+ name: artifact-afl
+ path: artifacts
+ - uses: actions/download-artifact@v3
+ with:
+ name: artifact-aflpp
+ path: artifacts
+ - uses: actions/download-artifact@v3
+ with:
+ name: artifact-dotnet-fuzzing-tools-linux
+ path: artifacts
+ - uses: actions/download-artifact@v3
+ with:
+ name: artifact-dotnet-fuzzing-tools-windows
+ path: artifacts
- uses: actions/setup-python@v4
with:
python-version: 3.7
@@ -436,6 +481,7 @@ jobs:
isort --profile black . --check
black . --check
flake8 *.py
+ rm -r .mypy_cache
- name: Package Onefuzz
run: |
set -ex
@@ -540,7 +586,8 @@ jobs:
- uses: actions/upload-artifact@v3
with:
- name: integration-test-artifacts
+ # NB: this name is used by check-pr.py
+ name: artifact-integration-tests-linux
path: src/integration-tests/artifacts
build-integration-tests-windows:
runs-on: windows-2019
@@ -623,52 +670,39 @@ jobs:
shell: powershell
- uses: actions/upload-artifact@v3
with:
- name: integration-test-artifacts
+ # NB: this name is used by check-pr.py
+ name: artifact-integration-tests-windows
path: src/integration-tests/artifacts
integration-tests-linux:
runs-on: ubuntu-20.04
needs:
- build-integration-tests-linux
- dotnet-fuzzing-tools-linux
- # even though this job doesn't use the artifacts for these other jobs,
- # we must include them or we get spurious failures when the download-artifact
- # step tries to download the named artifact which includes files from
- # all of these jobs
- - agent
- - azcopy
- - cli
- - onefuzztypes
- - proxy
- - service
- - afl
- - aflpp
- - radamsa-linux
- - radamsa-win64
steps:
- uses: actions/checkout@v3
- uses: actions/download-artifact@v3
with:
- name: build-artifacts
- path: build-artifacts
+ name: artifact-dotnet-fuzzing-tools-linux
+ path: dotnet-fuzzing-tools-linux
- uses: actions/download-artifact@v3
with:
- name: integration-test-artifacts
- path: integration-test-artifacts
+ name: artifact-integration-tests-linux
+ path: integration-tests-linux
- name: test
shell: bash
run: |
set -ex -o pipefail
# Must be absolute paths.
- export GOODBAD_DOTNET="${GITHUB_WORKSPACE}/integration-test-artifacts/GoodBadDotnet"
+ export GOODBAD_DOTNET="${GITHUB_WORKSPACE}/integration-tests-linux/GoodBadDotnet"
- export LIBFUZZER_DOTNET="${GITHUB_WORKSPACE}/build-artifacts/third-party/dotnet-fuzzing-linux/libfuzzer-dotnet/libfuzzer-dotnet"
+ export LIBFUZZER_DOTNET="${GITHUB_WORKSPACE}/dotnet-fuzzing-tools-linux/third-party/dotnet-fuzzing-linux/libfuzzer-dotnet/libfuzzer-dotnet"
chmod +x $LIBFUZZER_DOTNET
- export LIBFUZZER_DOTNET_LOADER="${GITHUB_WORKSPACE}/build-artifacts/third-party/dotnet-fuzzing-linux/LibFuzzerDotnetLoader/LibFuzzerDotnetLoader"
+ export LIBFUZZER_DOTNET_LOADER="${GITHUB_WORKSPACE}/dotnet-fuzzing-tools-linux/third-party/dotnet-fuzzing-linux/LibFuzzerDotnetLoader/LibFuzzerDotnetLoader"
chmod +x $LIBFUZZER_DOTNET_LOADER
- export SHARPFUZZ="${GITHUB_WORKSPACE}/build-artifacts/third-party/dotnet-fuzzing-linux/sharpfuzz/SharpFuzz.CommandLine"
+ export SHARPFUZZ="${GITHUB_WORKSPACE}/dotnet-fuzzing-tools-linux/third-party/dotnet-fuzzing-linux/sharpfuzz/SharpFuzz.CommandLine"
chmod +x $SHARPFUZZ
./src/ci/test-libfuzzer-dotnet.sh
diff --git a/src/ApiService/ApiService/ApiService.csproj b/src/ApiService/ApiService/ApiService.csproj
index c41a852def..38c68b3919 100644
--- a/src/ApiService/ApiService/ApiService.csproj
+++ b/src/ApiService/ApiService/ApiService.csproj
@@ -11,6 +11,7 @@
+
diff --git a/src/ApiService/ApiService/Functions/Jobs.cs b/src/ApiService/ApiService/Functions/Jobs.cs
index 3064e550dd..261fb237c2 100644
--- a/src/ApiService/ApiService/Functions/Jobs.cs
+++ b/src/ApiService/ApiService/Functions/Jobs.cs
@@ -46,14 +46,16 @@ public class Jobs {
var job = new Job(
JobId: Guid.NewGuid(),
State: JobState.Init,
- Config: cfg) {
- UserInfo = userInfo.UserInfo,
- };
+ Config: cfg,
+ UserInfo: new(
+ ObjectId: userInfo.UserInfo.ObjectId,
+ ApplicationId: userInfo.UserInfo.ApplicationId));
// create the job logs container
var metadata = new Dictionary{
{ "container_type", "logs" }, // TODO: use ContainerType.Logs enum somehow; needs snake case name
};
+
var containerName = Container.Parse($"logs-{job.JobId}");
var containerSas = await _context.Containers.CreateContainer(containerName, StorageType.Corpus, metadata);
if (containerSas is null) {
@@ -79,9 +81,9 @@ public class Jobs {
),
"job");
}
- await _context.Events.SendEvent(new EventJobCreated(job.JobId, job.Config, job.UserInfo));
- return await RequestHandling.Ok(req, JobResponse.ForJob(job));
+ await _context.Events.SendEvent(new EventJobCreated(job.JobId, job.Config, job.UserInfo));
+ return await RequestHandling.Ok(req, JobResponse.ForJob(job, taskInfo: null));
}
private async Task Delete(HttpRequestData req) {
@@ -111,7 +113,7 @@ public class Jobs {
}
}
- return await RequestHandling.Ok(req, JobResponse.ForJob(job));
+ return await RequestHandling.Ok(req, JobResponse.ForJob(job, taskInfo: null));
}
private async Task Get(HttpRequestData req) {
@@ -135,11 +137,10 @@ public class Jobs {
// TODO: search.WithTasks is not checked in Python code?
var taskInfo = await _context.TaskOperations.SearchStates(jobId).Select(TaskToJobTaskInfo).ToListAsync();
- job = job with { TaskInfo = taskInfo };
- return await RequestHandling.Ok(req, JobResponse.ForJob(job));
+ return await RequestHandling.Ok(req, JobResponse.ForJob(job, taskInfo));
}
var jobs = await _context.JobOperations.SearchState(states: search.State ?? Enumerable.Empty()).ToListAsync();
- return await RequestHandling.Ok(req, jobs.Select(j => JobResponse.ForJob(j)));
+ return await RequestHandling.Ok(req, jobs.Select(j => JobResponse.ForJob(j, taskInfo: null)));
}
}
diff --git a/src/ApiService/ApiService/Functions/Node.cs b/src/ApiService/ApiService/Functions/Node.cs
index 92d9536036..6d8ad921bb 100644
--- a/src/ApiService/ApiService/Functions/Node.cs
+++ b/src/ApiService/ApiService/Functions/Node.cs
@@ -59,7 +59,7 @@ public class Node {
_context.NodeMessageOperations.GetMessages(machineId).ToListAsync().AsTask());
var commands = messages.Select(m => m.Message).ToList();
- return await RequestHandling.Ok(req, NodeToNodeSearchResult(node with { Tasks = tasks, Messages = commands }));
+ return await RequestHandling.Ok(req, NodeToNodeSearchResult(node, tasks, commands));
}
var nodes = await _context.NodeOperations.SearchStates(
@@ -67,10 +67,10 @@ public class Node {
poolName: search.PoolName,
scalesetId: search.ScalesetId).ToListAsync();
- return await RequestHandling.Ok(req, nodes.Select(NodeToNodeSearchResult));
+ return await RequestHandling.Ok(req, nodes.Select(x => NodeToNodeSearchResult(x, null, null)));
}
- private static NodeSearchResult NodeToNodeSearchResult(Service.Node node) {
+ private static NodeSearchResult NodeToNodeSearchResult(Service.Node node, List? tasks, List? messages) {
return new NodeSearchResult(
PoolId: node.PoolId,
PoolName: node.PoolName,
@@ -82,7 +82,9 @@ public class Node {
ScalesetId: node.ScalesetId,
ReimageRequested: node.ReimageRequested,
DeleteRequested: node.DeleteRequested,
- DebugKeepNode: node.DebugKeepNode);
+ DebugKeepNode: node.DebugKeepNode,
+ Tasks: tasks,
+ Messages: messages);
}
private async Async.Task Patch(HttpRequestData req) {
diff --git a/src/ApiService/ApiService/Functions/TimerRetention.cs b/src/ApiService/ApiService/Functions/TimerRetention.cs
index bcc58d9af2..284dcdbfb3 100644
--- a/src/ApiService/ApiService/Functions/TimerRetention.cs
+++ b/src/ApiService/ApiService/Functions/TimerRetention.cs
@@ -71,45 +71,6 @@ public class TimerRetention {
}
}
- await foreach (var job in _jobOps.QueryAsync(Query.And(timeFilter, Query.EqualEnum("state", JobState.Enabled)))) {
- if (job.UserInfo is not null && job.UserInfo.Upn is not null) {
- _log.LogInformation("removing PII from job {JobId}", job.JobId);
- var userInfo = job.UserInfo with { Upn = null };
- var updatedJob = job with { UserInfo = userInfo };
- var r = await _jobOps.Replace(updatedJob);
- if (!r.IsOk) {
- _log.AddHttpStatus(r.ErrorV);
- _log.LogError("Failed to save job {JobId}", updatedJob.JobId);
- }
- }
- }
-
- await foreach (var task in _taskOps.QueryAsync(Query.And(timeFilter, Query.EqualEnum("state", TaskState.Stopped)))) {
- if (task.UserInfo is not null && task.UserInfo.Upn is not null) {
- _log.LogInformation("removing PII from task {TaskId}", task.TaskId);
- var userInfo = task.UserInfo with { Upn = null };
- var updatedTask = task with { UserInfo = userInfo };
- var r = await _taskOps.Replace(updatedTask);
- if (!r.IsOk) {
- _log.AddHttpStatus(r.ErrorV);
- _log.LogError("Failed to save task {TaskId}", updatedTask.TaskId);
- }
- }
- }
-
- await foreach (var repro in _reproOps.QueryAsync(timeFilter)) {
- if (repro.UserInfo is not null && repro.UserInfo.Upn is not null) {
- _log.LogInformation("removing PII from repro: {VmId}", repro.VmId);
- var userInfo = repro.UserInfo with { Upn = null };
- var updatedRepro = repro with { UserInfo = userInfo };
- var r = await _reproOps.Replace(updatedRepro);
- if (!r.IsOk) {
- _log.AddHttpStatus(r.ErrorV);
- _log.LogError("Failed to save repro {VmId}", updatedRepro.VmId);
- }
- }
- }
-
//delete Task queues for tasks that do not exist in the table (manually deleted from the table)
//delete Pool queues for pools that were deleted before https://github.com/microsoft/onefuzz/issues/2430 got fixed
await foreach (var q in _queue.ListQueues(StorageType.Corpus)) {
diff --git a/src/ApiService/ApiService/OneFuzzTypes/Events.cs b/src/ApiService/ApiService/OneFuzzTypes/Events.cs
index 2726a9b0a0..d81e083db4 100644
--- a/src/ApiService/ApiService/OneFuzzTypes/Events.cs
+++ b/src/ApiService/ApiService/OneFuzzTypes/Events.cs
@@ -106,7 +106,7 @@ public class EventTypeProvider : ITypeProvider {
public record EventTaskStopped(
Guid JobId,
Guid TaskId,
- UserInfo? UserInfo,
+ StoredUserInfo? UserInfo,
TaskConfig Config
) : BaseEvent();
@@ -115,7 +115,7 @@ public record EventTaskFailed(
Guid JobId,
Guid TaskId,
Error Error,
- UserInfo? UserInfo,
+ StoredUserInfo? UserInfo,
TaskConfig Config
) : BaseEvent();
@@ -124,7 +124,7 @@ TaskConfig Config
public record EventJobCreated(
Guid JobId,
JobConfig Config,
- UserInfo? UserInfo
+ StoredUserInfo? UserInfo
) : BaseEvent();
@@ -139,7 +139,7 @@ public record JobTaskStopped(
public record EventJobStopped(
Guid JobId,
JobConfig Config,
- UserInfo? UserInfo,
+ StoredUserInfo? UserInfo,
List TaskInfo
) : BaseEvent(), ITruncatable {
public BaseEvent Truncate(int maxLength) {
@@ -155,7 +155,7 @@ public record EventTaskCreated(
Guid JobId,
Guid TaskId,
TaskConfig Config,
- UserInfo? UserInfo
+ StoredUserInfo? UserInfo
) : BaseEvent();
[EventType(EventType.TaskStateUpdated)]
@@ -375,7 +375,7 @@ public DownloadableEventMessage(Guid EventId, EventType EventType, BaseEvent Eve
public record EventMessage(
Guid EventId,
EventType EventType,
- [property: TypeDiscrimnatorAttribute("EventType", typeof(EventTypeProvider))]
+ [property: TypeDiscrimnator("EventType", typeof(EventTypeProvider))]
[property: JsonConverter(typeof(BaseEventConverter))]
BaseEvent Event,
Guid InstanceId,
@@ -401,7 +401,7 @@ public record EventMessage(
public class BaseEventConverter : JsonConverter {
public override BaseEvent? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) {
- return null;
+ throw new NotSupportedException("BaseEvent cannot be read");
}
public override void Write(Utf8JsonWriter writer, BaseEvent value, JsonSerializerOptions options) {
diff --git a/src/ApiService/ApiService/OneFuzzTypes/Model.cs b/src/ApiService/ApiService/OneFuzzTypes/Model.cs
index 671305c41e..7bbc24127a 100644
--- a/src/ApiService/ApiService/OneFuzzTypes/Model.cs
+++ b/src/ApiService/ApiService/OneFuzzTypes/Model.cs
@@ -39,8 +39,8 @@ public record TaskHeartbeatEntry(
Guid TaskId,
Guid? JobId,
Guid MachineId,
- HeartbeatData[] Data
- );
+ HeartbeatData[] Data);
+
public record NodeHeartbeatEntry(Guid NodeId, HeartbeatData[] Data);
public record NodeCommandStopIfFree();
@@ -51,7 +51,6 @@ public record StopTaskNodeCommand(Guid TaskId);
public record NodeCommandAddSshKey(string PublicKey);
-
public record NodeCommand
(
[property: JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
@@ -114,11 +113,7 @@ public record Node
bool DeleteRequested = false,
bool DebugKeepNode = false,
bool Managed = true
-) : StatefulEntityBase(State) {
-
- public List? Tasks { get; set; }
- public List? Messages { get; set; }
-}
+) : StatefulEntityBase(State) { }
public record Forward
@@ -287,7 +282,7 @@ public record Task(
ISecret? Auth = null,
DateTimeOffset? Heartbeat = null,
DateTimeOffset? EndTime = null,
- UserInfo? UserInfo = null) : StatefulEntityBase(State) {
+ StoredUserInfo? UserInfo = null) : StatefulEntityBase(State) {
}
public record TaskEvent(
@@ -727,7 +722,7 @@ public record Repro(
Error? Error = null,
string? Ip = null,
DateTimeOffset? EndTime = null,
- UserInfo? UserInfo = null
+ StoredUserInfo? UserInfo = null
) : StatefulEntityBase(State);
// TODO: Make this >1 and < 7*24 (more than one hour, less than seven days)
@@ -908,12 +903,13 @@ public record Job(
[PartitionKey][RowKey] Guid JobId,
JobState State,
JobConfig Config,
+ StoredUserInfo? UserInfo,
string? Error = null,
DateTimeOffset? EndTime = null
-) : StatefulEntityBase(State) {
- public List? TaskInfo { get; set; }
- public UserInfo? UserInfo { get; set; }
-}
+) : StatefulEntityBase(State) { }
+
+// This is like UserInfo but lacks the UPN:
+public record StoredUserInfo(Guid? ApplicationId, Guid? ObjectId);
public record Nsg(string Name, Region Region) {
public static Nsg ForRegion(Region region)
diff --git a/src/ApiService/ApiService/OneFuzzTypes/Responses.cs b/src/ApiService/ApiService/OneFuzzTypes/Responses.cs
index ee78f7b3cd..d9c3dd48af 100644
--- a/src/ApiService/ApiService/OneFuzzTypes/Responses.cs
+++ b/src/ApiService/ApiService/OneFuzzTypes/Responses.cs
@@ -33,8 +33,9 @@ public record NodeSearchResult(
ScalesetId? ScalesetId,
bool ReimageRequested,
bool DeleteRequested,
- bool DebugKeepNode
-) : BaseResponse();
+ bool DebugKeepNode,
+ List? Tasks,
+ List? Messages) : BaseResponse();
public record TaskSearchResult(
Guid JobId,
@@ -46,7 +47,7 @@ public record TaskSearchResult(
Authentication? Auth,
DateTimeOffset? Heartbeat,
DateTimeOffset? EndTime,
- UserInfo? UserInfo,
+ StoredUserInfo? UserInfo,
List Events,
List Nodes,
[property: JsonPropertyName("Timestamp")] // must retain capital T for backcompat
@@ -96,18 +97,20 @@ public record JobResponse(
string? Error,
DateTimeOffset? EndTime,
List? TaskInfo,
+ StoredUserInfo? UserInfo,
[property: JsonPropertyName("Timestamp")] // must retain capital T for backcompat
DateTimeOffset? Timestamp
// not including UserInfo from Job model
) : BaseResponse() {
- public static JobResponse ForJob(Job j)
+ public static JobResponse ForJob(Job j, List? taskInfo)
=> new(
JobId: j.JobId,
State: j.State,
Config: j.Config,
Error: j.Error,
EndTime: j.EndTime,
- TaskInfo: j.TaskInfo,
+ TaskInfo: taskInfo,
+ UserInfo: j.UserInfo,
Timestamp: j.Timestamp
);
}
@@ -237,7 +240,7 @@ public record ReproVmResponse(
Error? Error = null,
string? Ip = null,
DateTimeOffset? EndTime = null,
- UserInfo? UserInfo = null
+ StoredUserInfo? UserInfo = null
) : BaseResponse() {
public static ReproVmResponse FromRepro(Repro repro, Authentication? auth) {
diff --git a/src/ApiService/ApiService/OneFuzzTypes/Webhooks.cs b/src/ApiService/ApiService/OneFuzzTypes/Webhooks.cs
index 5f2a5660ef..550ae4a129 100644
--- a/src/ApiService/ApiService/OneFuzzTypes/Webhooks.cs
+++ b/src/ApiService/ApiService/OneFuzzTypes/Webhooks.cs
@@ -39,7 +39,7 @@ WebhookMessage Data
public record WebhookMessageLog(
[RowKey] Guid EventId,
EventType EventType,
- [property: TypeDiscrimnatorAttribute("EventType", typeof(EventTypeProvider))]
+ [property: TypeDiscrimnator("EventType", typeof(EventTypeProvider))]
[property: JsonConverter(typeof(BaseEventConverter))]
BaseEvent Event,
Guid InstanceId,
diff --git a/src/ApiService/ApiService/onefuzzlib/ReproOperations.cs b/src/ApiService/ApiService/onefuzzlib/ReproOperations.cs
index ec5c66cd93..cd3e289402 100644
--- a/src/ApiService/ApiService/onefuzzlib/ReproOperations.cs
+++ b/src/ApiService/ApiService/onefuzzlib/ReproOperations.cs
@@ -351,7 +351,7 @@ await _context.ReproOperations.GetSetupContainer(repro)
Os: task.Os,
Auth: new SecretAddress(auth),
EndTime: DateTimeOffset.UtcNow + TimeSpan.FromHours(config.Duration),
- UserInfo: userInfo);
+ UserInfo: new(ObjectId: userInfo.ObjectId, ApplicationId: userInfo.ApplicationId));
var r = await _context.ReproOperations.Insert(vm);
if (!r.IsOk) {
diff --git a/src/ApiService/ApiService/onefuzzlib/TaskOperations.cs b/src/ApiService/ApiService/onefuzzlib/TaskOperations.cs
index 18e4af9c86..a7a4cd0ebb 100644
--- a/src/ApiService/ApiService/onefuzzlib/TaskOperations.cs
+++ b/src/ApiService/ApiService/onefuzzlib/TaskOperations.cs
@@ -215,14 +215,19 @@ public TaskOperations(ILogger log, IMemoryCache cache, IOnefuzzC
return OneFuzzResult.Error(ErrorCode.INVALID_CONFIGURATION, "task must have vm or pool");
}
- var task = new Task(jobId, Guid.NewGuid(), TaskState.Init, os, config, UserInfo: userInfo);
+ var storedUserInfo = new StoredUserInfo(
+ ApplicationId: userInfo.ApplicationId,
+ ObjectId: userInfo.ObjectId);
+
+ var task = new Task(jobId, Guid.NewGuid(), TaskState.Init, os, config, UserInfo: storedUserInfo);
var r = await _context.TaskOperations.Insert(task);
if (!r.IsOk) {
_logTracer.AddHttpStatus(r.ErrorV);
_logTracer.LogError("failed to insert task {TaskId}", task.TaskId);
}
- await _context.Events.SendEvent(new EventTaskCreated(jobId, task.TaskId, config, userInfo));
+
+ await _context.Events.SendEvent(new EventTaskCreated(jobId, task.TaskId, config, storedUserInfo));
_logTracer.LogInformation("created task {JobId} {TaskId} {TaskType}", jobId, task.TaskId, task.Config.Task.Type);
return OneFuzzResult.Ok(task);
diff --git a/src/ApiService/ApiService/onefuzzlib/notifications/JinjaTemplateAdapter.cs b/src/ApiService/ApiService/onefuzzlib/notifications/JinjaTemplateAdapter.cs
index fa5c07c480..38acbc5970 100644
--- a/src/ApiService/ApiService/onefuzzlib/notifications/JinjaTemplateAdapter.cs
+++ b/src/ApiService/ApiService/onefuzzlib/notifications/JinjaTemplateAdapter.cs
@@ -196,7 +196,7 @@ public class JinjaTemplateAdapter {
new SecretValue(new Authentication("password", "public key", "private key")),
DateTimeOffset.UtcNow,
DateTimeOffset.UtcNow,
- new UserInfo(Guid.NewGuid(), Guid.NewGuid(), "upn")
+ new(Guid.NewGuid(), Guid.NewGuid())
);
var job = new Job(
@@ -209,6 +209,7 @@ public class JinjaTemplateAdapter {
duration,
"logs"
),
+ null,
"some error",
DateTimeOffset.UtcNow
);
diff --git a/src/ApiService/ApiService/onefuzzlib/orm/EntityConverter.cs b/src/ApiService/ApiService/onefuzzlib/orm/EntityConverter.cs
index ddc01f34c2..f1ae42dfa3 100644
--- a/src/ApiService/ApiService/onefuzzlib/orm/EntityConverter.cs
+++ b/src/ApiService/ApiService/onefuzzlib/orm/EntityConverter.cs
@@ -3,6 +3,7 @@
using System.Reflection;
using System.Text.Json;
using System.Text.Json.Serialization;
+using System.Threading.Tasks;
using Azure;
using Azure.Data.Tables;
@@ -12,8 +13,7 @@ public abstract record EntityBase {
[JsonIgnore]
public ETag? ETag { get; set; }
- [JsonPropertyName("Timestamp")]
- // this needs to be serialized with a capital T for backwards compat
+ [JsonIgnore]
public DateTimeOffset? Timestamp { get; set; }
// https://docs.microsoft.com/en-us/rest/api/storageservices/designing-a-scalable-partitioning-strategy-for-azure-table-storage#yyy
@@ -49,6 +49,7 @@ public class SkipRenameAttribute : Attribute { }
public class RowKeyAttribute : Attribute { }
[AttributeUsage(AttributeTargets.Parameter)]
public class PartitionKeyAttribute : Attribute { }
+
[AttributeUsage(AttributeTargets.Property)]
public class TypeDiscrimnatorAttribute : Attribute {
public string FieldName { get; }
@@ -166,8 +167,7 @@ public class EntityConverter {
return _cache.GetOrAdd(typeof(T), type => {
var constructor = type.GetConstructors()[0];
var parameterInfos = constructor.GetParameters();
- var parameters =
- parameterInfos.SelectMany(GetEntityProperties).ToArray();
+ var parameters = parameterInfos.SelectMany(GetEntityProperties).ToArray();
return new EntityInfo(typeof(T), parameters.ToLookup(x => x.name), BuildConstructerFrom(constructor));
});
@@ -177,6 +177,48 @@ public class EntityConverter {
public static T? FromJsonString(string value) => JsonSerializer.Deserialize(value, _options);
+ private async ValueTask<(string, object?)> PropertyToColumnValue(EntityProperty prop, object? value) {
+ if (value == null) {
+ return (prop.columnName, null);
+ }
+
+ if (prop.kind == EntityPropertyKind.PartitionKey || prop.kind == EntityPropertyKind.RowKey) {
+ return (prop.columnName, value?.ToString());
+ }
+
+ if (prop.type == typeof(Guid) || prop.type == typeof(Guid?) || prop.type == typeof(Uri)) {
+ return (prop.columnName, value?.ToString());
+ }
+
+ if (prop.type == typeof(bool)
+ || prop.type == typeof(bool?)
+ || prop.type == typeof(string)
+ || prop.type == typeof(DateTime)
+ || prop.type == typeof(DateTime?)
+ || prop.type == typeof(DateTimeOffset)
+ || prop.type == typeof(DateTimeOffset?)
+ || prop.type == typeof(int)
+ || prop.type == typeof(int?)
+ || prop.type == typeof(long)
+ || prop.type == typeof(long?)
+ || prop.type == typeof(double)
+ || prop.type == typeof(double?)) {
+ return (prop.columnName, value);
+ }
+
+ // if prop.type is a SecretData
+ if (typeof(ISecret).IsAssignableFrom(prop.type)) {
+ var secret = (ISecret)value;
+ if (!secret.IsHIddden) {
+ var kv = await _secretsOperations.StoreSecret(secret);
+ value = new SecretAddress