Skip to content

Commit

Permalink
OrcCommand: GetThis: use Archive7z
Browse files Browse the repository at this point in the history
  • Loading branch information
fabienfl-orc committed Nov 9, 2020
1 parent b2628c6 commit 068ae22
Show file tree
Hide file tree
Showing 2 changed files with 81 additions and 130 deletions.
22 changes: 15 additions & 7 deletions src/OrcCommand/GetThis.h
Expand Up @@ -21,6 +21,7 @@
#include <string>

#include <boost/logic/tribool.hpp>

#include "ConfigFileReader.h"
#include "ConfigFileReader.h"
#include "MFTWalker.h"
Expand All @@ -35,6 +36,9 @@
#include "CryptoHashStream.h"
#include "FuzzyHashStream.h"

#include "Archive/Appender.h"
#include "Archive/7z/Archive7z.h"

#pragma managed(push, off)

constexpr auto GETTHIS_DEFAULT_MAXTOTALBYTES = (100 * 1024 * 1024); // 50MB;
Expand Down Expand Up @@ -162,10 +166,10 @@ class ORCUTILS_API Main : public UtilitiesMain
ULONGLONG SampleSize = 0LL;
FILETIME CollectionDate;

LONGLONG VolumeSerial; // |
MFT_SEGMENT_REFERENCE FRN; // |--> Uniquely identifies a data stream to collect
USHORT InstanceID; // |
size_t AttributeIndex = 0; // AttributeIndex because each attribute from the match is a sample
LONGLONG VolumeSerial;
MFT_SEGMENT_REFERENCE FRN;
USHORT InstanceID;
size_t AttributeIndex = 0;
ContentSpec Content;
std::shared_ptr<CryptoHashStream> HashStream;
std::shared_ptr<FuzzyHashStream> FuzzyHashStream;
Expand Down Expand Up @@ -320,7 +324,7 @@ class ORCUTILS_API Main : public UtilitiesMain
const std::wstring ComputerName;
Limits GlobalLimits;
std::unordered_set<std::wstring> SampleNames;
std::shared_ptr<Orc::ArchiveCreate> m_compressor;
std::unique_ptr<Archive::Appender<Archive::Archive7z>> m_compressor;
std::shared_ptr<Orc::TableOutput::IStreamWriter> m_tableWriter;

HRESULT ConfigureSampleStreams(SampleRef& sample) const;
Expand All @@ -335,8 +339,10 @@ class ORCUTILS_API Main : public UtilitiesMain

using SampleWrittenCb = std::function<void(const SampleRef&, HRESULT hrWrite)>;

HRESULT
WriteSample(ArchiveCreate& compressor, std::unique_ptr<SampleRef> sample, SampleWrittenCb writtenCb = {}) const;
HRESULT WriteSample(
Archive::Appender<Archive::Archive7z>& compressor,
std::unique_ptr<SampleRef> pSample,
SampleWrittenCb writtenCb = {}) const;

HRESULT WriteSample(
const std::filesystem::path& outputDir,
Expand Down Expand Up @@ -396,5 +402,7 @@ class ORCUTILS_API Main : public UtilitiesMain
HRESULT CloseArchiveOutput();
HRESULT CloseDirectoryOutput();
};

} // namespace Command::GetThis

} // namespace Orc
189 changes: 66 additions & 123 deletions src/OrcCommand/GetThis_Run.cpp
@@ -1,9 +1,10 @@
//
// SPDX-License-Identifier: LGPL-2.1-or-later
//
// Copyright © 2011-2019 ANSSI. All Rights Reserved.
// Copyright © 2011-2020 ANSSI. All Rights Reserved.
//
// Author(s): Jean Gautier (ANSSI)
// fabienfl (ANSSI)
//

#include "stdafx.h"
Expand Down Expand Up @@ -36,7 +37,10 @@

#include "NtfsDataStructures.h"

using namespace std;
#include "Archive/CompressionLevel.h"
#include "Archive/Appender.h"
#include "Archive/7z/Archive7z.h"

namespace fs = std::filesystem;

using namespace Orc;
Expand All @@ -50,47 +54,26 @@ enum class CompressorFlags : uint32_t
kComputeHash = 1
};

std::shared_ptr<ArchiveCreate>
CreateCompressor(const OutputSpec& outputSpec, CompressorFlags flags, HRESULT& hr, logger& _L_)
std::unique_ptr<Archive::Appender<Archive::Archive7z>> CreateCompressor(const OutputSpec& outputSpec)
{
const bool computeHash = (static_cast<uint32_t>(flags) & static_cast<uint32_t>(CompressorFlags::kComputeHash));

auto compressor = ArchiveCreate::MakeCreate(outputSpec.ArchiveFormat, _L_, computeHash);
if (compressor == nullptr)
{
hr = E_POINTER;
log::Error(_L_, hr, L"Failed calling MakeCreate for archive '%s'\r\n", outputSpec.Path.c_str());
return nullptr;
}
using namespace Archive;

hr = compressor->InitArchive(outputSpec.Path);
if (FAILED(hr))
std::error_code ec;
auto compressionLevel = ToCompressionLevel(outputSpec.Compression, ec);
if (ec)
{
log::Error(_L_, hr, L"Failed to initialize archive '%s'\r\n", outputSpec.Path.c_str());
return nullptr;
return {};
}

if (!outputSpec.Password.empty())
{
hr = compressor->SetPassword(outputSpec.Password);
if (FAILED(hr))
{
log::Error(_L_, hr, L"Failed to set password for '%s'\r\n", outputSpec.Path.c_str());
return nullptr;
}
}
Archive::Archive7z archiver(Archive::Format::k7z, compressionLevel, outputSpec.Password);

hr = compressor->SetCompressionLevel(outputSpec.Compression);
if (FAILED(hr))
auto appender = Appender<Archive7z>::Create(std::move(archiver), fs::path(outputSpec.Path), 1024 * 1024 * 50, ec);
if (ec)
{
log::Error(_L_, hr, L"Failed to set compression level for '%s'\r\n", outputSpec.Path.c_str());
return nullptr;
return {};
}

compressor->SetCallback(
[&_L_](const OrcArchive::ArchiveItem& item) { log::Info(_L_, L"\t%s\r\n", item.Path.c_str()); });

return compressor;
return appender;
}

std::shared_ptr<TableOutput::IStreamWriter> CreateCsvWriter(
Expand Down Expand Up @@ -129,33 +112,39 @@ std::shared_ptr<TableOutput::IStreamWriter> CreateCsvWriter(
return csvWriter;
}

std::shared_ptr<TemporaryStream> CreateLogStream(const std::filesystem::path& out, HRESULT& hr, logger& _L_)
void CompressTable(
const std::unique_ptr<Archive::Appender<Archive::Archive7z>>& compressor,
const std::shared_ptr<TableOutput::IStreamWriter>& tableWriter)
{
auto logWriter = std::make_shared<LogFileWriter>(0x1000);
logWriter->SetConsoleLog(_L_->ConsoleLog());
logWriter->SetDebugLog(_L_->DebugLog());
logWriter->SetVerboseLog(_L_->VerboseLog());

auto logStream = std::make_shared<TemporaryStream>(logWriter);
std::error_code ec;

hr = logStream->Open(out.parent_path(), out.filename(), 5 * 1024 * 1024);
HRESULT hr = tableWriter->Flush();
if (FAILED(hr))
{
log::Error(_L_, hr, L"Failed to create temp stream\r\n");
return nullptr;
Log::Error(L"Failed to flush csv writer (code: {:#x})", hr);
}

auto tableStream = tableWriter->GetStream();
if (tableStream == nullptr || tableStream->GetSize() == 0)
{
return;
}

hr = _L_->LogToStream(logStream);
hr = tableStream->SetFilePointer(0, FILE_BEGIN, nullptr);
if (FAILED(hr))
{
log::Error(_L_, hr, L"Failed to initialize temp logging\r\n");
return nullptr;
Log::Error(L"Failed to rewind csv stream (code: {:#x})", hr);
}

return logStream;
auto item = std::make_unique<Archive::Item>(tableStream, L"GetThis.csv");
compressor->Add(std::move(item));
if (ec)
{
Log::Error(L"Failed to add GetThis.csv (code: {:#x})", ec.value());
}
}

std::wstring RetrieveComputerName(const std::wstring& defaultName, logger& _L_)
std::wstring RetrieveComputerName(const std::wstring& defaultName)
{
std::wstring name;

Expand Down Expand Up @@ -594,6 +583,11 @@ HRESULT Main::ConfigureSampleStreams(SampleRef& sample) const
_ASSERT(sample.Matches.front()->MatchingAttributes[sample.AttributeIndex].DataStream->IsOpen() == S_OK);

auto& dataStream = sample.Matches.front()->MatchingAttributes[sample.AttributeIndex].DataStream;
hr = dataStream->SetFilePointer(0, FILE_BEGIN, NULL);
if (FAILED(hr))
{
return hr;
}

// Stream are initially at eof

Expand Down Expand Up @@ -764,28 +758,29 @@ Main::AddSampleRefToCSV(ITableOutput& output, const Main::SampleRef& sample) con
return S_OK;
}

HRESULT
Main::WriteSample(ArchiveCreate& compressor, std::unique_ptr<SampleRef> pSample, SampleWrittenCb writtenCb) const
HRESULT Main::WriteSample(
Archive::Appender<Archive::Archive7z>& compressor,
std::unique_ptr<SampleRef> pSample,
SampleWrittenCb writtenCb) const
{
auto sample = std::shared_ptr<SampleRef>(std::move(pSample));

const auto onItemArchivedCb = [this, sample, writtenCb](HRESULT hrArchived) {
const auto onItemArchivedCb = [this, sample, writtenCb](const std::error_code& ec) {
FinalizeHashes(*sample);

HRESULT hrTable = AddSampleRefToCSV(*m_tableWriter, *sample);
if (FAILED(hrTable))
{
log::Error(
_L_,
hrTable,
L"Failed to add sample %s metadata to csv\r\n",
sample->Matches.front()->MatchingNames.front().FullPathName.c_str());
Log::Error(
L"Failed to add sample '{}' metadata to csv (code: {:#x})",
sample->Matches.front()->MatchingNames.front().FullPathName,
hrTable);
}

if (writtenCb)
{
HRESULT hr = E_FAIL;
if (SUCCEEDED(hrTable) && SUCCEEDED(hrArchived))
if (SUCCEEDED(hrTable) && !ec)
{
hr = S_OK;
}
Expand All @@ -796,19 +791,14 @@ Main::WriteSample(ArchiveCreate& compressor, std::unique_ptr<SampleRef> pSample,

if (sample->IsOfflimits())
{
onItemArchivedCb(S_OK);
onItemArchivedCb({});
return S_OK;
}

HRESULT hr = compressor.AddStream(
sample->SampleName.c_str(), sample->SourcePath.c_str(), sample->CopyStream, onItemArchivedCb);
if (FAILED(hr))
{
log::Error(_L_, hr, L"Failed to add to archive the sample '%s'\r\n", sample->SampleName.c_str());
// No need to call 'onItemArchivedCb' as it is 'AddStream' responsability
}
auto item = std::make_unique<Archive::Item>(sample->CopyStream, sample->SampleName, std::move(onItemArchivedCb));
compressor.Add(std::move(item));

return hr;
return S_OK;
}

HRESULT Main::WriteSample(
Expand Down Expand Up @@ -901,14 +891,7 @@ HRESULT Main::InitArchiveOutput()
}
}

::CreateLogStream(tempDir / L"GetThisLogStream", hr, _L_);
if (FAILED(hr))
{
log::Error(_L_, hr, L"Failed to create log stream\r\n");
return hr;
}

m_compressor = ::CreateCompressor(config.Output, CompressorFlags::kNone, hr, _L_);
m_compressor = ::CreateCompressor(config.Output);
if (m_compressor == nullptr)
{
Log::Error(L"Failed to create compressor");
Expand All @@ -931,60 +914,20 @@ HRESULT Main::CloseArchiveOutput()
_ASSERT(m_compressor);
_ASSERT(m_tableWriter);

m_compressor->FlushQueue();

HRESULT hr = m_tableWriter->Flush();
if (FAILED(hr))
{
log::Error(_L_, hr, L"Failed to flush csv writer\r\n");
}

auto tableStream = m_tableWriter->GetStream();
if (tableStream && tableStream->GetSize())
{
hr = tableStream->SetFilePointer(0, FILE_BEGIN, nullptr);
if (FAILED(hr))
{
log::Error(_L_, hr, L"Failed to rewind csv stream\r\n");
}

hr = m_compressor->AddStream(L"GetThis.csv", L"GetThis.csv", tableStream);
if (FAILED(hr))
{
log::Error(_L_, hr, L"Failed to add GetThis.csv\r\n");
}
}

auto logStream = _L_->GetByteStream();
_L_->CloseLogToStream(false);
std::error_code ec;

if (logStream && logStream->GetSize() > 0LL)
m_compressor->Flush(ec);
if (ec)
{
hr = logStream->SetFilePointer(0, FILE_BEGIN, nullptr);
if (FAILED(hr))
{
log::Error(_L_, hr, L"Failed to rewind log stream\r\n");
}

hr = m_compressor->AddStream(L"GetThis.log", L"GetThis.log", logStream);
if (FAILED(hr))
{
log::Error(_L_, hr, L"Failed to add GetThis.log\r\n");
}
Log::Error(L"Failed to compress '{}' (code: {:#x})", config.Output.Path, ec.value());
}

hr = m_compressor->Complete();
if (FAILED(hr))
{
log::Error(_L_, hr, L"Failed to complete %s\r\n", config.Output.Path.c_str());
return hr;
}
::CompressTable(m_compressor, m_tableWriter);

hr = tableStream->Close();
if (FAILED(hr))
m_compressor->Close(ec);
if (ec)
{
log::Error(_L_, hr, L"Failed to close csv writer\r\n");
return hr;
Log::Error(L"Failed to close archive (code: {:#x})", ec.value());
}

return S_OK;
Expand Down

0 comments on commit 068ae22

Please sign in to comment.