Showing 856 changed files with 60,922 additions and 20,968 deletions.
117 changes: 47 additions & 70 deletions .github/workflows/commit-access-review.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,57 +62,9 @@ def __repr__(self):
)


def run_graphql_query(
query: str, variables: dict, token: str, retry: bool = True
) -> dict:
"""
This function submits a graphql query and returns the results as a
dictionary.
"""
s = requests.Session()
retries = requests.adapters.Retry(total=8, backoff_factor=2, status_forcelist=[504])
s.mount("https://", requests.adapters.HTTPAdapter(max_retries=retries))

headers = {
"Authorization": "bearer {}".format(token),
# See
# https://github.blog/2021-11-16-graphql-global-id-migration-update/
"X-Github-Next-Global-ID": "1",
}
request = s.post(
url="https://api.github.com/graphql",
json={"query": query, "variables": variables},
headers=headers,
)

rate_limit = request.headers.get("X-RateLimit-Remaining")
print(rate_limit)
if rate_limit and int(rate_limit) < 10:
reset_time = int(request.headers["X-RateLimit-Reset"])
while reset_time - int(time.time()) > 0:
time.sleep(60)
print(
"Waiting until rate limit reset",
reset_time - int(time.time()),
"seconds remaining",
)

if request.status_code == 200:
if "data" not in request.json():
print(request.json())
sys.exit(1)
return request.json()["data"]
elif retry:
return run_graphql_query(query, variables, token, False)
else:
raise Exception(
"Failed to run graphql query\nquery: {}\nerror: {}".format(
query, request.json()
)
)


def check_manual_requests(start_date: datetime.datetime, token: str) -> list[str]:
def check_manual_requests(
gh: github.Github, start_date: datetime.datetime
) -> list[str]:
"""
Return a list of users who have been asked since ``start_date`` if they
want to keep their commit access.
Expand All @@ -137,18 +89,21 @@ def check_manual_requests(start_date: datetime.datetime, token: str) -> list[str
"""
formatted_start_date = start_date.strftime("%Y-%m-%dT%H:%M:%S")
variables = {
"query": f"type:issue created:>{formatted_start_date} org:llvm repo:llvm-project label:infrastructure:commit-access"
"query": f"type:issue created:>{formatted_start_date} org:llvm repo:llvm-project label:infra:commit-access"
}

data = run_graphql_query(query, variables, token)
res_header, res_data = gh._Github__requester.graphql_query(
query=query, variables=variables
)
data = res_data["data"]
users = []
for issue in data["search"]["nodes"]:
users.extend([user[1:] for user in re.findall("@[^ ,\n]+", issue["body"])])

return users


def get_num_commits(user: str, start_date: datetime.datetime, token: str) -> int:
def get_num_commits(gh: github.Github, user: str, start_date: datetime.datetime) -> int:
"""
Get number of commits that ``user`` has been made since ``start_date`.
"""
Expand All @@ -166,7 +121,10 @@ def get_num_commits(user: str, start_date: datetime.datetime, token: str) -> int
}
"""

data = run_graphql_query(user_query, variables, token)
res_header, res_data = gh._Github__requester.graphql_query(
query=user_query, variables=variables
)
data = res_data["data"]
variables["user_id"] = data["user"]["id"]

query = """
Expand All @@ -193,7 +151,10 @@ def get_num_commits(user: str, start_date: datetime.datetime, token: str) -> int
}
"""
count = 0
data = run_graphql_query(query, variables, token)
res_header, res_data = gh._Github__requester.graphql_query(
query=query, variables=variables
)
data = res_data["data"]
for repo in data["organization"]["teams"]["nodes"][0]["repositories"]["nodes"]:
count += int(repo["ref"]["target"]["history"]["totalCount"])
if count >= User.THRESHOLD:
Expand All @@ -202,7 +163,7 @@ def get_num_commits(user: str, start_date: datetime.datetime, token: str) -> int


def is_new_committer_query_repo(
user: str, start_date: datetime.datetime, token: str
gh: github.Github, user: str, start_date: datetime.datetime
) -> bool:
"""
Determine if ``user`` is a new committer. A new committer can keep their
Expand All @@ -220,7 +181,10 @@ def is_new_committer_query_repo(
}
"""

data = run_graphql_query(user_query, variables, token)
res_header, res_data = gh._Github__requester.graphql_query(
query=user_query, variables=variables
)
data = res_data["data"]
variables["owner"] = "llvm"
variables["user_id"] = data["user"]["id"]
variables["start_date"] = start_date.strftime("%Y-%m-%dT%H:%M:%S")
Expand All @@ -245,7 +209,10 @@ def is_new_committer_query_repo(
}
"""

data = run_graphql_query(query, variables, token)
res_header, res_data = gh._Github__requester.graphql_query(
query=query, variables=variables
)
data = res_data["data"]
repo = data["organization"]["repository"]
commits = repo["ref"]["target"]["history"]["nodes"]
if len(commits) == 0:
Expand All @@ -256,18 +223,22 @@ def is_new_committer_query_repo(
return True


def is_new_committer(user: str, start_date: datetime.datetime, token: str) -> bool:
def is_new_committer(
gh: github.Github, user: str, start_date: datetime.datetime
) -> bool:
"""
Wrapper around is_new_commiter_query_repo to handle exceptions.
"""
try:
return is_new_committer_query_repo(user, start_date, token)
return is_new_committer_query_repo(gh, user, start_date)
except:
pass
return True


def get_review_count(user: str, start_date: datetime.datetime, token: str) -> int:
def get_review_count(
gh: github.Github, user: str, start_date: datetime.datetime
) -> int:
"""
Return the number of reviews that ``user`` has done since ``start_date``.
"""
Expand All @@ -286,11 +257,14 @@ def get_review_count(user: str, start_date: datetime.datetime, token: str) -> in
"query": f"type:pr commenter:{user} -author:{user} merged:>{formatted_start_date} org:llvm",
}

data = run_graphql_query(query, variables, token)
res_header, res_data = gh._Github__requester.graphql_query(
query=query, variables=variables
)
data = res_data["data"]
return int(data["search"]["issueCount"])


def count_prs(triage_list: dict, start_date: datetime.datetime, token: str):
def count_prs(gh: github.Github, triage_list: dict, start_date: datetime.datetime):
"""
Fetch all the merged PRs for the project since ``start_date`` and update
``triage_list`` with the number of PRs merged for each user.
Expand Down Expand Up @@ -329,7 +303,10 @@ def count_prs(triage_list: dict, start_date: datetime.datetime, token: str):
has_next_page = True
while has_next_page:
print(variables)
data = run_graphql_query(query, variables, token)
res_header, res_data = gh._Github__requester.graphql_query(
query=query, variables=variables
)
data = res_data["data"]
for pr in data["search"]["nodes"]:
# Users can be None if the user has been deleted.
if not pr["author"]:
Expand Down Expand Up @@ -365,14 +342,14 @@ def main():

print("Start:", len(triage_list), "triagers")
# Step 0 Check if users have requested commit access in the last year.
for user in check_manual_requests(one_year_ago, token):
for user in check_manual_requests(gh, one_year_ago):
if user in triage_list:
print(user, "requested commit access in the last year.")
del triage_list[user]
print("After Request Check:", len(triage_list), "triagers")

# Step 1 count all PRs authored or merged
count_prs(triage_list, one_year_ago, token)
count_prs(gh, triage_list, one_year_ago)

print("After PRs:", len(triage_list), "triagers")

Expand All @@ -381,7 +358,7 @@ def main():

# Step 2 check for reviews
for user in list(triage_list.keys()):
review_count = get_review_count(user, one_year_ago, token)
review_count = get_review_count(gh, user, one_year_ago)
triage_list[user].add_reviewed(review_count)

print("After Reviews:", len(triage_list), "triagers")
Expand All @@ -391,7 +368,7 @@ def main():

# Step 3 check for number of commits
for user in list(triage_list.keys()):
num_commits = get_num_commits(user, one_year_ago, token)
num_commits = get_num_commits(gh, user, one_year_ago)
# Override the total number of commits to not double count commits and
# authored PRs.
triage_list[user].set_authored(num_commits)
Expand All @@ -401,7 +378,7 @@ def main():
# Step 4 check for new committers
for user in list(triage_list.keys()):
print("Checking", user)
if is_new_committer(user, one_year_ago, token):
if is_new_committer(gh, user, one_year_ago):
print("Removing new committer: ", user)
del triage_list[user]

Expand Down
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@ autoconf/autom4te.cache
# VS2017 and VSCode config files.
.vscode
.vs
#zed config files
.zed
# pythonenv for github Codespaces
pythonenv*
# clangd index. (".clangd" is a config file now, thus trailing slash)
Expand Down
19 changes: 12 additions & 7 deletions bolt/include/bolt/Core/BinaryContext.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
#include "llvm/ADT/iterator.h"
#include "llvm/BinaryFormat/Dwarf.h"
#include "llvm/BinaryFormat/MachO.h"
#include "llvm/ExecutionEngine/Orc/SymbolStringPool.h"
#include "llvm/MC/MCAsmInfo.h"
#include "llvm/MC/MCCodeEmitter.h"
#include "llvm/MC/MCContext.h"
Expand Down Expand Up @@ -276,11 +277,10 @@ class BinaryContext {
void deregisterSectionName(const BinarySection &Section);

public:
static Expected<std::unique_ptr<BinaryContext>>
createBinaryContext(Triple TheTriple, StringRef InputFileName,
SubtargetFeatures *Features, bool IsPIC,
std::unique_ptr<DWARFContext> DwCtx,
JournalingStreams Logger);
static Expected<std::unique_ptr<BinaryContext>> createBinaryContext(
Triple TheTriple, std::shared_ptr<orc::SymbolStringPool> SSP,
StringRef InputFileName, SubtargetFeatures *Features, bool IsPIC,
std::unique_ptr<DWARFContext> DwCtx, JournalingStreams Logger);

/// Superset of compiler units that will contain overwritten code that needs
/// new debug info. In a few cases, functions may end up not being
Expand Down Expand Up @@ -372,6 +372,7 @@ class BinaryContext {
bool hasSymbolsWithFileName() const { return HasSymbolsWithFileName; }
void setHasSymbolsWithFileName(bool Value) { HasSymbolsWithFileName = Value; }

std::shared_ptr<orc::SymbolStringPool> getSymbolStringPool() { return SSP; }
/// Return true if relocations against symbol with a given name
/// must be created.
bool forceSymbolRelocations(StringRef SymbolName) const;
Expand Down Expand Up @@ -631,6 +632,8 @@ class BinaryContext {

std::unique_ptr<Triple> TheTriple;

std::shared_ptr<orc::SymbolStringPool> SSP;

const Target *TheTarget;

std::string TripleName;
Expand Down Expand Up @@ -807,8 +810,10 @@ class BinaryContext {

BinaryContext(std::unique_ptr<MCContext> Ctx,
std::unique_ptr<DWARFContext> DwCtx,
std::unique_ptr<Triple> TheTriple, const Target *TheTarget,
std::string TripleName, std::unique_ptr<MCCodeEmitter> MCE,
std::unique_ptr<Triple> TheTriple,
std::shared_ptr<orc::SymbolStringPool> SSP,
const Target *TheTarget, std::string TripleName,
std::unique_ptr<MCCodeEmitter> MCE,
std::unique_ptr<MCObjectFileInfo> MOFI,
std::unique_ptr<const MCAsmInfo> AsmInfo,
std::unique_ptr<const MCInstrInfo> MII,
Expand Down
22 changes: 12 additions & 10 deletions bolt/lib/Core/BinaryContext.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ void BinaryContext::logBOLTErrorsAndQuitOnFatal(Error E) {
BinaryContext::BinaryContext(std::unique_ptr<MCContext> Ctx,
std::unique_ptr<DWARFContext> DwCtx,
std::unique_ptr<Triple> TheTriple,
std::shared_ptr<orc::SymbolStringPool> SSP,
const Target *TheTarget, std::string TripleName,
std::unique_ptr<MCCodeEmitter> MCE,
std::unique_ptr<MCObjectFileInfo> MOFI,
Expand All @@ -136,12 +137,12 @@ BinaryContext::BinaryContext(std::unique_ptr<MCContext> Ctx,
std::unique_ptr<MCDisassembler> DisAsm,
JournalingStreams Logger)
: Ctx(std::move(Ctx)), DwCtx(std::move(DwCtx)),
TheTriple(std::move(TheTriple)), TheTarget(TheTarget),
TripleName(TripleName), MCE(std::move(MCE)), MOFI(std::move(MOFI)),
AsmInfo(std::move(AsmInfo)), MII(std::move(MII)), STI(std::move(STI)),
InstPrinter(std::move(InstPrinter)), MIA(std::move(MIA)),
MIB(std::move(MIB)), MRI(std::move(MRI)), DisAsm(std::move(DisAsm)),
Logger(Logger), InitialDynoStats(isAArch64()) {
TheTriple(std::move(TheTriple)), SSP(std::move(SSP)),
TheTarget(TheTarget), TripleName(TripleName), MCE(std::move(MCE)),
MOFI(std::move(MOFI)), AsmInfo(std::move(AsmInfo)), MII(std::move(MII)),
STI(std::move(STI)), InstPrinter(std::move(InstPrinter)),
MIA(std::move(MIA)), MIB(std::move(MIB)), MRI(std::move(MRI)),
DisAsm(std::move(DisAsm)), Logger(Logger), InitialDynoStats(isAArch64()) {
RegularPageSize = isAArch64() ? RegularPageSizeAArch64 : RegularPageSizeX86;
PageAlign = opts::NoHugePages ? RegularPageSize : HugePageSize;
}
Expand All @@ -159,8 +160,9 @@ BinaryContext::~BinaryContext() {
/// Create BinaryContext for a given architecture \p ArchName and
/// triple \p TripleName.
Expected<std::unique_ptr<BinaryContext>> BinaryContext::createBinaryContext(
Triple TheTriple, StringRef InputFileName, SubtargetFeatures *Features,
bool IsPIC, std::unique_ptr<DWARFContext> DwCtx, JournalingStreams Logger) {
Triple TheTriple, std::shared_ptr<orc::SymbolStringPool> SSP,
StringRef InputFileName, SubtargetFeatures *Features, bool IsPIC,
std::unique_ptr<DWARFContext> DwCtx, JournalingStreams Logger) {
StringRef ArchName = "";
std::string FeaturesStr = "";
switch (TheTriple.getArch()) {
Expand Down Expand Up @@ -283,8 +285,8 @@ Expected<std::unique_ptr<BinaryContext>> BinaryContext::createBinaryContext(

auto BC = std::make_unique<BinaryContext>(
std::move(Ctx), std::move(DwCtx), std::make_unique<Triple>(TheTriple),
TheTarget, std::string(TripleName), std::move(MCE), std::move(MOFI),
std::move(AsmInfo), std::move(MII), std::move(STI),
std::move(SSP), TheTarget, std::string(TripleName), std::move(MCE),
std::move(MOFI), std::move(AsmInfo), std::move(MII), std::move(STI),
std::move(InstructionPrinter), std::move(MIA), nullptr, std::move(MRI),
std::move(DisAsm), Logger);

Expand Down
1 change: 1 addition & 0 deletions bolt/lib/Core/DebugNames.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,7 @@ bool static canProcess(const DWARFUnit &Unit, const DIE &Die,
case dwarf::DW_TAG_structure_type:
case dwarf::DW_TAG_typedef:
case dwarf::DW_TAG_unspecified_type:
case dwarf::DW_TAG_union_type:
if (TagsOnly || Die.findAttribute(dwarf::Attribute::DW_AT_name))
return true;
return false;
Expand Down
3 changes: 2 additions & 1 deletion bolt/lib/Rewrite/DWARFRewriter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1691,7 +1691,8 @@ namespace {
std::unique_ptr<BinaryContext>
createDwarfOnlyBC(const object::ObjectFile &File) {
return cantFail(BinaryContext::createBinaryContext(
File.makeTriple(), File.getFileName(), nullptr, false,
File.makeTriple(), std::make_shared<orc::SymbolStringPool>(),
File.getFileName(), nullptr, false,
DWARFContext::create(File, DWARFContext::ProcessDebugRelocations::Ignore,
nullptr, "", WithColor::defaultErrorHandler,
WithColor::defaultWarningHandler),
Expand Down
Loading