Skip to content

Commit

Permalink
remove FIXME and (cosmicexplorer) comments (#6479)
Browse files Browse the repository at this point in the history
### Problem

- `FIXME` is a bit judgy for no well-defined additional meaning beyond `TODO`.
- The `TODO(name)` syntax is (mostly) deprecated / all of the instances of its use here have enough context on their own.
  • Loading branch information
cosmicexplorer committed Sep 16, 2018
1 parent d6a9490 commit 606fc83
Show file tree
Hide file tree
Showing 30 changed files with 39 additions and 46 deletions.
2 changes: 1 addition & 1 deletion build-support/bin/ci.sh
Expand Up @@ -133,7 +133,7 @@ if [[ "${python_three:-false}" == "true" ]]; then
# TODO(John Sirois): Allow `<4` when the issues with `3.7` are fixed. See:
# https://github.com/pantsbuild/pants/issues/6363
export PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS='["CPython>=3.4,<3.7"]'
# FIXME: Clear interpreters, otherwise this constraint does not end up applying due to a cache
# TODO: Clear interpreters, otherwise this constraint does not end up applying due to a cache
# bug between the `./pants binary` and further runs.
./pants.pex clean-all
fi
Expand Down
Expand Up @@ -47,9 +47,8 @@ def register_options(cls, register):
super(SiteGen, cls).register_options(register)
register('--config-path', type=list, help='Path to .json file describing site structure.')

# FIXME(cosmicexplorer): requiring these products ensures that the markdown
# and reference tasks run before this one, but we don't use those
# products.
# TODO: requiring these products ensures that the markdown and reference tasks run before this
# one, but we don't use those products.
@classmethod
def prepare(cls, options, round_manager):
round_manager.require(MarkdownToHtml.MARKDOWN_HTML_PRODUCT)
Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/backend/native/config/environment.py
Expand Up @@ -187,7 +187,7 @@ class LLVMCppToolchain(datatype([('cpp_toolchain', CppToolchain)])): pass
class GCCCppToolchain(datatype([('cpp_toolchain', CppToolchain)])): pass


# FIXME: make this an @rule, after we can automatically produce LibcDev and other subsystems in the
# TODO: make this an @rule, after we can automatically produce LibcDev and other subsystems in the
# v2 engine (see #5788).
class HostLibcDev(datatype(['crti_object', 'fingerprint'])):

Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/backend/native/register.py
Expand Up @@ -35,7 +35,7 @@ def build_file_aliases():


def register_goals():
# FIXME(#5962): register these under the 'compile' goal when we eliminate the product transitive
# TODO(#5962): register these under the 'compile' goal when we eliminate the product transitive
# dependency from export -> compile.
task(name='native-third-party-fetch', action=NativeExternalLibraryFetch).install('native-compile')
task(name='c-for-ctypes', action=CCompile).install('native-compile')
Expand Down
Expand Up @@ -123,7 +123,7 @@ def cpp_compiler(self):
extra_args=[])


# FIXME(#5663): use this over the XCode linker!
# TODO(#5663): use this over the XCode linker!
@rule(Linker, [Select(Platform), Select(LLVM)])
def get_lld(platform, llvm):
return llvm.linker(platform)
Expand Down
Expand Up @@ -19,7 +19,7 @@ def register_options(cls, register):
register('--fatal-warnings', type=bool, default=True, fingerprint=True, advanced=True,
help='The default for the "fatal_warnings" argument for targets of this language.')

# FIXME: use some more formal method of mirroring options between a target and a subsystem -- see
# TODO: use some more formal method of mirroring options between a target and a subsystem -- see
# pants.backend.jvm.subsystems.dependency_context.DependencyContext#defaulted_property()!
def get_subsystem_target_mirrored_field_value(self, field_name, target):
"""Get the attribute `field_name` from `target` if set, else from this subsystem's options."""
Expand Down
Expand Up @@ -128,7 +128,7 @@ def include_dirs(self):

all_inc_dirs = base_inc_dirs
for d in base_inc_dirs:
# FIXME: what does this directory do?
# TODO: figure out what this directory does and why it's not already found by this compiler.
secure_inc_dir = os.path.join(d, 'secure')
if is_readable_dir(secure_inc_dir):
all_inc_dirs.append(secure_inc_dir)
Expand Down
3 changes: 2 additions & 1 deletion src/python/pants/backend/native/targets/native_artifact.py
Expand Up @@ -28,6 +28,7 @@ def as_shared_lib(self, platform):
})

def _compute_fingerprint(self):
# FIXME: can we just use the __hash__ method here somehow?
# TODO: This fingerprint computation boilerplate is error-prone and could probably be
# streamlined, for simple payload fields.
hasher = sha1(self.lib_name.encode('utf-8'))
return hasher.hexdigest() if PY3 else hasher.hexdigest().decode('utf-8')
Expand Up @@ -88,7 +88,7 @@ def linker(self):

@memoized_property
def platform(self):
# FIXME: convert this to a v2 engine dependency injection.
# TODO: convert this to a v2 engine dependency injection.
return Platform.create()

def _retrieve_single_product_at_target_base(self, product_mapping, target):
Expand All @@ -114,7 +114,7 @@ def execute(self):
if vt.valid:
shared_library = self._retrieve_shared_lib_from_cache(vt)
else:
# FIXME: We need to partition links based on proper dependency edges and not
# TODO: We need to partition links based on proper dependency edges and not
# perform a link to every native_external_library for all targets in the closure.
# https://github.com/pantsbuild/pants/issues/6178
link_request = self._make_link_request(
Expand Down
8 changes: 4 additions & 4 deletions src/python/pants/backend/native/tasks/native_compile.py
Expand Up @@ -33,14 +33,14 @@ class NativeCompileRequest(datatype([
])): pass


# FIXME(#5950): perform all process execution in the v2 engine!
# TODO(#5950): perform all process execution in the v2 engine!
class ObjectFiles(datatype(['root_dir', 'filenames'])):

def file_paths(self):
return [os.path.join(self.root_dir, fname) for fname in self.filenames]


# FIXME: this is a temporary hack -- we could introduce something like a "NativeRequirement" with
# TODO: this is a temporary hack -- we could introduce something like a "NativeRequirement" with
# dependencies, header, object file, library name (more?) instead of using multiple products.
class NativeTargetDependencies(datatype(['native_deps'])): pass

Expand Down Expand Up @@ -160,7 +160,7 @@ def get_sources_headers_for_target(self, target):

# Unique file names are required because we just dump object files into a single directory, and
# the compiler will silently just produce a single object file if provided non-unique filenames.
# FIXME: add some shading to file names so we can remove this check.
# TODO: add some shading to file names so we can remove this check.
# NB: It shouldn't matter if header files have the same name, but this will raise an error in
# that case as well. We won't need to do any shading of header file names.
seen_filenames = defaultdict(list)
Expand All @@ -178,7 +178,7 @@ def get_sources_headers_for_target(self, target):

return [os.path.join(get_buildroot(), rel_root, src) for src in target_relative_sources]

# FIXME(#5951): expand `Executable` to cover argv generation (where an `Executable` is subclassed
# TODO(#5951): expand `Executable` to cover argv generation (where an `Executable` is subclassed
# to modify or extend the argument list, as declaratively as possible) to remove
# `extra_compile_args(self)`!
@abstractmethod
Expand Down
Expand Up @@ -33,8 +33,7 @@ class ConanRequirement(datatype(['pkg_spec'])):
}

def parse_conan_stdout_for_pkg_sha(self, stdout):
# FIXME: properly regex this method.
# https://github.com/pantsbuild/pants/issues/6168
# TODO(#6168): properly regex this method.
pkg_line = stdout.split('Packages')[1]
collected_matches = [line for line in pkg_line.split() if self.pkg_spec in line]
pkg_sha = collected_matches[0].split(':')[1]
Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/backend/native/tasks/native_task.py
Expand Up @@ -9,7 +9,7 @@

class NativeTask(Task):

# FIXME(#5869): delete this when we can request Subsystems from options in tasks!
# TODO(#5869): delete this when we can request Subsystems from options in tasks!
def _request_single(self, product, subject):
# NB: This is not supposed to be exposed to Tasks yet -- see #4769 to track the status of
# exposing v2 products in v1 tasks.
Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/binaries/binary_tool.py
Expand Up @@ -20,7 +20,7 @@
logger = logging.getLogger(__name__)


# TODO(cosmicexplorer): Add integration tests for this file.
# TODO: Add integration tests for this file.
class BinaryToolBase(Subsystem):
"""Base class for subsytems that configure binary tools.
Expand Down
6 changes: 3 additions & 3 deletions src/python/pants/binaries/binary_util.py
Expand Up @@ -315,9 +315,9 @@ def __init__(self, baseurls, binary_tool_fetcher, path_by_id=None,
'linux': lambda release, machine: ('linux', machine),
}

# FIXME(cosmicexplorer): we create a HostPlatform in this class instead of in the constructor
# because we don't want to fail until a binary is requested. The HostPlatform should be a
# parameter that gets lazily resolved by the v2 engine.
# TODO: we create a HostPlatform in this class instead of in the constructor because we don't want
# to fail until a binary is requested. The HostPlatform should be a parameter that gets lazily
# resolved by the v2 engine.
@memoized_method
def _host_platform(self):
uname_result = self._uname_func()
Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/build_graph/target.py
Expand Up @@ -680,7 +680,7 @@ def strict_dependencies(self, dep_context):
strict_deps = self._cached_strict_dependencies_map.get(dep_context, None)
if strict_deps is None:
default_predicate = self._closure_dep_predicate({self}, **dep_context.target_closure_kwargs)
# FIXME(#5977): this branch needs testing!
# TODO(#5977): this branch needs testing!
if not default_predicate:
def default_predicate(*args, **kwargs):
return True
Expand Down
3 changes: 1 addition & 2 deletions src/python/pants/engine/fs.py
Expand Up @@ -124,8 +124,7 @@ class DirectoryToMaterialize(datatype([('path', text_type), ('directory_digest',
FilesContent = Collection.of(FileContent)


# TODO(cosmicexplorer): don't recreate this in python, get this from
# fs::EMPTY_DIGEST somehow.
# TODO: don't recreate this in python, get this from fs::EMPTY_DIGEST somehow.
_EMPTY_FINGERPRINT = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'


Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/fs/archive.py
Expand Up @@ -122,7 +122,7 @@ def _invoke_xz(self, xz_input_file):
This allows streaming the decompressed tar archive directly into a tar decompression stream,
which is significantly faster in practice than making a temporary file.
"""
# FIXME: --threads=0 is supposed to use "the number of processor cores on the machine", but I
# TODO: --threads=0 is supposed to use "the number of processor cores on the machine", but I
# see no more than 100% cpu used at any point. This seems like it could be a bug? If performance
# is an issue, investigate further.
cmd = [self._xz_binary_path, '--decompress', '--stdout', '--keep', '--threads=0', xz_input_file]
Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/goal/products.py
Expand Up @@ -64,7 +64,7 @@ def add_for_targets(self, targets, products):
:API: public
"""
# FIXME: This is a temporary helper for use until the classpath has been split.
# TODO: This is a temporary helper for use until the classpath has been split.
for target in targets:
self.add_for_target(target, products)

Expand Down
3 changes: 1 addition & 2 deletions src/python/pants/option/parser_hierarchy.py
Expand Up @@ -36,8 +36,7 @@ def all_enclosing_scopes(scope, allow_global=True):

_validate_full_scope(scope)

# TODO(cosmicexplorer): validate scopes here and/or in `enclosing_scope()`
# instead of assuming correctness.
# TODO: validate scopes here and/or in `enclosing_scope()` instead of assuming correctness.
def scope_within_range(tentative_scope):
if tentative_scope is None:
return False
Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/option/scope.py
Expand Up @@ -11,7 +11,7 @@
GLOBAL_SCOPE_CONFIG_SECTION = 'GLOBAL'


# FIXME: convert this to a datatype?
# TODO: convert this to a datatype?
class ScopeInfo(namedtuple('_ScopeInfo', ['scope', 'category', 'optionable_cls'])):
"""Information about a scope."""

Expand Down
1 change: 0 additions & 1 deletion src/python/pants/pantsd/watchman_launcher.py
Expand Up @@ -27,7 +27,6 @@ def create(cls, bootstrap_options):
baseurls=bootstrap_options.binaries_baseurls,
binary_tool_fetcher=binary_tool_fetcher,
path_by_id=bootstrap_options.binaries_path_by_id,
# TODO(cosmicexplorer): do we need to test this?
allow_external_binary_tool_downloads=bootstrap_options.allow_external_binary_tool_downloads)

return WatchmanLauncher(
Expand Down
4 changes: 2 additions & 2 deletions src/python/pants/util/objects.py
Expand Up @@ -72,8 +72,8 @@ def __new__(cls, *args, **kwargs):
except TypeError as e:
raise cls.make_type_error(e)

# TODO(cosmicexplorer): Make this kind of exception pattern (filter for
# errors then display them all at once) more ergonomic.
# TODO: Make this kind of exception pattern (filter for errors then display them all at once)
# more ergonomic.
type_failure_msgs = []
for field_name, field_constraint in fields_with_constraints.items():
field_value = getattr(this_object, field_name)
Expand Down
2 changes: 1 addition & 1 deletion src/python/pants/util/osutil.py
Expand Up @@ -52,7 +52,7 @@ def known_os_names():
return reduce(set.union, OS_ALIASES.values())


# TODO(cosmicexplorer): use this as the default value for the global --binaries-path-by-id option!
# TODO: use this as the default value for the global --binaries-path-by-id option!
# panstd testing fails saying no run trackers were created when I tried to do this.
SUPPORTED_PLATFORM_NORMALIZED_NAMES = {
('linux', 'x86_64'): ('linux', 'x86_64'),
Expand Down
4 changes: 2 additions & 2 deletions src/rust/engine/fs/src/lib.rs
Expand Up @@ -417,8 +417,8 @@ pub enum StrictGlobMatching {
}

impl StrictGlobMatching {
// TODO(cosmicexplorer): match this up with the allowed values for the GlobMatchErrorBehavior type
// in python somehow?
// TODO: match this up with the allowed values for the GlobMatchErrorBehavior type in python
// somehow!
pub fn create(behavior: &str) -> Result<Self, String> {
match behavior {
"ignore" => Ok(StrictGlobMatching::Ignore),
Expand Down
2 changes: 1 addition & 1 deletion src/rust/engine/src/context.rs
Expand Up @@ -109,7 +109,7 @@ impl Core {
fs_pool: fs_pool.clone(),
runtime: runtime,
store: store,
// FIXME: Errors in initialization should definitely be exposed as python
// TODO: Errors in initialization should definitely be exposed as python
// exceptions, rather than as panics.
vfs: PosixFS::new(build_root, fs_pool, &ignore_patterns).unwrap_or_else(|e| {
panic!("Could not initialize VFS: {:?}", e);
Expand Down
4 changes: 2 additions & 2 deletions src/rust/engine/src/nodes.rs
Expand Up @@ -1037,8 +1037,8 @@ impl Node for NodeKey {
fn typstr(tc: &TypeConstraint) -> String {
externs::key_to_str(&tc.0)
}
// FIXME(cosmicexplorer): these should all be converted to fmt::Debug implementations, and then
// this method can go away in favor of the auto-derived Debug for this type.
// TODO: these should all be converted to fmt::Debug implementations, and then this method can
// go away in favor of the auto-derived Debug for this type.
match self {
&NodeKey::DigestFile(ref s) => format!("DigestFile({:?})", s.0),
&NodeKey::ExecuteProcess(ref s) => format!("ExecuteProcess({:?}", s.0),
Expand Down
Expand Up @@ -14,7 +14,7 @@ def platform_specific(normalized_os_name):

def decorator(test_fn):
def wrapper(self, *args, **kwargs):
# FIXME: This should be drawn from the v2 engine somehow.
# TODO: This should be drawn from the v2 engine somehow.
platform = Platform.create()

if platform.normalized_os_name == normalized_os_name:
Expand Down
3 changes: 1 addition & 2 deletions tests/python/pants_test/binaries/test_binary_tool.py
Expand Up @@ -72,8 +72,7 @@ def _select_for_version(self, version):
return BinaryUtilFakeUname.Factory._create_for_cls(BinaryUtilFakeUname).select(binary_request)


# TODO(cosmicexplorer): these should have integration tests which use BinaryTool subclasses
# overriding archive_type
# TODO: these should have integration tests which use BinaryTool subclasses overriding archive_type.
class BinaryToolBaseTest(BaseTest):

def setUp(self):
Expand Down
2 changes: 1 addition & 1 deletion tests/python/pants_test/binaries/test_binary_util.py
Expand Up @@ -32,7 +32,7 @@ def __str__(self):
return 'ExternalUrlGenerator(<example __str__()>)'


# TODO(cosmicexplorer): test requests with an archiver!
# TODO: test requests with an archiver!
class BinaryUtilTest(TestBase):
"""Tests binary_util's binaries_baseurls handling."""

Expand Down
2 changes: 0 additions & 2 deletions tests/python/pants_test/engine/test_isolated_process.py
Expand Up @@ -217,8 +217,6 @@ def test_blows_up_on_invalid_args(self):
with self.assertRaises(TypeCheckError):
self._default_args_execute_process_request(argv=('1',), env=['foo', 'bar'])

# TODO(cosmicexplorer): we should probably check that the digest info in
# ExecuteProcessRequest is valid, beyond just checking if it's a string.
with self.assertRaisesRegexp(TypeCheckError, "env"):
ExecuteProcessRequest(
argv=('1',),
Expand Down

0 comments on commit 606fc83

Please sign in to comment.