Skip to content

Commit

Permalink
Issue #155: Remove FIT_HAS_STATIC_LAMBDA configuration
Browse files Browse the repository at this point in the history
  • Loading branch information
pfultz2 committed Mar 27, 2016
1 parent 1a9b1ec commit 37355c5
Show file tree
Hide file tree
Showing 9 changed files with 12 additions and 28 deletions.
2 changes: 0 additions & 2 deletions include/fit/lambda.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,6 @@
#include <fit/detail/static_const_var.hpp>


#define FIT_HAS_STATIC_LAMBDA 1

#ifndef FIT_REWRITE_STATIC_LAMBDA
#ifdef _MSC_VER
#define FIT_REWRITE_STATIC_LAMBDA 1
Expand Down
3 changes: 1 addition & 2 deletions test/conditional.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ FIT_TEST_CASE()
FIT_TEST_CHECK(lam(t3()) == 3);
}
#endif
#if FIT_HAS_STATIC_LAMBDA

FIT_STATIC_LAMBDA_FUNCTION(static_fun) = fit::conditional(
[](t1)
{
Expand All @@ -113,7 +113,6 @@ FIT_TEST_CASE()
FIT_TEST_CHECK(static_fun(t2()) == 2);
FIT_TEST_CHECK(static_fun(t3()) == 3);
}
#endif

FIT_TEST_CASE()
{
Expand Down
3 changes: 1 addition & 2 deletions test/lambda.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
#include <memory>
#include "test.hpp"

#if FIT_HAS_STATIC_LAMBDA

static constexpr auto add_one = FIT_STATIC_LAMBDA(int x)
{
return x + 1;
Expand Down Expand Up @@ -90,4 +90,3 @@ FIT_TEST_CASE()
}

}
#endif
3 changes: 1 addition & 2 deletions test/match.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ FIT_TEST_CASE()
FIT_STATIC_TEST_CHECK(fit::reveal(fun)(1) == 1);
FIT_STATIC_TEST_CHECK(fit::reveal(fun)(foo()) == 2);
};
#if FIT_HAS_STATIC_LAMBDA

FIT_TEST_CASE()
{

Expand All @@ -86,7 +86,6 @@ FIT_TEST_CASE()
FIT_TEST_CHECK(lam(1) == 1);
FIT_TEST_CHECK(lam(foo()) == 2);
};
#endif

FIT_TEST_CASE()
{
Expand Down
3 changes: 1 addition & 2 deletions test/reveal.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ FIT_TEST_CASE()
static_assert(!fit::is_callable<fit::reveal_adaptor<f_type>, int>::value, "Callable");
// fit::reveal(f)(1);
}
#if FIT_HAS_STATIC_LAMBDA

#ifndef _MSC_VER
static constexpr auto lam = fit::conditional(
FIT_STATIC_LAMBDA(t1)
Expand Down Expand Up @@ -99,7 +99,6 @@ FIT_TEST_CASE()

// fit::reveal(static_fun)(1);
}
#endif

struct integral_type
{
Expand Down
7 changes: 1 addition & 6 deletions test/static_def/static_def.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ extern void* f_sum_constexpr_fo_addr();

extern void* sum_var_addr();
extern void* sum_constexpr_fo_addr();
#if FIT_HAS_STATIC_LAMBDA

void* sum_lambda_addr()
{
return (void*)&fit_test::fit_sum_lambda;
Expand All @@ -24,7 +24,6 @@ void* sum_fo_addr()
{
return (void*)&fit_test::fit_sum_fo;
}
#endif

void* sum_var_addr()
{
Expand All @@ -37,13 +36,10 @@ void* sum_constexpr_fo_addr()

int main()
{
#if FIT_HAS_STATIC_LAMBDA
if (fit_test::fit_sum_fo(1, 2) != 3) printf("FAILED\n");
if (fit_test::fit_sum_lambda(1, 2) != 3) printf("FAILED\n");
#endif
if (fit_test::fit_sum(1, 2) != 3) printf("FAILED\n");

#if FIT_HAS_STATIC_LAMBDA
#if !FIT_NO_UNIQUE_STATIC_LAMBDA_FUNCTION_ADDR
if (sum_lambda_addr() != f_sum_lambda_addr()) printf("FAILED: Lambda\n");
if (sum_fo_addr() != f_sum_fo_addr()) printf("FAILED: Function object\n");
Expand All @@ -52,7 +48,6 @@ int main()
#if !FIT_NO_UNIQUE_STATIC_VAR
if (sum_var_addr() != f_sum_var_addr()) printf("FAILED: Lambda\n");
if (sum_constexpr_fo_addr() != f_sum_constexpr_fo_addr()) printf("FAILED: Function object\n");
#endif
#endif
return f();
}
7 changes: 3 additions & 4 deletions test/static_def/static_def.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,11 @@
#include <fit/lambda.hpp>

namespace fit_test {
#if FIT_HAS_STATIC_LAMBDA

FIT_STATIC_LAMBDA_FUNCTION(fit_sum_lambda) = [](int x, int y)
{
return x + y;
};
#endif

struct fit_sum_f
{
Expand All @@ -20,9 +19,9 @@ struct fit_sum_f
return x + y;
}
};
#if FIT_HAS_STATIC_LAMBDA

FIT_STATIC_LAMBDA_FUNCTION(fit_sum_fo) = fit_sum_f();
#endif

FIT_STATIC_FUNCTION(fit_sum_constexpr_fo) = fit_sum_f();

FIT_DECLARE_STATIC_VAR(fit_sum_var, fit_sum_f);
Expand Down
8 changes: 2 additions & 6 deletions test/static_def/static_def2.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ extern void* f_sum_constexpr_fo_addr();

extern void* sum_var_addr();
extern void* sum_constexpr_fo_addr();
#if FIT_HAS_STATIC_LAMBDA

void* f_sum_lambda_addr()
{
return (void*)&fit_test::fit_sum_lambda;
Expand All @@ -22,7 +22,7 @@ void* f_sum_fo_addr()
{
return (void*)&fit_test::fit_sum_fo;
}
#endif

void* f_sum_var_addr()
{
return (void*)&fit_test::fit_sum_var;
Expand All @@ -34,13 +34,10 @@ void* f_sum_constexpr_fo_addr()

int f()
{
#if FIT_HAS_STATIC_LAMBDA
if (fit_test::fit_sum_fo(1, 2) != 3) printf("FAILED\n");
if (fit_test::fit_sum_lambda(1, 2) != 3) printf("FAILED\n");
#endif
if (fit_test::fit_sum(1, 2) != 3) printf("FAILED\n");

#if FIT_HAS_STATIC_LAMBDA
#if !FIT_NO_UNIQUE_STATIC_LAMBDA_FUNCTION_ADDR
if (sum_lambda_addr() != f_sum_lambda_addr()) printf("FAILED: Lambda\n");
if (sum_fo_addr() != f_sum_fo_addr()) printf("FAILED: Function object\n");
Expand All @@ -49,7 +46,6 @@ int f()
#if !FIT_NO_UNIQUE_STATIC_VAR
if (sum_var_addr() != f_sum_var_addr()) printf("FAILED: Lambda\n");
if (sum_constexpr_fo_addr() != f_sum_constexpr_fo_addr()) printf("FAILED: Function object\n");
#endif
#endif
return 0;
}
Expand Down
4 changes: 2 additions & 2 deletions test/unpack.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ FIT_TEST_CASE()
static_assert(fit::is_unpackable<tuple_type&&>::value, "Not unpackable");

}
#if FIT_HAS_STATIC_LAMBDA

FIT_STATIC_AUTO lambda_unary_unpack = fit::unpack(FIT_STATIC_LAMBDA(int x)
{
return x;
Expand All @@ -127,7 +127,7 @@ FIT_TEST_CASE()
{
FIT_TEST_CHECK(3 == lambda_unary_unpack(fit::pack_decay(3)));
}
#endif

struct unary_move
{
std::unique_ptr<int> i;
Expand Down

0 comments on commit 37355c5

Please sign in to comment.