Skip to content

Commit

Permalink
Add JuliaFormatter (#480)
Browse files Browse the repository at this point in the history
  • Loading branch information
odow committed Feb 23, 2022
1 parent 1f48d1f commit e3ca424
Show file tree
Hide file tree
Showing 122 changed files with 5,606 additions and 2,804 deletions.
8 changes: 8 additions & 0 deletions .JuliaFormatter.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Configuration file for JuliaFormatter.jl
# For more information, see: https://domluna.github.io/JuliaFormatter.jl/stable/config/

always_for_in = true
always_use_return = true
margin = 80
remove_extra_newlines = true
short_to_long_function_def = true
30 changes: 30 additions & 0 deletions .github/workflows/format_check.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
name: format-check
on:
push:
branches:
- master
- release-*
pull_request:
types: [opened, synchronize, reopened]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: julia-actions/setup-julia@latest
with:
version: '1'
- uses: actions/checkout@v1
- name: Format check
shell: julia --color=yes {0}
run: |
using Pkg
Pkg.add(PackageSpec(name="JuliaFormatter", version="0.22.4"))
using JuliaFormatter
format(".", verbose=true)
out = String(read(Cmd(`git diff`)))
if isempty(out)
exit(0)
end
@error "Some files have not been formatted !!!"
write(stdout, out)
exit(1)
40 changes: 20 additions & 20 deletions benchmark/benchmarks.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
using Pkg
tempdir = mktempdir()
Pkg.activate(tempdir)
Pkg.develop(PackageSpec(path=joinpath(@__DIR__, "..")))
Pkg.develop(PackageSpec(path = joinpath(@__DIR__, "..")))
Pkg.add(["BenchmarkTools", "PkgBenchmark", "MathOptInterface"])
Pkg.resolve()

Expand All @@ -13,26 +13,26 @@ const MOIU = MOI.Utilities

const SUITE = BenchmarkGroup()

problems = [
"constant_fix!_with_complex_numbers",
"affine_dot_multiply_atom",
"affine_hcat_atom",
"affine_trace_atom",
"exp_entropy_atom",
"exp_log_perspective_atom",
"socp_norm_2_atom",
"socp_quad_form_atom",
"socp_sum_squares_atom",
"lp_norm_inf_atom",
"lp_maximum_atom",
"sdp_and_exp_log_det_atom",
"sdp_norm2_atom",
"sdp_lambda_min_atom",
"sdp_sum_largest_eigs",
"mip_integer_variables",
]
problems = [
"constant_fix!_with_complex_numbers",
"affine_dot_multiply_atom",
"affine_hcat_atom",
"affine_trace_atom",
"exp_entropy_atom",
"exp_log_perspective_atom",
"socp_norm_2_atom",
"socp_quad_form_atom",
"socp_sum_squares_atom",
"lp_norm_inf_atom",
"lp_maximum_atom",
"sdp_and_exp_log_det_atom",
"sdp_norm2_atom",
"sdp_lambda_min_atom",
"sdp_sum_largest_eigs",
"mip_integer_variables",
]

SUITE["formulation"] = ProblemDepot.benchmark_suite(problems) do problem
model = MOIU.MockOptimizer(MOIU.Model{Float64}())
Convex.load_MOI_model!(model, problem)
return Convex.load_MOI_model!(model, problem)
end
8 changes: 4 additions & 4 deletions benchmark/pprinthelper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,16 @@ function displayresult(result)
md = sprint(export_markdown, result)
md = replace(md, ":x:" => "")
md = replace(md, ":white_check_mark:" => "")
display(Markdown.parse(md))
return display(Markdown.parse(md))
end

function printnewsection(name)
println()
println()
println()
printstyled("" ^ displaysize(stdout)[2]; color=:blue)
println()
printstyled(name; bold=true)
printstyled(""^displaysize(stdout)[2]; color = :blue)
println()
printstyled(name; bold = true)
println()
return println()
end
6 changes: 4 additions & 2 deletions benchmark/pprintjudge.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
# which is available under an MIT license (see LICENSE).
using PkgBenchmark
include("pprinthelper.jl")
group_target = PkgBenchmark.readresults(joinpath(@__DIR__, "result-target.json"))
group_baseline = PkgBenchmark.readresults(joinpath(@__DIR__, "result-baseline.json"))
group_target =
PkgBenchmark.readresults(joinpath(@__DIR__, "result-target.json"))
group_baseline =
PkgBenchmark.readresults(joinpath(@__DIR__, "result-baseline.json"))
judgement = judge(group_target, group_baseline)

displayresult(judgement)
Expand Down
6 changes: 1 addition & 5 deletions benchmark/runbenchmarks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,6 @@
using PkgBenchmark
benchmarkpkg(
dirname(@__DIR__),
BenchmarkConfig(
env = Dict(
"JULIA_NUM_THREADS" => "1",
),
),
BenchmarkConfig(env = Dict("JULIA_NUM_THREADS" => "1")),
resultfile = joinpath(@__DIR__, "result.json"),
)
10 changes: 3 additions & 7 deletions benchmark/runjudge.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,9 @@
# which is available under an MIT license (see LICENSE).
using PkgBenchmark

mkconfig(; kwargs...) =
BenchmarkConfig(
env = Dict(
"JULIA_NUM_THREADS" => "1",
);
kwargs...
)
function mkconfig(; kwargs...)
return BenchmarkConfig(env = Dict("JULIA_NUM_THREADS" => "1"); kwargs...)
end

group_target = benchmarkpkg(
dirname(@__DIR__),
Expand Down
16 changes: 10 additions & 6 deletions docs/examples_literate/general_examples/basic_usage.jl
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ p.constraints += A * x <= b
p.constraints += [x >= 1; x <= 10; x[2] <= 5; x[1] + x[4] - x[2] <= 10]
solve!(p, solver)

println(round(p.optval, digits=2))
println(round.(evaluate(x), digits=2))
println(round(p.optval, digits = 2))
println(round.(evaluate(x), digits = 2))
println(evaluate(x[1] + x[4] - x[2]))

# ### Matrix Variables and promotions
Expand All @@ -55,7 +55,7 @@ y = Variable()
## X is a 2 x 2 variable, and y is scalar. X' + y promotes y to a 2 x 2 variable before adding them
p = minimize(norm(vec(X)) + y, 2 * X <= 1, X' + y >= 1, X >= 0, y >= 0)
solve!(p, solver)
println(round.(evaluate(X), digits=2))
println(round.(evaluate(X), digits = 2))
println(evaluate(y))
p.optval

Expand All @@ -72,16 +72,20 @@ p.optval
#

x = Variable(4)
p = satisfy(norm(x) <= 100, exp(x[1]) <= 5, x[2] >= 7, geomean(x[3], x[4]) >= x[2])
p = satisfy(
norm(x) <= 100,
exp(x[1]) <= 5,
x[2] >= 7,
geomean(x[3], x[4]) >= x[2],
)
solve!(p, solver)
println(p.status)
evaluate(x)

# ### SDP cone and Eigenvalues


y = Semidefinite(2)
p = maximize(eigmin(y), tr(y)<=6)
p = maximize(eigmin(y), tr(y) <= 6)
solve!(p, solver)
p.optval

Expand Down
25 changes: 16 additions & 9 deletions docs/examples_literate/general_examples/chebyshev_center.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@
using Convex, LinearAlgebra, SCS

# Generate the input data
a1 = [ 2; 1];
a2 = [ 2; -1];
a3 = [-1; 2];
a1 = [2; 1];
a2 = [2; -1];
a3 = [-1; 2];
a4 = [-1; -2];
b = ones(4, 1);

Expand All @@ -29,12 +29,19 @@ solve!(p, MOI.OptimizerWithAttributes(SCS.Optimizer, "verbose" => 0))
p.optval

# Generate the figure
x = range(-1.5, stop=1.5, length=100);
x = range(-1.5, stop = 1.5, length = 100);
theta = 0:pi/100:2*pi;
using Plots
plot(x, x -> -x * a1[1] / a1[2] + b[1] / a1[2])
plot!(x, x -> -x * a2[1]/ a2[2] + b[2] / a2[2])
plot!(x, x -> -x * a3[1]/ a3[2] + b[3] / a3[2])
plot!(x, x -> -x * a4[1]/ a4[2] + b[4] / a4[2])
plot!(evaluate(x_c)[1] .+ evaluate(r) * cos.(theta), evaluate(x_c)[2] .+ evaluate(r) * sin.(theta), linewidth = 2)
plot!(title ="Largest Euclidean ball lying in a 2D polyhedron", legend = nothing)
plot!(x, x -> -x * a2[1] / a2[2] + b[2] / a2[2])
plot!(x, x -> -x * a3[1] / a3[2] + b[3] / a3[2])
plot!(x, x -> -x * a4[1] / a4[2] + b[4] / a4[2])
plot!(
evaluate(x_c)[1] .+ evaluate(r) * cos.(theta),
evaluate(x_c)[2] .+ evaluate(r) * sin.(theta),
linewidth = 2,
)
plot!(
title = "Largest Euclidean ball lying in a 2D polyhedron",
legend = nothing,
)
20 changes: 10 additions & 10 deletions docs/examples_literate/general_examples/control.jl
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@
#
# The following code builds and solves our control example:


using Convex, SCS, Plots

## Some constraints on our motion
Expand All @@ -103,12 +102,13 @@ force = Variable(2, T - 1)
mu = 1

## Add constraints on our variables
constraints = Constraint[ position[:, i + 1] == position[:, i] + h * velocity[:, i] for i in 1 : T - 1]

constraints = Constraint[
position[:, i+1] == position[:, i] + h * velocity[:, i] for i in 1:T-1
]

for i in 1 : T - 1
acceleration = force[:, i]/mass + g - drag * velocity[:, i]
push!(constraints, velocity[:, i + 1] == velocity[:, i] + h * acceleration)
for i in 1:T-1
acceleration = force[:, i] / mass + g - drag * velocity[:, i]
push!(constraints, velocity[:, i+1] == velocity[:, i] + h * acceleration)
end

## Add position constraints
Expand All @@ -126,10 +126,10 @@ solve!(problem, MOI.OptimizerWithAttributes(SCS.Optimizer, "verbose" => 0))
# We can plot the trajectory taken by the object.

pos = evaluate(position)
plot([pos[1, 1]], [pos[2, 1]], st=:scatter, label="initial point")
plot!([pos[1, T]], [pos[2, T]], st=:scatter, label="final point")
plot!(pos[1, :], pos[2, :], label="trajectory")
plot([pos[1, 1]], [pos[2, 1]], st = :scatter, label = "initial point")
plot!([pos[1, T]], [pos[2, T]], st = :scatter, label = "final point")
plot!(pos[1, :], pos[2, :], label = "trajectory")

# We can also see how the magnitude of the force changes over time.

plot(vec(sum(evaluate(force).^2, dims=1)), label="force (magnitude)")
plot(vec(sum(evaluate(force) .^ 2, dims = 1)), label = "force (magnitude)")
54 changes: 30 additions & 24 deletions docs/examples_literate/general_examples/huber_regression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ end
# Generate data for Huber regression.
using Random
Random.seed!(1);
number_samples = round(Int,1.5*n);
beta_true = 5*randn(n);
number_samples = round(Int, 1.5 * n);
beta_true = 5 * randn(n);
X = randn(n, number_samples);
Y = zeros(number_samples);
v = randn(number_samples);
Expand All @@ -28,43 +28,49 @@ using Convex, SCS, Distributions
lsq_data = zeros(number_tests);
huber_data = zeros(number_tests);
prescient_data = zeros(number_tests);
p_vals = range(0, stop=0.15, length=number_tests);
for i=1:length(p_vals)
p = p_vals[i];
p_vals = range(0, stop = 0.15, length = number_tests);
for i in 1:length(p_vals)
p = p_vals[i]
## Generate the sign changes.
factor = 2 * rand(Binomial(1, 1-p), number_samples) .- 1;
Y = factor .* X' * beta_true + v;
factor = 2 * rand(Binomial(1, 1 - p), number_samples) .- 1
Y = factor .* X' * beta_true + v

## Form and solve a standard regression problem.
beta = Variable(n);
fit = norm(beta - beta_true) / norm(beta_true);
cost = norm(X' * beta - Y);
prob = minimize(cost);
solve!(prob, MOI.OptimizerWithAttributes(SCS.Optimizer, "verbose" => 0));
lsq_data[i] = evaluate(fit);
beta = Variable(n)
fit = norm(beta - beta_true) / norm(beta_true)
cost = norm(X' * beta - Y)
prob = minimize(cost)
solve!(prob, MOI.OptimizerWithAttributes(SCS.Optimizer, "verbose" => 0))
lsq_data[i] = evaluate(fit)

## Form and solve a prescient regression problem,
## i.e., where the sign changes are known.
cost = norm(factor .* (X'*beta) - Y);
solve!(minimize(cost), MOI.OptimizerWithAttributes(SCS.Optimizer, "verbose" => 0))
prescient_data[i] = evaluate(fit);
cost = norm(factor .* (X' * beta) - Y)
solve!(
minimize(cost),
MOI.OptimizerWithAttributes(SCS.Optimizer, "verbose" => 0),
)
prescient_data[i] = evaluate(fit)

## Form and solve the Huber regression problem.
cost = sum(huber(X' * beta - Y, 1));
solve!(minimize(cost), MOI.OptimizerWithAttributes(SCS.Optimizer, "verbose" => 0))
huber_data[i] = evaluate(fit);
cost = sum(huber(X' * beta - Y, 1))
solve!(
minimize(cost),
MOI.OptimizerWithAttributes(SCS.Optimizer, "verbose" => 0),
)
huber_data[i] = evaluate(fit)
end

#-
using Plots

plot(p_vals, huber_data, label="Huber", xlabel="p", ylabel="Fit" )
plot!(p_vals, lsq_data, label="Least squares")
plot!(p_vals, prescient_data, label="Prescient")
plot(p_vals, huber_data, label = "Huber", xlabel = "p", ylabel = "Fit")
plot!(p_vals, lsq_data, label = "Least squares")
plot!(p_vals, prescient_data, label = "Prescient")
#-

## Plot the relative reconstruction error for Huber and prescient regression,
## zooming in on smaller values of p.
indices = findall(p_vals .<= 0.08);
plot(p_vals[indices], huber_data[indices], label="Huber")
plot!(p_vals[indices], prescient_data[indices], label="Prescient")
plot(p_vals[indices], huber_data[indices], label = "Huber")
plot!(p_vals[indices], prescient_data[indices], label = "Prescient")

0 comments on commit e3ca424

Please sign in to comment.