Skip to content

Commit

Permalink
introduce evaluate function, add pretty print
Browse files Browse the repository at this point in the history
  • Loading branch information
davidlizeng committed Aug 18, 2014
1 parent 2e3ff71 commit 9971a53
Show file tree
Hide file tree
Showing 21 changed files with 124 additions and 76 deletions.
Binary file modified docs/_build/doctrees/environment.pickle
Binary file not shown.
Binary file modified docs/_build/doctrees/index.doctree
Binary file not shown.
Binary file modified docs/_build/doctrees/julia_examples.doctree
Binary file not shown.
Binary file modified docs/_build/doctrees/julia_tutorial.doctree
Binary file not shown.
28 changes: 14 additions & 14 deletions docs/_build/html/_sources/julia_examples.txt
Original file line number Diff line number Diff line change
Expand Up @@ -72,15 +72,15 @@ We can now frame this problem in Julia code and solve our problem using LLS:

slope = Variable();
offset = Variable();
line = offset .+ x_data * slope;
line = offset + x_data * slope;
residuals = line - y_data;
error = sum_squares(residuals);
optval = minimize!(error);

# plot the data and the line
t = [0; 5; 0.1];
plt.plot(x_data, y_data, "ro");
plt.plot(t, slope.value .* t .+ offset.value);
plt.plot(t, evaluate(slope) * t + evaluate(offset));
plt.xlabel("x");
plt.ylabel("y");
plt.show();
Expand Down Expand Up @@ -113,9 +113,9 @@ Here is the Julia code to solve this problem using LLS and plot the quadratic:
quadratic_coeff = Variable();
slope = Variable();
offset = Variable();
quadratic = offset .+ x_data * slope + quadratic_coeff * x_data .^ 2;
quadratic = offset + x_data * slope + quadratic_coeff * x_data .^ 2;
residuals = quadratic - y_data;
error = sum_squares(error);
error = sum_squares(residuals);
optval = minimize!(error);

# Create some evenly spaced points for plotting, again replicate powers
Expand All @@ -124,7 +124,7 @@ Here is the Julia code to solve this problem using LLS and plot the quadratic:

# Plot our regressed function
plt.plot(x_data, y_data, "ro")
plt.plot(t, offset.value .+ t .* slope.value .+ t_squared * quadratic.value, "b")
plt.plot(t, evaluate(offset) + t * evaluate(slope) + t_squared * evaluate(quadratic_coeff), "b")
plt.xlabel("x")
plt.ylabel("y")
plt.show()
Expand Down Expand Up @@ -254,8 +254,8 @@ outcome:
optval = minimize!(mu * sum_squares(velocity) + sum_squares(force), constraints);


plt.plot(position.value[1, 1:2:T]', position.value[2, 1:2:T]', "r-", linewidth=1.5)
plt.quiver(position.value[1, 1:4:T], position.value[2, 1:4:T], force.value[1, 1:4:T-1]/2, force.value[2, 1:4:T-1]/2, width=0.002)
plt.plot(evaluate(position)[1, 1:2:T]', evaluate(position)[2, 1:2:T]', "r-", linewidth=1.5)
plt.quiver(evaluate(position)[1, 1:4:T], evaluate(position)[2, 1:4:T], evaluate(force)[1, 1:4:T-1]/2, evaluate(force)[2, 1:4:T-1]/2, width=0.002)
plt.plot(0, 0, "bo", markersize=10)
plt.plot(final_position[1], final_position[2], "go", markersize=10)
plt.xlim([-5, 15])
Expand Down Expand Up @@ -334,7 +334,7 @@ The code and data for this example can be found `here <https://github.com/davidl
objective = sum_squares(line_mat * x - line_vals);
optval = minimize!(objective);

plt.imshow(reshape(x.value, img_size,img_size), cmap = get_cmaps()[29])
plt.imshow(reshape(evaluate(x), img_size,img_size), cmap = get_cmaps()[29])

The final result of the tomography will look something like

Expand Down Expand Up @@ -427,10 +427,10 @@ Here is the LLS code:
# print out the 5 words most indicative of sports and nonsports
words = String[];
f = open("largeCorpusfeatures.txt");
for i = 1:length(w.value)
for i = 1:length(evaluate(w))
push!(words, readline(f))
end
indices = sortperm(vec(w.value));
indices = sortperm(vec(evaluate(w)));
for i = 1:5
print(words[indices[i]])
end
Expand Down Expand Up @@ -506,7 +506,7 @@ The following code uses LLS to find and plot the model:
smoothing = 100;
smooth_objective = sum_squares(yearly[1 : n - 1] - yearly[2 : n]);
optval = minimize!(sum_squares(temps - yearly) + smoothing * smooth_objective, eq_constraints);
residuals = temps - yearly.value;
residuals = temps - evaluate(yearly);

# Plot smooth fit
plt.figure(1)
Expand Down Expand Up @@ -561,7 +561,7 @@ against the actual residual temperatures:
# plot autoregressive fit of daily fluctuations for first few days
plt.figure(3)
plt.plot(residuals[ar_len + 1 : ar_len + 100], color="g")
plt.plot(residuals_mat[1:100, :] * ar_coef.value, color="r")
plt.plot(residuals_mat[1:100, :] * evaluate(ar_coef), color="r")
plt.title("Autoregressive Fit of Residuals")


Expand All @@ -576,8 +576,8 @@ Melbourne:
# plot final fit of data
plt.figure(4)
plt.plot(temps)
total_estimate = yearly.value
total_estimate[ar_len + 1 : end] += residuals_mat * ar_coef.value
total_estimate = evaluate(yearly)
total_estimate[ar_len + 1 : end] += residuals_mat * evaluate(ar_coef)
plt.plot(total_estimate, color="r", alpha=0.5)
plt.title("Total Fit of Data")
plt.xlim([0, n])
Expand Down
2 changes: 1 addition & 1 deletion docs/_build/html/_sources/julia_tutorial.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ To install LLS, simply open up the Julia shell and run the command:

.. code-block:: none

Pkg.clone("https://github.com/davidlizeng/LinearLeastSquares.jl.git")
Pkg.add("LinearLeastSquares")

To use LLS in Julia, run the following command to import the library:

Expand Down
28 changes: 14 additions & 14 deletions docs/_build/html/julia_examples.html
Original file line number Diff line number Diff line change
Expand Up @@ -106,15 +106,15 @@ <h3>Linear Regression<a class="headerlink" href="#linear-regression" title="Perm
We can now frame this problem in Julia code and solve our problem using LLS:</p>
<div class="highlight-none"><div class="highlight"><pre>slope = Variable();
offset = Variable();
line = offset .+ x_data * slope;
line = offset + x_data * slope;
residuals = line - y_data;
error = sum_squares(residuals);
optval = minimize!(error);

# plot the data and the line
t = [0; 5; 0.1];
plt.plot(x_data, y_data, &quot;ro&quot;);
plt.plot(t, slope.value .* t .+ offset.value);
plt.plot(t, evaluate(slope) * t + evaluate(offset));
plt.xlabel(&quot;x&quot;);
plt.ylabel(&quot;y&quot;);
plt.show();
Expand Down Expand Up @@ -142,9 +142,9 @@ <h3>Quadratic Regression<a class="headerlink" href="#quadratic-regression" title
<div class="highlight-none"><div class="highlight"><pre>quadratic_coeff = Variable();
slope = Variable();
offset = Variable();
quadratic = offset .+ x_data * slope + quadratic_coeff * x_data .^ 2;
quadratic = offset + x_data * slope + quadratic_coeff * x_data .^ 2;
residuals = quadratic - y_data;
error = sum_squares(error);
error = sum_squares(residuals);
optval = minimize!(error);

# Create some evenly spaced points for plotting, again replicate powers
Expand All @@ -153,7 +153,7 @@ <h3>Quadratic Regression<a class="headerlink" href="#quadratic-regression" title

# Plot our regressed function
plt.plot(x_data, y_data, &quot;ro&quot;)
plt.plot(t, offset.value .+ t .* slope.value .+ t_squared * quadratic.value, &quot;b&quot;)
plt.plot(t, evaluate(offset) + t * evaluate(slope) + t_squared * evaluate(quadratic_coeff), &quot;b&quot;)
plt.xlabel(&quot;x&quot;)
plt.ylabel(&quot;y&quot;)
plt.show()
Expand Down Expand Up @@ -263,8 +263,8 @@ <h2>Control<a class="headerlink" href="#control" title="Permalink to this headli
optval = minimize!(mu * sum_squares(velocity) + sum_squares(force), constraints);


plt.plot(position.value[1, 1:2:T]&#39;, position.value[2, 1:2:T]&#39;, &quot;r-&quot;, linewidth=1.5)
plt.quiver(position.value[1, 1:4:T], position.value[2, 1:4:T], force.value[1, 1:4:T-1]/2, force.value[2, 1:4:T-1]/2, width=0.002)
plt.plot(evaluate(position)[1, 1:2:T]&#39;, evaluate(position)[2, 1:2:T]&#39;, &quot;r-&quot;, linewidth=1.5)
plt.quiver(evaluate(position)[1, 1:4:T], evaluate(position)[2, 1:4:T], evaluate(force)[1, 1:4:T-1]/2, evaluate(force)[2, 1:4:T-1]/2, width=0.002)
plt.plot(0, 0, &quot;bo&quot;, markersize=10)
plt.plot(final_position[1], final_position[2], &quot;go&quot;, markersize=10)
plt.xlim([-5, 15])
Expand Down Expand Up @@ -331,7 +331,7 @@ <h3>Tomography<a class="headerlink" href="#tomography" title="Permalink to this
objective = sum_squares(line_mat * x - line_vals);
optval = minimize!(objective);

plt.imshow(reshape(x.value, img_size,img_size), cmap = get_cmaps()[29])
plt.imshow(reshape(evaluate(x), img_size,img_size), cmap = get_cmaps()[29])
</pre></div>
</div>
<p>The final result of the tomography will look something like</p>
Expand Down Expand Up @@ -412,10 +412,10 @@ <h3>Binary Classification<a class="headerlink" href="#binary-classification" tit
# print out the 5 words most indicative of sports and nonsports
words = String[];
f = open(&quot;largeCorpusfeatures.txt&quot;);
for i = 1:length(w.value)
for i = 1:length(evaluate(w))
push!(words, readline(f))
end
indices = sortperm(vec(w.value));
indices = sortperm(vec(evaluate(w)));
for i = 1:5
print(words[indices[i]])
end
Expand Down Expand Up @@ -480,7 +480,7 @@ <h2>Time Series Analysis<a class="headerlink" href="#time-series-analysis" title
smoothing = 100;
smooth_objective = sum_squares(yearly[1 : n - 1] - yearly[2 : n]);
optval = minimize!(sum_squares(temps - yearly) + smoothing * smooth_objective, eq_constraints);
residuals = temps - yearly.value;
residuals = temps - evaluate(yearly);

# Plot smooth fit
plt.figure(1)
Expand Down Expand Up @@ -525,7 +525,7 @@ <h2>Time Series Analysis<a class="headerlink" href="#time-series-analysis" title
# plot autoregressive fit of daily fluctuations for first few days
plt.figure(3)
plt.plot(residuals[ar_len + 1 : ar_len + 100], color=&quot;g&quot;)
plt.plot(residuals_mat[1:100, :] * ar_coef.value, color=&quot;r&quot;)
plt.plot(residuals_mat[1:100, :] * evaluate(ar_coef), color=&quot;r&quot;)
plt.title(&quot;Autoregressive Fit of Residuals&quot;)
</pre></div>
</div>
Expand All @@ -536,8 +536,8 @@ <h2>Time Series Analysis<a class="headerlink" href="#time-series-analysis" title
<div class="highlight-none"><div class="highlight"><pre># plot final fit of data
plt.figure(4)
plt.plot(temps)
total_estimate = yearly.value
total_estimate[ar_len + 1 : end] += residuals_mat * ar_coef.value
total_estimate = evaluate(yearly)
total_estimate[ar_len + 1 : end] += residuals_mat * evaluate(ar_coef)
plt.plot(total_estimate, color=&quot;r&quot;, alpha=0.5)
plt.title(&quot;Total Fit of Data&quot;)
plt.xlim([0, n])
Expand Down
2 changes: 1 addition & 1 deletion docs/_build/html/julia_tutorial.html
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ <h2>Installing LLS<a class="headerlink" href="#installing-lls" title="Permalink
For those new to Julia, the <a class="reference external" href="http://docs.julialang.org/en/release-0.2/">official Julia docs</a>
are a good way to get acquainted with the language.</p>
<p>To install LLS, simply open up the Julia shell and run the command:</p>
<div class="highlight-none"><div class="highlight"><pre>Pkg.clone(&quot;https://github.com/davidlizeng/LinearLeastSquares.jl.git&quot;)
<div class="highlight-none"><div class="highlight"><pre>Pkg.add(&quot;LinearLeastSquares&quot;)
</pre></div>
</div>
<p>To use LLS in Julia, run the following command to import the library:</p>
Expand Down
2 changes: 1 addition & 1 deletion docs/_build/html/searchindex.js

Large diffs are not rendered by default.

28 changes: 14 additions & 14 deletions docs/julia_examples.rst
Original file line number Diff line number Diff line change
Expand Up @@ -72,15 +72,15 @@ We can now frame this problem in Julia code and solve our problem using LLS:
slope = Variable();
offset = Variable();
line = offset .+ x_data * slope;
line = offset + x_data * slope;
residuals = line - y_data;
error = sum_squares(residuals);
optval = minimize!(error);
# plot the data and the line
t = [0; 5; 0.1];
plt.plot(x_data, y_data, "ro");
plt.plot(t, slope.value .* t .+ offset.value);
plt.plot(t, evaluate(slope) * t + evaluate(offset));
plt.xlabel("x");
plt.ylabel("y");
plt.show();
Expand Down Expand Up @@ -113,9 +113,9 @@ Here is the Julia code to solve this problem using LLS and plot the quadratic:
quadratic_coeff = Variable();
slope = Variable();
offset = Variable();
quadratic = offset .+ x_data * slope + quadratic_coeff * x_data .^ 2;
quadratic = offset + x_data * slope + quadratic_coeff * x_data .^ 2;
residuals = quadratic - y_data;
error = sum_squares(error);
error = sum_squares(residuals);
optval = minimize!(error);
# Create some evenly spaced points for plotting, again replicate powers
Expand All @@ -124,7 +124,7 @@ Here is the Julia code to solve this problem using LLS and plot the quadratic:
# Plot our regressed function
plt.plot(x_data, y_data, "ro")
plt.plot(t, offset.value .+ t .* slope.value .+ t_squared * quadratic.value, "b")
plt.plot(t, evaluate(offset) + t * evaluate(slope) + t_squared * evaluate(quadratic_coeff), "b")
plt.xlabel("x")
plt.ylabel("y")
plt.show()
Expand Down Expand Up @@ -254,8 +254,8 @@ outcome:
optval = minimize!(mu * sum_squares(velocity) + sum_squares(force), constraints);
plt.plot(position.value[1, 1:2:T]', position.value[2, 1:2:T]', "r-", linewidth=1.5)
plt.quiver(position.value[1, 1:4:T], position.value[2, 1:4:T], force.value[1, 1:4:T-1]/2, force.value[2, 1:4:T-1]/2, width=0.002)
plt.plot(evaluate(position)[1, 1:2:T]', evaluate(position)[2, 1:2:T]', "r-", linewidth=1.5)
plt.quiver(evaluate(position)[1, 1:4:T], evaluate(position)[2, 1:4:T], evaluate(force)[1, 1:4:T-1]/2, evaluate(force)[2, 1:4:T-1]/2, width=0.002)
plt.plot(0, 0, "bo", markersize=10)
plt.plot(final_position[1], final_position[2], "go", markersize=10)
plt.xlim([-5, 15])
Expand Down Expand Up @@ -334,7 +334,7 @@ The code and data for this example can be found `here <https://github.com/davidl
objective = sum_squares(line_mat * x - line_vals);
optval = minimize!(objective);
plt.imshow(reshape(x.value, img_size,img_size), cmap = get_cmaps()[29])
plt.imshow(reshape(evaluate(x), img_size,img_size), cmap = get_cmaps()[29])
The final result of the tomography will look something like

Expand Down Expand Up @@ -427,10 +427,10 @@ Here is the LLS code:
# print out the 5 words most indicative of sports and nonsports
words = String[];
f = open("largeCorpusfeatures.txt");
for i = 1:length(w.value)
for i = 1:length(evaluate(w))
push!(words, readline(f))
end
indices = sortperm(vec(w.value));
indices = sortperm(vec(evaluate(w)));
for i = 1:5
print(words[indices[i]])
end
Expand Down Expand Up @@ -506,7 +506,7 @@ The following code uses LLS to find and plot the model:
smoothing = 100;
smooth_objective = sum_squares(yearly[1 : n - 1] - yearly[2 : n]);
optval = minimize!(sum_squares(temps - yearly) + smoothing * smooth_objective, eq_constraints);
residuals = temps - yearly.value;
residuals = temps - evaluate(yearly);
# Plot smooth fit
plt.figure(1)
Expand Down Expand Up @@ -561,7 +561,7 @@ against the actual residual temperatures:
# plot autoregressive fit of daily fluctuations for first few days
plt.figure(3)
plt.plot(residuals[ar_len + 1 : ar_len + 100], color="g")
plt.plot(residuals_mat[1:100, :] * ar_coef.value, color="r")
plt.plot(residuals_mat[1:100, :] * evaluate(ar_coef), color="r")
plt.title("Autoregressive Fit of Residuals")
Expand All @@ -576,8 +576,8 @@ Melbourne:
# plot final fit of data
plt.figure(4)
plt.plot(temps)
total_estimate = yearly.value
total_estimate[ar_len + 1 : end] += residuals_mat * ar_coef.value
total_estimate = evaluate(yearly)
total_estimate[ar_len + 1 : end] += residuals_mat * evaluate(ar_coef)
plt.plot(total_estimate, color="r", alpha=0.5)
plt.title("Total Fit of Data")
plt.xlim([0, n])
Expand Down
2 changes: 1 addition & 1 deletion docs/julia_tutorial.rst
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ To install LLS, simply open up the Julia shell and run the command:

.. code-block:: none
Pkg.clone("https://github.com/davidlizeng/LinearLeastSquares.jl.git")
Pkg.add("LinearLeastSquares")
To use LLS in Julia, run the following command to import the library:

Expand Down
4 changes: 2 additions & 2 deletions examples/control/control.jl
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ constraints += velocity[:, T] == 0;
optval = minimize!(mu * sum_squares(velocity) + sum_squares(force), constraints);


plt.plot(position.value[1, 1:2:T]', position.value[2, 1:2:T]', "r-", linewidth=1.5)
plt.quiver(position.value[1, 1:4:T], position.value[2, 1:4:T], force.value[1, 1:4:T-1]/2, force.value[2, 1:4:T-1]/2, width=0.002)
plt.plot(evaluate(position)[1, 1:2:T]', evaluate(position)[2, 1:2:T]', "r-", linewidth=1.5)
plt.quiver(evaluate(position)[1, 1:4:T], evaluate(position)[2, 1:4:T], evaluate(force)[1, 1:4:T-1]/2, evaluate(force)[2, 1:4:T-1]/2, width=0.002)
plt.plot(0, 0, "bo", markersize=10)
plt.plot(final_position[1], final_position[2], "go", markersize=10)
plt.xlim([-5, 15])
Expand Down
8 changes: 4 additions & 4 deletions examples/machine_learning/document_classification.jl
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@ optval = minimize!(objective);
# print out the 5 words most indicative of sports and nonsports
words = String[];
f = open("largeCorpusfeatures.txt");
for i = 1:length(w.value)
for i = 1:length(evaluate(w))
push!(words, readline(f))
end
indices = sortperm(vec(full(w.value)));
indices = sortperm(vec(evaluate(w)));
for i = 1:5
print(words[indices[i]])
end
Expand All @@ -51,9 +51,9 @@ for i = 0:4
end

# calculate training error
yhat = sign(trainDocuments * w.value .+ v.value);
yhat = sign(trainDocuments * evaluate(w) + evaluate(v));
trainCE = 1/size(trainClasses,1)*sum(trainClasses .!= yhat)

# calculate performance of our classifier on the test set
yhat2 = sign(testDocuments * w.value .+ v.value);
yhat2 = sign(testDocuments * evaluate(w) + evaluate(v));
testCE = 1/size(testClasses,1)*sum(testClasses .!= yhat2)
12 changes: 7 additions & 5 deletions examples/simple_lin_and_quad_reg/linear_regression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# Translated into LinearLeastSquares.jl by Karanveer Mohan and David Zeng

using LinearLeastSquares
import PyPlot.plt
# Set the random seed to get consistent data
srand(1)

Expand All @@ -19,14 +20,15 @@ y_data = x_data_expanded * true_coeffs + 0.5 * rand(n, 1);

slope = Variable();
offset = Variable();
optval = minimize!(sum_squares(offset + x_data * slope - y_data));
println("Slope = $(slope.value[1, 1]), offset = $(offset.value[1, 1])");
line = offset + x_data * slope;
residuals = line - y_data;
error = sum_squares(residuals);
optval = minimize!(error);

import PyPlot.plt
# Print results and plot
# plot the data and the line
t = [0; 5; 0.1];
plt.plot(x_data, y_data, "ro");
plt.plot(t, slope.value[1, 1] * t .+ offset.value[1, 1]);
plt.plot(t, evaluate(slope) .* t .+ evaluate(offset));
plt.xlabel("x");
plt.ylabel("y");
plt.show();
Loading

0 comments on commit 9971a53

Please sign in to comment.