Permalink
Browse files

Fixed search API bug

  • Loading branch information...
ianmcxa authored and QuantumBadger committed Apr 21, 2018
1 parent 0bb88cc commit 88f06e64a28f02bdedc37cafebe2186066160de9
@@ -23,7 +23,7 @@ dependencies {
implementation 'com.android.support:appcompat-v7:27.1.1'
implementation 'com.android.support:recyclerview-v7:27.1.1'
implementation 'com.android.support:design:27.1.1'
implementation 'com.squareup.okhttp3:okhttp:3.9.1'
implementation 'com.squareup.okhttp3:okhttp:3.10.0'
implementation 'info.guardianproject.netcipher:netcipher:1.2.1'
testImplementation 'junit:junit:4.12'
@@ -28,14 +28,11 @@
import java.util.List;
public abstract class HTTPBackend {
private static boolean useJavaBackend = false;
/**
* Factory method can read configuration information to choose a backend
*/
public static HTTPBackend getBackend() {
return useJavaBackend ? new JavaHTTPBackend() : OKHTTPBackend.getHttpBackend();
return OKHTTPBackend.getHttpBackend();
}
public static class RequestDetails {

This file was deleted.

Oops, something went wrong.
@@ -18,9 +18,14 @@
package org.quantumbadger.redreader.http.okhttp;
import android.content.Context;
import android.util.Log;
import okhttp3.CacheControl;
import okhttp3.Call;
import okhttp3.ConnectionPool;
import okhttp3.Cookie;
import okhttp3.CookieJar;
import okhttp3.HttpUrl;
import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.RequestBody;
@@ -35,6 +40,8 @@
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
@@ -45,9 +52,37 @@
private static HTTPBackend httpBackend;
private OKHTTPBackend() {
final OkHttpClient.Builder builder = new OkHttpClient.Builder();
// here we set the over18 cookie and return it whenever the url contains search
// this is necessary to get the reddit API to return NSFW search results
final List<Cookie> list = new ArrayList<>();
Cookie.Builder cookieBuilder = new Cookie.Builder();
cookieBuilder.domain("reddit.com");
cookieBuilder.name("over18");
cookieBuilder.value("1");
cookieBuilder.path("/");
list.add(cookieBuilder.build());
final CookieJar cookieJar = new CookieJar() {
@Override
public void saveFromResponse(HttpUrl url, List<Cookie> cookies) {
//LOL we do not care
}
@Override
public List<Cookie> loadForRequest(HttpUrl url) {
if (url.toString().contains("search"))
return list;
else return Collections.emptyList();
}
};
builder.cookieJar(cookieJar);
if(TorCommon.isTorEnabled()) {
Proxy tor = new Proxy(Proxy.Type.HTTP, new InetSocketAddress("127.0.0.1", 8118));
//SOCKS appears to be broken for now, Relevant: https://github.com/square/okhttp/issues/2315
@@ -107,47 +142,50 @@ public Request prepareRequest(final Context context, final RequestDetails detail
public void executeInThisThread(final Listener listener) {
final Call call = mClient.newCall(builder.build());
Log.d("OK", "calling: " + call.request().url());
callRef.set(call);
try {
final Response response;
try {
response = call.execute();
} catch(IOException e) {
listener.onError(CacheRequest.REQUEST_FAILURE_CONNECTION, e, null);
return;
}
final int status = response.code();
final Response response;
if(status == 200 || status == 202) {
try {
response = call.execute();
} catch(IOException e) {
listener.onError(CacheRequest.REQUEST_FAILURE_CONNECTION, e, null);
Log.d("OK", "request didn't even connect: " + e.getMessage());
return;
} catch (Throwable t) {
listener.onError(CacheRequest.REQUEST_FAILURE_CONNECTION, t, null);
Log.d("OK", "request didn't even connect: " + t.getMessage());
return;
}
final ResponseBody body = response.body();
final InputStream bodyStream;
final Long bodyBytes;
final int status = response.code();
Log.d("OK", "request got status: " + status);
if(body != null) {
bodyStream = body.byteStream();
bodyBytes = body.contentLength();
} else {
// TODO error
bodyStream = null;
bodyBytes = null;
}
if(status == 200 || status == 202) {
final String contentType = response.header("Content-Type");
final ResponseBody body = response.body();
final InputStream bodyStream;
final Long bodyBytes;
listener.onSuccess(contentType, bodyBytes, bodyStream);
if(body != null) {
bodyStream = body.byteStream();
bodyBytes = body.contentLength();
} else {
listener.onError(CacheRequest.REQUEST_FAILURE_REQUEST, null, status);
// TODO error
bodyStream = null;
bodyBytes = null;
}
} catch(Throwable t) {
listener.onError(CacheRequest.REQUEST_FAILURE_CONNECTION, t, null);
final String contentType = response.header("Content-Type");
listener.onSuccess(contentType, bodyBytes, bodyStream);
} else {
listener.onError(CacheRequest.REQUEST_FAILURE_REQUEST, null, status);
}
}
@@ -73,7 +73,7 @@ public SearchPostListURL sort(PostSort newOrder) {
public Uri generateJsonUri() {
Uri.Builder builder = new Uri.Builder();
builder.scheme(Constants.Reddit.getScheme()).authority(Constants.Reddit.getDomain());
builder.scheme(Constants.Reddit.getScheme()).authority(Constants.Reddit.getHumanReadableDomain());
if(subreddit != null) {
builder.encodedPath("/r/");
@@ -115,6 +115,10 @@ public Uri generateJsonUri() {
builder.appendEncodedPath(".json");
// if the user doesn't have NSFW content disabled, it won't show up anyway
// leaving this on by default doesn't hurt
builder.appendQueryParameter("include_over_18", "on");
return builder.build();
}

0 comments on commit 88f06e6

Please sign in to comment.