Permalink
Browse files

added debug option

  • Loading branch information...
1 parent baeae38 commit f37cc9791cd0456810729865c121e0361931c665 = committed Feb 13, 2010
Showing with 50 additions and 8 deletions.
  1. +4 −0 cache/6b708baed2f5f88b9cf2e5bd3ab0eeca.cache
  2. +23 −0 example.php
  3. +2 −1 readme.markdown
  4. +21 −7 simpleCachedCurl.inc.php
@@ -0,0 +1,4 @@
+s:2676:"<?xml version="1.0" encoding="UTF-8"?>
+<query xmlns:yahoo="http://www.yahooapis.com/v1/base.rng" yahoo:count="10" yahoo:created="2010-02-13T08:05:17Z" yahoo:lang="en-US" yahoo:updated="2010-02-13T08:05:17Z" yahoo:uri="http://query.yahooapis.com/v1/yql?q=select+status.text%2C+status.id+from+xml+where+url+%3D+%22http%3A%2F%2Ftwitter.com%2Fstatuses%2Fuser_timeline%2Fginader.xml%3Fcount%3D10%22"><diagnostics><publiclyCallable>true</publiclyCallable><url execution-time="960" proxy="DEFAULT"><![CDATA[http://twitter.com/statuses/user_timeline/ginader.xml?count=10]]></url><user-time>974</user-time><service-time>960</service-time><build-version>4265</build-version></diagnostics><results><statuses><status><id>9050894213</id><text>oh boy - it's 2:15am and we just finished recording the podcast. Now off to bed. Good night @tcaspers and @73inches :-)</text></status></statuses><statuses><status><id>9048491972</id><text>now getting ready for the second take on recording the @technikwuerze about WCAG 2 with @73inches and @tcaspers - please Skype pleeese work</text></status></statuses><statuses><status><id>9048418011</id><text>watched the olympic opening unfold beautifully to then painfully fail in the end - sorry for the Canadians...</text></status></statuses><statuses><status><id>9037473557</id><text>@dajobe did you find a way to watch the opening ceremony already? Or do I really have to way another hour before it runs here at 7:30?</text></status></statuses><statuses><status><id>9020138990</id><text>@djesse we're talking about WCAG2 :-)</text></status></statuses><statuses><status><id>9019017590</id><text>thanks to Skype it seems like I'll have to wait until tonight at 12 to try to record the next try to @technikwuerze podcast - oh well...</text></status></statuses><statuses><status><id>8991713045</id><text>I'm at Dishdash Restaurant (190 S Murphy Avenue, at Washington, Sunnyvale). http://4sq.com/6OTCLZ</text></status></statuses><statuses><status><id>8983225589</id><text>interesting read: Target.com Accessibility - http://webaim.org/blog/target-com-accessibility/ (via @jared_w_smith)</text></status></statuses><statuses><status><id>8968244734</id><text>@aphillipo well echofon claims to have unread sync between their mac and iphone app but it doesn't work. Twitter should really handle that.</text></status></statuses><statuses><status><id>8965883294</id><text>just stopped his echofon experiment. The 2 features it has more than tweetie (unread sync and notifications) don't really work. Back now.</text></status></statuses></results></query><!-- total: 975 -->
+<!-- yqlengine1.pipes.ch1.yahoo.com uncompressed/chunked Sat Feb 13 20:05:16 GMT 2010 -->
+";
View
@@ -0,0 +1,23 @@
+<?php
+include 'simpleCachedCurl.inc.php';
+?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN"
+ "http://www.w3.org/TR/html4/strict.dtd">
+
+<html lang="en">
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=utf-8">
+ <title>simpleCachedCurl Example YQL call for twitter</title>
+ <meta name="author" content="Dirk Ginader">
+</head>
+<body>
+<?php
+
+$url = 'http://query.yahooapis.com/v1/public/yql?q=select%20status.text%2C%20status.id%20from%20xml%20where%20url%20%3D%20%22http%3A%2F%2Ftwitter.com%2Fstatuses%2Fuser_timeline%2Fginader.xml%3Fcount%3D10%22&format=xml&env=store%3A%2F%2Fdatatables.org%2Falltableswithkeys';
+$expires = 60; // 1 minute
+$rawData = simpleCachedCurl($url,$expires);
+echo "<pre>";print_r($rawData);echo"</pre>";
+
+?>
+</body>
+</html>
View
@@ -5,8 +5,9 @@ http://ginader.com
very simple wrapper for cURL that creates a local file cache
usage: created a folder named "cache" in the same folder as this file and chmod it 777
-call this function with 2 parameters:
+call this function with 2+1 parameters:
$url (string) the URL of the data that you would like to load
$expires (integer) the amound of seconds the cache should stay valid
+ $debug (boolean, optional) write debug information for troubleshooting
returns either the raw cURL data or false if request fails and no cache is available
View
@@ -16,30 +16,44 @@
returns either the raw cURL data or false if request fails and no cache is available
*/
-function simpleCachedCurl($url,$expires){
+function simpleCachedCurl($url,$expires,$debug){
+ if($debug){
+ echo "simpleCachedCurl debug:<br>";
+ }
$hash = md5($url);
$filename = dirname(__FILE__).'/cache/' . $hash . '.cache';
$changed = filemtime($filename);
$now = time();
- $diff = $now - $changed;
+ $diff = $now - $changed;
if ( !$changed || ($diff > $expires) ) {
- // no cache or expired --> make new request
+ if($debug){
+ echo "no cache or expired --> make new request<br>";
+ }
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
$rawData = curl_exec($ch);
curl_close($ch);
if(!$rawData){
- // request failed and we have no cache --> fail
+ if($debug){
+ echo "request failed and we have no cache --> fail<br>";
+ }
return false;
}
- // we got a return --> save it to cache
+ if($debug){
+ echo "we got a return --> save it to cache<br>";
+ }
$cache = fopen($filename, 'wb');
- fwrite($cache, serialize($rawData));
+ $write = fwrite($cache, serialize($rawData));
+ if($debug && !$write){
+ echo "writing to $filename failed. Make the folder '".dirname(__FILE__).'/cache/'."' is writeable (chmod 777)<br>";
+ }
fclose($cache);
return $rawData;
}
- // yay we hit the cache --> read it
+ if($debug){
+ echo "yay we hit the cache --> read it<br>";
+ }
$cache = unserialize(file_get_contents($filename));
return $cache;
}

0 comments on commit f37cc97

Please sign in to comment.