Permalink
Browse files

First release

  • Loading branch information...
0 parents commit 3fd60523c6b2f4acaeca82e59e2d1079eab0339e Tyler Hall committed Oct 15, 2010
Showing with 340 additions and 0 deletions.
  1. +66 −0 README.markdown
  2. +179 −0 autosmush
  3. +95 −0 lib/class.smushit.php
@@ -0,0 +1,66 @@
+Autosmush
+=========
+
+Autosmush is a command line tool which scans an [Amazon S3](http://aws.amazon.com/s3/) bucket
+and losslessly compresses your images using Yahoo!'s amazing
+[Smush.it web service](http://developer.yahoo.com/yslow/smushit/). It also adds a far-future
+expires header on your images to aid in browser caching as recommended by
+[YSlow](http://developer.yahoo.com/yslow/).
+
+Autosmush can be run manually or as a cron job. It avoids re-smushing images by checking for an
+'x-amz-smushed' HTTP header on already processed images.
+
+FEATURES
+--------
+
+ * Smushed images are automatically re-uploaded into S3
+ * Avoids re-smushing images, so future runs take less time
+ * Pass the '-t' parameter to do a dry-run and see how much space you could be saving
+ * Adds far future expiration header to each file
+ * Prints a summary of total bytes saved when complete
+
+REQUIREMENTS
+------------
+
+ * Requires PHP5 and php_curl extension.
+ * Requires the [AWS SDK for PHP](http://aws.amazon.com/sdkforphp/). (Download and install instructions are located inside autosmush.)
+
+USAGE
+-----
+
+`./autosmush some-s3-bucket-name`
+
+or
+
+`./autosmush some-s3-bucket-name/path/to/files`
+
+
+UPDATES
+-------
+
+Code is hosted at GitHub: [http://github.com/tylerhall/autosmush](http://github.com/tylerhall/autosmush)
+
+LICENSE
+-------
+
+The MIT License
+
+Copyright (c) 2010 Tyler Hall <tylerhall AT gmail DOT com>
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
179 autosmush
@@ -0,0 +1,179 @@
+#!/usr/bin/php
+<?PHP
+ // ** IMPORTANT **
+ // Autosmush requires the Amazon PHP SDK, which is not included in this project.
+ // To download and install the SDK, follow these steps...
+ //
+ // 1) Download the AWS SDK for PHP from here: http://aws.amazon.com/sdkforphp/
+ // 2) Unzip file
+ // 3) Inside the unzipped folder, copy the 'sdk-x.x.x' folder into Autosmush's 'lib' folder
+ // 4) Rename 'sdk-x.x.x' to 'sdk'
+
+ define('AWS_S3_KEY', '0NYVNQHMTB5MG8DEZ2G2');
+ define('AWS_S3_SECRET', 'aV9zkDrJ6MKvIgsa+xhEFog49XztCJ9SlljB7CGw');
+
+ // Prevent PHP 5.3 time zone warning...
+ date_default_timezone_set('America/Los_Angeles');
+
+ require_once 'lib/sdk/sdk.class.php';
+ require_once 'lib/class.smushit.php';
+
+ $shortops = 'tq';
+ $longopts = array('help');
+ $options = getopt($shortops, $longopts);
+
+ if(array_key_exists('help', $options) || $GLOBALS['argc'] == 1)
+ {
+ echo "Usage: " . $GLOBALS['argv'][0] . " [OPTION]... bucket-name OR bucket-name/path\n";
+ echo "Scans an Amazon S3 bucket for uncompressed images and smushes them using Yahoo!'s Smush.it service.\n";
+ echo "-t Test mode - don't commit any changes\n";
+ echo "-q Quite mode - only display errors\n";
+ echo "\n";
+ exit;
+ }
+
+ if(array_key_exists('t', $options))
+ define('TEST', true);
+ else
+ define('TEST', false);
+
+ // Parse out our $bucket_name and $bucket_path...
+ $slash = strpos($GLOBALS['argv'][$GLOBALS['argc'] - 1], '/');
+ if($slash === false)
+ {
+ $bucket_name = $GLOBALS['argv'][$GLOBALS['argc'] - 1];
+ $bucket_path = '';
+ }
+ else
+ {
+ $bucket_name = substr($GLOBALS['argv'][$GLOBALS['argc'] - 1], 0, $slash);
+ $bucket_path = substr($GLOBALS['argv'][$GLOBALS['argc'] - 1], $slash + 1);
+ }
+
+ smush_bucket($bucket_name, $bucket_path);
+
+ function smush_bucket($bucket_name, $bucket_path = '')
+ {
+ // Let's track some compression stats...
+ $files_smushed = 0;
+ $files_not_smushed = 0;
+ $files_skipped = 0;
+ $total_compressed_bytes = 0;
+ $total_uncompressed_bytes = 0;
+
+ // Get the bucket's contents...
+ $s3 = new AmazonS3(AWS_S3_KEY, AWS_S3_SECRET);
+ $options = array('prefix' => $bucket_path);
+ $xml = $s3->list_objects($bucket_name, $options);
+
+ if((string)$xml->status != '200')
+ die("Error: Unable to retrieve bucket contents.\n");
+
+ // Quit if the bucket is empty
+ if(!isset($xml->body) || !isset($xml->body->Contents))
+ die("\n");
+
+ // Loop through everything in the bucket and start smushing...
+ $smush = new SmushIt();
+ foreach($xml->body->Contents as $object)
+ {
+ // Only smush images...
+ if(preg_match('/\.(jpg|jpeg|png)$/i', (string)$object->Key) === 1)
+ {
+ // But first, check to see if the image has already been smushed...
+ $headers = $s3->get_object_headers($bucket_name, $object->Key);
+ if(isset($headers->header['x-amz-meta-smushed']))
+ {
+ iflog("SKIPPED: {$object->Key} already smushed\n");
+ $files_skipped++;
+ continue;
+ }
+
+ // Smush the image
+ $smush->smushURL("http://s3.amazonaws.com/$bucket_name/" . $object->Key);
+
+ // If Smush.it was successful...
+ if($smush->savings)
+ {
+ iflog("SMUSHED: {$object->Key} ({$smush->savings}%)\n");
+
+ $files_smushed++;
+ $total_uncompressed_bytes += $smush->size;
+ $total_compressed_bytes += $smush->compressedSize;
+
+ if(!TEST)
+ {
+ // Download the newly smushed version...
+ $tmp_filename = tempnam('/tmp', 'smush');
+ $fp = fopen($tmp_filename, 'w+');
+ $ch = curl_init($smush->compressedUrl);
+ curl_setopt($ch, CURLOPT_FILE, $fp);
+ curl_exec($ch);
+ curl_close($ch);
+ fclose($fp);
+
+ if(file_exists($tmp_filename) && is_readable($tmp_filename) && filesize($tmp_filename) == $smush->compressedSize)
+ {
+ // Upload the smushed version...
+ $options = array(
+ 'fileUpload' => $tmp_filename,
+ 'acl' => AmazonS3::ACL_PUBLIC,
+ 'contentType' => (string)$headers->header['content-type'],
+ 'meta' => array('smushed' => 'yes'),
+ 'headers' => array('expires' => date('D, j M Y H:i:s', time() + (86400 * 365 * 10)) . ' GMT')
+ );
+ $s3->create_object($bucket_name, $object->Key, $options);
+ }
+ else
+ {
+ iflog("ERROR: Could not download smushed version of {$object->Key} \n");
+ }
+ }
+ }
+ else
+ {
+ iflog("NOT SMUSHED: {$object->Key} already compressed\n");
+ $files_not_smushed++;
+ $total_uncompressed_bytes += $smush->size;
+
+ if(!TEST)
+ {
+ // Mark it as smushed so we don't waste time next time...
+ $options = array(
+ 'meta' => array('smushed' => 'yes'),
+ 'acl' => AmazonS3::ACL_PUBLIC,
+ 'headers' => array('expires' => date('D, j M Y H:i:s', time() + (86400 * 365 * 10)) . ' GMT')
+ );
+ $s3->update_object($bucket_name, $object->Key, $options);
+ }
+ }
+ }
+ }
+
+ // Print our savings...
+ echo ' Files Smushed: ' . $files_smushed . "\n";
+ echo 'Could Not Smush: ' . $files_not_smushed . "\n";
+ echo ' Files Skipped: ' . $files_skipped . "\n\n";
+ echo ' Size: ' . bytes2str($total_uncompressed_bytes) . "\n";
+ echo 'Compressed Size: ' . bytes2str($total_compressed_bytes) . "\n\n";
+ echo ' Savings: ' . bytes2str($total_uncompressed_bytes - $total_compressed_bytes) . "\n";
+ echo ' Savings %: ' . round(($total_uncompressed_bytes - $total_compressed_bytes) / $total_uncompressed_bytes * 100, 2) . "%\n";
+ }
+
+ function iflog($str)
+ {
+ global $options;
+ if(array_key_exists('q', $options)) return;
+ echo $str;
+ }
+
+ function bytes2str($val, $round = 0)
+ {
+ $unit = array('','K','M','G','T','P','E','Z','Y');
+ while($val >= 1000)
+ {
+ $val /= 1024;
+ array_shift($unit);
+ }
+ return round($val, $round) . array_shift($unit) . 'B';
+ }
@@ -0,0 +1,95 @@
+<?PHP
+ // smushit-php - a PHP client for Yahoo!'s Smush.it web service
+ //
+ // June 24, 2010
+ // Tyler Hall <tylerhall@gmail.com>
+ // http://github.com/tylerhall/smushit-php/tree/master
+
+ class SmushIt
+ {
+ const SMUSH_URL = 'http://www.smushit.com/ysmush.it/ws.php?';
+
+ public $filename;
+ public $url;
+ public $compressedUrl;
+ public $size;
+ public $compressedSize;
+ public $savings;
+ public $error;
+
+ public function __construct($data = null)
+ {
+ if(!is_null($data))
+ {
+ if(preg_match('/https?:\/\//', $data) == 1)
+ $this->smushURL($data);
+ else
+ $this->smushFile($data);
+ }
+ }
+
+ public function smushURL($url)
+ {
+ $this->url = $url;
+
+ $ch = curl_init();
+ curl_setopt($ch, CURLOPT_URL, self::SMUSH_URL . 'img=' . $url);
+ curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
+ curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 5);
+ $json_str = curl_exec($ch);
+ curl_close($ch);
+
+ return $this->parseResponse($json_str);
+ }
+
+ public function smushFile($filename)
+ {
+ $this->filename = $filename;
+
+ if(!is_readable($filename))
+ {
+ $this->error = 'Could not read file';
+ return false;
+ }
+
+ $ch = curl_init();
+ curl_setopt($ch, CURLOPT_URL, self::SMUSH_URL);
+ curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
+ curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 5);
+ curl_setopt($ch, CURLOPT_POST, true);
+ curl_setopt($ch, CURLOPT_POSTFIELDS, array('files' => '@' . $filename));
+ $json_str = curl_exec($ch);
+ curl_close($ch);
+
+ return $this->parseResponse($json_str);
+ }
+
+ private function parseResponse($json_str)
+ {
+ $this->size = null;
+ $this->compressedUrl = null;
+ $this->compressedSize = null;
+ $this->savings = null;
+
+ $this->error = null;
+ $json = json_decode($json_str);
+
+ if(is_null($json))
+ {
+ $this->error = 'Bad response from Smush.it web service';
+ return false;
+ }
+
+ if(isset($json->error))
+ {
+ $this->error = $json->error;
+ return false;
+ }
+
+ $this->size = $json->src_size;
+ $this->compressedUrl = $json->dest;
+ $this->compressedSize = $json->dest_size;
+ $this->savings = $json->percent;
+ return true;
+ }
+ }

0 comments on commit 3fd6052

Please sign in to comment.