Skip to content
This repository

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
branch: master
Fetching contributors…

Cannot retrieve contributors at this time

executable file 125 lines (110 sloc) 3.201 kb
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125
usage(){
    cat << ECHO
keyword-syncer - exports and imports custom search engines in Google's Chrome browser
Usage:
$0 [options] profile_path

Summary:
asdf
The profile path can be either a directory that contains a Google Chrome
"Web Data" file, or the file itself. The file exported from this tool can be
edited as a spreadsheet. Currently the only import options are to append to
the existing table entries or to replace them all.
-h Display this information.
-e Export to a csv file
-i csv_file Import contents of a csv file
-a Atomic import. Nuke existing entries. (backup will be exported)

Acknowledgments:
Copyright (c) 2010 Richard Bronosky
Offered under the terms of the MIT License.
http://www.opensource.org/licenses/mit-license.php
Created while employed by CMGdigital
ECHO

    exit $1
}

export_keywords(){
OUT="$1"
echo -n > "$OUT"
sqlite3 "$DB_FILE" <<- SQL > "$OUT"
.headers ON
.mode csv
select * from keywords;
SQL
}

import_keywords(){
IN="$1"
echo "Importing file: $IN"
echo "DROP TABLE IF EXISTS keywordsImport;" | sqlite3 "$DB_FILE"
# create a table (keywordsImport) like an existing table (keywords).
echo ".schema keywords" | sqlite3 "$DB_FILE" | sed 's/TABLE "keywords"/TABLE "keywordsImport"/' | sqlite3 "$DB_FILE"
# trim headers if needed
TEMP=$(mktemp -t $(basename -s .sh $0))
cat "$IN" | awk -F, 'NR==1 && $2=="short_name" && $3=="keyword" {next} {print}' > $TEMP
# collect field list
fields=$(
sqlite3 "$DB_FILE" <<- SQL | sed 's/[iI][dD],//;q'
.separator ","
.headers ON
INSERT INTO keywordsImport (id,short_name,keyword,favicon_url,url) values (0,0,0,0,0);
select * from keywordsImport;
DELETE from keywordsImport;
SQL
)
    # import the temporary data file
sqlite3 "$DB_FILE" <<- SQL
.separator ","
.import $TEMP keywordsImport
INSERT INTO keywords ($fields) SELECT $fields from keywordsImport;
DROP TABLE IF EXISTS keywordsImport;
SQL
    # remove the temporary data file
    [[ -f "$TEMP" ]] && rm "$TEMP"
}

backup_keywords(){
OUTPUT_FILE="$PWD/keywords.$(date +%s).csv"
echo "Creating backup file: $OUTPUT_FILE"
export_keywords "$OUTPUT_FILE"
}

truncate_keywords(){
echo; read -sn 1 -p "Are you sure you want to delete all existing keyword searches? [y/N] " response; echo
if [[ $response =~ [yY] ]]; then
echo "DELETE FROM keywords;" | sqlite3 "$DB_FILE"
fi
}

opt_export(){
OUTPUT_FILE="$PWD/keywords.csv"
echo "Creating export file: $OUTPUT_FILE"
export_keywords "$OUTPUT_FILE"
}

opt_atomic(){
truncate_keywords
}

opt_import(){
import_keywords "$IMPORT_FILE"
}

while getopts "eai:" opt; do
case $opt in
e ) # export
operations[1]=opt_export
;;

a ) # atomic
operations[3]=opt_atomic
;;

i ) # import
IMPORT_FILE="$OPTARG"
operations[2]=backup_keywords
operations[4]=opt_import
;;

h|\? ) usage 1 ;;
esac
done

DB_FILE=${!OPTIND}
[[ -d "$DB_FILE" ]] && DB_FILE="${DB_FILE%/}/Web Data"
if [[ ! -f "$DB_FILE" ]]; then
    echo "Cannot read Web Data file: $DB_FILE"
    exit 2
fi
echo "Using Web Data file: $DB_FILE"
for operation in "${operations[@]}";
do
"${operation[@]}"
done
Something went wrong with that request. Please try again.