Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

checkpoint while this is working. i need to move things around again.

  • Loading branch information...
commit da279ff4ded5d0d95d1299b10ea3b38207c47726 1 parent 8d9329e
@robey authored
View
32 src/fauna.coffee
@@ -1,12 +1,17 @@
+event_sequence = require("./fauna/event_sequence")
+exports.EventSequence = event_sequence.EventSequence
+
+fauna_client = require("./fauna/fauna_client")
+exports.FaunaClient = fauna_client.FaunaClient
+
+rest = require("./fauna/rest")
+exports.Rest = rest.Rest
+exports.RestError = rest.RestError
schema = require("./fauna/schema")
exports.Class = schema.Class
exports.Schema = schema.Schema
-fauna_client = require("./fauna/fauna_client")
-exports.EventSequence = fauna_client.EventSequence
-exports.FaunaClient = fauna_client.FaunaClient
-
# useful for debugging
util = require 'util'
exports.dump = (x) -> util.inspect(x, false, null, true)
@@ -18,17 +23,6 @@ exports.dump = (x) -> util.inspect(x, false, null, true)
Q = require 'q'
util = require 'util'
-rest = require("./rest")
-Rest = rest.Rest
-RestError = rest.RestError
-
-schema = require("./schema")
-Class = schema.Class
-Schema = schema.Schema
-Data = schema.Data
-Reference = schema.Reference
-EventSet = schema.EventSet
-
dump = (x) -> util.inspect(x, false, null, true)
@@ -284,13 +278,5 @@ class Fauna
@delete("instances/#{ref}")
-exports.Rest = Rest
-exports.RestError = RestError
exports.Fauna = Fauna
exports.FaunaError = FaunaError
-
-exports.Class = Class
-exports.Schema = Schema
-exports.Reference = Reference
-exports.Data = Data
-exports.EventSet = EventSet
View
68 src/fauna/event_sequence.coffee
@@ -0,0 +1,68 @@
+topoSort = (graph) ->
+ ###
+ given a graph of { id: [id] } where each key maps to a list of the keys
+ dependent on it, return a sorted array of ids in dependency order.
+ ###
+ rv = []
+ # make a copy of 'graph' so we don't destroy it. it's mutable in JS.
+ work = {}
+ for k, v of graph then work[k] = v
+ recursing = {}
+ visit = (id) ->
+ if recursing[id]? then throw new Error("Dependency graph has a cycle")
+ recursing[id] = true
+ for d in work[id] then if work[d]? then visit(d)
+ delete work[id]
+ delete recursing[id]
+ rv.push id
+ while Object.keys(work).length > 0
+ visit(Object.keys(work)[0])
+ rv
+
+
+class EventSequence
+ constructor: (@schema, data) ->
+ @events = []
+ if data?
+ @before = data.resource.before
+ if not @before? then @before = 0
+ @after = data.resource.after
+ @unpack(data)
+ else
+ @before = 0
+ @after = 0
+
+ # unpack a response of { resource:{}, references:{} }
+ unpack: (data) ->
+ # first, sort the attached 'references' by dependency.
+ dependencies = {}
+ for refid, struct of data.references
+ dependencies[refid] = []
+ for k, v of struct.references
+ dependencies[refid].push.apply(dependencies[refid], if v instanceof Array then v else [v])
+ # then, inflate each reference.
+ references = {}
+ for refid in topoSort(dependencies)
+ struct = data.references[refid]
+ newrefs = {}
+ for k, v of struct.references
+ v = if v instanceof Array then v.map((x) -> references[x]) else references[v]
+ newrefs[k] = v
+ struct.references = newrefs
+ references[refid] = @schema.inflate(struct)
+ # finally, fill in the references in the event set.
+ @events = data.resource.events
+ for event in @events
+ refid = event.resource
+ if refid? and references[refid]? then event.resource = references[refid]
+
+ # return only the objects with a "create" event and no corresponding "delete" event.
+ # objects are ordered newest to oldest.
+ toArray: ->
+ rv = []
+ for event in @events then if event.action == "create" then rv.push(event.resource)
+ for event in @events then if event.action == "delete"
+ rv = (x for x in rv when x._fauna.id != event.resource._fauna.id)
+ rv
+
+exports.EventSequence = EventSequence
View
115 src/fauna/fauna_client.coffee
@@ -1,80 +1,34 @@
Q = require 'q'
util = require 'util'
+event_sequence = require("./event_sequence")
rest = require("./rest")
schema = require("./schema")
+EventSequence = event_sequence.EventSequence
+Rest = rest.Rest
+Schema = schema.Schema
-topoSort = (graph) ->
- ###
- given a graph of { id: [id] } where each key refers to a list of the keys
- dependent on it, return a sorted array of ids in dependency order.
- ###
- rv = []
- # make a copy of 'graph' so we don't destroy it. it's mutable in JS.
- work = {}
- for k, v of graph then work[k] = v
- recursing = {}
- visit = (id) ->
- if recursing[id]? then throw new Error("Dependency graph has a cycle")
- recursing[id] = true
- for d in work[id] then if work[d]? then visit(d)
- delete work[id]
- delete recursing[id]
- rv.push id
- while Object.keys(work).length > 0
- visit(Object.keys(work)[0])
- rv
-
-
-class EventSequence
- constructor: (@schema, data) ->
- @events = []
- if data?
- @before = data.resource.before
- if not @before? then @before = 0
- @after = data.resource.after
- @unpack(data)
- else
- @before = 0
- @after = 0
-
- # unpack a response of { resource:{}, references:{} }
- unpack: (data) ->
- # first, sort the attached 'references' by dependency.
- dependencies = {}
- for refid, struct of data.references
- dependencies[refid] = []
- for k, v of struct.references
- dependencies[refid].push.apply(dependencies[refid], if v instanceof Array then v else [v])
- # then, inflate each reference.
- references = {}
- for refid in topoSort(dependencies)
- struct = data.references[refid]
- newrefs = {}
- for k, v of struct.references
- v = if v instanceof Array then v.map((x) -> references[x]) else references[v]
- newrefs[k] = v
- struct.references = newrefs
- references[refid] = @schema.inflate(struct, @client)
- # finally, fill in the references in the event set.
- @events = data.resource.events
- for event in @events
- refid = event.resource
- if refid? and references[refid]? then event.resource = references[refid]
-
- # return only the objects with a "create" event and no corresponding "delete" event.
- # objects are ordered oldest to newest.
- toArray: ->
- rv = []
- for event in @events then if event.action == "create" then rv.unshift(event.resource)
- for event in @events then if event.action == "delete"
- rv = (x for x in rv when x._fauna.id != event.resource._fauna.id)
- rv
-
-exports.EventSequence = EventSequence
+dump = (x) -> util.inspect(x, false, null, true)
+# ----- helper decorators
+
+requireOwner = (f) ->
+ ->
+ if not (@ownerAuthentication.username? and @ownerAuthentication.password?)
+ return Q.reject(new Error("Requires authentication as owner"))
+ f.bind(@)()
+
+asEventArray = (f) ->
+ ->
+ f.bind(@)().then (data) =>
+ (new EventSequence(@schema, data)).toArray()
+
+asObject = (f) ->
+ ->
+ f.bind(@)().then (data) => @schema.inflate(data)
+
class FaunaClient
constructor: ->
# three kinds of authentication: owner, publisher, client, user
@@ -84,6 +38,9 @@ class FaunaClient
@userToken = null
# all js/json object transformations use the schema:
@schema = new Schema(@)
+ # hook up nested namespaces
+ @publisherKeys = new _PublisherKeys
+ for k, v of @publisherKeys then @publisherKeys[k] = v.bind(@)
debug: (message) -> Rest.debug(message)
@@ -101,30 +58,24 @@ class FaunaClient
urlFor: (path) ->
"#{@protocol}://#{encodeURIComponent(@username)}:#{escape(@password)}@#{@hostname}/#{@apiVersion}/#{path}"
- op: (method, path, data) ->
- options.url = @urlFor(path)
+ rest: (method, path, data) ->
+ options = { url: @urlFor(path) }
if data? then options.body = JSON.stringify(data)
Rest.op(method, options).then (body) ->
if body? then JSON.parse(body) else null
-
+ setOwnerAuth: (username, password) ->
+ @ownerAuthentication.username = username
+ @ownerAuthentication.password = password
-
- publisherKeys:
- get: -> @requireOwner => @asEventArray => @op("get", "keys/publisher")
+ class _PublisherKeys
+ get: requireOwner asEventArray -> @rest("get", "keys/publisher")
+ create: requireOwner -> @rest("post", "keys/publisher")
- # ----- helper decorators
- requireOwner: (f) ->
- if not @ownerAuthentication? then return Q.reject(new Error("Requires authentication as owner"))
- f()
-
- asEventArray: (f) ->
- f().then (data) =>
- (new EventSequence(@schema, data)).toArray()
View
2  src/fauna/schema.coffee
@@ -132,7 +132,7 @@ class Schema
obj[k] = resource.data[k]
else if fieldType == "reference"
ref = resource.references[k]
- if ref? then obj[k] = @inflate(ref)
+ if ref? then obj[k] = (if ref._fauna? then ref else @inflate(ref))
obj
deflate: (obj) ->
View
3  test/test_event_sequence.coffee
@@ -56,8 +56,7 @@ describe "EventSequence", ->
@field "name"
schema.addPrototypes Cat
list = (new fauna.EventSequence(schema, data2)).toArray()
- list[0].name.should.eql("Simba")
- list[1].name.should.eql("Spooky")
+ list.map((x) -> x.name).should.eql [ "Spooky", "Simba" ]
View
135 test/test_fauna.coffee
@@ -1,135 +0,0 @@
-
-should = require 'should'
-util = require 'util'
-
-fauna = require("../lib/fauna")
-Q = require("q")
-Rest = fauna.Rest
-Fauna = fauna.Fauna
-
-dump = (x) -> util.inspect(x, false, null, true)
-
-withSuccessfulRequest = (json, f) ->
- Rest.withRequestHandler(((options) -> [ null, 200, JSON.stringify(json) ]), f)
-
-futureTest = (f) ->
- (done) ->
- f().then((-> done()), ((error) -> done(error)))
-
-
-# some test data
-message_1 =
- ref: "classes/messages/1"
- class: "classes/messages"
- ts: 900000
- references: {}
- data: {}
- deleted: false
-message_2 =
- ref: "classes/messages/2"
- class: "classes/messages"
- ts: 930000
- references: {}
- data: {}
- deleted: false
-message_3 =
- ref: "classes/messages/3"
- class: "classes/messages"
- ts: 991000
- references: { "author": "users/11" }
- data: {}
- deleted: false
-user_11 =
- ref: "users/11"
- class: "users"
- ts: 999000
- references: {}
- data: { email: "root@example.com" }
- deleted: false
-
-
-describe "Fauna", ->
- it "can GET", futureTest ->
- f = new Fauna("robey", "xyzzy")
- r = -> f.get("help")
- withSuccessfulRequest({ okay: true }, r).then ([ resp, requests ]) ->
- resp.should.eql({ okay: true })
-
- it "collapses events", ->
- response =
- resource:
- ref: "classes/messages"
- after: 930000
- events: [
- { ts: 930000, action: "create", resource: "classes/messages/2", set: "classes/messages" }
- { ts: 900000, action: "create", resource: "classes/messages/1", set: "classes/messages" }
- ]
- references:
- "classes/messages/1": message_1
- "classes/messages/2": message_2
- f = new Fauna()
- f.collapseEvents(response).should.eql {
- before: 0,
- after: 930000,
- items: [
- { _fauna: { id: "classes/messages/2", className: "messages", ts: new Date(930), fauna: f } }
- { _fauna: { id: "classes/messages/1", className: "messages", ts: new Date(900), fauna: f } }
- ]
- }
-
- it "unpacks schemas", ->
- class Message extends fauna.Class
- @reference "author"
- getAuthor: -> @author
- class User extends fauna.Class
- @field "email"
- response =
- resource:
- ref: "classes/messages"
- events: [
- { ts: 991000, action: "create", resource: "classes/messages/3", set: "classes/messages" }
- ]
- references:
- "classes/messages/3": message_3
- "users/11": user_11
- f = new Fauna()
- f.addClasses Message, User
- page = f.collapseEvents(response)
- (page.items[0] instanceof Message).should.equal(true)
- page.items.should.eql [
- {
- _fauna: { id: "classes/messages/3", className: "messages", ts: new Date(991), fauna: f }
- author:
- _fauna: { id: "users/11", className: "users", ts: new Date(999), fauna: f }
- email: "root@example.com"
- }
- ]
- page.items[0].getAuthor().email.should.eql("root@example.com")
-
- it "can handle 204 responses", futureTest ->
- f = new Fauna()
- handler = (options) -> [ null, 204, undefined ]
- r = -> f.deleteUser("users/9")
- Rest.withRequestHandler(handler, r).then ([ resp, requests ]) ->
- resp?.should.equal(false)
-
- it "can setup a schema", futureTest ->
- class Fish extends fauna.Class
- @eventSet "scales"
- handler = (options) ->
- if options.url.match(/classes\/fishs\/config/)
- [ null, 200, JSON.stringify(resource: { ref: "classes/fishs/config" }) ]
- else
- [ null, 200, JSON.stringify(resource: { ref: "classes/fishs/sets/scales/config" }) ]
-
- f = new Fauna()
- f.addClasses Fish
- r = -> f.setupSchemas()
- Rest.withRequestHandler(handler, r).then ([ resp, requests ]) ->
- resp.should.eql [
- {
- "class": { ref: "classes/fishs/config" },
- event_sets: [ { ref: "classes/fishs/sets/scales/config" } ]
- }
- ]
- requests.length.should.equal(2)
View
117 test/test_fauna_client.coffee
@@ -0,0 +1,117 @@
+should = require 'should'
+Q = require 'q'
+util = require 'util'
+
+fauna = require("../lib/fauna")
+
+withSuccessfulRequest = (json, f) ->
+ fauna.Rest.withRequestHandler(((options) -> [ null, 200, JSON.stringify(json) ]), f)
+
+futureTest = (f) ->
+ (done) ->
+ f().then((-> done()), ((error) -> done(error)))
+
+
+describe "FaunaClient", ->
+ it "requires auth sometimes", futureTest ->
+ f = new fauna.FaunaClient()
+ f.publisherKeys.get()
+ .then (x) ->
+ throw new Error("Should not succeed")
+ .fail (error) ->
+ error.message.should.match(/Requires authentication/)
+
+
+ describe "publisherKeys", ->
+ it "get", futureTest ->
+ f = new fauna.FaunaClient()
+ f.setOwnerAuth("u", "p")
+ r = -> f.publisherKeys.get()
+ withSuccessfulRequest(JSON.parse(data1), r).then ([ resp, requests ]) ->
+ requests.length.should.eql(1)
+ requests[0].url.should.match(/keys\/publisher/)
+ resp.map((item) -> item.key).should.eql [
+ "AQAAayWp_qAAAQBrJamcsAABzAFAUI2ckXGpAt2VjWsyiA"
+ "AQAAayWp97AEAQBrJamcsAABlnqXMsjdfw3kJU44o1dpDg"
+ "AQAAayWpw9AAAQBrJamcsAABfoxWbkY-6Vd8d_er_VP_NA"
+ ]
+
+ it "create", futureTest ->
+ f = new fauna.FaunaClient()
+ f.setOwnerAuth("u", "p")
+ r = -> f.publisherKeys.create()
+ withSuccessfulRequest(JSON.parse(data2), r).then ([ resp, requests ]) ->
+ requests.length.should.eql(1)
+ requests[0].url.should.match(/keys\/publisher/)
+ console.log fauna.dump(resp)
+ resp.key.should.eql "AQAAayWp97AEAQBrJamcsAABlnqXMsjdfw3kJU44o1dpDg"
+
+
+# test data from the fauna documentation for GET keys/publisher
+data1 = """
+{
+ "resource" : {
+ "ref" : "keys/publisher",
+ "class" : "sets",
+ "after" : 9223372036854775,
+ "creates" : 3,
+ "updates" : 0,
+ "deletes" : 0,
+ "events" : [
+ {
+ "ts" : 1365020937405000,
+ "action" : "create",
+ "resource" : "keys/publisher/30159234559639553",
+ "set" : "keys/publisher"
+ },
+ {
+ "ts" : 1365020937292002,
+ "action" : "create",
+ "resource" : "keys/publisher/30159234443248641",
+ "set" : "keys/publisher"
+ },
+ {
+ "ts" : 1365020936500000,
+ "action" : "create",
+ "resource" : "keys/publisher/30159233572929537",
+ "set" : "keys/publisher"
+ }
+ ]
+ },
+ "references" : {
+ "keys/publisher/30159234559639553" : {
+ "ref" : "keys/publisher/30159234559639553",
+ "class" : "keys/publisher",
+ "ts" : 1365020937405000,
+ "key" : "AQAAayWp_qAAAQBrJamcsAABzAFAUI2ckXGpAt2VjWsyiA",
+ "deleted" : false
+ },
+ "keys/publisher/30159234443248641" : {
+ "ref" : "keys/publisher/30159234443248641",
+ "class" : "keys/publisher",
+ "ts" : 1365020937292002,
+ "key" : "AQAAayWp97AEAQBrJamcsAABlnqXMsjdfw3kJU44o1dpDg",
+ "deleted" : false
+ },
+ "keys/publisher/30159233572929537" : {
+ "ref" : "keys/publisher/30159233572929537",
+ "class" : "keys/publisher",
+ "ts" : 1365020936500000,
+ "key" : "AQAAayWpw9AAAQBrJamcsAABfoxWbkY-6Vd8d_er_VP_NA",
+ "deleted" : false
+ }
+ }
+}
+"""
+
+data2 = """
+{
+ "resource" : {
+ "ref" : "keys/publisher/30159234443248641",
+ "class" : "keys/publisher",
+ "ts" : 1365020937292002,
+ "key" : "AQAAayWp97AEAQBrJamcsAABlnqXMsjdfw3kJU44o1dpDg",
+ "deleted" : false
+ }
+}
+"""
Please sign in to comment.
Something went wrong with that request. Please try again.