Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

[enhance] github: Added remaining updated files for github V3.

  • Loading branch information...
commit 5aa653ce1967e26709a69f4010901e7c91a20354 1 parent b368d8b
@nrs135 nrs135 authored
View
9 lib/stdlib/apis/github/auth/auth.opa
@@ -21,7 +21,7 @@
* @destination public
*/
-//package stdlib.apis.github.auth
+package stdlib.apis.github.auth
import stdlib.apis.common
import stdlib.apis.github.lib
@@ -34,6 +34,9 @@ type GHAuth.scope =
{user} /** DB read/write access to profile info only. */
/ {public_repo} /** DB read/write access, and Git read access to public repos. */
/ {repo} /** DB read/write access, and Git read access to public and private repos */
+ / {repo_status} /** Read/write access to public and private repo statuses.
+ Does not include access to code - use repo for that. */
+ / {delete_repo} /** Delete access to adminable repositories. */
/ {gist} /** Write access to gists. */
@private GHAp = {{
@@ -43,6 +46,8 @@ type GHAuth.scope =
| {user} -> "user"
| {public_repo} -> "public_repo"
| {repo} -> "repo"
+ | {repo_status} -> "repo:status"
+ | {delete_repo} -> "delete_repo"
| {gist} -> "gist"
}}
@@ -72,7 +77,7 @@ GHAuth(conf:GHAuth.conf) = {{
("code", code)]
match GHLib.full_post(base, path, data, some) with
| {some=c} ->
- token = AL.get_field(AL.get_data(c), "access_token")
+ token = AL.get_field(AL.get_data(c.content), "access_token")
if token == "" then {none}
else {some=token}
| _ -> {none}
View
161 lib/stdlib/apis/github/commit/commit.opa
@@ -1,161 +0,0 @@
-/*
- Copyright © 2011 MLstate
-
- This file is part of Opa.
-
- Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
- The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-*/
-/*
- * Author : Nicolas Glondu <nicolas.glondu@mlstate.com>
- **/
-
-/**
- * GitHub commit API module
- *
- * @category api
- * @author Nicolas Glondu, 2011
- * @destination public
- */
-
-//package stdlib.apis.github.commit
-import stdlib.apis.github
-import stdlib.apis.github.lib
-
-/* Types returned by API */
-
-type GitHub.commit = {
- id : string
- parents : list(string)
- author : GitHub.commit_user
- message : string
- url : string
- commited_date : Date.date
- authored_date : Date.date
- tree : string
- committer : GitHub.commit_user
-}
-
-type GitHub.commit_more = {
- base : GitHub.commit
- modified : list((string,string))
- removed : list(string)
-}
-
-@private GHCp = {{
-
- @private GP = GHParse
-
- get_commit_internal(m) =
- parents = List.filter_map(
- j -> match j:RPC.Json.json with
- | {Record=r} ->
- match List.assoc("id",r) with
- | {some={String=s}} -> some(s)
- | {some=v} -> some(Json.serialize(v))
- | {none} -> none
- end
- | _ -> none,
- m.list("parents"))
- { id = m.str("id")
- parents = parents
- author = GP.get_commit_user(m.record("author"))
- message = m.str("message")
- url = m.str("url")
- commited_date = m.date("commited_date")
- authored_date = m.date("authored_date")
- tree = m.str("tree")
- committer = GP.get_commit_user(m.record("committer"))
- } : GitHub.commit
-
- get_commit(srcmap) =
- m = GP.map_funs(srcmap)
- if m.exists("id") then
- some(get_commit_internal(m))
- else none
-
- get_commit_more(srcmap) =
- m = GP.map_funs(srcmap)
- if m.exists("id") then
- base = get_commit_internal(m)
- removed = m.list("removed")
- |> List.filter_map(
- x ->
- match x with
- | {String=s} -> some(s)
- | _ -> none,
- _ )
- modified = m.list("modified")
- |> List.fold(
- e, acc ->
- match e:RPC.Json.json with
- | {Record=r} ->
- List.fold(
- (k,v), acc ->
- vv = match v:RPC.Json.json with
- | {String=s} -> s
- | _ -> Json.serialize(v)
- List.add((k,vv),acc),
- r, acc
- )
- | _ -> acc,
- _, [])
- some(~{base modified removed}:GitHub.commit_more)
- else none
-
- multiple_commits(res) =
- GP.dewrap_list(res, "commits", get_commit)
-
- one_commit_more(res) =
- GP.dewrap_obj(res, "commit", get_commit_more)
-
-}}
-
-GHCommit = {{
-
- @private p(o:string,r:string) = "{o}/{r}"
-
- /**
- * Gets a list of commits on a branch of a repo
- *
- * @param owner Owner of the repo
- * @param repo Repository to get commits from
- * @param branch Branch to get commits from
- * @param page Page of commits displayed. First page is 1. If a value inferior to 1 is provided, it is considered as 1.
- */
- get_commits(owner, repo, branch:string, page:int) =
- path = "/commits/list/{p(owner,repo)}/{branch}"
- /* Force the min value of page to 1, GitHub also does
- it on its side anyway. */
- page = if page < 1 then 1 else page
- data = [("page",Int.to_string(page))]
- GHLib.api_get(path, data, GHCp.multiple_commits)
-
- /**
- * Gets a list of commit on a file on a branch of a repo
- *
- * @param owner Owner of the repo
- * @param repo Repository to get commits from
- * @param branch Branch to get commits from
- * @param file File to follow
- * @param page Page of commits displayed. First page is 1. If a value inferior to 1 is provided, it is considered as 1.
- */
- get_file_commits(owner, repo, branch:string, file:string, page) =
- path = "/commits/list/{p(owner,repo)}/{branch}/{file}"
- /* Force the min value of page to 1, GitHub also does
- it on its side anyway. */
- page = if page < 1 then 1 else page
- data = [("page",Int.to_string(page))]
- GHLib.api_get(path, data, GHCp.multiple_commits)
-
- /**
- * Gets detailled information about one commit
- */
- get_commit(owner, repo, sha:string) =
- path = "/commits/show/{p(owner,repo)}/{sha}"
- GHLib.api_get(path, [], GHCp.one_commit_more)
-
-}}
View
416 lib/stdlib/apis/github/events/events.opa
@@ -0,0 +1,416 @@
+/*
+ Copyright © 2011 MLstate
+
+ This file is part of Opa.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+/*
+ * Author : Nicolas Glondu <nicolas.glondu@mlstate.com>
+ **/
+
+package stdlib.apis.github.events
+import stdlib.apis.github
+import stdlib.apis.github.lib
+
+/**
+ * GitHub user API module
+ *
+ * @category api
+ * @author Nicolas Glondu, 2011
+ * @destination public
+ */
+
+type GitHub.page = {
+ page_name : string
+ title : string
+ action : string
+ sha : string
+ html_url : string
+}
+
+type GitHub.push_event = {
+ head : string
+ ref : string
+ size : int
+ commits : list({sha : string
+ message : string
+ author : { name : string
+ email : string
+ }
+ url : string
+ distinct : bool
+ })
+}
+
+type GitHub.payload =
+ { repo_comment:GitHub.repo_comment } // CommitCommentEvent
+ / { ref_type:string ref:string master_branch:string description:string } // CreateEvent
+ / { ref_type:string ref:string } // DeleteEvent
+ / { download:GitHub.download } // DownloadEvent
+ / { target:GitHub.user } // FollowEvent
+ / { forkee:GitHub.repository } // ForkEvent
+ / { head:string before:string after:string } // ForkApplyEvent
+ / { action:string gist:GitHub.gist } // GistEvent
+ / { pages:list(GitHub.page) } // GollumEvent
+ / { action:string issue:GitHub.issue comment:GitHub.issue_comment } // IssueCommentEvent
+ / { action:string issue:GitHub.issue } // IssuesEvent
+ / { member:GitHub.user action:string } // MemberEvent
+ / { empty } // PublicEvent - empty payload
+ / { action:string number:int pull_request:GitHub.pull_req } // PullRequestEvent
+ / { prr_comment:GitHub.repo_comment } // PullRequestReviewCommentEvent // ??? check this, is this repo_comment?
+ / { push_event : GitHub.push_event } // PushEvent
+ / { team : GitHub.id_name_url user:GitHub.user repo:GitHub.repository } // TeamAddEvent
+ / { action:string } // WatchEvent
+ / { raw : list((string,RPC.Json.json)) }
+
+type GitHub.event = {
+ `type` : string
+ `public` : bool
+ payload : GitHub.payload
+ repo : GitHub.id_name_url
+ actor : GitHub.short_user
+ org : GitHub.short_user
+ created_at : Date.date
+ id : int
+}
+
+@private GHEp = {{
+
+ @private GP = GHParse
+
+// { comment:GitHub.repo_comment } // CommitCommentEvent
+
+ get_commitcommentevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_rec(m, "comment", GP.get_repo_comment)) with
+ | {some=repo_comment} ->
+ res = {
+ repo_comment = repo_comment
+ } : GitHub.payload
+ {some=res}
+ | _ -> none
+
+// / { ref_type:string ref:string master_branch:string description:string } // CreateEvent
+
+ get_createevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ if m.exists("ref")
+ then
+ res = {
+ ref = m.str("ref")
+ master_branch = m.str("master_branch")
+ ref_type = m.str("ref_type")
+ description = m.str("description")
+ } : GitHub.payload
+ {some=res}
+ else none
+
+// / { ref_type:string ref:string } // DeleteEvent
+
+ get_deleteevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ if m.exists("ref")
+ then
+ res = {
+ ref = m.str("ref")
+ ref_type = m.str("ref_type")
+ } : GitHub.payload
+ {some=res}
+ else none
+
+// / { download:GitHub.download } // DownloadEvent
+
+ get_downloadevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_rec(m, "download", GP.get_download)) with
+ | {some=download} ->
+ res = {
+ download = download
+ } : GitHub.payload
+ {some=res}
+ | _ -> none
+
+// / { target:GitHub.user } // FollowEvent
+
+ get_followevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_rec(m, "target", GP.get_user)) with
+ | {some=target} ->
+ res = {
+ target = target
+ } : GitHub.payload
+ {some=res}
+ | _ -> none
+
+// / { forkee:GitHub.repository } // ForkEvent
+
+ get_forkevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_rec(m, "forkee", GP.get_repo(false))) with
+ | {some=forkee} ->
+ res = {
+ forkee = forkee
+ } : GitHub.payload
+ {some=res}
+ | _ -> none
+
+// / { head:string before:string after:string } // ForkApplyEvent
+
+ get_forkapplyevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ if m.exists("head")
+ then
+ res = {
+ head = m.str("head")
+ before = m.str("before")
+ after = m.str("after")
+ } : GitHub.payload
+ {some=res}
+ else none
+
+// / { action:string gist:GitHub.gist } // GistEvent
+
+ get_gistevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_rec(m, "gist", GP.get_gist)) with
+ | {some=gist} ->
+ res = {
+ action = m.str("action")
+ gist = gist
+ } : GitHub.payload
+ {some=res}
+ | _ -> none
+
+// / { pages:list(GitHub.page) } // GollumEvent
+
+ get_page(srcmap) =
+ m = GP.map_funs(srcmap)
+ if m.exists("page_name")
+ then
+ page = {
+ page_name = m.str("page_name")
+ title = m.str("title")
+ action = m.str("action")
+ sha = m.str("sha")
+ html_url = m.str("html_url")
+ } : GitHub.page
+ {some=page}
+ else none
+
+ get_gollumevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_list(m, "pages", get_page)) with
+ | [] -> none
+ | pages ->
+ res = {
+ pages = pages
+ } : GitHub.payload
+ {some=res}
+
+// / { action:string issue:GitHub.issue comment:GitHub.issue_comment } // IssueCommentEvent
+
+ get_issuecommentevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_rec(m, "issue", GP.get_issue),
+ GP.get_rec(m, "comment", GP.get_issue_comment)) with
+ | ({some=issue},{some=comment}) ->
+ res = {
+ action = m.str("action")
+ issue = issue
+ comment = comment
+ } : GitHub.payload
+ {some=res}
+ | _ -> none
+
+// / { action:string issue:GitHub.issue } // IssuesEvent
+
+ get_issuesevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_rec(m, "issue", GP.get_issue)) with
+ | {some=issue} ->
+ res = {
+ action = m.str("action")
+ issue = issue
+ } : GitHub.payload
+ {some=res}
+ | _ -> none
+
+// / { member:GitHub.user action:string } // MemberEvent
+
+ get_memberevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_rec(m, "member", GP.get_user)) with
+ | {some=member} ->
+ res = {
+ action = m.str("action")
+ member = member
+ } : GitHub.payload
+ {some=res}
+ | _ -> none
+
+// / { empty } // PublicEvent - empty payload
+
+ get_publicevent(_) : option(GitHub.payload) = {some={empty}}
+
+// / { action:string number:int pull_request:GitHub.pull_req } // PullRequestEvent
+
+ get_pull_requestevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_rec(m, "pull_request", GP.get_pull_req)) with
+ | {some=pull_request} ->
+ res = {
+ action = m.str("action")
+ number = m.int("number")
+ pull_request = pull_request
+ } : GitHub.payload
+ {some=res}
+ | _ -> none
+
+// / { prr_comment:GitHub.repo_comment } // PullRequestReviewCommentEvent // ??? check this, is this repo_comment?
+
+ get_pullrequestreviewcommentevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_rec(m, "comment", GP.get_repo_comment)) with
+ | {some=prr_comment} ->
+ res = {
+ prr_comment = prr_comment
+ } : GitHub.payload
+ {some=res}
+ | _ -> none
+
+// / { push_event : GitHub.push_event } // PushEvent
+
+ get_name_email(srcmap) =
+ m = GP.map_funs(srcmap)
+ { name = m.str("name")
+ email = m.str("email")
+ }
+
+ get_event_commit(srcmap) =
+ m = GP.map_funs(srcmap)
+ if m.exists("message")
+ then
+ res = {
+ sha = m.str("sha")
+ message = m.str("message")
+ author = GP.get_rec(m, "author", get_name_email)
+ url = m.str("url")
+ distinct = m.bool("distinct")
+ }
+ {some=res}
+ else none
+
+ get_pushevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ if m.exists("commits")
+ then
+ res = {
+ head = m.str("head")
+ ref = m.str("ref")
+ size = m.int("size")
+ commits = GP.get_list(m, "commits", get_event_commit)
+ } : GitHub.push_event
+ {some={push_event=res}}
+ else none
+
+// / { team : GitHub.id_name_url user:GitHub.user repo:GitHub.repository } // TeamAddEvent
+
+ get_teamaddevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ match (GP.get_rec(m, "team", GP.get_id_name_url),
+ GP.get_rec(m, "user", GP.get_user),
+ GP.get_rec(m, "repo", GP.get_repo(true))) with
+ | (team,{some=user},{some=repo}) ->
+ res = {
+ team = team
+ user = user
+ repo = repo
+ } : GitHub.payload
+ {some=res}
+ | _ -> none
+
+// / { action:string } // WatchEvent
+
+ get_watchevent(srcmap) : option(GitHub.payload) =
+ m = GP.map_funs(srcmap)
+ if m.exists("action")
+ then
+ res = {
+ action = m.str("action")
+ } : GitHub.payload
+ {some=res}
+ else none
+
+ get_payload(m, typ) : GitHub.payload =
+ match m.record("payload") with
+ | {Record=[]} -> {raw=[]}
+ | {Record=r} ->
+ match typ with
+ | "CommitCommentEvent" -> Option.default({raw=r},get_commitcommentevent({Record=r}))
+ | "CreateEvent" -> Option.default({raw=r},get_createevent({Record=r}))
+ | "DeleteEvent" -> Option.default({raw=r},get_deleteevent({Record=r}))
+ | "DownloadEvent" -> Option.default({raw=r},get_downloadevent({Record=r}))
+ | "FollowEvent" -> Option.default({raw=r},get_followevent({Record=r}))
+ | "ForkEvent" -> Option.default({raw=r},get_forkevent({Record=r}))
+ | "ForkApplyEvent" -> Option.default({raw=r},get_forkapplyevent({Record=r}))
+ | "GistEvent" -> Option.default({raw=r},get_gistevent({Record=r}))
+ | "GollumEvent" -> Option.default({raw=r},get_gollumevent({Record=r}))
+ | "IssueCommentEvent" -> Option.default({raw=r},get_issuecommentevent({Record=r}))
+ | "IssuesEvent" -> Option.default({raw=r},get_issuesevent({Record=r}))
+ | "MemberEvent" -> Option.default({raw=r},get_memberevent({Record=r}))
+ | "PublicEvent" -> Option.default({raw=r},get_publicevent({Record=r}))
+ | "PullRequestEvent" -> Option.default({raw=r},get_pull_requestevent({Record=r}))
+ | "PullRequestReviewCommentEvent" -> Option.default({raw=r},get_pullrequestreviewcommentevent({Record=r}))
+ | "PushEvent" -> Option.default({raw=r},get_pushevent({Record=r}))
+ | "TeamAddEvent" -> Option.default({raw=r},get_teamaddevent({Record=r}))
+ | "WatchEvent" -> Option.default({raw=r},get_watchevent({Record=r}))
+ | _ -> {raw=r}
+ end
+ | _ -> {raw=[]}
+
+ // NOTE: the spec says "All Events have the same response format" which is
+ // complete nonsense. See the output from list_repo_issue_events.
+ get_event(srcmap) =
+ m = GP.map_funs(srcmap)
+ if m.exists("id")
+ then
+ typ = m.str("type")
+ res = {
+ `type` = typ
+ `public` = m.bool("public")
+ payload = get_payload(m, typ)
+ repo = GP.get_rec(m, "repo", GP.get_id_name_url)
+ actor = GP.get_rec(m, "actor", GP.get_short_user)
+ org = GP.get_rec(m, "org", GP.get_short_user)
+ created_at = m.date("created_at")
+ id = GP.get_id(m)
+ } : GitHub.event
+ {some=res}
+ else {none}
+
+ multiple_events(res:WebClient.success(string)) = GP.dewrap_whole_list(res, get_event)
+
+}}
+
+GHEvents = {{
+
+ @private list_events_generic(token, path) =
+ GHLib.api_get_full(path, token, [], GHEp.multiple_events)
+
+ list_public_events(token) = list_events_generic(token, "/events")
+ list_repo_events(token, user:string, repo:string) = list_events_generic(token, "/repos/{user}/{repo}/events")
+ list_repo_issue_events(token, user:string, repo:string) = list_events_generic(token, "/repos/{user}/{repo}/issues/events")
+ list_network_public_events(token, user:string, repo:string) = list_events_generic(token, "/networks/{user}/{repo}/events")
+ list_org_public_events(token, org:string) = list_events_generic(token, "/orgs/{org}/events")
+ list_user_received_events(token, user:string) = list_events_generic(token, "/users/{user}/received_events")
+ list_user_received_public_events(token, user:string) = list_events_generic(token, "/users/{user}/received_events/public")
+ list_user_performed_events(token, user:string) = list_events_generic(token, "/users/{user}/events")
+ list_user_performed_public_events(token, user:string) = list_events_generic(token, "/users/{user}/events/public")
+ list_organization_events(token, user:string, org:string) = list_events_generic(token, "/users/{user}/events/orgs/{org}")
+
+}}
View
158 lib/stdlib/apis/github/gist/gist.opa
@@ -21,7 +21,7 @@
* @destination public
*/
-//package stdlib.apis.github.gist
+package stdlib.apis.github.gist
import stdlib.apis.github
import stdlib.apis.github.lib
@@ -29,21 +29,31 @@ import stdlib.apis.github.lib
type GitHub.gist_comment = {
id : int
- user : string
- created_at : Date.date
- updated_at : Date.date
+ url : string
body : string
+ user : GitHub.short_user
+ created_at : Date.date
}
-type GitHub.gist = {
- owner : string
- public : bool
- repo : string
- created_at : Date.date
- description : string
- files : list(string)
- comments : list(GitHub.gist_comment)
-}
+/**
+ * Type of a GitHub gist provided to [GHGist.list_gists]
+ */
+type GHGist.t =
+ { self : string } /** Gists of authenticated user (self="" means public gists) */
+ / { user : string } /** Gists of given user */
+ / { `public` } /** All public gists */
+ / { starred : string} /** Authenticated user's starred gists */
+
+/**
+ * Type of a GitHub gist file update provided to [GHGist.edit_gist]
+ */
+type GHGist.file_update =
+ (string,
+ { update : {content : string} }
+ / { modify : {filename : string
+ content : string } }
+ / { delete })
+
@private GHGp = {{
@@ -51,61 +61,91 @@ type GitHub.gist = {
get_comment(srcmap) =
m = GP.map_funs(srcmap)
- { id = m.int("id")
- user = m.str("user")
- created_at = m.date("created_at")
- updated_at = m.date("updated_at")
- body = m.str("body")
- } : GitHub.gist_comment
-
- get_gist(srcmap) =
- m = GP.map_funs(srcmap)
- if m.exists("repo") then
- files = List.map(
- v -> match v:RPC.Json.json with
- | {String=s} -> s
- | _ -> Json.serialize(v),
- m.list("files"))
- comments = List.filter_map(
- v -> match v:RPC.Json.json with
- | {Record=_} -> some(get_comment(v))
- | _ -> none,
- m.list("comments"))
- res = {
- owner = m.str("owner")
- public = m.bool("public")
- repo = m.str("repo")
- created_at = m.date("created_at")
- description = m.str("description")
- files = files
- comments = comments
- } : GitHub.gist
- some(res)
+ if m.exists("id")
+ then
+ comment = {
+ id = GP.get_id(m)
+ url = m.str("url")
+ body = m.str("body")
+ user = GP.get_rec(m, "user", GP.get_short_user)
+ created_at = m.date("created_at")
+ } : GitHub.gist_comment
+ {some=comment}
else none
- multiple_gists(res) =
- GP.dewrap_list(res, "gists", get_gist)
+ one_gist(res) = GP.dewrap_whole_obj(res, GP.get_gist)
+ multiple_gists(res) = GP.dewrap_whole_list(res, GP.get_gist)
+
+ one_comment(res) = GP.dewrap_whole_obj(res, get_comment)
+ multiple_comments(res) = GP.dewrap_whole_list(res, get_comment)
}}
GHGist = {{
- @private host = "https://gist.github.com/api/v1/json"
+ @private GP = GHParse
+
+ @private gist_path(gist:GHGist.t) =
+ match gist with
+ | {~user} -> ("/users/{user}/gists","")
+ | {~self} -> ("/gists",self)
+ | {`public`} -> ("/gists/public","")
+ | {~starred} -> ("/gists/starred",starred)
+
+ list_gists(gist:GHGist.t) =
+ (path, token) = gist_path(gist)
+ GHLib.api_get_full(path, token, [], GHGp.multiple_gists)
+
+ get_gist(gist_id:int) =
+ path = "/gists/{gist_id}"
+ GHLib.api_get_full(path, "", [], GHGp.one_gist)
+
+ create_gist(token:string, description:option(string), public:bool, files:list((string, string))) =
+ files = List.to_string_using("\{","}",",",List.map(((fn,cn) -> "\"{fn}\":\{\"content\":\"{cn}\"}"),files))
+ json = GHLib.mkopts([{sopt=("description",description)},{breq=("public",public)},{req=("files",files)}])
+ GHLib.api_post_string("/gists", token, json, GHGp.one_gist)
+
+ edit_gist(token:string, id:int, description:option(string), files:list(GHGist.file_update)) =
+ files = List.to_string_using("\{","}",",",
+ List.map(((fn,fu) ->
+ match fu with
+ | {update=~{content}} ->
+ "\"{fn}\":\{\"content\":\"{content}\"}"
+ | {modify=~{filename content}} ->
+ "\"{fn}\":\{\"filename\":\"{filename}\",\"content\":\"{content}\"}"
+ | {delete} ->
+ "\"{fn}\":null}"
+ ),files))
+ json = GHLib.mkopts([{sopt=("description",description)},{req=("files",files)}])
+ GHLib.api_patch_string("/gists/{id}", token, json, GHGp.one_gist)
+
+ star_gist(token:string, id:int) = GHLib.api_put_string("/gists/{id}/star", token, "", GP.expect_204)
+
+ unstar_gist(token:string, id:int) = GHLib.api_delete_string("/gists/{id}/star", token, "", GP.expect_204)
+
+ is_gist_starred(token:string, id:int) = GHLib.api_get_full("/gists/{id}/star", token, [], GP.expect_204_404)
+
+ fork_gist(token:string, id:int) = GHLib.api_post_string("/gists/{id}/fork", token, "", GHGp.one_gist)
+
+ delete_gist(token:string, id:int) = GHLib.api_delete_string("/gists/{id}", token, "", GP.expect_204)
+
+ gist_comments(token:string, id:int) = GHLib.api_get_full("/gists/{id}/comments", token, [], GHGp.multiple_comments)
+
+ get_comment(token:string, id:int) = GHLib.api_get_full("/gists/comments/{id}", token, [], GHGp.one_comment)
+
+ create_comment(token:string, gist_id:int, body) =
+ json = GHLib.mkopts([{sreq=("body",body)}])
+ GHLib.api_post_string("/gists/{gist_id}/comments", token, json, GHGp.one_comment)
- get_gist(gist_id:string) =
- path = "{host}/{gist_id}"
- r = GHLib.api_get_full(path, [], GHGp.multiple_gists)
- match r with
- | {some=l} ->
- if l == [] then none else some(List.head(l))
- | _ -> none
+ edit_comment(token:string, gist_id:int, body) =
+ json = GHLib.mkopts([{sreq=("body",body)}])
+ GHLib.api_patch_string("/gists/comments/{gist_id}", token, json, GHGp.one_comment)
- get_user_gists(user:string) =
- path = "{host}/gists/{user}"
- GHLib.api_get_full(path, [], GHGp.multiple_gists)
+ delete_comment(token:string, id:int) = GHLib.api_delete_string("/gists/comments/{id}", token, "", GP.expect_204)
- get_gist_file(gist_id:string, filename:string) =
- path = "https://raw.github.com/gist/{gist_id}/{filename}"
- GHLib.api_get_full(path, [], some)
+ // TODO: file urls can be found in the gist data
+ //get_gist_file(gist_id:string, filename:string) =
+ // path = "https://raw.github.com/gist/{gist_id}/{filename}"
+ // GHLib.api_get_full(path, "", [], some)
}}
View
280 lib/stdlib/apis/github/git_data/git_data.opa
@@ -0,0 +1,280 @@
+/*
+ Copyright © 2012 MLstate
+
+ This file is part of Opa.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+*/
+/*
+ * Author : Nicolas Glondu <nicolas.glondu@mlstate.com>
+ **/
+
+/**
+ * GitHub gist API module
+ *
+ * @category api
+ * @author Nicolas Glondu, 2011
+ * @destination public
+ */
+
+package stdlib.apis.github.git_data
+import stdlib.apis.github
+import stdlib.apis.github.lib
+
+/* Types returned by API */
+
+
+type GitHub.gitdata_blob = {
+ content : string
+ encoding : string
+ sha : string
+ size : int
+}
+
+type GitHub.gitdata_sha = {
+ sha : string
+}
+
+type GitHub.gitdata_object = {
+ `type` : string
+ sha : string
+ url : string
+}
+
+type GitHub.gitdata_reference = {
+ ref : string
+ url : string
+ object : option(GitHub.gitdata_object)
+}
+
+type GitHub.gitdata_tag = {
+ tag : string
+ sha : string
+ url : string
+ message : string
+ tagger : option(GitHub.commit_user)
+ object : option(GitHub.gitdata_object)
+}
+
+type GitHub.gitdata_tree_element = {
+ path : string
+ mode : string
+ `type` : string
+ size : int
+ sha : string
+ url : string
+}
+
+type GitHub.gitdata_tree = {
+ sha : string
+ url : string
+ tree : list(GitHub.gitdata_tree_element)
+}
+
+type GitHub.gitdata_sha_or_content =
+ {sha:string}
+ / {content:string}
+
+type GitHub.gitdata_btc = {blob} / {tree} / {commit}
+
+type GitHub.gitdata_tree_element_param = {
+ path : string
+ mode : string
+ `type` : GitHub.gitdata_btc
+ sha_or_content : GitHub.gitdata_sha_or_content
+}
+
+type GitHub.gitdata_tree_param = list(GitHub.gitdata_tree_element_param)
+
+@private GHGDp = {{
+
+ @private GP = GHParse
+
+ get_gitdata_blob(srcmap) =
+ m = GP.map_funs(srcmap)
+ if m.exists("content")
+ then
+ res = {
+ content = m.str("content")
+ encoding = m.str("encoding")
+ sha = m.str("sha")
+ size = m.int("size")
+ } : GitHub.gitdata_blob
+ {some=res}
+ else {none}
+
+ get_gitdata_sha(srcmap) =
+ m = GP.map_funs(srcmap)
+ if m.exists("sha")
+ then
+ res = {
+ sha = m.str("sha")
+ } : GitHub.gitdata_sha
+ {some=res}
+ else {none}
+
+ get_gitdata_object(srcmap) =
+ m = GP.map_funs(srcmap)
+ if m.exists("type")
+ then
+ res = {
+ `type` = m.str("type")
+ sha = m.str("sha")
+ url = m.str("url")
+ } : GitHub.gitdata_object
+ {some=res}
+ else {none}
+
+ get_reference(srcmap) =
+ m = GP.map_funs(srcmap)
+ if m.exists("ref")
+ then
+ res = {
+ ref = m.str("ref")
+ url = m.str("url")
+ object = GP.get_rec(m, "object", get_gitdata_object)
+ } : GitHub.gitdata_reference
+ {some=res}
+ else {none}
+
+ get_tag(srcmap) =
+ m = GP.map_funs(srcmap)
+ if m.exists("ref")
+ then
+ res = {
+ tag = m.str("tag")
+ sha = m.str("sha")
+ url = m.str("url")
+ message = m.str("message")
+ tagger = GP.get_rec(m, "tagger", GP.get_commit_user_opt)
+ object = GP.get_rec(m, "object", get_gitdata_object)
+ } : GitHub.gitdata_tag
+ {some=res}
+ else {none}
+
+ get_tree_element(srcmap) =
+ m = GP.map_funs(srcmap)
+ if m.exists("path")
+ then
+ res = {
+ path = m.str("path")
+ mode = m.str("mode")
+ `type` = m.str("type")
+ size = m.int("size")
+ sha = m.str("sha")
+ url = m.str("url")
+ } : GitHub.gitdata_tree_element
+ {some=res}
+ else {none}
+
+ get_tree(srcmap) =
+ m = GP.map_funs(srcmap)
+ if m.exists("tree")
+ then
+ res = {
+ sha = m.str("sha")
+ url = m.str("url")
+ tree = List.filter_map(get_tree_element,m.list("tree"))
+ } : GitHub.gitdata_tree
+ {some=res}
+ else {none}
+
+ one_blob(res) = GP.dewrap_whole_obj(res, get_gitdata_blob)
+ one_sha(res) = GP.dewrap_whole_obj(res, get_gitdata_sha)
+
+ one_commit(res) = GP.dewrap_whole_obj(res, GP.get_commit_opt)
+
+ one_reference(res) = GP.dewrap_whole_obj(res, get_reference)
+ multiple_references(res) = GP.dewrap_whole_list(res, get_reference)
+
+ one_tag(res) = GP.dewrap_whole_obj(res, get_tag)
+
+ one_tree(res) = GP.dewrap_whole_obj(res, get_tree)
+
+}}
+
+// TODO: Big function to update a file in a repo, see http://developer.github.com/v3/git/
+
+GHGitData = {{
+
+ @private GP = GHParse
+
+ get_blob(token:string, user:string, repo:string, sha:string) =
+ GHLib.api_get_full("/repos/{user}/{repo}/git/blobs/{sha}", token, [], GHGDp.one_blob)
+
+ create_blob(token:string, encoding:GitHub.encoding, user:string, repo:string, content:string) =
+ json = GHLib.mkopts([{sreq=("content",content)},{rcst=("encoding",GHLib.string_of_encoding,encoding)}])
+ GHLib.api_post_string("/repos/{user}/{repo}/git/blobs", token, json, GHGDp.one_sha)
+
+ get_commit(token:string, user:string, repo:string, sha:string) =
+ GHLib.api_get_full("/repos/{user}/{repo}/git/commits/{sha}", token, [], GHGDp.one_commit)
+
+ @private commit_user_to_json(cu:option(GitHub.commit_user)) =
+ match cu with
+ | {some=cu} -> [("name","\"{cu.name}\""),("email","\"{cu.email}\""),("date","\"{GHParse.date_to_string(cu.date)}\"")]
+ | {none} -> []
+
+ create_commit(token:string, user:string, repo:string,
+ message:string, tree:string, parents:list(string),
+ author:option(GitHub.commit_user), committer:option(GitHub.commit_user)) =
+ json = GHLib.mkopts([{sreq=("message",message)},{obj=("author",commit_user_to_json(author))},
+ {obj=("committer",commit_user_to_json(committer))},{lst=("parents",parents)},
+ {sreq=("tree",tree)}])
+ GHLib.api_post_string("/repos/{user}/{repo}/git/commits", token, json, GHGDp.one_commit)
+
+ get_reference(token:string, user:string, repo:string, branch:string) =
+ GHLib.api_get_full("/repos/{user}/{repo}/git/refs/heads/{branch}", token, [], GHGDp.one_reference)
+
+ get_all_references(token:string, user:string, repo:string, subname:string) =
+ GHLib.api_get_full("/repos/{user}/{repo}/git/refs/{subname}", token, [], GHGDp.multiple_references)
+
+ create_reference(token:string, user:string, repo:string, ref:string, sha:string) =
+ json = GHLib.mkopts([{sreq=("ref",ref)},{sreq=("sha",sha)}])
+ GHLib.api_post_string("/repos/{user}/{repo}/git/refs", token, json, GHGDp.one_reference)
+
+ update_reference(token:string, user:string, repo:string, ref:string, sha:string, force:bool) =
+ json = GHLib.mkopts([{sreq=("sha",sha)},{breq=("force",force)}])
+ GHLib.api_post_string("/repos/{user}/{repo}/git/refs/{ref}", token, json, GHGDp.one_reference)
+
+ delete_reference(token:string, user:string, repo:string, ref:string) =
+ GHLib.api_delete_string("/repos/{user}/{repo}/git/refs/{ref}", token, "", GP.expect_204)
+
+ get_tag(token:string, user:string, repo:string, sha:string) =
+ GHLib.api_get_full("/repos/{user}/{repo}/git/refs/tags/{sha}", token, [], GHGDp.one_tag)
+
+ create_tag(token:string, user:string, repo:string,
+ tag:string, message:string, object:string, typ:string, tagger:option(GitHub.commit_user)) =
+ json = GHLib.mkopts([{sreq=("tag",tag)},{sreq=("message",message)},{sreq=("object",object)},
+ {sreq=("type",typ)},{obj=("tagger",commit_user_to_json(tagger))}])
+ GHLib.api_post_string("/repos/{user}/{repo}/git/tags", token, json, GHGDp.one_tag)
+
+ get_tree(token:string, user:string, repo:string, sha:string, recursively:bool) =
+ data = if recursively then [("recursive","1")] else []
+ GHLib.api_get_full("/repos/{user}/{repo}/git/trees/{sha}", token, data, GHGDp.one_tree)
+
+ @private string_of_btc(btc:GitHub.gitdata_btc) =
+ match btc with
+ | {blob} -> "blob"
+ | {tree} -> "tree"
+ | {commit} -> "commit"
+
+ @private string_of_sha_or_content(soc:GitHub.gitdata_sha_or_content) =
+ match soc with
+ | {~sha} -> ("sha",sha)
+ | {~content} -> ("content",content)
+
+ @private tree_param_to_json(tp:GitHub.gitdata_tree_element_param) =
+ GHLib.mksobj([("path",tp.path),
+ ("mode",tp.mode),
+ ("type",string_of_btc(tp.`type`)),
+ string_of_sha_or_content(tp.sha_or_content)])
+
+ create_tree(token:string, user:string, repo:string, base_tree:option(string), tree:GitHub.gitdata_tree_param) =
+ json = GHLib.mkopts([{sopt=("base_tree",base_tree)},{lst=("tree",List.map(tree_param_to_json,tree))}])
+ GHLib.api_post_string("/repos/{user}/{repo}/git/trees", token, json, GHGDp.one_tree)
+
+}}
View
374 lib/stdlib/apis/github/issue/issue.opa
@@ -21,183 +21,259 @@
* @destination public
*/
-//package stdlib.apis.github.issue
+package stdlib.apis.github.issue
import stdlib.apis.github
import stdlib.apis.github.lib
/* Types returned by API */
-type GitHub.issue = {
- user : string
- gravatar_id : string
- created_at : Date.date
- updated_at : Date.date
- state : {open}/{closed}
- number : int
- votes : int
- position : float
- title : string
- body : string
- labels : list(string)
+type GitHub.issue_event = {
+ url : string
+ actor : GitHub.short_user
+ event : string
+ commit_id : string
+ created_at : Date.date
+ issue : option(GitHub.issue)
}
-type GitHub.issue_comment = {
- id : int
- user : string
- gravatar_id : string
- created_at : Date.date
- updated_at : Date.date
- body : string
+type GitHub.issue_params = {
+ filter : {assigned} / {created} / {mentioned} / {subscribed}
+ state : {open} / {closed}
+ labels : list(string)
+ sort : {created} / {updated} / {comments}
+ direction : {asc} / {desc}
+ since : option(Date.date)
}
-@private GHIp = {{
+type GitHub.valnonestar('a) = {val:'a} / {none} / {star}
- state_to_string(s) =
- match s : {open}/{closed} with
- | {open} -> "open" | {closed} -> "closed"
+@private GHIp = {{
@private GP = GHParse
- get_issue(srcmap) =
+ get_event(srcmap) =
m = GP.map_funs(srcmap)
- if m.exists("title") then
- state = match m.str("state") with
- | "closed" -> {closed}
- | "open" | _ -> {open}
- labels = List.filter_map(
- v -> match v:RPC.Json.json with
- | {String=s} -> some(s) | _ -> none,
- m.list("labels"))
+ if m.exists("event") then
res = {
- user = m.str("user")
- gravatar_id = m.str("gravatar_id")
- created_at = m.date("created_at")
- updated_at = m.date("updated_at")
- state = state
- number = m.int("number")
- votes = m.int("votes")
- position = m.float("position")
- title = m.str("title")
- body = m.str("body")
- labels = labels
- } : GitHub.issue
+ url = m.str("url")
+ actor = GP.get_rec(m, "actor", GP.get_short_user)
+ event = m.str("event")
+ commit_id = m.str("commit_id")
+ created_at = m.date("created_at")
+ issue = GP.get_rec(m, "issue", GP.get_issue)
+ } : GitHub.issue_event
some(res)
else none
- get_comment(srcmap) =
- m = GP.map_funs(srcmap)
- if m.exists("id") then
- res = {
- id = m.int("id")
- user = m.str("user")
- gravatar_id = m.str("gravatar_id")
- created_at = m.date("created_at")
- updated_at = m.date("updated_at")
- body = m.str("body")
- } : GitHub.issue_comment
- some(res)
- else none
+ one_issue(res) = GP.dewrap_whole_obj(res, GP.get_issue)
+ multiple_issues(res) = GP.dewrap_whole_list(res, GP.get_issue)
- one_issue(res) =
- GP.dewrap_obj(res, "issue", get_issue)
+ one_issue_comment(res) = GP.dewrap_whole_obj(res, GP.get_issue_comment)
+ multiple_comments(res) = GP.dewrap_whole_list(res, GP.get_issue_comment)
- multiple_issues(res) =
- GP.dewrap_list(res, "issues", get_issue)
+ one_event(res) = GP.dewrap_whole_obj(res, get_event)
+ multiple_events(res) = GP.dewrap_whole_list(res, get_event)
- one_comment(res) =
- GP.dewrap_obj(res, "comment", get_comment)
+ one_label(res) = GP.dewrap_whole_obj(res, GP.get_label)
+ multiple_labels(res) = GP.dewrap_whole_list(res, GP.get_label)
- comments(res) =
- GP.dewrap_list(res, "comments", get_comment)
+ one_milestone(res) = GP.dewrap_whole_obj(res, GP.get_milestone)
+ multiple_milestones(res) = GP.dewrap_whole_list(res, GP.get_milestone)
labels(res) = GP.multiple_strings(res, "labels")
}}
+type GitHub.issue_filter = {assigned} / {created} / {mentioned} / {subscribed}
+type GitHub.issue_sort = {created} / {updated} / {comments}
+type GitHub.issue_sort2 = {due_date} / {completeness}
+
GHIssue = {{
- @private p(o:string,r:string) = "{o}/{r}"
-
- search(owner, repo, state, query:string) =
- st = GHIp.state_to_string(state)
- path = "/issues/search/{p(owner,repo)}/{st}/{query}"
- GHLib.api_get(path, [], GHIp.multiple_issues)
-
- list_by_state(owner, repo, state) =
- st = GHIp.state_to_string(state)
- path = "/issues/list/{p(owner,repo)}/{st}"
- GHLib.api_get(path, [], GHIp.multiple_issues)
-
- list_by_label(owner, repo, label:string) =
- path = "/issues/list/{p(owner,repo)}/label/{label}"
- GHLib.api_get(path, [], GHIp.multiple_issues)
-
- get_issue(owner, repo, number:int) =
- path = "/issues/show/{p(owner,repo)}/{number}"
- GHLib.api_get(path, [], GHIp.one_issue)
-
- /**
- * Opens a new issue
- *
- * @param owner Owner of the targeted repo
- * @param repo Targeted repo
- * @param title Title of the new issue
- * @param body Body of the new issue
- * @param token Access token of user
- */
- open_issue(owner, repo, title, body, token) =
- path = "/issues/open/{p(owner,repo)}"
- data = [("access_token",token),
- ("title",title), ("body",body)]
- GHLib.api_post(path, data, GHIp.one_issue)
-
- /**
- * Edits an existing issue
- *
- * @param owner Owner of the targeted repo
- * @param repo Targeted repo
- * @param number ID number of the issue
- * @param title New title of the issue (leave blanck to keep old title)
- * @param body New body of the issue (leave blanck to keep old body)
- * @param token Access token of user
- */
- edit_issue(owner, repo, number:int, title, body, token) =
- path = "/issues/edit/{p(owner,repo)}/{number}"
- data = [("access_token",token),
- ("title",title), ("body",body)]
- GHLib.api_post(path, data, GHIp.one_issue)
-
- close_issue(owner, repo, number:int, token) =
- path = "/issues/close/{p(owner,repo)}/{number}"
- data = [("access_token",token)]
- GHLib.api_post(path, data, GHIp.one_issue)
-
- reopen_issue(owner, repo, number:int, token) =
- path = "/issues/reopen/{p(owner,repo)}/{number}"
- data = [("access_token",token)]
- GHLib.api_post(path, data, GHIp.one_issue)
-
- list_project_labels(owner, repo) =
- path = "/issues/labels/{p(owner,repo)}"
- GHLib.api_get(path, [], GHIp.labels)
-
- add_label(owner, repo, label:string, number:int, token) =
- path = "/issues/label/add/{p(owner,repo)}/{label}/{number}"
- data = [("access_token",token)]
- GHLib.api_post(path, data, GHIp.labels)
-
- remove_label(owner, repo, label:string, number:int, token) =
- path = "/issues/label/remove/{p(owner,repo)}/{label}/{number}"
- data = [("access_token",token)]
- GHLib.api_post(path, data, GHIp.labels)
-
- get_issue_comments(owner, repo, number:int) =
- path = "/issues/comments/{p(owner,repo)}/{number}"
- GHLib.api_get(path, [], GHIp.comments)
-
- comment(owner, repo, number:int, comment, token) =
- path = "/issues/comment/{p(owner,repo)}/{number}"
- data = [("access_token",token), ("comment",comment)]
- GHLib.api_post(path, data, GHIp.one_comment)
+ @private GP = GHParse
+
+ @private opt_valnonestar(name:string, vns:option(GitHub.valnonestar('a))) =
+ match vns with
+ | {some={~val}} -> [(name,"{val}")]
+ | {some={none}} -> [(name,"none")]
+ | {some={star}} -> [(name,"*")]
+ | {none} -> []
+
+ @private opt_filter(filter) =
+ match filter with
+ | {some=filter} ->
+ filter =
+ match filter with
+ | {assigned} -> "assigned"
+ | {created} -> "created"
+ | {mentioned} -> "mentioned"
+ | {subscribed} -> "subscribed"
+ end
+ [("filter",filter)]
+ | _ -> []
+
+ @private opt_labels(labels) =
+ match labels with
+ | [_|_] -> [("labels",String.concat(",",labels))]
+ | _ -> []
+
+ @private opt_sort(sort) =
+ match sort with
+ | {some=sort} ->
+ sort =
+ match sort with
+ | {created} -> "created"
+ | {updated} -> "updated"
+ | {comments} -> "comments"
+ end
+ [("sort",sort)]
+ | _ -> []
+
+ @private opt_sort2(sort) =
+ match sort with
+ | {some=sort} ->
+ sort =
+ match sort with
+ | {due_date} -> "due_date"
+ | {completeness} -> "completeness"
+ end
+ [("sort",sort)]
+ | _ -> []
+
+ @private opt_since(since) =
+ match since with
+ | {some=since} -> [("since",GHParse.date_to_string(since))]
+ | _ -> []
+
+ @private opt_str(name:string, str:option(string)) =
+ match str with
+ | {some=str} -> [(name,str)]
+ | {none} -> []
+
+ list_issues(token:string,
+ filter:option(GitHub.issue_filter), state:option(GitHub.state), labels:list(string),
+ sort:option(GitHub.issue_sort), direction:option(GitHub.direction), since:option(Date.date)) =
+ options =
+ List.flatten([opt_filter(filter),GHLib.opt_state(state),opt_labels(labels),
+ opt_sort(sort),GHLib.opt_direction(direction),opt_since(since)])
+ GHLib.api_get_full("/issues", token, options, GHIp.multiple_issues)
+
+ list_issues_repo(token:string, user:string, repo:string,
+ milestone:option(GitHub.valnonestar(int)), state:option(GitHub.state),
+ assignee:option(GitHub.valnonestar(string)), mentioned:option(string), labels:list(string),
+ sort:option(GitHub.issue_sort), direction:option(GitHub.direction), since:option(Date.date)) =
+ options =
+ List.flatten([opt_valnonestar("milestone",milestone),GHLib.opt_state(state),opt_valnonestar("assignee",assignee),
+ opt_str("mentioned",mentioned),opt_labels(labels),opt_sort(sort),GHLib.opt_direction(direction),
+ opt_since(since)])
+ GHLib.api_get_full("/repos/{user}/{repo}/issues", token, options, GHIp.multiple_issues)
+
+ get_issue(token:string, user:string, repo:string, number:int) =
+ GHLib.api_get_full("/repos/{user}/{repo}/issues/{number}", token, [], GHIp.one_issue)
+
+ create_issue(token:string, user:string, repo:string,
+ title:string, body:option(string), assignee:option(string), milestone:option(int), labels:list(string)) =
+ json = GHLib.mkopts([{sreq=("title",title)},{sopt=("body",body)},{sopt=("assignee",assignee)},
+ {iopt=("milestone",milestone)},{lst=("labels",labels)}])
+ GHLib.api_post_string("/repos/{user}/{repo}/issues", token, json, GHIp.one_issue)
+
+ edit_issue(token:string, user:string, repo:string,
+ title:option(string), body:option(string), assignee:option(string),
+ state:option(GitHub.state), milestone:option(int), labels:list(string)) =
+ json = GHLib.mkopts([{sopt=("title",title)},{sopt=("body",body)},{sopt=("assignee",assignee)},
+ {ocst=("state",GHLib.string_of_state,state)},{iopt=("milestone",milestone)},{lst=("labels",labels)}])
+ GHLib.api_patch_string("/repos/{user}/{repo}/issues", token, json, GHIp.one_issue)
+
+ list_assignees(token:string, user:string, repo:string) =
+ GHLib.api_get_full("/repos/{user}/{repo}/assignees", token, [], GP.multiple_short_users)
+
+ check_assignee(token:string, user:string, repo:string, assignee:string) =
+ GHLib.api_get_full("/repos/{user}/{repo}/assignees/{assignee}", token, [], GP.expect_204_404)
+
+ list_comments(token:string, user:string, repo:string, number:int) =
+ GHLib.api_get_full("/repos/{user}/{repo}/issues/{number}/comments", token, [], GHIp.multiple_comments)
+
+ get_comment(token:string, user:string, repo:string, id:int) =
+ GHLib.api_get_full("/repos/{user}/{repo}/issues/comments/{id}", token, [], GHIp.one_issue_comment)
+
+ create_comment(token:string, user:string, repo:string, number:int, body:string) =
+ json = GHLib.mkopts([{sreq=("body",body)}])
+ GHLib.api_post_string("/repos/{user}/{repo}/issues/{number}/comments", token, json, GHIp.one_issue_comment)
+
+ edit_comment(token:string, user:string, repo:string, body:string, id:int) =
+ json = GHLib.mkopts([{sreq=("body",body)}])
+ GHLib.api_post_string("/repos/{user}/{repo}/issues/comments/{id}", token, json, GHIp.one_issue_comment)
+
+ delete_comment(token:string, user:string, repo:string, id:int) =
+ GHLib.api_delete_string("/repos/{user}/{repo}/issues/comments/{id}", token, "", GP.expect_204)
+
+ list_issue_events(token:string, user:string, repo:string, issue_number:int) =
+ GHLib.api_get_full("/repos/{user}/{repo}/issues/{issue_number}/events", token, [], GHIp.multiple_events)
+
+ list_repo_events(token:string, user:string, repo:string) =
+ GHLib.api_get_full("/repos/{user}/{repo}/issues/events", token, [], GHIp.multiple_events)
+
+ get_event(token:string, user:string, repo:string, id:int) =
+ GHLib.api_get_full("/repos/{user}/{repo}/issues/events/{id}", token, [], GHIp.one_event)
+
+ list_labels(token:string, user:string, repo:string) =
+ GHLib.api_get_full("/repos/{user}/{repo}/labels", token, [], GHIp.multiple_labels)
+
+ get_label(token:string, user:string, repo:string, name:string) =
+ GHLib.api_get_full("/repos/{user}/{repo}/labels/{name}", token, [], GHIp.one_label)
+
+ create_label(token:string, user:string, repo:string, name:string, color:string) =
+ json = GHLib.mkopts([{sreq=("name",name)},{sreq=("color",color)}])
+ GHLib.api_post_string("/repos/{user}/{repo}/labels", token, json, GHIp.one_label)
+
+ update_label(token:string, user:string, repo:string, name:string, color:string) =
+ json = GHLib.mkopts([{sreq=("name",name)},{sreq=("color",color)}])
+ GHLib.api_patch_string("/repos/{user}/{repo}/labels/{name}", token, json, GHIp.one_label)
+
+ delete_label(token:string, user:string, repo:string, name:string) =
+ GHLib.api_delete_string("/repos/{user}/{repo}/labels/{name}", token, "", GP.expect_204)
+
+ list_issue_labels(token:string, user:string, repo:string, number:int) =
+ GHLib.api_get_full("/repos/{user}/{repo}/issues/{number}/labels", token, [], GHIp.multiple_labels)
+
+ add_issue_labels(token:string, user:string, repo:string, number:int, labels:list(string)) =
+ json = GHLib.mkslst(labels)
+ GHLib.api_post_string("/repos/{user}/{repo}/issues/{number}/labels", token, json, GHIp.multiple_labels)
+
+ remove_issue_label(token:string, user:string, repo:string, number:int, name:string) =
+ GHLib.api_delete_string("/repos/{user}/{repo}/issues/{number}/labels/{name}", token, "", GHIp.multiple_labels)
+
+ replace_issue_labels(token:string, user:string, repo:string, number:int, labels:list(string)) =
+ json = GHLib.mkslst(labels)
+ GHLib.api_put_string("/repos/{user}/{repo}/issues/{number}/labels", token, json, GHIp.multiple_labels)
+
+ remove_all_issue_labels(token:string, user:string, repo:string, number:int) =
+ GHLib.api_delete_string("/repos/{user}/{repo}/issues/{number}/labels", token, "", GP.expect_204)
+
+ get_milestone_issue_labels(token:string, user:string, repo:string, number:int) =
+ GHLib.api_get_full("/repos/{user}/{repo}/milestones/{number}/labels", token, [], GHIp.multiple_labels)
+
+ list_milestones(token:string, user:string, repo:string,
+ state:option(GitHub.state), sort:option(GitHub.issue_sort2), direction:option(GitHub.direction)) =
+ options = List.flatten([GHLib.opt_state(state),opt_sort2(sort),GHLib.opt_direction(direction)])
+ GHLib.api_get_full("/repos/{user}/{repo}/milestones", token, options, GHIp.multiple_issues)
+
+ get_milestone(token:string, user:string, repo:string, number:int) =
+ GHLib.api_get_full("/repos/{user}/{repo}/milestones/{number}", token, [], GHIp.one_milestone)
+
+ create_milestone(token:string, user:string, repo:string,
+ title:string, state:option(GitHub.state), description:option(string), due_on:option(Date.date)) =
+ json = GHLib.mkopts([{sreq=("title",title)},{ocst=("state",GHLib.string_of_state,state)},
+ {sopt=("description",description)},{sopt=("due_on",Option.map(GHParse.date_to_string,due_on))}])
+ GHLib.api_post_string("/repos/{user}/{repo}/milestones", token, json, GHIp.one_milestone)
+
+ update_milestone(token:string, user:string, repo:string, number:int,
+ title:option(string), state:option(GitHub.state), description:option(string), due_on:option(Date.date)) =
+ json = GHLib.mkopts([{sopt=("title",title)},{ocst=("state",GHLib.string_of_state,state)},
+ {sopt=("description",description)},{sopt=("due_on",Option.map(GHParse.date_to_string,due_on))}])
+ GHLib.api_patch_string("/repos/{user}/{repo}/milestones/{number}", token, json, GHIp.one_milestone)
+
+ delete_milestone(token:string, user:string, repo:string, number:int) =
+ GHLib.api_delete_string("/repos/{user}/{repo}/milestones/{number}", token, "", GP.expect_204)
}}
View
1,016 lib/stdlib/apis/github/lib/lib.opa
@@ -21,7 +21,7 @@
* @destination private
*/
-//package stdlib.apis.github.lib
+package stdlib.apis.github.lib
import stdlib.apis.common
import stdlib.apis.github
@@ -39,88 +39,230 @@ type GHParse.map = {
GHLib = {{
@private AL = API_libs
- @private host = "https://github.com/api/v2/json"
+ @private host = "https://api.github.com"
+
+ string_of_encoding(encoding:GitHub.encoding) =
+ match encoding with
+ | {utf8} -> "utf-8"
+ | {base64} -> "base64"
+
+ encoding_of_string(str:string) : GitHub.encoding =
+ match str with
+ | "utf8" -> {utf8}
+ | "base64" -> {base64}
+ | _ -> {utf8}
+
+ string_of_state(s:GitHub.state) =
+ match s with
+ | {open} -> "open"
+ | {closed} -> "closed"
+
+ string_of_direction(direction:GitHub.direction) =
+ match direction with
+ | {asc} -> "asc"
+ | {desc} -> "desc"
generic_build_path(path, options) =
- if options == [] then path
+ if options == []
+ then path
else "{path}?{API_libs.form_urlencode(options)}"
+ get_final_path(base, path, token, options) =
+ match token with
+ | "" -> generic_build_path("{base}{path}", options)
+ | _ -> generic_build_path("{base}{path}", options++[("access_token",token)])
+
+ userpath(user:GitHub.user_id,what) =
+ what = if what == "" then "" else "/{what}"
+ match user with
+ | ~{self} -> ("/user{what}", [("access_token", self)])
+ | ~{login} -> ("/users/{login}{what}", [])
+
add_if(k,v,l) =
if v == "" then l else List.add((k, v), l)
+ mkopt(name,raw) = "\"{name}\":{raw}"
+ mksopt(name,opt:string) = "\"{name}\":\"{opt}\""
+ mkiopt(name,opt:int) = "\"{name}\":{opt}"
+ mkbopt(name,opt:bool) = "\"{name}\":{opt}"
+ mksub(l:list((string,string))) = String.concat(",",List.map(((n,v) -> mkopt(n,v)),l))
+ mkssub(l:list((string,string))) = String.concat(",",List.map(((n,v) -> mksopt(n,v)),l))
+ mksobj(l:list((string,string))) = List.to_string_using("\{","}",",",List.map(((n,v) -> mksopt(n,v)),l))
+ mklst(l:list(string),mk) = List.to_string_using("[","]",",",List.map((e -> mk(e)),l))
+ mkslst(l) = mklst(l,(s -> "\"{s}\""))
+
+ mkopts(opts) =
+ do_lst(n,l,mk) =
+ match l with
+ | [] -> {none}
+ | _ -> {some="\"{n}\":[{String.concat(",",List.map((e -> mk(e)),l))}]"}
+ end
+ do_obj(n,o,mk) =
+ match o with
+ | [] -> {none}
+ | _ -> {some="\"{n}\":\{{String.concat(",",List.map(((n,v) -> mk(n,v)),o))}}"}
+ end
+ List.to_string_using("\{","}",",",
+ List.filter_map((opt ->
+ match opt with
+ | {req=(n,r)} -> {some=mkopt(n,r)}
+ | {sreq=(n,v)} -> {some=mksopt(n,v)}
+ | {ireq=(n,v)} -> {some=mkiopt(n,v)}
+ | {breq=(n,v)} -> {some=mkbopt(n,v)}
+ | {rcst=(n,f,v)} -> {some=mksopt(n,f(v))}
+ | {opt=(n,o)} -> Option.map((v -> mkopt(n,v)),o)
+ | {sopt=(n,o)} -> Option.map((v -> mksopt(n,v)),o)
+ | {iopt=(n,o)} -> Option.map((v -> mkiopt(n,v)),o)
+ | {bopt=(n,o)} -> Option.map((v -> mkbopt(n,v)),o)
+ | {ocst=(n,f,o)} -> Option.map((v -> mksopt(n,f(v))),o)
+ | {lst=(n,l)} -> do_lst(n,l,(v -> v))
+ | {slst=(n,l)} -> do_lst(n,l,(s -> "\"{s}\""))
+ | {obj=(n,o)} -> do_obj(n,o,mkopt)
+ ),opts))
+
+ opt_generic(string_of:'a -> string,name:string,opt:option('a)) : list((string,string)) =
+ match opt with
+ | {some=opt} -> [(name,string_of(opt))]
+ | _ -> []
+ opt_string = opt_generic((s -> s),_,_)
+ opt_int = opt_generic(Int.to_string,_,_)
+ opt_bool = opt_generic(Bool.to_string,_,_)
+ opt_state = opt_generic(string_of_state,"state",_)
+ opt_direction = opt_generic(string_of_direction,"direction",_)
+
/* Temporary duplicate while parse functions are built */
- api_get_full(full_path, data, parse_fun) =
- final_path = generic_build_path(full_path, data)
- //do jlog("GET {final_path}")
+ @private api_get_full_(base, path, token, options, parse_fun) =
+ final_path = get_final_path(base, path, token, options)
+ do Log.info("github","GET {final_path}")
match Uri.of_string(final_path) with
| {none} -> none
| {some=uri} ->
match WebClient.Get.try_get(uri) with
| {failure=_} -> none
| {success=s} ->
- //do jlog(s.content)
- parse_fun(s.content)
+ do Log.info("github","code={s.code} content={if s.content == "" then "none" else s.content}")
+ parse_fun(s)
end
+ api_get_full(path, token, options, parse_fun) = api_get_full_(host, path, token, options, parse_fun)
+ api_get(path:string, options, parse_fun) = api_get_full_(host, path, "", options, parse_fun)
+
+ mtjson = "application/json"
+ mtform = "application/x-www-form-urlencoded"
- api_get(path:string, data, parse_fun) =
- full_path = "{host}{path}"
- api_get_full(full_path, data, parse_fun)
-
- /**
- * Shortcut for GET requests with only a token
- */
- api_get_logged(path, token, parse_fun) =
- data = if token == "" then []
- else [("access_token", token)]
- api_get(path, data, parse_fun)
-
- full_post(base, path, data, parse_fun) =
- txtdata = AL.form_urlencode(data)
- //do jlog("POST {txtdata} on {base}{path}")
- match Uri.of_string("{base}{path}") with
+ @private full_post_string(base, path, token, mimetype, data, parse_fun) =
+ final_path = get_final_path(base, path, token, [])
+ match Uri.of_string(final_path) with
| {none} -> none
| {some=uri} ->
- match WebClient.Post.try_post(uri,txtdata) with
+ do Log.info("github","POST {data} on {uri}")
+ options = { WebClient.Post.default_options with ~mimetype content={some=data} }
+ match WebClient.Post.try_post_with_options(uri,options) with
| {failure=_} -> none
| {success=s} ->
- //do jlog(s.content)
- parse_fun(s.content)
+ do Log.info("github","code={s.code} content={if s.content == "" then "none" else s.content}")
+ parse_fun(s)
end
-
+ full_post(base, path, data, parse_fun) = full_post_string(base, path, "", mtform, AL.form_urlencode(data), parse_fun)
+ full_post_forms(base, path, token, forms, parse_fun) =
+ bound = Random.string(20)
+ content_type = "multipart/form-data; boundary={bound}"
+ forms = List.map((f ->
+ match f with
+ | {form=(name, content)} ->
+ "--{bound}\r\nContent-Disposition: form-data; name=\"{name}\"\r\n\r\n{content}\r\n"
+ | {file=(name, filename, content_type, content)} ->
+ "--{bound}\r\nContent-Disposition: form-data; name=\"{name}\"; filename=\"{filename}\"\r\nContent-Type={content_type}\r\n\r\n{content}\r\n"
+ ),forms)
+ content = String.concat("",List.append(forms,["--{bound}--\r\n"]))
+ full_post_string(base, path, token, content_type, content, parse_fun)
api_post = full_post(host, _, _, _)
+ api_post_string = full_post_string(host, _, _, mtform, _, _)
- /**
- * Shortcut for POST requests with only a token
- */
- api_post_logged(path, token, parse_fun) =
- data = [("access_token", token)]
- api_post(path, data, parse_fun)
+ @private full_put_string(base, path, token, data, parse_fun) =
+ final_path = get_final_path(base, path, token, [])
+ match Uri.of_string(final_path) with
+ | {none} -> none
+ | {some=uri} ->
+ do Log.info("github","PUT {data} on {uri}")
+ match WebClient.Put.try_put(uri,data) with
+ | {failure=_} -> none
+ | {success=s} ->
+ do Log.info("github","code={s.code} content={if s.content == "" then "none" else s.content}")
+ parse_fun(s)
+ end
+ full_put(base, path, data, parse_fun) = full_put_string(base, path, "", AL.form_urlencode(data), parse_fun)
+ api_put = full_put(host, _, _, _)
+ api_put_string = full_put_string(host, _, _, _, _)
- /* For v3 api */
+ @private full_delete_string(base, path, token, data:string, parse_fun) =
+ final_path = get_final_path(base, path, token, [])
+ match Uri.of_string(final_path) with
+ | {none} -> none
+ | {some=uri} ->
+ do Log.info("github","DELETE {data} on {uri}")
+ // IMPORTANT: this is needed because the node.js client sticks "Transfer-Encoding: chunked"
+ // into the headers for DELETE which confuses the github server.
+ options = { default_options("DELETE") with custom_headers=["Transfer-Encoding: identity"] }
+ match try_delete_content(uri,data,options) with
+ | {failure=_} -> none
+ | {success=s} ->
+ do Log.info("github","code={s.code} content={if s.content == "" then "none" else s.content}")
+ parse_fun(s)
+ end
+ full_delete(base, path, data, parse_fun) = full_delete_string(base, path, "", AL.form_urlencode(data), parse_fun)
+ api_delete = full_delete(host, _, _, _)
+ api_delete_string = full_delete_string(host, _, _, _, _)
+
+ // WebClient doesn't have PATCH, yet
+ @private default_options(op) : WebClient.Generic.options =
+ { operation = op
+ auth = {none}
+ custom_headers = []
+ custom_agent = {none}
+ redirect = {none}
+ timeout_sec = {some = 36.}
+ ssl_key = {none}
+ ssl_policy = {none}
+ }
+ @private try_generic(op:string, location:Uri.uri, content:string): WebClient.result(string) =
+ try_generic_with_options(op, location, content, default_options(op))
+ @private try_generic_with_options(op:string, location:Uri.uri,
+ content:string, options:WebClient.Generic.options): WebClient.result(string) =
+ @callcc(k ->
+ on_result(x) = Continuation.return(k, x)
+ try_generic_with_options_async(op, location, content, options, on_result))
+ @private try_generic_with_options_async(op:string, location:Uri.uri, content:string, options:WebClient.Generic.options,
+ on_result: WebClient.result(string) -> void): void =
+ generic_options = { options with
+ operation = op
+ custom_headers = ["Content-Length: {String.length(content)}"]++options.custom_headers
+ }
+ on_success(x) = on_result({success = x})
+ on_failure(x) = on_result({failure = x})
+ WebClient.Generic.try_request_with_options_async(location, op, generic_options, {some=content}, on_success, on_failure)
- @private basev3 = "https://api.github.com"
+ try_patch(location, content) = try_generic("PATCH", location, content)
+ try_delete_content(location, content, options) = try_generic_with_options("DELETE", location, content, options)
- v3_put(path, data, token:option(string), parse_fun) =
- txtdata =
- {Record=data}:RPC.Json.json |> Json.serialize
- //do jlog("PUT {txtdata} on {basev3}{path}")
- match Uri.of_string("{basev3}{path}") with
+ @private full_patch_string(base, path, token, data:string, parse_fun) =
+ final_path = get_final_path(base, path, token, [])
+ match Uri.of_string(final_path) with
| {none} -> none
| {some=uri} ->
- options =
- { WebClient.Put.default_options with
- auth = Option.map(x->"token {x}", token) }
- match WebClient.Put.try_put_with_options(uri,txtdata,options) with
+ do Log.info("github","PATCH {data} on {uri}")
+ match try_patch(uri,data) with
| {failure=_} -> none
| {success=s} ->
- //do jlog(s.content)
- parse_fun(s.content)
+ do Log.info("github","code={s.code} content={if s.content == "" then "none" else s.content}")
+ parse_fun(s)
end
+ full_patch(base, path, data, parse_fun) = full_patch_string(base, path, "", AL.form_urlencode(data), parse_fun)
+ api_patch = full_patch(host, _, _, _)
+ api_patch_string = full_patch_string(host, _, _, _, _)
}}
-
GHParse = {{
parse_date(str) =
@@ -155,6 +297,18 @@ GHParse = {{
do_shift(true,8,0)(Date.build({year=y month=m day=d}))
Parser.try_parse(p, str)
+ @private tzfmt = Date.generate_printer("%z")
+ @private fmt = Date.generate_printer("%FT%T")
+
+ date_to_string(d:Date.date) =
+ tz = Date.to_formatted_string(tzfmt, d)
+ tz =
+ match String.length(tz) with
+ | 4 -> String.sub(0,2,tz)^":"^String.sub(2,2,tz)
+ | 5 -> String.sub(0,3,tz)^":"^String.sub(3,2,tz)
+ | _ -> tz
+ "{Date.to_formatted_string(fmt, d)}{tz}"
+
map_funs(srcmap) =
map = JsonOpa.record_fields(srcmap) ? Map.empty
str = API_libs_private.map_get_string(_, map)
@@ -174,8 +328,42 @@ GHParse = {{
map = JsonOpa.record_fields(srcmap) ? Map.empty
{some=Map.fold(aux,map,[])}
- dewrap_obj(res, main_key, process) =
- match Json.of_string(res) with
+ add_option(o, l) = match o with | {some=e} -> [e|l] | {none} -> l
+
+ get_str_opt(json) =
+ match json with
+ | {String=s} -> {some=s}
+ | _ -> {none}
+
+ get_rec_opt(m:GHParse.map, name, get) =
+ if m.exists(name)
+ then
+ match m.record(name) with
+ | {Record=[]} -> none
+ | {Record=r} -> {some=get({Record=r})}
+ | _ -> none
+ else none
+
+ get_rec(m:GHParse.map, name, get) = get(m.record(name))
+
+ get_list(m:GHParse.map, name, get) = List.fold((r, acc -> add_option(get(r),acc)),m.list(name),[])
+
+ get_raw(m:GHParse.map, name) =
+ match m.record(name) with
+ | {Record=[]} -> []
+ | {Record=r} -> r
+ | _ -> []
+
+ xhtml(res:WebClient.success(string)) = {some=Xhtml.of_string(res.content)}
+
+ dewrap_whole_obj(res:WebClient.success(string), process) =
+ match Json.of_string(res.content) with
+ | {some={Record=r}} -> process({Record=r})
+ | _ -> {none}
+
+ // Still needed - see legacy search functions
+ dewrap_obj(res:WebClient.success(string), main_key, process) =
+ match Json.of_string(res.content) with
| {some={Record=r}} ->
match List.assoc(main_key, r) with
| {some={Record=o}} -> process({Record=o})
@@ -183,11 +371,21 @@ GHParse = {{
end
| _ -> {none}
- dewrap_list(res, main_key, process) =
+ dewrap_whole_list(res:WebClient.success(string), process) =
+ aux(e, acc) = //List.add(process(e),acc)
+ match process(e) with
+ | {some=s} -> List.add(s,acc)
+ | _ -> acc
+ match Json.of_string(res.content) with
+ | {some={List=l}} -> {some=List.fold(aux,l,[])}
+ | _ -> {none}
+
+ // Still needed - see legacy search functions
+ dewrap_list(res:WebClient.success(string), main_key, process) =
aux(e, acc) =
match process(e) with
| {some=s} -> List.add(s,acc) | _ -> acc
- match Json.of_string(res) with
+ match Json.of_string(res.content) with
| {some={Record=r}} ->
match List.assoc(main_key, r) with
| {some={List=l}} -> {some=List.fold(aux,l,[])}
@@ -195,13 +393,29 @@ GHParse = {{
end
| _ -> {none}
- multiple_strings(res, main_key) =
- process(e) = match e with
- | {String=s} -> {some=s} | _ -> {none}
- dewrap_list(res, main_key, process)
+ multiple_strings(res:WebClient.success(string), _) =
+ process(e) =
+ match e with
+ | {String=s} -> {some=s}
+ | _ -> {none}
+ dewrap_whole_list(res, process)
+
+ list_strings(res:WebClient.success(string)) =
+ process(e) =
+ match e with
+ | {String=s} -> {some=s}
+ | _ -> {none}
+ dewrap_whole_list(res, process)
+
+ multiple_objects(res:WebClient.success(string), process) =
+ process_el(e) =
+ match e with
+ | {Record=r} -> process({Record=r})
+ | _ -> {none}
+ dewrap_whole_list(res, process_el)
- one_string(res, main_key) =
- match Json.of_string(res) with
+ one_string(res:WebClient.success(string), main_key) =
+ match Json.of_string(res.content) with
| {some={Record=r}} ->
match List.assoc(main_key, r) with
| {some={String=s}} -> {some=s}
@@ -211,20 +425,63 @@ GHParse = {{
/* Parsers used in several sub-libs */
+ expect_code(codes:list((int,'a)), res:WebClient.success(string)) : option('a) =
+ match List.assoc(res.code, codes) with
+ | {some=v} -> {some=v}
+ | {none} ->
+ do Log.info("github","expect_code: bad code {res.code} (expected {String.concat(",",List.map(((c,_) -> "{c}"),codes))})")
+ none
+
+ expect_201 = expect_code([(201,{})],_)
+ expect_204 = expect_code([(204,{})],_)
+ expect_302 = expect_code([(302,{})],_)
+ expect_204_404 = expect_code([(204,true),(404,false)],_)
+
get_commit_user(srcmap) =
m = map_funs(srcmap)
{ name = m.str("name")
- login = m.str("login")
email = m.str("email")
+ date = m.date("date")
} : GitHub.commit_user
+ get_commit_user_opt(srcmap) = {some=get_commit_user(srcmap)}
+
+ get_id(m) =
+ match m.record("id") with
+ | {Int=i} -> i
+ | {String="user-"} -> 0
+ | {String=s} ->
+ get_int(s) = Parser.try_parse_opt(parser i=Rule.natural -> {some=i},s)
+ Option.default(0,get_int(List.head(List.rev(String.explode("-",s)))))
+ | _ -> 0
+
+ get_short_user_opt(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("login")
+ then
+ res = {
+ id = get_id(m)
+ login = m.str("login")
+ gravatar_id = m.str("gravatar_id")
+ avatar_url = m.str("avatar_url")
+ url = m.str("url")
+ `type` = m.str("type")
+ contributions = m.int("contributions")
+ } : GitHub.short_user
+ {some=res}
+ else none
+ default_short_user = { id=-1; login="unknown user"; url=""; gravatar_id=""; avatar_url=""; `type`="missing"; contributions=-1 }
+ get_short_user(srcmap) = Option.default(default_short_user,get_short_user_opt(srcmap))
+
get_public_key(srcmap) =
m = map_funs(srcmap)
if m.exists("key") then
res = {
- title = m.str("title")
- id = m.int("id")
- key = m.str("key")
+ id = get_id(m)
+ key = m.str("key")
+ title = m.str("title")
+ url = m.str("url")
+ verified = m.bool("verified")
} : GitHub.public_key
{some=res}
else {none}
@@ -233,74 +490,633 @@ GHParse = {{
m = map_funs(srcmap)
if force_some || m.exists("name") then
res = {
- name = m.str("name")
- owner = m.str("owner")
- homepage = m.str("homepage")
url = m.str("url")
+ html_url = m.str("html_url")
+ clone_url = m.str("clone_url")
+ git_url = m.str("git_url")
+ ssh_url = m.str("ssh_url")
+ svn_url = m.str("svn_url")
+ mirror_url = m.str("mirror_url")
+ id = get_id(m)
+ owner = get_rec(m, "owner", get_short_user)
+ name = m.str("name")
+ full_name = m.str("full_name")
description = m.str("description")
+ homepage = m.str("homepage")
language = m.str("language")
- created_at = m.date("created_at")
- pushed_at = m.date("pushed_at")
- size = m.int("size")
private = m.bool("private")
fork = m.bool("fork")
forks = m.int("forks")
watchers = m.int("watchers")
- has_downloads = m.bool("has_downloads")
- has_wiki = m.bool("has_wiki")
- has_issues = m.bool("has_issues")
+ size = m.int("size")
+ master_branch = m.str("master_branch")
open_issues = m.int("open_issues")
+ pushed_at = m.date("pushed_at")
+ created_at = m.date("created_at")
+ updated_at = m.date("updated_at")
+ organization = get_rec(m, "organization", get_short_user_opt)
+ parent = get_rec(m, "parent", get_repo(false))
+ source = get_rec(m, "source", get_repo(false))
+ has_issues = if m.exists("has_issues") then {some=m.bool("has_issues")} else {none}
+ has_wiki = if m.exists("has_wiki") then {some=m.bool("has_wiki")} else {none}
+ has_downloads = if m.exists("has_downloads") then {some=m.bool("has_downloads")} else {none}
} : GitHub.repository
{some=res}
else {none}
+ get_repo_comment(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("id")
+ then
+ comment = {
+ html_url = m.str("html_url")
+ url = m.str("url")
+ id = get_id(m)
+ body = m.str("body")
+ path = m.str("path")
+ position = m.int("position")
+ line = m.int("line")
+ commit_id = m.str("commit_id")
+ user = get_rec(m, "user", get_short_user)
+ created_at = m.date("created_at")
+ updated_at = m.date("updated_at")
+ } : GitHub.repo_comment
+ {some=comment}
+ else none
+
+ get_download(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("id")
+ then
+ download = {
+ url = m.str("url")
+ html_url = m.str("html_url")
+ id = get_id(m)
+ name = m.str("name")
+ description = m.str("description")
+ size = m.int("size")
+ download_count = m.int("download_count")
+ content_type = m.str("content_type")
+ } : GitHub.download
+ {some=download}
+ else none
+
+ get_gist_file(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("filename")
+ then
+ file = {
+ size = m.int("size")
+ filename = m.str("filename")
+ raw_url = m.str("raw_url")
+ content = m.str("content")
+ }
+ {some=file}
+ else none
+
+ get_gist_fork(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("user")
+ then
+ user = get_rec(m, "user", get_short_user)
+ fork = {
+ user = user
+ url = m.str("url")
+ created_at = m.date("created_at")
+ }
+ {some=fork}
+ else none
+
+ get_gist_change_status(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("total")
+ then
+ cs = {
+ deletions = m.int("deletions")
+ additions = m.int("additions")
+ total = m.int("total")
+ }
+ {some=cs}
+ else none
+
+ get_gist_hist(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("user")
+ then
+ user = get_rec(m, "user", get_short_user)
+ change_status =
+ match m.record("change_status") with
+ | {Record=r} -> get_gist_change_status({Record=r})
+ | _ -> none
+ history = {
+ user = user
+ url = m.str("url")
+ change_status = change_status
+ version = m.str("version")
+ committed_at = m.date("committed_at")
+ }
+ {some=history}
+ else none
+
+ get_gist(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("id")
+ then
+ user = get_rec(m, "user", get_short_user)
+ files =
+ match m.record("files") with
+ | {Record=r} ->
+ List.fold(((filename, file), acc ->
+ match get_gist_file(file) with
+ | {some=file} -> [(filename,file)|acc]
+ | {none} -> acc),r,[])
+ | _ -> []
+ forks = get_list(m, "forks", get_gist_fork)
+ history = get_list(m, "history", get_gist_hist)
+ res = {
+ id = get_id(m)
+ `public` = m.bool("public")
+ description = m.str("description")
+ user = user
+ url = m.str("url")
+ html_url = m.str("html_url")
+ git_push_url = m.str("git_push_url")
+ git_pull_url = m.str("git_pull_url")
+ comments = m.int("comments")
+ created_at = m.date("created_at")
+ updated_at = m.date("updated_at")
+ files = files
+ forks = forks
+ history = history
+ } : GitHub.gist
+ some(res)
+ else none
+
+ get_state(m) =
+ match m.str("state") with
+ | "closed" -> {closed}
+ | "open" | _ -> {open}
+
+ get_label(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("name") then
+ res = {
+ name = m.str("name")
+ url = m.str("url")
+ color = m.str("color")
+ } : GitHub.label
+ some(res)
+ else none
+
+/* milestone
+{"open_issues":1,
+ "number":1,
+ "due_on":null,
+ "closed_issues":0,
+ "creator":{"avatar_url":"https://sec...",
+ "login":"nrs135",
+ "gravatar_id":"b0df98244f9039e7546c9d08313cfbf8",
+ "id":1378405,
+ "url":"https://api.github.com/users/nrs135"},
+ "description":null,
+ "created_at":"2012-09-19T10:36:32Z",
+ "title":"First milestone",
+ "id":177762,
+ "state":"open",
+ "url":"https://api.github.com/repos/nrs135/TestGitHubAPIRepo/milestones/1"
+}
+*/
+
+ get_milestone(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("title") then
+ res = {
+ url = m.str("url")
+ number = m.int("number")
+ state = get_state(m)
+ title = m.str("title")
+ description = m.str("description")
+ creator = get_rec(m, "creator", get_short_user)
+ open_issues = m.int("open_issues")
+ closed_issues = m.int("closed_issues")
+ created_at = m.date("created_at")
+ due_on = m.date("due_on")
+ } : GitHub.milestone
+ some(res)
+ else none
+
+ get_pull_request(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("html_url") then
+ res = {
+ html_url = m.str("html_url")
+ diff_url = m.str("diff_url")
+ patch_url = m.str("patch_url")
+ } : GitHub.pull_request
+ some(res)
+ else none
+
+ get_issue(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("title") then
+ res = {
+ url = m.str("url")
+ html_url = m.str("html_url")
+ number = m.int("number")
+ state = get_state(m)
+ title = m.str("title")
+ body = m.str("body")
+ user = get_rec(m, "user", get_short_user)
+ labels = get_list(m, "labels", get_label)
+ assignee = get_rec(m, "assignee", get_short_user)
+ milestone = get_rec(m, "milestone", get_milestone)
+ comments = m.int("comments")
+ pull_request = get_rec(m, "pull_request", get_pull_request)
+ closed_at = m.date("closed_at")
+ created_at = m.date("created_at")
+ updated_at = m.date("updated_at")
+ } : GitHub.issue
+ some(res)
+ else none
+
+ get_issue_comment(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("id") then
+ res = {
+ id = get_id(m)
+ url = m.str("url")
+ body = m.str("body")
+ user = get_rec(m, "user", get_short_user)
+ created_at = m.date("created_at")
+ updated_at = m.date("updated_at")
+ } : GitHub.issue_comment
+ some(res)
+ else none
+
+ get_ref(srcmap) =
+ m = map_funs(srcmap)
+ { label = m.str("label")
+ ref = m.str("ref")
+ sha = m.str("sha")
+ repository = m.record("repository") |> get_repo(true) |> Option.get
+ user = get_short_user(m.record("user"))
+ } : GitHub.ref
+
+/*
+[{"parents":[{"sha":"0136294cf02206767036865c7b9c4bee83372827",
+ "url":"https://api.github.com/repos/hbbio/webshell/commits/0136294cf02206767036865c7b9c4bee83372827"}],
+ "sha":"df99eaa753bdeb65c735ead68554eb4e67720358",
+ "committer":{"avatar_url":"https://secure.gravatar.com/avatar/b0df98244f9039e7546c9d08313cfbf8?d=https://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-user-420.png",
+ "login":"nrs135",
+ "gravatar_id":"b0df98244f9039e7546c9d08313cfbf8",
+ "id":1378405,
+ "url":"https://api.github.com/users/nrs135"},
+ "commit":{"comment_count":0,
+ "tree":{"sha":"21536c9aa218d12272c3d53b91e60d6e46a59827",
+ "url":"https://api.github.com/repos/hbbio/webshell/git/trees/21536c9aa218d12272c3d53b91e60d6e46a59827"},
+ "message":"Added Blekko search.",
+ "committer":{"date":"2012-02-09T00:17:45-08:00",
+ "email":"norman.scaife@mlstate.com",
+ "name":"Norman Scaife"},
+ "url":"https://api.github.com/repos/hbbio/webshell/git/commits/df99eaa753bdeb65c735ead68554eb4e67720358",
+ "author":{"date":"2012-02-09T00:17:45-08:00",
+ "email":"norman.scaife@mlstate.com",
+ "name":"Norman Scaife"}},
+ "author":{"avatar_url":"https://secure.gravatar.com/avatar/b0df98244f9039e7546c9d08313cfbf8?d=https://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-user-420.png",
+ "login":"nrs135",
+ "gravatar_id":"b0df98244f9039e7546c9d08313cfbf8",
+ "id":1378405,
+ "url":"https://api.github.com/users/nrs135"},
+ "url":"https://api.github.com/repos/hbbio/webshell/commits/df99eaa753bdeb65c735ead68554eb4e67720358"}]
+*/
+
+ get_stats(srcmap) =
+ m = map_funs(srcmap)
+ {
+ additions = m.int("additions")
+ deletions = m.int("deletions")
+ total = m.int("total")
+ } : GitHub.stats
+
+ @private get_full_commit_(m) =
+ {
+ sha = m.str("sha")
+ commit = get_rec(m, "commit", get_commit)
+ author = get_rec(m, "author", get_short_user)
+ parents = get_list(m, "parents", get_url_sha_opt)
+ url = m.str("url")
+ committer = get_rec(m, "committer", get_short_user)
+ stats = get_rec_opt(m, "stats", get_stats)
+ files = get_list(m, "files", get_file_opt)
+ } : GitHub.full_commit
+
+ get_full_commit(srcmap) = get_full_commit_(map_funs(srcmap))
+
+ get_full_commit_opt(srcmap) =
+ m = map_funs(srcmap)
+ if m.exists("commit")
+ then {some=get_full_commit_(m)}
+ else none
+
+/*
+[{"head":{"user":{"gravatar_id":"f6ce56b7883a131667475cc2f9a17d2a",
+ "login":"Aqua-Ye",
+ "id":85234,
+ "url":"https://api.github.com/users/Aqua-Ye",
+ "avatar_url":"https://secu..."},
+ "sha":"c544aa29f1c379a22ca3e131981f183ccaf03c29",
+ "ref":"master",
+ "label":"Aqua-Ye:master",
+ "repo":{"forks_count":0,
+ "has_wiki":true,
+ "html_url":"https://github.com/Aqua-Ye/webshell",
+ "watchers":0,
+ "mirror_url":null,
+ "watchers_count":0,
+ "svn_url":"https://github.com/Aqua-Ye/webshell",
+ "owner":{"gravatar_id":"f6ce56b7883a131667475cc2f9a17d2a",
+ "login":"Aqua-Ye",
+ "id":85234,
+ "url":"https://api.github.com/users/Aqua-Ye",
+ "avatar_url":"https://secur..."}
+ "description":""
+ "pushed_at":"2012-08-29T13:56:49Z"
+ "forks":0
+ "open_issues":0
+ "open_issues_count":0
+ "has_issues":false
+ "created_at":"2012-08-29T09:59:52Z"
+ "clone_url":"https://github.com/Aqua-Ye/webshell.git"
+ "language":"Opa"
+ "has_downloads":true
+ "size":120
+ "fork":true
+ "ssh_url":"git@github.com:Aqua-Ye/webshell.git"
+ "updated_at":"2012-08-29T13:56:50Z"
+ "full_name":"Aqua-Ye/webshell"
+ "name":"webshell"
+ "private":false
+ "id":5598719
+ "homepage":""
+ "git_url":"git://github.com/Aqua-Ye/webshell.git"
+ "url":"https://api.github.com/repos/Aqua-Ye/webshell"}},
+ "body":"",
+ "user":{"gravatar_id":"f6ce56b7883a131667475cc2f9a17d2a",
+ "login":"Aqua-Ye",
+ "id":85234,
+ "url":"https://api.github.com/users/Aqua-Ye",
+ "avatar_url":"https://secu..."},
+ "html_url":"https://github.com/hbbio/webshell/pull/6",
+ "merged_at":null,
+ "_links":{"self":{"href":"https://api.github.com/repos/hbbio/webshell/pulls/6"},
+ "issue":{"href":"https://api.github.com/repos/hbbio/webshell/issues/6"},
+ "comments":{"href":"https://api.github.com/repos/hbbio/webshell/issues/6/comments"},
+ "review_comments":{"href":"https://api.github.com/repos/hbbio/webshell/pulls/6/comments"},
+ "html":{"href":"https://github.com/hbbio/webshell/pull/6"}},
+ "closed_at":null,
+ "created_at":"2012-08-29T13:59:53Z",
+ "assignee":null,
+ "title":"Fixed compilation and run with Opa 1.0.5",
+ "patch_url":"https://github.com/hbbio/webshell/pull/6.patch",
+ "diff_url":"https://github.com/hbbio/webshell/pull/6.diff",
+ "base":{"user":null,
+ "sha":"1f87ee5a9b2a862d24b4f5e17342e7603a4672f2",
+ "ref":"master",
+ "label":"hbbio:master",
+ "repo":{"forks_count":3,
+ "has_wiki":true,
+ "html_url":"https://github.com/hbbio/webshell",
+ "watchers":13,
+ "mirror_url":null,
+ "watchers_count":13,
+ "svn_url":"https://github.com/hbbio/webshell",
+ "owner":{"gravatar_id":"f9492f8c897f46459626d8bd44cc8b9f",
+ "login":"hbbio",
+ "id":808274,
+ "url":"https://api.github.com/users/hbbio",
+ "avatar_url":"https://sec..."},
+ "description":"",
+ "pushed_at":"2012-05-09T10:46:44Z",
+ "forks":3,
+ "open_issues":5,
+ "open_issues_count":5,
+ "has_issues":true,
+ "created_at":"2011-12-15T21:29:00Z",
+ "clone_url":"https://github.com/hbbio/webshell.git",
+ "language":"Opa",
+ "has_downloads":true,
+ "size":124,
+ "fork":false,
+ "ssh_url":"git@github.com:hbbio/webshell.git",
+ "updated_at":"2012-09-13T01:20:05Z",
+ "full_name":"hbbio/webshell",
+ "name":"webshell",
+ "private":false,
+ "id":2990713,
+ "homepage":"",
+ "git_url":"git://github.com/hbbio/webshell.git",
+ "url":"https://api.github.com/repos/hbbio/webshell"}},
+ "state":"open",
+ "number":6,
+ "updated_at":"2012-08-29T13:59:53Z",
+ "milestone":null,
+ "id":2193062,
+ "issue_url":"https://github.com/hbbio/webshell/issues/6",
+ "url":"https://api.github.com/repos/hbbio/webshell/pulls/6"}
+]
+*/
+
+ get_pull_req_int(m) =
+ state =
+ match m.str("state") with
+ | "open" -> {open}
+ | "closed" -> {closed=m.date("closed_at")}
+ | x -> {other=x}
+ {
+ url = m.str("url")
+ html_url = m.str("html_url")
+ diff_url = m.str("diff_url")
+ patch_url = m.str("patch_url")
+ issue_url = m.str("issue_url")
+ number = m.int("number