Skip to content

Commit

Permalink
Merge pull request #60 from ibacher/fix_tests
Browse files Browse the repository at this point in the history
Fix tests
  • Loading branch information
ibacher committed Jul 9, 2019
2 parents af5fa73 + e930f61 commit 22d22e1
Show file tree
Hide file tree
Showing 8 changed files with 21 additions and 34 deletions.
2 changes: 1 addition & 1 deletion examples/literate_src/1_pubmed_search_and_save.jl
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ all_pmids(conn_sqlite)
tables = ["author_ref", "mesh_desc", "mesh_qual", "mesh_heading"]
for t in tables
query_str = "SELECT * FROM $t LIMIT 5;"
q = SQLite.query(conn_sqlite, query_str)
q = SQLite.Query(conn_sqlite, query_str)
println(q)
end

Expand Down
3 changes: 2 additions & 1 deletion examples/runexamples.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
all_examples = [
("literate_src/1_pubmed_search_and_save.jl", " Running Example: Search and Save"),
("literate_src/2_umls_map_and_filter.jl", " Running Example: MeSH/UMLS Map and Filter"),
# TODO how can we run UMLS examples?
# ("literate_src/2_umls_map_and_filter.jl", " Running Example: MeSH/UMLS Map and Filter"),
("literate_src/4_pubmed_export_citations.jl", " Running Example: Export Citations"),
("literate_src/5_load_medline.jl", " Running Example: Load MEDLINE")
]
Expand Down
3 changes: 1 addition & 2 deletions src/PubMed/pubmed_sql_utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -511,8 +511,7 @@ function db_insert!(db::MySQL.Connection, csv_path::String = pwd(), csv_prefix::
path = joinpath(csv_path, "$(csv_prefix)$(table).csv")
drop_csv && push!(paths,path)

headers = CSV.read(path, DataFrame, rows = 1)
# return headers
headers = CSV.read(path, limit=1)

cols = String.(getfield(headers, :colindex).names)
if !col_match(db, table, cols)
Expand Down
14 changes: 9 additions & 5 deletions test/ct.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,16 @@ using ZipFile

#test all files are .xml
not_xml = false
r = ZipFile.Reader(fout);
for f in r.files
if !occursin(r"^.*\.xml.*$", f.name)
not_xml = true
break
r = ZipFile.Reader(fout)
try
for f in r.files
if !occursin(r"^.*\.xml.*$", f.name)
not_xml = true
break
end
end
finally
close(r)
end

@test not_xml == false
Expand Down
7 changes: 1 addition & 6 deletions test/dbutils_sqlite.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@ using SQLite

@testset "SQLite DBUtils" begin

db_path = "test_db.sqlite"
conn = SQLite.DB(db_path)
conn = SQLite.DB()
PubMed.create_tables!(conn)

#check collection of tables
Expand All @@ -19,8 +18,4 @@ using SQLite

@test length(sel[1]) == 1
@test sel[1][1] == 1234

if isfile(db_path)
rm(db_path)
end
end
12 changes: 1 addition & 11 deletions test/processes_sqlite.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,8 @@ const verbose = false
const umls_user = get(ENV, "UMLS_USER", "")
const umls_pswd = get(ENV, "UMLS_PSSWD", "")
#************************ SQLite **************************
const db_path="./test_processes.sqlite"
#***************************************************************************

if isfile(db_path)
rm(db_path)
end

const conn_sql = SQLite.DB(db_path)
const conn_sql = SQLite.DB()
PubMed.create_tables!(conn_sql)

@testset "Save and Search" begin
Expand Down Expand Up @@ -63,9 +57,5 @@ global credentials_set = get(ENV, "TRAVIS_SECURE_ENV_VARS", "true")=="true" && u

end

# remove temp files
if isfile(db_path)
rm(db_path)
end
println("------------End Test Processes SQLite-----------")
println("------------------------------------------------")
9 changes: 1 addition & 8 deletions test/pubmed.jl
Original file line number Diff line number Diff line change
Expand Up @@ -154,9 +154,7 @@ import Base.parse
println("-----------------------------------------")
println(" Testing SQLite Saving")

db_path = "./test_db.db"

conn = SQLite.DB(db_path)
conn = SQLite.DB()
PubMed.create_tables!(conn)
PubMed.save_efetch!(conn, efetch_doc,false, true)

Expand Down Expand Up @@ -187,11 +185,6 @@ import Base.parse
count = q[1][1]
@test count > 0
end

# remove temp files
if isfile(db_path)
rm(db_path)
end
end


Expand Down
5 changes: 5 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,11 @@
#
using Test

# Hack: force the loading of MbedTLS as otherwise it conflicts with the version in LibCURL
# Entropy() was chosen as a fairly quick exported function
using MbedTLS
MbedTLS.Entropy()

using XMLDict
using BioMedQuery.Processes
using BioMedQuery.PubMed
Expand Down

0 comments on commit 22d22e1

Please sign in to comment.