/
test_client.py
1604 lines (1455 loc) · 68.7 KB
/
test_client.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
The obspy.clients.fdsn.client test suite.
:copyright:
The ObsPy Development Team (devs@obspy.org)
:license:
GNU Lesser General Public License, Version 3
(https://www.gnu.org/copyleft/lesser.html)
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from future.builtins import * # NOQA
from future.utils import PY2
import io
import os
import re
import sys
import unittest
import warnings
from difflib import Differ
if PY2:
import urllib2 as urllib_request
else:
import urllib.request as urllib_request
import lxml
import numpy as np
import requests
from obspy import UTCDateTime, read, read_inventory, Stream, Trace
from obspy.core.compatibility import mock, RegExTestCase
from obspy.core.util.base import NamedTemporaryFile
from obspy.clients.fdsn import Client, RoutingClient
from obspy.clients.fdsn.client import (build_url, parse_simple_xml,
get_bulk_string)
from obspy.clients.fdsn.header import (DEFAULT_USER_AGENT, URL_MAPPINGS,
FDSNException, FDSNRedirectException,
FDSNNoDataException, DEFAULT_SERVICES)
from obspy.core.inventory import Response
from obspy.geodetics import locations2degrees
USER_AGENT = "ObsPy (test suite) " + " ".join(DEFAULT_USER_AGENT.split())
def _normalize_stats(obj):
if isinstance(obj, Stream):
for tr in obj:
_normalize_stats(tr)
else:
if "processing" in obj.stats:
del obj.stats["processing"]
if "_fdsnws_dataselect_url" in obj.stats:
del obj.stats._fdsnws_dataselect_url
def failmsg(got, expected, ignore_lines=[]):
"""
Create message on difference between objects.
If both are strings create a line-by-line diff, otherwise create info on
both using str().
For diffs, lines that contain any string given in ignore_lines will be
excluded from the comparison.
"""
if isinstance(got, str) and isinstance(expected, str):
got = [l for l in got.splitlines(True)
if all([x not in l for x in ignore_lines])]
expected = [l for l in expected.splitlines(True)
if all([x not in l for x in ignore_lines])]
diff = Differ().compare(got, expected)
diff = "".join([l for l in diff if l[0] in "-+?"])
if diff:
return "\nDiff:\n%s" % diff
else:
return ""
else:
return "\nGot:\n%s\nExpected:\n%s" % (str(got), str(expected))
def normalize_version_number(string):
"""
Returns imput string with version numbers normalized for testing purposes.
Due to Py3k arbitrary dictionary ordering it also sorts word wise the
input string, independent of commas and newlines.
"""
match = r'v[0-9]+\.[0-9]+\.[0-9]+'
repl = re.sub(match, "vX.X.X", string).replace(",", "")
return [l.strip() for l in repl.splitlines()]
class ClientTestCase(RegExTestCase):
"""
Test cases for obspy.clients.fdsn.client.Client.
"""
@classmethod
def setUpClass(cls):
# directory where the test files are located
cls.path = os.path.dirname(__file__)
cls.datapath = os.path.join(cls.path, "data")
cls.client = Client(base_url="IRIS", user_agent=USER_AGENT)
cls.client_auth = \
Client(base_url="IRIS", user_agent=USER_AGENT,
user="nobody@iris.edu", password="anonymous")
def test_empty_bulk_string(self):
"""
Makes sure an exception is raised if an empty bulk string would be
produced (e.g. empty list as input for `get_bulk_string()`)
"""
msg = ("Empty 'bulk' parameter potentially leading to a FDSN request "
"of all available data")
for bad_input in [[], '', None]:
with self.assertRaises(FDSNException) as e:
get_bulk_string(bulk=bad_input, arguments={})
self.assertEqual(e.exception.args[0], msg)
def test_validate_base_url(self):
"""
Tests the _validate_base_url() method.
"""
test_urls_valid = list(URL_MAPPINGS.values())
test_urls_valid += [
"http://arclink.ethz.ch",
"http://example.org",
"https://webservices.rm.ingv.it",
"http://localhost:8080/test/",
"http://93.63.40.85/",
"http://[::1]:80/test/",
"http://[2001:db8:85a3:8d3:1319:8a2e:370:7348]",
"http://[2001:db8::ff00:42:8329]",
"http://[::ffff:192.168.89.9]",
"http://jane",
"http://localhost"]
test_urls_fails = [
"http://",
"http://127.0.1",
"http://127.=.0.1",
"http://127.0.0.0.1"]
test_urls_fails += [
"http://[]",
"http://[1]",
"http://[1:2]",
"http://[1::2::3]",
"http://[1::2:3::4]",
"http://[1:2:2:4:5:6:7]"]
for url in test_urls_valid:
self.assertEqual(
self.client._validate_base_url(url),
True)
for url in test_urls_fails:
self.assertEqual(
self.client._validate_base_url(url),
False)
def test_url_building(self):
"""
Tests the build_url() functions.
"""
# Application WADL
self.assertEqual(
build_url("http://service.iris.edu", "dataselect", 1,
"application.wadl"),
"http://service.iris.edu/fdsnws/dataselect/1/application.wadl")
self.assertEqual(
build_url("http://service.iris.edu", "event", 1,
"application.wadl"),
"http://service.iris.edu/fdsnws/event/1/application.wadl")
self.assertEqual(
build_url("http://service.iris.edu", "station", 1,
"application.wadl"),
"http://service.iris.edu/fdsnws/station/1/application.wadl")
# Test one parameter.
self.assertEqual(
build_url("http://service.iris.edu", "dataselect", 1,
"query", {"network": "BW"}),
"http://service.iris.edu/fdsnws/dataselect/1/query?network=BW")
self.assertEqual(
build_url("http://service.iris.edu", "dataselect", 1,
"queryauth", {"network": "BW"}),
"http://service.iris.edu/fdsnws/dataselect/1/queryauth?network=BW")
# Test two parameters. Note random order, two possible results.
self.assertTrue(
build_url("http://service.iris.edu", "dataselect", 1,
"query", {"net": "A", "sta": "BC"}) in
("http://service.iris.edu/fdsnws/dataselect/1/query?net=A&sta=BC",
"http://service.iris.edu/fdsnws/dataselect/1/query?sta=BC&net=A"))
# A wrong service raises a ValueError
self.assertRaises(ValueError, build_url, "http://service.iris.edu",
"obspy", 1, "query")
def test_location_parameters(self):
"""
Tests how the variety of location values are handled.
Why location? Mostly because it is one tricky parameter. It is not
uncommon to assume that a non-existent location is "--", but in reality
"--" is "<space><space>". This substitution exists because mostly
because various applications have trouble digesting spaces (spaces in
the URL, for example).
The confusion begins when location is treated as empty instead, which
would imply "I want all locations" instead of "I only want locations of
<space><space>"
"""
# requests with no specified location should be treated as a wildcard
self.assertFalse(
"--" in build_url("http://service.iris.edu", "station", 1,
"query", {"network": "IU", "station": "ANMO",
"starttime": "2013-01-01"}))
# location of " " is the same as "--"
self.assertEqual(
build_url("http://service.iris.edu", "station", 1,
"query", {"location": " "}),
"http://service.iris.edu/fdsnws/station/1/query?location=--")
# wildcard locations are valid. Will be encoded.
self.assertEqual(
build_url("http://service.iris.edu", "station", 1,
"query", {"location": "*"}),
"http://service.iris.edu/fdsnws/station/1/query?location=%2A")
self.assertEqual(
build_url("http://service.iris.edu", "station", 1,
"query", {"location": "A?"}),
"http://service.iris.edu/fdsnws/station/1/query?location=A%3F")
# lists are valid, including <space><space> lists. Again encoded
# result.
self.assertEqual(
build_url("http://service.iris.edu", "station", 1,
"query", {"location": " ,1?,?0"}),
"http://service.iris.edu/fdsnws/station/1/query?"
"location=--%2C1%3F%2C%3F0")
self.assertEqual(
build_url("http://service.iris.edu", "station", 1,
"query", {"location": "1?,--,?0"}),
"http://service.iris.edu/fdsnws/station/1/query?"
"location=1%3F%2C--%2C%3F0")
# Test all three special cases with empty parameters into lists.
self.assertEqual(
build_url("http://service.iris.edu", "station", 1,
"query", {"location": " ,AA,BB"}),
"http://service.iris.edu/fdsnws/station/1/query?"
"location=--%2CAA%2CBB")
self.assertEqual(
build_url("http://service.iris.edu", "station", 1,
"query", {"location": "AA, ,BB"}),
"http://service.iris.edu/fdsnws/station/1/query?"
"location=AA%2C--%2CBB")
self.assertEqual(
build_url("http://service.iris.edu", "station", 1,
"query", {"location": "AA,BB, "}),
"http://service.iris.edu/fdsnws/station/1/query?"
"location=AA%2CBB%2C--")
# The location parameter is also passed through the
# _create_url_from_parameters() method and thus has to survive it!
# This guards against a regression where all empty location codes
# where removed by this function!
for service in ["station", "dataselect"]:
for loc in ["", " ", " ", "--", b"", b" ", b" ", b"--",
u"", u" ", u" ", u"--"]:
self.assertIn(
"location=--",
self.client._create_url_from_parameters(
service, [],
{"location": loc, "starttime": 0, "endtime": 1}))
# Also check the full call with a mock test.
for loc in ["", " ", " ", "--", b"", b" ", b" ", b"--",
u"", u" ", u" ", u"--"]:
with mock.patch("obspy.clients.fdsn.Client._download") as p:
self.client.get_stations(0, 0, location=loc,
filename=mock.Mock())
self.assertEqual(p.call_count, 1)
self.assertIn("location=--", p.call_args[0][0])
with mock.patch("obspy.clients.fdsn.Client._download") as p:
self.client.get_waveforms(1, 2, loc, 4, 0, 0,
filename=mock.Mock())
self.assertEqual(p.call_count, 1)
self.assertIn("location=--", p.call_args[0][0])
def test_url_building_with_auth(self):
"""
Tests the Client._build_url() method with authentication.
Necessary on top of test_url_building test case because clients with
authentication have to build different URLs for dataselect.
"""
# no authentication
got = self.client._build_url("dataselect", "query", {'net': "BW"})
expected = "http://service.iris.edu/fdsnws/dataselect/1/query?net=BW"
self.assertEqual(got, expected)
# with authentication
got = self.client_auth._build_url("dataselect", "query", {'net': "BW"})
expected = ("http://service.iris.edu/fdsnws/dataselect/1/"
"queryauth?net=BW")
self.assertEqual(got, expected)
def test_set_credentials(self):
"""
Test for issue #2146
When setting credentials not during `__init__` but using
`set_credentials`, waveform queries should still properly go to
"queryauth" endpoint.
"""
client = Client(base_url="IRIS", user_agent=USER_AGENT)
user = "nobody@iris.edu"
password = "anonymous"
client.set_credentials(user=user, password=password)
got = client._build_url("dataselect", "query", {'net': "BW"})
expected = ("http://service.iris.edu/fdsnws/dataselect/1/"
"queryauth?net=BW")
self.assertEqual(got, expected)
# more basic test: check that set_credentials has set Client.user
# (which is tested when checking which endpoint to use, query or
# queryauth)
self.assertEqual(client.user, user)
def test_trim_stream_after_get_waveform(self):
"""
Tests that stream is properly trimmed to user requested times after
fetching from datacenter, see #1887
"""
c = Client(
service_mappings={'dataselect':
'http://eida.ipgp.fr/fdsnws/dataselect/1'})
starttime = UTCDateTime('2016-11-01T00:00:00')
endtime = UTCDateTime('2016-11-01T00:00:10')
stream = c.get_waveforms('G', 'PEL', '*', 'LHZ', starttime, endtime)
self.assertEqual(starttime, stream[0].stats.starttime)
self.assertEqual(endtime, stream[0].stats.endtime)
def test_service_discovery_iris(self):
"""
Tests the automatic discovery of services with the IRIS endpoint. The
test parameters are taken from IRIS' website.
This will have to be adjusted once IRIS changes their implementation.
"""
client = self.client
self.assertEqual(set(client.services.keys()),
set(("dataselect", "event", "station",
"available_event_contributors",
"available_event_catalogs")))
# The test sets are copied from the IRIS webpage.
self.assertEqual(
set(client.services["dataselect"].keys()),
set(("starttime", "endtime", "network", "station", "location",
"channel", "quality", "minimumlength", "longestonly")))
self.assertEqual(
set(client.services["station"].keys()),
set(("starttime", "endtime", "startbefore", "startafter",
"endbefore", "endafter", "network", "station", "location",
"channel", "minlatitude", "maxlatitude", "minlongitude",
"maxlongitude", "latitude", "longitude", "minradius",
"maxradius", "level", "includerestricted", "format",
"includeavailability", "updatedafter", "matchtimeseries")))
self.assertEqual(
set(client.services["event"].keys()),
set(("starttime", "endtime", "minlatitude", "maxlatitude",
"minlongitude", "maxlongitude", "latitude", "longitude",
"maxradius", "minradius", "mindepth", "maxdepth",
"minmagnitude", "maxmagnitude",
"magnitudetype", "format",
"catalog", "contributor", "limit", "offset", "orderby",
"updatedafter", "includeallorigins", "includeallmagnitudes",
"includearrivals", "eventid",
"originid" # XXX: This is currently just specified in the
# WADL.
)))
# Also check an exemplary value in more detail.
minradius = client.services["event"]["minradius"]
self.assertEqual(minradius["default_value"], 0.0)
self.assertEqual(minradius["required"], False)
self.assertEqual(minradius["doc"], "")
self.assertEqual(minradius["doc_title"], "Specify minimum distance "
"from the geographic point defined by latitude and "
"longitude")
self.assertEqual(minradius["type"], float)
self.assertEqual(minradius["options"], [])
def test_iris_event_catalog_availability(self):
"""
Tests the parsing of the available event catalogs.
"""
self.assertEqual(set(self.client.services["available_event_catalogs"]),
set(("GCMT", "ISC", "NEIC PDE")))
def test_iris_event_contributors_availability(self):
"""
Tests the parsing of the available event contributors.
"""
response = requests.get(
'http://service.iris.edu/fdsnws/event/1/contributors')
xml = lxml.etree.fromstring(response.content)
expected = {
elem.text for elem in xml.xpath('/Contributors/Contributor')}
# check that we have some values in there
self.assertTrue(len(expected) > 5)
self.assertEqual(
set(self.client.services["available_event_contributors"]),
expected)
def test_discover_services_defaults(self):
"""
A Client initialized with _discover_services=False shouldn't perform
any services/WADL queries on the endpoint, and should show only the
default service parameters.
"""
client = Client(base_url="IRIS", user_agent=USER_AGENT,
_discover_services=False)
self.assertEqual(client.services, DEFAULT_SERVICES)
def test_simple_xml_parser(self):
"""
Tests the simple XML parsing helper function.
"""
catalogs = parse_simple_xml("""
<?xml version="1.0"?>
<Catalogs>
<total>6</total>
<Catalog>ANF</Catalog>
<Catalog>GCMT</Catalog>
<Catalog>TEST</Catalog>
<Catalog>ISC</Catalog>
<Catalog>UofW</Catalog>
<Catalog>NEIC PDE</Catalog>
</Catalogs>""")
self.assertEqual(catalogs, {"catalogs": set(("ANF", "GCMT", "TEST",
"ISC", "UofW",
"NEIC PDE"))})
def test_iris_example_queries_event(self):
"""
Tests the (sometimes modified) example queries given on the IRIS
web page.
Used to be tested against files but that was not maintainable. It
now tests if the queries return what was asked for.
"""
client = self.client
# Event id query.
cat = client.get_events(eventid=609301)
self.assertEqual(len(cat), 1)
self.assertIn("609301", cat[0].resource_id.id)
# Temporal query.
cat = client.get_events(
starttime=UTCDateTime("2001-01-07T01:00:00"),
endtime=UTCDateTime("2001-01-07T01:05:00"), catalog="ISC")
self.assertGreater(len(cat), 0)
for event in cat:
self.assertEqual(event.origins[0].extra.catalog.value, "ISC")
self.assertGreater(event.origins[0].time,
UTCDateTime("2001-01-07T01:00:00"))
self.assertGreater(UTCDateTime("2001-01-07T01:05:00"),
event.origins[0].time)
# Misc query.
cat = client.get_events(
starttime=UTCDateTime("2001-01-07T14:00:00"),
endtime=UTCDateTime("2001-01-08T00:00:00"), minlatitude=15,
maxlatitude=40, minlongitude=-170, maxlongitude=170,
includeallmagnitudes=True, minmagnitude=4, orderby="magnitude")
self.assertGreater(len(cat), 0)
for event in cat:
self.assertGreater(event.origins[0].time,
UTCDateTime("2001-01-07T14:00:00"))
self.assertGreater(UTCDateTime("2001-01-08T00:00:00"),
event.origins[0].time)
self.assertGreater(event.origins[0].latitude, 14.9)
self.assertGreater(40.1, event.origins[0].latitude)
self.assertGreater(event.origins[0].latitude, -170.1)
self.assertGreater(170.1, event.origins[0].latitude)
# events returned by FDSNWS can contain many magnitudes with a wide
# range, and currently (at least for IRIS) the magnitude threshold
# sent to the server checks if at least one magnitude matches, it
# does not only check the preferred magnitude..
self.assertTrue(any(m.mag >= 3.999 for m in event.magnitudes))
def test_iris_example_queries_station(self):
"""
Tests the (sometimes modified) example queries given on IRIS webpage.
This test used to download files but that is almost impossible to
keep up to date - thus it is now a bit smarter and tests the
returned inventory in different ways.
"""
client = self.client
# Radial query.
inv = client.get_stations(latitude=-56.1, longitude=-26.7,
maxradius=15)
self.assertGreater(len(inv.networks), 0) # at least one network
for net in inv:
self.assertGreater(len(net.stations), 0) # at least one station
for sta in net:
dist = locations2degrees(sta.latitude, sta.longitude,
-56.1, -26.7)
# small tolerance for WGS84.
self.assertGreater(15.1, dist, "%s.%s" % (net.code,
sta.code))
# Misc query.
inv = client.get_stations(
startafter=UTCDateTime("2003-01-07"),
endbefore=UTCDateTime("2011-02-07"), minlatitude=15,
maxlatitude=55, minlongitude=170, maxlongitude=-170, network="IM")
self.assertGreater(len(inv.networks), 0) # at least one network
for net in inv:
self.assertGreater(len(net.stations), 0) # at least one station
for sta in net:
msg = "%s.%s" % (net.code, sta.code)
self.assertGreater(sta.start_date, UTCDateTime("2003-01-07"),
msg)
if sta.end_date is not None:
self.assertGreater(UTCDateTime("2011-02-07"), sta.end_date,
msg)
self.assertGreater(sta.latitude, 14.9, msg)
self.assertGreater(55.1, sta.latitude, msg)
self.assertFalse(-170.1 <= sta.longitude <= 170.1, msg)
self.assertEqual(net.code, "IM", msg)
# Simple query
inv = client.get_stations(
starttime=UTCDateTime("2000-01-01"),
endtime=UTCDateTime("2001-01-01"), net="IU", sta="ANMO")
self.assertGreater(len(inv.networks), 0) # at least one network
for net in inv:
self.assertGreater(len(net.stations), 0) # at least one station
for sta in net:
self.assertGreater(UTCDateTime("2001-01-01"), sta.start_date)
if sta.end_date is not None:
self.assertGreater(sta.end_date, UTCDateTime("2000-01-01"))
self.assertEqual(net.code, "IU")
self.assertEqual(sta.code, "ANMO")
# Station wildcard query.
inv = client.get_stations(
starttime=UTCDateTime("2000-01-01"),
endtime=UTCDateTime("2002-01-01"), network="IU", sta="A*",
location="00")
self.assertGreater(len(inv.networks), 0) # at least one network
for net in inv:
self.assertGreater(len(net.stations), 0) # at least one station
for sta in net:
self.assertGreater(UTCDateTime("2002-01-01"), sta.start_date)
if sta.end_date is not None:
self.assertGreater(sta.end_date, UTCDateTime("2000-01-01"))
self.assertEqual(net.code, "IU")
self.assertTrue(sta.code.startswith("A"))
def test_iris_example_queries_dataselect(self):
"""
Tests the (sometimes modified) example queries given on IRIS webpage.
"""
client = self.client
queries = [
("IU", "ANMO", "00", "BHZ",
UTCDateTime("2010-02-27T06:30:00.000"),
UTCDateTime("2010-02-27T06:40:00.000")),
("IU", "A*", "*", "BHZ",
UTCDateTime("2010-02-27T06:30:00.000"),
UTCDateTime("2010-02-27T06:31:00.000")),
("IU", "A??", "*0", "BHZ",
UTCDateTime("2010-02-27T06:30:00.000"),
UTCDateTime("2010-02-27T06:31:00.000")),
]
result_files = ["dataselect_example.mseed",
"dataselect_example_wildcards.mseed",
"dataselect_example_mixed_wildcards.mseed",
]
for query, filename in zip(queries, result_files):
# test output to stream
got = client.get_waveforms(*query)
# Assert that the meta-information about the provider is stored.
for tr in got:
self.assertEqual(
tr.stats._fdsnws_dataselect_url,
client.base_url + "/fdsnws/dataselect/1/query")
# Remove fdsnws URL as it is not in the data from the disc.
for tr in got:
del tr.stats._fdsnws_dataselect_url
file_ = os.path.join(self.datapath, filename)
expected = read(file_)
# The client trims by default.
_normalize_stats(got)
self.assertEqual(got, expected, "Dataselect failed for query %s" %
repr(query))
# test output to file
with NamedTemporaryFile() as tf:
client.get_waveforms(*query, filename=tf.name)
with open(tf.name, 'rb') as fh:
got = fh.read()
with open(file_, 'rb') as fh:
expected = fh.read()
self.assertEqual(got, expected, "Dataselect failed for query %s" %
repr(query))
def test_authentication(self):
"""
Test dataselect with authentication.
"""
client = self.client_auth
# dataselect example queries
query = ("IU", "ANMO", "00", "BHZ",
UTCDateTime("2010-02-27T06:30:00.000"),
UTCDateTime("2010-02-27T06:40:00.000"))
filename = "dataselect_example.mseed"
got = client.get_waveforms(*query)
file_ = os.path.join(self.datapath, filename)
expected = read(file_)
_normalize_stats(got)
self.assertEqual(got, expected, failmsg(got, expected))
def test_iris_example_queries_event_discover_services_false(self):
"""
Tests the (sometimes modified) example queries given on the IRIS
web page, without service discovery.
Used to be tested against files but that was not maintainable. It
now tests if the queries return what was asked for.
"""
client = Client(base_url="IRIS", user_agent=USER_AGENT,
_discover_services=False)
# Event id query.
cat = client.get_events(eventid=609301)
self.assertEqual(len(cat), 1)
self.assertIn("609301", cat[0].resource_id.id)
# Temporal query.
cat = client.get_events(
starttime=UTCDateTime("2001-01-07T01:00:00"),
endtime=UTCDateTime("2001-01-07T01:05:00"), catalog="ISC")
self.assertGreater(len(cat), 0)
for event in cat:
self.assertEqual(event.origins[0].extra.catalog.value, "ISC")
self.assertGreater(event.origins[0].time,
UTCDateTime("2001-01-07T01:00:00"))
self.assertGreater(UTCDateTime("2001-01-07T01:05:00"),
event.origins[0].time)
# Misc query.
cat = client.get_events(
starttime=UTCDateTime("2001-01-07T14:00:00"),
endtime=UTCDateTime("2001-01-08T00:00:00"), minlatitude=15,
maxlatitude=40, minlongitude=-170, maxlongitude=170,
includeallmagnitudes=True, minmagnitude=4, orderby="magnitude")
self.assertGreater(len(cat), 0)
for event in cat:
self.assertGreater(event.origins[0].time,
UTCDateTime("2001-01-07T14:00:00"))
self.assertGreater(UTCDateTime("2001-01-08T00:00:00"),
event.origins[0].time)
self.assertGreater(event.origins[0].latitude, 14.9)
self.assertGreater(40.1, event.origins[0].latitude)
self.assertGreater(event.origins[0].latitude, -170.1)
self.assertGreater(170.1, event.origins[0].latitude)
# events returned by FDSNWS can contain many magnitudes with a wide
# range, and currently (at least for IRIS) the magnitude threshold
# sent to the server checks if at least one magnitude matches, it
# does not only check the preferred magnitude..
self.assertTrue(any(m.mag >= 3.999 for m in event.magnitudes))
def test_iris_example_queries_station_discover_services_false(self):
"""
Tests the (sometimes modified) example queries given on IRIS webpage,
without service discovery.
This test used to download files but that is almost impossible to
keep up to date - thus it is now a bit smarter and tests the
returned inventory in different ways.
"""
client = Client(base_url="IRIS", user_agent=USER_AGENT,
_discover_services=False)
# Radial query.
inv = client.get_stations(latitude=-56.1, longitude=-26.7,
maxradius=15)
self.assertGreater(len(inv.networks), 0) # at least one network
for net in inv:
self.assertGreater(len(net.stations), 0) # at least one station
for sta in net:
dist = locations2degrees(sta.latitude, sta.longitude,
-56.1, -26.7)
# small tolerance for WGS84.
self.assertGreater(15.1, dist, "%s.%s" % (net.code,
sta.code))
# Misc query.
inv = client.get_stations(
startafter=UTCDateTime("2003-01-07"),
endbefore=UTCDateTime("2011-02-07"), minlatitude=15,
maxlatitude=55, minlongitude=170, maxlongitude=-170, network="IM")
self.assertGreater(len(inv.networks), 0) # at least one network
for net in inv:
self.assertGreater(len(net.stations), 0) # at least one station
for sta in net:
msg = "%s.%s" % (net.code, sta.code)
self.assertGreater(sta.start_date, UTCDateTime("2003-01-07"),
msg)
if sta.end_date is not None:
self.assertGreater(UTCDateTime("2011-02-07"), sta.end_date,
msg)
self.assertGreater(sta.latitude, 14.9, msg)
self.assertGreater(55.1, sta.latitude, msg)
self.assertFalse(-170.1 <= sta.longitude <= 170.1, msg)
self.assertEqual(net.code, "IM", msg)
# Simple query
inv = client.get_stations(
starttime=UTCDateTime("2000-01-01"),
endtime=UTCDateTime("2001-01-01"), net="IU", sta="ANMO")
self.assertGreater(len(inv.networks), 0) # at least one network
for net in inv:
self.assertGreater(len(net.stations), 0) # at least one station
for sta in net:
self.assertGreater(UTCDateTime("2001-01-01"), sta.start_date)
if sta.end_date is not None:
self.assertGreater(sta.end_date, UTCDateTime("2000-01-01"))
self.assertEqual(net.code, "IU")
self.assertEqual(sta.code, "ANMO")
# Station wildcard query.
inv = client.get_stations(
starttime=UTCDateTime("2000-01-01"),
endtime=UTCDateTime("2002-01-01"), network="IU", sta="A*",
location="00")
self.assertGreater(len(inv.networks), 0) # at least one network
for net in inv:
self.assertGreater(len(net.stations), 0) # at least one station
for sta in net:
self.assertGreater(UTCDateTime("2002-01-01"), sta.start_date)
if sta.end_date is not None:
self.assertGreater(sta.end_date, UTCDateTime("2000-01-01"))
self.assertEqual(net.code, "IU")
self.assertTrue(sta.code.startswith("A"))
def test_iris_example_queries_dataselect_discover_services_false(self):
"""
Tests the (sometimes modified) example queries given on IRIS webpage,
without discovering services first.
"""
client = Client(base_url="IRIS", user_agent=USER_AGENT,
_discover_services=False)
queries = [
("IU", "ANMO", "00", "BHZ",
UTCDateTime("2010-02-27T06:30:00.000"),
UTCDateTime("2010-02-27T06:40:00.000")),
("IU", "A*", "*", "BHZ",
UTCDateTime("2010-02-27T06:30:00.000"),
UTCDateTime("2010-02-27T06:31:00.000")),
("IU", "A??", "*0", "BHZ",
UTCDateTime("2010-02-27T06:30:00.000"),
UTCDateTime("2010-02-27T06:31:00.000")),
]
result_files = ["dataselect_example.mseed",
"dataselect_example_wildcards.mseed",
"dataselect_example_mixed_wildcards.mseed",
]
for query, filename in zip(queries, result_files):
# test output to stream
got = client.get_waveforms(*query)
# Assert that the meta-information about the provider is stored.
for tr in got:
self.assertEqual(
tr.stats._fdsnws_dataselect_url,
client.base_url + "/fdsnws/dataselect/1/query")
# Remove fdsnws URL as it is not in the data from the disc.
for tr in got:
del tr.stats._fdsnws_dataselect_url
file_ = os.path.join(self.datapath, filename)
expected = read(file_)
_normalize_stats(got)
self.assertEqual(got, expected, "Dataselect failed for query %s" %
repr(query))
# test output to file
with NamedTemporaryFile() as tf:
client.get_waveforms(*query, filename=tf.name)
with open(tf.name, 'rb') as fh:
got = fh.read()
with open(file_, 'rb') as fh:
expected = fh.read()
self.assertEqual(got, expected, "Dataselect failed for query %s" %
repr(query))
def test_conflicting_params(self):
"""
"""
self.assertRaises(FDSNException, self.client.get_stations,
network="IU", net="IU")
def test_help_function_with_iris(self):
"""
Tests the help function with the IRIS example.
This will have to be adopted any time IRIS changes their
implementation.
"""
try:
client = self.client
# Capture output
tmp = io.StringIO()
sys.stdout = tmp
client.help("event")
got = sys.stdout.getvalue()
sys.stdout = sys.__stdout__
tmp.close()
filename = "event_helpstring.txt"
with open(os.path.join(self.datapath, filename)) as fh:
expected = fh.read()
# allow for changes in version number..
got = normalize_version_number(got)
expected = normalize_version_number(expected)
# catalogs/contributors are checked in separate tests
self.assertTrue(got[-2].startswith('Available catalogs:'))
self.assertTrue(got[-1].startswith('Available contributors:'))
got = got[:-2]
expected = expected[:-2]
for line_got, line_expected in zip(got, expected):
self.assertEqual(line_got, line_expected)
# Reset. Creating a new one is faster then clearing the old one.
tmp = io.StringIO()
sys.stdout = tmp
client.help("station")
got = sys.stdout.getvalue()
sys.stdout = sys.__stdout__
tmp.close()
filename = "station_helpstring.txt"
with open(os.path.join(self.datapath, filename)) as fh:
expected = fh.read()
got = normalize_version_number(got)
expected = normalize_version_number(expected)
self.assertEqual(got, expected, failmsg(got, expected))
# Reset.
tmp = io.StringIO()
sys.stdout = tmp
client.help("dataselect")
got = sys.stdout.getvalue()
sys.stdout = sys.__stdout__
tmp.close()
filename = "dataselect_helpstring.txt"
with open(os.path.join(self.datapath, filename)) as fh:
expected = fh.read()
got = normalize_version_number(got)
expected = normalize_version_number(expected)
self.assertEqual(got, expected, failmsg(got, expected))
finally:
sys.stdout = sys.__stdout__
def test_str_method(self):
got = str(self.client)
expected = (
"FDSN Webservice Client (base url: http://service.iris.edu)\n"
"Available Services: 'dataselect' (v1.0.0), 'event' (v1.0.6), "
"'station' (v1.0.7), 'available_event_catalogs', "
"'available_event_contributors'\n\n"
"Use e.g. client.help('dataselect') for the\n"
"parameter description of the individual services\n"
"or client.help() for parameter description of\n"
"all webservices.")
got = normalize_version_number(got)
expected = normalize_version_number(expected)
self.assertEqual(got, expected, failmsg(got, expected))
def test_dataselect_bulk(self):
"""
Test bulk dataselect requests, POSTing data to server. Also tests
authenticated bulk request.
"""
clients = [self.client, self.client_auth]
file = os.path.join(self.datapath, "bulk.mseed")
expected = read(file)
# test cases for providing lists of lists
# Deliberately requesting data that overlap the end-time of a channel.
# TA.A25A..BHZ ends at 2011-07-22T14:50:25.5
bulk = (("TA", "A25A", "", "BHZ",
UTCDateTime("2011-07-22T14:50:23"),
UTCDateTime("2011-07-22T14:50:29")),
("TA", "A25A", "", "BHE",
UTCDateTime("2010-03-25T00:00:00"),
UTCDateTime("2010-03-25T00:00:06")),
("IU", "ANMO", "*", "HHZ",
UTCDateTime("2010-03-25T00:00:00"),
UTCDateTime("2010-03-25T00:00:08")))
# As of 03 December 2018, it looks like IRIS is ignoring minimumlength?
params = dict(quality="B", longestonly=False, minimumlength=5)
for client in clients:
# test output to stream
got = client.get_waveforms_bulk(bulk, **params)
# Remove fdsnws URL as it is not in the data from the disc.
for tr in got:
del tr.stats._fdsnws_dataselect_url
self.assertEqual(got, expected, failmsg(got, expected))
# test output to file
with NamedTemporaryFile() as tf:
client.get_waveforms_bulk(bulk, filename=tf.name, **params)
got = read(tf.name)
self.assertEqual(got, expected, failmsg(got, expected))
# test cases for providing a request string
bulk = ("quality=B\n"
"longestonly=false\n"
"minimumlength=5\n"
"TA A25A -- BHZ 2010-03-25T00:00:00 2010-03-25T00:00:04\n"
"TA A25A -- BHE 2010-03-25T00:00:00 2010-03-25T00:00:06\n"
"IU ANMO * HHZ 2010-03-25T00:00:00 2010-03-25T00:00:08\n")
for client in clients:
# test output to stream
got = client.get_waveforms_bulk(bulk)
# Assert that the meta-information about the provider is stored.
for tr in got:
if client.user:
self.assertEqual(
tr.stats._fdsnws_dataselect_url,
client.base_url + "/fdsnws/dataselect/1/queryauth")
else:
self.assertEqual(
tr.stats._fdsnws_dataselect_url,
client.base_url + "/fdsnws/dataselect/1/query")
# Remove fdsnws URL as it is not in the data from the disc.
for tr in got:
del tr.stats._fdsnws_dataselect_url
self.assertEqual(got, expected, failmsg(got, expected))
# test output to file
with NamedTemporaryFile() as tf:
client.get_waveforms_bulk(bulk, filename=tf.name)
got = read(tf.name)
self.assertEqual(got, expected, failmsg(got, expected))
# test cases for providing a file name
for client in clients:
with NamedTemporaryFile() as tf:
with open(tf.name, "wt") as fh:
fh.write(bulk)
got = client.get_waveforms_bulk(bulk)
# Remove fdsnws URL as it is not in the data from the disc.
for tr in got:
del tr.stats._fdsnws_dataselect_url
self.assertEqual(got, expected, failmsg(got, expected))
# test cases for providing a file-like object
for client in clients:
got = client.get_waveforms_bulk(io.StringIO(bulk))
# Remove fdsnws URL as it is not in the data from the disc.
for tr in got:
del tr.stats._fdsnws_dataselect_url
self.assertEqual(got, expected, failmsg(got, expected))
def test_station_bulk(self):
"""
Test bulk station requests, POSTing data to server. Also tests
authenticated bulk request.
Does currently only test reading from a list of list. The other
input types are tested with the waveform bulk downloader and thus
should work just fine.
"""
clients = [self.client, self.client_auth]
# test cases for providing lists of lists
starttime = UTCDateTime(1990, 1, 1)
endtime = UTCDateTime(1990, 1, 1) + 10
bulk = [
["IU", "ANMO", "", "BHE", starttime, endtime],
["IU", "CCM", "", "BHZ", starttime, endtime],
["IU", "COR", "", "UHZ", starttime, endtime],
["IU", "HRV", "", "LHN", starttime, endtime],
]
for client in clients:
# Test with station level.
inv = client.get_stations_bulk(bulk, level="station")
# Test with output to file.
with NamedTemporaryFile() as tf:
client.get_stations_bulk(
bulk, filename=tf.name, level="station")
inv2 = read_inventory(tf.name, format="stationxml")