-
Notifications
You must be signed in to change notification settings - Fork 529
/
client.py
2110 lines (1886 loc) · 88.5 KB
/
client.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
FDSN Web service client for ObsPy.
:copyright:
The ObsPy Development Team (devs@obspy.org)
:license:
GNU Lesser General Public License, Version 3
(https://www.gnu.org/copyleft/lesser.html)
"""
import collections.abc
import copy
import gzip
import io
import os
import re
from socket import timeout as socket_timeout
import textwrap
import threading
import warnings
from collections import OrderedDict
from http.client import HTTPException, IncompleteRead
from urllib.parse import urlparse
from lxml import etree
import obspy
from obspy import UTCDateTime, read_inventory
from .header import (DEFAULT_PARAMETERS, DEFAULT_USER_AGENT, FDSNWS,
OPTIONAL_PARAMETERS, PARAMETER_ALIASES,
URL_DEFAULT_SUBPATH, URL_MAPPINGS, URL_MAPPING_SUBPATHS,
WADL_PARAMETERS_NOT_TO_BE_PARSED, DEFAULT_SERVICES,
FDSNException, FDSNRedirectException, FDSNNoDataException,
FDSNTimeoutException,
FDSNNoAuthenticationServiceException,
FDSNBadRequestException, FDSNNoServiceException,
FDSNInternalServerException,
FDSNNotImplementedException,
FDSNBadGatewayException,
FDSNTooManyRequestsException,
FDSNRequestTooLargeException,
FDSNServiceUnavailableException,
FDSNUnauthorizedException,
FDSNForbiddenException,
FDSNDoubleAuthenticationException,
FDSNInvalidRequestException)
from .wadl_parser import WADLParser
from urllib.parse import urlencode
import urllib.request as urllib_request
import queue
DEFAULT_SERVICE_VERSIONS = {'dataselect': 1, 'station': 1, 'event': 1}
class CustomRedirectHandler(urllib_request.HTTPRedirectHandler):
"""
Custom redirection handler to also do it for POST requests which the
standard library does not do by default.
"""
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""
Copied and modified from the standard library.
"""
# Force the same behaviour for GET, HEAD, and POST.
m = req.get_method()
if (not (code in (301, 302, 303, 307) and
m in ("GET", "HEAD", "POST"))):
raise urllib_request.HTTPError(req.full_url, code, msg, headers,
fp)
# be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
content_headers = ("content-length", "content-type")
newheaders = dict((k, v) for k, v in req.headers.items()
if k.lower() not in content_headers)
# Also redirect the data of the request which the standard library
# interestingly enough does not do.
return urllib_request.Request(
newurl, headers=newheaders,
data=req.data,
origin_req_host=req.origin_req_host,
unverifiable=True)
class NoRedirectionHandler(urllib_request.HTTPRedirectHandler):
"""
Handler that does not direct!
"""
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""
Copied and modified from the standard library.
"""
raise FDSNRedirectException(
"Requests with credentials (username, password) are not being "
"redirected by default to improve security. To force redirects "
"and if you trust the data center, set `force_redirect` to True "
"when initializing the Client.")
class Client(object):
"""
FDSN Web service request client.
For details see the :meth:`~obspy.clients.fdsn.client.Client.__init__()`
method.
"""
# Dictionary caching any discovered service. Therefore repeatedly
# initializing a client with the same base URL is cheap.
__service_discovery_cache = {}
#: Regex for UINT8
RE_UINT8 = r'(?:25[0-5]|2[0-4]\d|[0-1]?\d{1,2})'
#: Regex for HEX4
RE_HEX4 = r'(?:[\d,a-f]{4}|[1-9,a-f][0-9,a-f]{0,2}|0)'
#: Regex for IPv4
RE_IPv4 = r'(?:' + RE_UINT8 + r'(?:\.' + RE_UINT8 + r'){3})'
#: Regex for IPv6
RE_IPv6 = \
r'(?:\[' + RE_HEX4 + r'(?::' + RE_HEX4 + r'){7}\]' + \
r'|\[(?:' + RE_HEX4 + r':){0,5}' + RE_HEX4 + r'::\]' + \
r'|\[::' + RE_HEX4 + r'(?::' + RE_HEX4 + r'){0,5}\]' + \
r'|\[::' + RE_HEX4 + r'(?::' + RE_HEX4 + r'){0,3}:' + RE_IPv4 + \
r'\]' + \
r'|\[' + RE_HEX4 + r':' + \
r'(?:' + RE_HEX4 + r':|:' + RE_HEX4 + r'){0,4}' + \
r':' + RE_HEX4 + r'\])'
#: Regex for checking the validity of URLs
URL_REGEX = r'https?://' + \
r'(' + RE_IPv4 + \
r'|' + RE_IPv6 + \
r'|localhost' + \
r'|\w(?:[\w-]*\w)?' + \
r'|(?:\w(?:[\w-]{0,61}[\w])?\.){1,}([a-z][a-z0-9-]{1,62}))' + \
r'(?::\d{2,5})?' + \
r'(/[\w\.-]+)*/?$'
@classmethod
def _validate_base_url(cls, base_url):
if re.match(cls.URL_REGEX, base_url, re.IGNORECASE):
return True
else:
return False
def __init__(self, base_url="EARTHSCOPE", major_versions=None, user=None,
password=None, user_agent=DEFAULT_USER_AGENT, debug=False,
timeout=120, service_mappings=None, force_redirect=False,
eida_token=None, _discover_services=True):
"""
Initializes an FDSN Web Service client.
>>> client = Client("EARTHSCOPE")
>>> print(client) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
FDSN Webservice Client (base url: http://service.iris.edu)
Available Services: 'dataselect' (v...), 'event' (v...),
'station' (v...), 'available_event_catalogs',
'available_event_contributors'
Use e.g. client.help('dataselect') for the
parameter description of the individual services
or client.help() for parameter description of
all webservices.
:type base_url: str
:param base_url: Base URL of FDSN web service compatible server
(e.g. "http://service.iris.edu") or key string for recognized
server (one of %s).
:type major_versions: dict
:param major_versions: Allows to specify custom major version numbers
for individual services (e.g.
`major_versions={'station': 2, 'dataselect': 3}`), otherwise the
latest version at time of implementation will be used.
:type user: str
:param user: User name of HTTP Digest Authentication for access to
restricted data.
:type password: str
:param password: Password of HTTP Digest Authentication for access to
restricted data.
:type user_agent: str
:param user_agent: The user agent for all requests.
:type debug: bool
:param debug: Debug flag.
:type timeout: float
:param timeout: Maximum time (in seconds) to wait for a single request
to receive the first byte of the response (after which an exception
is raised).
:type service_mappings: dict
:param service_mappings: For advanced use only. Allows the direct
setting of the endpoints of the different services. (e.g.
``service_mappings={'station': 'http://example.com/test/stat/1'}``)
Valid keys are ``event``, ``station``, and ``dataselect``. This
will overwrite the ``base_url`` and ``major_versions`` arguments.
For all services not specified, the default default locations
indicated by ``base_url`` and ``major_versions`` will be used. Any
service that is manually specified as ``None`` (e.g.
``service_mappings={'event': None}``) will be deactivated.
:type force_redirect: bool
:param force_redirect: By default the client will follow all HTTP
redirects as long as no credentials (username and password)
are given. If credentials are given it will raise an exception
when a redirect is discovered. This is done to improve security.
Settings this flag to ``True`` will force all redirects to be
followed even if credentials are given.
:type eida_token: str
:param eida_token: Token for EIDA authentication mechanism, see
http://geofon.gfz-potsdam.de/waveform/archive/auth/index.php. If a
token is provided, options ``user`` and ``password`` must not be
used. This mechanism is only available on select EIDA nodes. The
token can be provided in form of the PGP message as a string, or
the filename of a local file with the PGP message in it.
:type _discover_services: bool
:param _discover_services: By default the client will query information
about the FDSN endpoint when it is instantiated. In certain cases,
this may place a heavy load on the FDSN service provider. If set
to ``False``, no service discovery is performed and default
parameter support is assumed. This parameter is experimental and
will likely be removed in the future.
"""
self.debug = debug
self.user = user
self.timeout = timeout
self._force_redirect = force_redirect
# Cache for the webservice versions. This makes interactive use of
# the client more convenient.
self.__version_cache = {}
if base_url.upper() in URL_MAPPINGS:
url_mapping = base_url.upper()
base_url = URL_MAPPINGS[url_mapping]
url_subpath = URL_MAPPING_SUBPATHS.get(
url_mapping, URL_DEFAULT_SUBPATH)
else:
if base_url.isalpha():
msg = "The FDSN service shortcut `{}` is unknown."\
.format(base_url)
raise ValueError(msg)
url_subpath = URL_DEFAULT_SUBPATH
# Make sure the base_url does not end with a slash.
base_url = base_url.strip("/")
# Catch invalid URLs to avoid confusing error messages
if not self._validate_base_url(base_url):
msg = "The FDSN service base URL `{}` is not a valid URL."\
.format(base_url)
raise ValueError(msg)
self.base_url = base_url
self.url_subpath = url_subpath
self._set_opener(user, password)
self.request_headers = {"User-Agent": user_agent}
# Avoid mutable kwarg.
if major_versions is None:
major_versions = {}
# Make a copy to avoid overwriting the default service versions.
self.major_versions = DEFAULT_SERVICE_VERSIONS.copy()
self.major_versions.update(major_versions)
# Avoid mutable kwarg.
if service_mappings is None:
service_mappings = {}
self._service_mappings = service_mappings
if self.debug is True:
print("Base URL: %s" % self.base_url)
if self._service_mappings:
print("Custom service mappings:")
for key, value in self._service_mappings.items():
print("\t%s: '%s'" % (key, value))
print("Request Headers: %s" % str(self.request_headers))
if _discover_services:
self._discover_services()
else:
self.services = DEFAULT_SERVICES
# Use EIDA token if provided - this requires setting new url openers.
#
# This can only happen after the services have been discovered as
# the clients needs to know if the fdsnws implementation has support
# for the EIDA token system.
#
# This is a non-standard feature but we support it, given the number
# of EIDA nodes out there.
if eida_token is not None:
# Make sure user/pw are not also given.
if user is not None or password is not None:
msg = ("EIDA authentication token provided, but "
"user and password are also given.")
raise FDSNDoubleAuthenticationException(msg)
self.set_eida_token(eida_token)
@property
def _has_eida_auth(self):
return self.services.get('eida-auth', False)
def set_credentials(self, user, password):
"""
Set user and password resulting in subsequent web service
requests for waveforms being authenticated for potential access to
restricted data.
This will overwrite any previously set-up credentials/authentication.
:type user: str
:param user: User name of credentials.
:type password: str
:param password: Password for given user name.
"""
self.user = user
self._set_opener(user, password)
def set_eida_token(self, token, validate=True):
"""
Fetch user and password from the server using the provided token,
resulting in subsequent web service requests for waveforms being
authenticated for potential access to restricted data.
This only works for select EIDA nodes and relies on the auth mechanism
described here:
http://geofon.gfz-potsdam.de/waveform/archive/auth/index.php
This will overwrite any previously set-up credentials/authentication.
:type token: str
:param token: Token for EIDA authentication mechanism, see
http://geofon.gfz-potsdam.de/waveform/archive/auth/index.php.
This mechanism is only available on select EIDA nodes. The token
can be provided in form of the PGP message as a string, or the
filename of a local file with the PGP message in it.
:type validate: bool
:param validate: Whether to sanity check the token before sending it to
the EIDA server or not.
"""
user, password = self._resolve_eida_token(token, validate=validate)
self.set_credentials(user, password)
def _set_opener(self, user, password):
# Only add the authentication handler if required.
handlers = []
if user is not None and password is not None:
# Create an OpenerDirector for HTTP Digest Authentication
password_mgr = urllib_request.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, self.base_url, user, password)
handlers.append(urllib_request.HTTPDigestAuthHandler(password_mgr))
if (user is None and password is None) or self._force_redirect is True:
# Redirect if no credentials are given or the force_redirect
# flag is True.
handlers.append(CustomRedirectHandler())
else:
handlers.append(NoRedirectionHandler())
# Don't install globally to not mess with other codes.
self._url_opener = urllib_request.build_opener(*handlers)
if self.debug:
print('Installed new opener with handlers: {!s}'.format(handlers))
def _resolve_eida_token(self, token, validate=True):
"""
Use the token to get credentials.
"""
if not self._has_eida_auth:
msg = ("EIDA token authentication requested but service at '{}' "
"does not specify /dataselect/auth in the "
"dataselect/application.wadl.").format(self.base_url)
raise FDSNNoAuthenticationServiceException(msg)
token_file = None
# check if there's a local file that matches the provided string
if os.path.isfile(token):
token_file = token
with open(token_file, 'rb') as fh:
token = fh.read().decode()
# sanity check on the token
if validate:
if not _validate_eida_token(token):
if token_file:
msg = ("Read EIDA token from file '{}' but it does not "
"seem to contain a valid PGP message.").format(
token_file)
else:
msg = ("EIDA token does not seem to be a valid PGP "
"message. If you passed a filename, make sure the "
"file actually exists.")
raise ValueError(msg)
# force https so that we don't send around tokens unsecurely
url = 'https://{}{}/dataselect/1/auth'.format(
urlparse(self.base_url).netloc + urlparse(self.base_url).path,
self.url_subpath)
# paranoid: check again that we only send the token to https
if urlparse(url).scheme != "https":
msg = 'This should not happen, please file a bug report.'
raise Exception(msg)
# Already does the error checking with fdsnws semantics.
response = self._download(url=url, data=token.encode(),
use_gzip=True, return_string=True,
content_type='application/octet-stream')
user, password = response.decode().split(':')
if self.debug:
print('Got temporary user/pw: {}/{}'.format(user, password))
return user, password
def get_events(self, starttime=None, endtime=None, minlatitude=None,
maxlatitude=None, minlongitude=None, maxlongitude=None,
latitude=None, longitude=None, minradius=None,
maxradius=None, mindepth=None, maxdepth=None,
minmagnitude=None, maxmagnitude=None, magnitudetype=None,
eventtype=None, includeallorigins=None,
includeallmagnitudes=None, includearrivals=None,
eventid=None, limit=None, offset=None, orderby=None,
catalog=None, contributor=None, updatedafter=None,
filename=None, **kwargs):
"""
Query the event service of the client.
>>> client = Client("EARTHSCOPE")
>>> cat = client.get_events(eventid=609301)
>>> print(cat)
1 Event(s) in Catalog:
1997-10-14T09:53:11.070000Z | -22.145, -176.720 | 7.8 ...
The return value is a :class:`~obspy.core.event.Catalog` object
which can contain any number of events.
>>> t1 = UTCDateTime("2001-01-07T00:00:00")
>>> t2 = UTCDateTime("2001-01-07T03:00:00")
>>> cat = client.get_events(starttime=t1, endtime=t2, minmagnitude=4,
... catalog="ISC")
>>> print(cat)
3 Event(s) in Catalog:
2001-01-07T02:55:59.290000Z | +9.801, +76.548 | 4.9 ...
2001-01-07T02:35:35.170000Z | -21.291, -68.308 | 4.4 ...
2001-01-07T00:09:25.630000Z | +22.946, -107.011 | 4.0 ...
:type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional
:param starttime: Limit to events on or after the specified start time.
:type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`, optional
:param endtime: Limit to events on or before the specified end time.
:type minlatitude: float, optional
:param minlatitude: Limit to events with a latitude larger than the
specified minimum.
:type maxlatitude: float, optional
:param maxlatitude: Limit to events with a latitude smaller than the
specified maximum.
:type minlongitude: float, optional
:param minlongitude: Limit to events with a longitude larger than the
specified minimum.
:type maxlongitude: float, optional
:param maxlongitude: Limit to events with a longitude smaller than the
specified maximum.
:type latitude: float, optional
:param latitude: Specify the latitude to be used for a radius search.
:type longitude: float, optional
:param longitude: Specify the longitude to be used for a radius
search.
:type minradius: float, optional
:param minradius: Limit to events within the specified minimum number
of degrees from the geographic point defined by the latitude and
longitude parameters.
:type maxradius: float, optional
:param maxradius: Limit to events within the specified maximum number
of degrees from the geographic point defined by the latitude and
longitude parameters.
:type mindepth: float, optional
:param mindepth: Limit to events with depth, in kilometers, larger than
the specified minimum.
:type maxdepth: float, optional
:param maxdepth: Limit to events with depth, in kilometers, smaller
than the specified maximum.
:type minmagnitude: float, optional
:param minmagnitude: Limit to events with a magnitude larger than the
specified minimum.
:type maxmagnitude: float, optional
:param maxmagnitude: Limit to events with a magnitude smaller than the
specified maximum.
:type magnitudetype: str, optional
:param magnitudetype: Specify a magnitude type to use for testing the
minimum and maximum limits.
:type eventtype: str, optional
:param eventtype: Limit to events with a specified event type.
Multiple types are comma-separated (e.g.,
``"earthquake,quarry blast"``). Allowed values are from QuakeML.
See :const:`obspy.core.event.header.EventType` for a list of
allowed event types.
:type includeallorigins: bool, optional
:param includeallorigins: Specify if all origins for the event should
be included, default is data center dependent but is suggested to
be the preferred origin only.
:type includeallmagnitudes: bool, optional
:param includeallmagnitudes: Specify if all magnitudes for the event
should be included, default is data center dependent but is
suggested to be the preferred magnitude only.
:type includearrivals: bool, optional
:param includearrivals: Specify if phase arrivals should be included.
:type eventid: str or int, optional
:param eventid: Select a specific event by ID; event identifiers are
data center specific (String or Integer).
:type limit: int, optional
:param limit: Limit the results to the specified number of events.
:type offset: int, optional
:param offset: Return results starting at the event count specified,
starting at 1.
:type orderby: str, optional
:param orderby: Order the result by time or magnitude with the
following possibilities:
* time: order by origin descending time
* time-asc: order by origin ascending time
* magnitude: order by descending magnitude
* magnitude-asc: order by ascending magnitude
:type catalog: str, optional
:param catalog: Limit to events from a specified catalog
:type contributor: str, optional
:param contributor: Limit to events contributed by a specified
contributor.
:type updatedafter: :class:`~obspy.core.utcdatetime.UTCDateTime`,
optional
:param updatedafter: Limit to events updated after the specified time.
:type filename: str or file
:param filename: If given, the downloaded data will be saved there
instead of being parsed to an ObsPy object. Thus it will contain
the raw data from the webservices.
Any additional keyword arguments will be passed to the webservice as
additional arguments. If you pass one of the default parameters and the
webservice does not support it, a warning will be issued. Passing any
non-default parameters that the webservice does not support will raise
an error.
"""
if "event" not in self.services:
msg = "The current client does not have an event service."
raise ValueError(msg)
locs = locals()
setup_query_dict('event', locs, kwargs)
url = self._create_url_from_parameters(
"event", DEFAULT_PARAMETERS['event'], kwargs)
data_stream = self._download(url)
data_stream.seek(0, 0)
if filename:
self._write_to_file_object(filename, data_stream)
data_stream.close()
else:
cat = obspy.read_events(data_stream, format="quakeml")
data_stream.close()
return cat
def get_stations(self, starttime=None, endtime=None, startbefore=None,
startafter=None, endbefore=None, endafter=None,
network=None, station=None, location=None, channel=None,
minlatitude=None, maxlatitude=None, minlongitude=None,
maxlongitude=None, latitude=None, longitude=None,
minradius=None, maxradius=None, level=None,
includerestricted=None, includeavailability=None,
updatedafter=None, matchtimeseries=None, filename=None,
format=None, **kwargs):
"""
Query the station service of the FDSN client.
>>> client = Client("EARTHSCOPE")
>>> starttime = UTCDateTime("2001-01-01")
>>> endtime = UTCDateTime("2001-01-02")
>>> inventory = client.get_stations(network="IU", station="A*",
... starttime=starttime,
... endtime=endtime)
>>> print(inventory) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Inventory created at ...
Created by: IRIS WEB SERVICE: fdsnws-station | version: ...
...
Sending institution: IRIS-DMC (IRIS-DMC)
Contains:
Networks (1):
IU
Stations (3):
IU.ADK (Adak, Aleutian Islands, Alaska)
IU.AFI (Afiamalu, Samoa)
IU.ANMO (Albuquerque, New Mexico, USA)
Channels (0):
>>> inventory.plot() # doctest: +SKIP
.. plot::
from obspy import UTCDateTime
from obspy.clients.fdsn import Client
client = Client()
starttime = UTCDateTime("2001-01-01")
endtime = UTCDateTime("2001-01-02")
inventory = client.get_stations(network="IU", station="A*",
starttime=starttime,
endtime=endtime)
inventory.plot()
The result is an :class:`~obspy.core.inventory.inventory.Inventory`
object which models a StationXML file.
The ``level`` argument determines the amount of returned information.
``level="station"`` is useful for availability queries whereas
``level="response"`` returns the full response information for the
requested channels. ``level`` can furthermore be set to ``"network"``
and ``"channel"``.
>>> inventory = client.get_stations(
... starttime=starttime, endtime=endtime,
... network="IU", sta="ANMO", loc="00", channel="*Z",
... level="response")
>>> print(inventory) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
Inventory created at ...
Created by: IRIS WEB SERVICE: fdsnws-station | version: ...
...
Sending institution: IRIS-DMC (IRIS-DMC)
Contains:
Networks (1):
IU
Stations (1):
IU.ANMO (Albuquerque, New Mexico, USA)
Channels (4):
IU.ANMO.00.BHZ, IU.ANMO.00.LHZ, IU.ANMO.00.UHZ,
IU.ANMO.00.VHZ
>>> inventory[0].plot_response(min_freq=1E-4) # doctest: +SKIP
.. plot::
from obspy import UTCDateTime
from obspy.clients.fdsn import Client
client = Client()
starttime = UTCDateTime("2001-01-01")
endtime = UTCDateTime("2001-01-02")
inventory = client.get_stations(
starttime=starttime, endtime=endtime,
network="IU", sta="ANMO", loc="00", channel="*Z",
level="response")
inventory[0].plot_response(min_freq=1E-4)
:type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`
:param starttime: Limit to metadata epochs starting on or after the
specified start time.
:type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`
:param endtime: Limit to metadata epochs ending on or before the
specified end time.
:type startbefore: :class:`~obspy.core.utcdatetime.UTCDateTime`
:param startbefore: Limit to metadata epochs starting before specified
time.
:type startafter: :class:`~obspy.core.utcdatetime.UTCDateTime`
:param startafter: Limit to metadata epochs starting after specified
time.
:type endbefore: :class:`~obspy.core.utcdatetime.UTCDateTime`
:param endbefore: Limit to metadata epochs ending before specified
time.
:type endafter: :class:`~obspy.core.utcdatetime.UTCDateTime`
:param endafter: Limit to metadata epochs ending after specified time.
:type network: str
:param network: Select one or more network codes. Can be SEED network
codes or data center defined codes. Multiple codes are
comma-separated (e.g. ``"IU,TA"``).
:type station: str
:param station: Select one or more SEED station codes. Multiple codes
are comma-separated (e.g. ``"ANMO,PFO"``).
:type location: str
:param location: Select one or more SEED location identifiers. Multiple
identifiers are comma-separated (e.g. ``"00,01"``). As a
special case ``“--“`` (two dashes) will be translated to a string
of two space characters to match blank location IDs.
:type channel: str
:param channel: Select one or more SEED channel codes. Multiple codes
are comma-separated (e.g. ``"BHZ,HHZ"``).
:type minlatitude: float
:param minlatitude: Limit to stations with a latitude larger than the
specified minimum.
:type maxlatitude: float
:param maxlatitude: Limit to stations with a latitude smaller than the
specified maximum.
:type minlongitude: float
:param minlongitude: Limit to stations with a longitude larger than the
specified minimum.
:type maxlongitude: float
:param maxlongitude: Limit to stations with a longitude smaller than
the specified maximum.
:type latitude: float
:param latitude: Specify the latitude to be used for a radius search.
:type longitude: float
:param longitude: Specify the longitude to be used for a radius
search.
:type minradius: float
:param minradius: Limit results to stations within the specified
minimum number of degrees from the geographic point defined by the
latitude and longitude parameters.
:type maxradius: float
:param maxradius: Limit results to stations within the specified
maximum number of degrees from the geographic point defined by the
latitude and longitude parameters.
:type level: str
:param level: Specify the level of detail for the results ("network",
"station", "channel", "response"), e.g. specify "response" to get
full information including instrument response for each channel.
:type includerestricted: bool
:param includerestricted: Specify if results should include information
for restricted stations.
:type includeavailability: bool
:param includeavailability: Specify if results should include
information about time series data availability.
:type updatedafter: :class:`~obspy.core.utcdatetime.UTCDateTime`
:param updatedafter: Limit to metadata updated after specified date;
updates are data center specific.
:type matchtimeseries: bool
:param matchtimeseries: Only include data for which matching time
series data is available.
:type filename: str or file
:param filename: If given, the downloaded data will be saved there
instead of being parsed to an ObsPy object. Thus it will contain
the raw data from the webservices.
:type format: str
:param format: The format in which to request station information.
``"xml"`` (StationXML) or ``"text"`` (FDSN station text format).
XML has more information but text is much faster.
:rtype: :class:`~obspy.core.inventory.inventory.Inventory`
:returns: Inventory with requested station information.
Any additional keyword arguments will be passed to the webservice as
additional arguments. If you pass one of the default parameters and the
webservice does not support it, a warning will be issued. Passing any
non-default parameters that the webservice does not support will raise
an error.
"""
if "station" not in self.services:
msg = "The current client does not have a station service."
raise ValueError(msg)
locs = locals()
setup_query_dict('station', locs, kwargs)
url = self._create_url_from_parameters(
"station", DEFAULT_PARAMETERS['station'], kwargs)
data_stream = self._download(url)
data_stream.seek(0, 0)
if filename:
self._write_to_file_object(filename, data_stream)
data_stream.close()
else:
# This works with XML and StationXML data.
if format is None or format == 'xml':
inventory = read_inventory(data_stream, format='STATIONXML')
elif format == 'text':
inventory = read_inventory(data_stream, format='STATIONTXT')
else:
inventory = read_inventory(data_stream)
data_stream.close()
return inventory
def get_waveforms(self, network, station, location, channel, starttime,
endtime, quality=None, minimumlength=None,
longestonly=None, filename=None, attach_response=False,
**kwargs):
"""
Query the dataselect service of the client.
>>> client = Client("EARTHSCOPE")
>>> t1 = UTCDateTime("2010-02-27T06:30:00.000")
>>> t2 = t1 + 5
>>> st = client.get_waveforms("IU", "ANMO", "00", "LHZ", t1, t2)
>>> print(st) # doctest: +ELLIPSIS
1 Trace(s) in Stream:
IU.ANMO.00.LHZ | 2010-02-27T06:30:00.069538Z - ... | 1.0 Hz, 5 samples
The services can deal with UNIX style wildcards.
>>> st = client.get_waveforms("IU", "A*", "1?", "LHZ", t1, t2)
>>> print(st) # doctest: +ELLIPSIS
3 Trace(s) in Stream:
IU.ADK.10.LHZ | 2010-02-27T06:30:00.069538Z - ... | 1.0 Hz, 5 samples
IU.AFI.10.LHZ | 2010-02-27T06:30:00.069538Z - ... | 1.0 Hz, 5 samples
IU.ANMO.10.LHZ | 2010-02-27T06:30:00.069538Z - ... | 1.0 Hz, 5 samples
Use ``attach_response=True`` to automatically add response information
to each trace. This can be used to remove response using
:meth:`~obspy.core.stream.Stream.remove_response`.
>>> t = UTCDateTime("2012-12-14T10:36:01.6Z")
>>> st = client.get_waveforms("TA", "E42A", "*", "BH?", t+300, t+400,
... attach_response=True)
>>> st.remove_response(output="VEL") # doctest: +ELLIPSIS
<obspy.core.stream.Stream object at ...>
>>> st.plot() # doctest: +SKIP
.. plot::
from obspy import UTCDateTime
from obspy.clients.fdsn import Client
client = Client("EARTHSCOPE")
t = UTCDateTime("2012-12-14T10:36:01.6Z")
st = client.get_waveforms("TA", "E42A", "*", "BH?", t+300, t+400,
attach_response=True)
st.remove_response(output="VEL")
st.plot()
:type network: str
:param network: Select one or more network codes. Can be SEED network
codes or data center defined codes. Multiple codes are
comma-separated (e.g. ``"IU,TA"``). Wildcards are allowed.
:type station: str
:param station: Select one or more SEED station codes. Multiple codes
are comma-separated (e.g. ``"ANMO,PFO"``). Wildcards are allowed.
:type location: str
:param location: Select one or more SEED location identifiers. Multiple
identifiers are comma-separated (e.g. ``"00,01"``). Wildcards are
allowed.
:type channel: str
:param channel: Select one or more SEED channel codes. Multiple codes
are comma-separated (e.g. ``"BHZ,HHZ"``).
:type starttime: :class:`~obspy.core.utcdatetime.UTCDateTime`
:param starttime: Limit results to time series samples on or after the
specified start time
:type endtime: :class:`~obspy.core.utcdatetime.UTCDateTime`
:param endtime: Limit results to time series samples on or before the
specified end time
:type quality: str, optional
:param quality: Select a specific SEED quality indicator, handling is
data center dependent.
:type minimumlength: float, optional
:param minimumlength: Limit results to continuous data segments of a
minimum length specified in seconds.
:type longestonly: bool, optional
:param longestonly: Limit results to the longest continuous segment per
channel.
:type filename: str or file
:param filename: If given, the downloaded data will be saved there
instead of being parsed to an ObsPy object. Thus it will contain
the raw data from the webservices.
:type attach_response: bool
:param attach_response: Specify whether the station web service should
be used to automatically attach response information to each trace
in the result set. A warning will be shown if a response can not be
found for a channel. Does nothing if output to a file was
specified.
Any additional keyword arguments will be passed to the webservice as
additional arguments. If you pass one of the default parameters and the
webservice does not support it, a warning will be issued. Passing any
non-default parameters that the webservice does not support will raise
an error.
"""
if "dataselect" not in self.services:
msg = "The current client does not have a dataselect service."
raise ValueError(msg)
locs = locals()
setup_query_dict('dataselect', locs, kwargs)
# Special location handling. Convert empty strings to "--".
if "location" in kwargs and not kwargs["location"]:
kwargs["location"] = "--"
url = self._create_url_from_parameters(
"dataselect", DEFAULT_PARAMETERS['dataselect'], kwargs)
# Gzip not worth it for MiniSEED and most likely disabled for this
# route in any case.
data_stream = self._download(url, use_gzip=False)
data_stream.seek(0, 0)
if filename:
self._write_to_file_object(filename, data_stream)
data_stream.close()
else:
st = obspy.read(data_stream, format="MSEED")
data_stream.close()
if attach_response:
self._attach_responses(st)
self._attach_dataselect_url_to_stream(st)
st.trim(starttime, endtime)
return st
def _attach_responses(self, st):
"""
Helper method to fetch response via get_stations() and attach it to
each trace in stream.
"""
netids = {}
for tr in st:
if tr.id not in netids:
netids[tr.id] = (tr.stats.starttime, tr.stats.endtime)
continue
netids[tr.id] = (
min(tr.stats.starttime, netids[tr.id][0]),
max(tr.stats.endtime, netids[tr.id][1]))
inventories = []
for key, value in netids.items():
net, sta, loc, chan = key.split(".")
starttime, endtime = value
try:
inventories.append(self.get_stations(
network=net, station=sta, location=loc, channel=chan,
starttime=starttime, endtime=endtime, level="response"))
except Exception as e:
warnings.warn(str(e))
st.attach_response(inventories)
def get_waveforms_bulk(self, bulk, quality=None, minimumlength=None,
longestonly=None, filename=None,
attach_response=False, **kwargs):
r"""
Query the dataselect service of the client. Bulk request.
Send a bulk request for waveforms to the server. `bulk` can either be
specified as a filename, a file-like object or a string (with
information formatted according to the FDSN standard) or a list of
lists (each specifying network, station, location, channel, starttime
and endtime). See examples and parameter description for more
details.
`bulk` can be provided in the following forms:
(1) As a list of lists. Each list item has to be list of network,
station, location, channel, starttime and endtime.
(2) As a valid request string/file as defined in the
`FDSNWS documentation <https://www.fdsn.org/webservices/>`_.
The request information can be provided as a..
- a string containing the request information
- a string with the path to a local file with the request
- an open file handle (or file-like object) with the request
>>> client = Client("EARTHSCOPE")
>>> t1 = UTCDateTime("2010-02-27T06:30:00.000")
>>> t2 = t1 + 1
>>> t3 = t1 + 3
>>> bulk = [("IU", "ANMO", "*", "BHZ", t1, t2),
... ("IU", "AFI", "1?", "BHE", t1, t3),
... ("GR", "GRA1", "*", "BH*", t2, t3)]
>>> st = client.get_waveforms_bulk(bulk)
>>> print(st) # doctest: +ELLIPSIS
5 Trace(s) in Stream:
GR.GRA1..BHE | 2010-02-27T06:30:01... | 20.0 Hz, 40 samples
GR.GRA1..BHN | 2010-02-27T06:30:01... | 20.0 Hz, 40 samples
GR.GRA1..BHZ | 2010-02-27T06:30:01... | 20.0 Hz, 40 samples
IU.ANMO.00.BHZ | 2010-02-27T06:30:00... | 20.0 Hz, 20 samples
IU.ANMO.10.BHZ | 2010-02-27T06:30:00... | 40.0 Hz, 40 samples
>>> bulk = 'quality=B\n' + \
... 'longestonly=false\n' + \
... 'IU ANMO * BHZ 2010-02-27 2010-02-27T00:00:02\n' + \
... 'IU AFI 1? BHE 2010-02-27 2010-02-27T00:00:04\n' + \
... 'GR GRA1 * BH? 2010-02-27 2010-02-27T00:00:02\n'
>>> st = client.get_waveforms_bulk(bulk)
>>> print(st) # doctest: +ELLIPSIS
5 Trace(s) in Stream:
GR.GRA1..BHE | 2010-02-27T00:00:00... | 20.0 Hz, 40 samples
GR.GRA1..BHN | 2010-02-27T00:00:00... | 20.0 Hz, 40 samples
GR.GRA1..BHZ | 2010-02-27T00:00:00... | 20.0 Hz, 40 samples
IU.ANMO.00.BHZ | 2010-02-27T00:00:00... | 20.0 Hz, 40 samples
IU.ANMO.10.BHZ | 2010-02-27T00:00:00... | 40.0 Hz, 80 samples
>>> st = client.get_waveforms_bulk("/tmp/request.txt") \
... # doctest: +SKIP
>>> print(st) # doctest: +SKIP
5 Trace(s) in Stream:
GR.GRA1..BHE | 2010-02-27T00:00:00... | 20.0 Hz, 40 samples
GR.GRA1..BHN | 2010-02-27T00:00:00... | 20.0 Hz, 40 samples
GR.GRA1..BHZ | 2010-02-27T00:00:00... | 20.0 Hz, 40 samples
IU.ANMO.00.BHZ | 2010-02-27T00:00:00... | 20.0 Hz, 40 samples
IU.ANMO.10.BHZ | 2010-02-27T00:00:00... | 40.0 Hz, 80 samples
>>> t = UTCDateTime("2012-12-14T10:36:01.6Z")
>>> t1 = t + 300
>>> t2 = t + 400
>>> bulk = [("TA", "S42A", "*", "BHZ", t1, t2),
... ("TA", "W42A", "*", "BHZ", t1, t2),
... ("TA", "Z42A", "*", "BHZ", t1, t2)]
>>> st = client.get_waveforms_bulk(bulk, attach_response=True)
>>> st.remove_response(output="VEL") # doctest: +ELLIPSIS
<obspy.core.stream.Stream object at ...>
>>> st.plot() # doctest: +SKIP
.. plot::
from obspy import UTCDateTime
from obspy.clients.fdsn import Client
client = Client("EARTHSCOPE")
t = UTCDateTime("2012-12-14T10:36:01.6Z")
t1 = t + 300
t2 = t + 400
bulk = [("TA", "S42A", "*", "BHZ", t1, t2),
("TA", "W42A", "*", "BHZ", t1, t2),
("TA", "Z42A", "*", "BHZ", t1, t2)]
st = client.get_waveforms_bulk(bulk, attach_response=True)
st.remove_response(output="VEL")
st.plot()