diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index 6fdc60fcb3394..8a12016025c3e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -98,7 +98,6 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.index.query.QueryBuilders.matchQuery; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -725,22 +724,26 @@ public void testFieldCaps() throws Exception { // end::field-caps-execute // tag::field-caps-response - assertThat(response.get().keySet(), contains("user")); - Map userResponse = response.getField("user"); - - assertThat(userResponse.keySet(), containsInAnyOrder("keyword", "text")); // <1> + Map userResponse = response.getField("user"); // <1> FieldCapabilities textCapabilities = userResponse.get("keyword"); - assertTrue(textCapabilities.isSearchable()); - assertFalse(textCapabilities.isAggregatable()); + boolean isSearchable = textCapabilities.isSearchable(); + boolean isAggregatable = textCapabilities.isAggregatable(); - assertArrayEquals(textCapabilities.indices(), // <2> - new String[]{"authors", "contributors"}); - assertNull(textCapabilities.nonSearchableIndices()); // <3> - assertArrayEquals(textCapabilities.nonAggregatableIndices(), // <4> - new String[]{"authors"}); + String[] indices = textCapabilities.indices(); // <2> + String[] nonSearchableIndices = textCapabilities.nonSearchableIndices(); // <3> + String[] nonAggregatableIndices = textCapabilities.nonAggregatableIndices();//<4> // end::field-caps-response + assertThat(userResponse.keySet(), containsInAnyOrder("keyword", "text")); + + assertTrue(isSearchable); + assertFalse(isAggregatable); + + assertArrayEquals(indices, new String[]{"authors", "contributors"}); + assertNull(nonSearchableIndices); + assertArrayEquals(nonAggregatableIndices, new String[]{"authors"}); + // tag::field-caps-execute-listener ActionListener listener = new ActionListener() { @Override diff --git a/docs/CHANGELOG.asciidoc b/docs/CHANGELOG.asciidoc deleted file mode 100644 index 6eb26fde8f9f8..0000000000000 --- a/docs/CHANGELOG.asciidoc +++ /dev/null @@ -1,257 +0,0 @@ -[[es-release-notes]] -= {es} Release Notes - -[partintro] --- -// To add a release, copy and paste the template text -// and add a link to the new section. Note that release subheads must -// be floated and sections cannot be empty. - -// Use these for links to issue and pulls. Note issues and pulls redirect one to -// each other on Github, so don't worry too much on using the right prefix. -:issue: https://github.com/elastic/elasticsearch/issues/ -:pull: https://github.com/elastic/elasticsearch/pull/ - -This section summarizes the changes in each release. - -* <> -* <> -* <> - --- - -//// -// To add a release, copy and paste the following text, uncomment the relevant -// sections, and add a link to the new section in the list of releases at the -// top of the page. Note that release subheads must be floated and sections -// cannot be empty. -// TEMPLATE: - -// [[release-notes-n.n.n]] -// == {es} n.n.n - -//[float] -[[breaking-n.n.n]] -//=== Breaking Changes - -//[float] -//=== Breaking Java Changes - -//[float] -//=== Deprecations - -//[float] -//=== New Features - -//[float] -//=== Enhancements - -//[float] -//=== Bug Fixes - -//[float] -//=== Regressions - -//[float] -//=== Known Issues - -//// - -[[release-notes-7.0.0]] -== {es} 7.0.0 - -coming[7.0.0] - -[float] -[[breaking-7.0.0]] -=== Breaking Changes - -<> ({pull}29609[#29609]) - -<> ({pull}29004[#29004]) -<> ({pull}29635[#29635]) - -<> ({pull}30185[#30185]) - -Machine Learning:: -* The `max_running_jobs` node property is removed in this release. Use the -`xpack.ml.max_open_jobs` setting instead. For more information, see <>. - -* <> ({pull}29601[#29601]) - -//[float] -//=== Breaking Java Changes - -[float] -=== Deprecations -Monitoring:: -* The `xpack.monitoring.collection.interval` setting can no longer be set to `-1` -to disable monitoring data collection. Use `xpack.monitoring.collection.enabled` -and set it to `false` (its default), which was added in 6.3.0. - -Security:: -* The fields returned as part of the mappings section by get index, get -mappings, get field mappings, and field capabilities API are now only the -ones that the user is authorized to access in case field level security is enabled. - -//[float] -//=== New Features - -//[float] -//=== Enhancements - -[float] -=== Bug Fixes - -Use date format in `date_range` mapping before fallback to default ({pull}29310[#29310]) - -Fix NPE in 'more_like_this' when field has zero tokens ({pull}30365[#30365]) - -Fixed prerelease version of elasticsearch in the `deb` package to sort before GA versions -({pull}29000[#29000]) - -Rollup:: -* Validate timezone in range queries to ensure they match the selected job when -searching ({pull}30338[#30338]) - -SQL:: -* Fix parsing of Dates containing milliseconds ({pull}30419[#30419]) - -[float] -=== Regressions -Fail snapshot operations early when creating or deleting a snapshot on a repository that has been -written to by an older Elasticsearch after writing to it with a newer Elasticsearch version. ({pull}30140[#30140]) - -Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641]) -Do not fail snapshot when deleting a missing snapshotted file ({pull}30332[#30332]) - -//[float] -//=== Regressions - -//[float] -//=== Known Issues - -[[release-notes-6.4.0]] -== {es} 6.4.0 - -coming[6.4.0] - -//[float] -[[breaking-6.4.0]] -//=== Breaking Changes - -//[float] -//=== Breaking Java Changes - -[float] -=== Deprecations - -Deprecated multi-argument versions of the request methods in the RestClient. -Prefer the "Request" object flavored methods. ({pull}30315[#30315]) - -[float] -=== New Features - -The new <> field allows to know which fields -got ignored at index time because of the <> -option. ({pull}30140[#29658]) - -A new analysis plugin called `analysis_nori` that exposes the Lucene Korean -analysis module. ({pull}30397[#30397]) - -[float] -=== Enhancements - -{ref-64}/breaking_64_api_changes.html#copy-source-settings-on-resize[Allow -copying source settings on index resize operations] ({pull}30255[#30255], {pull}30404[#30404]) - -Added new "Request" object flavored request methods in the RestClient. Prefer -these instead of the multi-argument versions. ({pull}29623[#29623]) - -Added `setJsonEntity` to `Request` object so it is marginally easier to send JSON. ({pull}30447[#30447]) -Watcher HTTP client used in watches now allows more parallel connections to the -same endpoint and evicts long running connections. ({pull}30130[#30130]) - -The cluster state listener to decide if watcher should be -stopped/started/paused now runs far less code in an executor but is more -synchronous and predictable. Also the trigger engine thread is only started on -data nodes. And the Execute Watch API can be triggered regardless is watcher is -started or stopped. ({pull}30118[#30118]) - -Added put index template API to the high level rest client ({pull}30400[#30400]) - -Add ability to filter coordinating-only nodes when interacting with cluster -APIs. ({pull}30313[#30313]) - -[float] -=== Bug Fixes - -Use date format in `date_range` mapping before fallback to default ({pull}29310[#29310]) - -Fix NPE in 'more_like_this' when field has zero tokens ({pull}30365[#30365]) - -Do not ignore request analysis/similarity settings on index resize operations when the source index already contains such settings ({pull}30216[#30216]) - -Fix NPE when CumulativeSum agg encounters null value/empty bucket ({pull}29641[#29641]) - -Machine Learning:: - -* Account for gaps in data counts after job is reopened ({pull}30294[#30294]) - -Add validation that geohashes are not empty and don't contain unsupported characters ({pull}30376[#30376]) - -Rollup:: -* Validate timezone in range queries to ensure they match the selected job when -searching ({pull}30338[#30338]) - -SQL:: -* Fix parsing of Dates containing milliseconds ({pull}30419[#30419]) - -Allocation:: - -Auto-expand replicas when adding or removing nodes to prevent shard copies from -being dropped and resynced when a data node rejoins the cluster ({pull}30423[#30423]) - -//[float] -//=== Regressions - -//[float] -//=== Known Issues - -[[release-notes-6.3.1]] -== Elasticsearch version 6.3.1 - -coming[6.3.1] - -//[float] -[[breaking-6.3.1]] -//=== Breaking Changes - -//[float] -//=== Breaking Java Changes - -//[float] -//=== Deprecations - -//[float] -//=== New Features - -//[float] -//=== Enhancements - -[float] -=== Bug Fixes - -Reduce the number of object allocations made by {security} when resolving the indices and aliases for a request ({pull}30180[#30180]) - -Respect accept header on requests with no handler ({pull}30383[#30383]) - -SQL:: -* Fix parsing of Dates containing milliseconds ({pull}30419[#30419]) - -//[float] -//=== Regressions - -//[float] -//=== Known Issues diff --git a/docs/java-rest/high-level/search/field-caps.asciidoc b/docs/java-rest/high-level/search/field-caps.asciidoc index fef30f629ca61..1f5b10ad034df 100644 --- a/docs/java-rest/high-level/search/field-caps.asciidoc +++ b/docs/java-rest/high-level/search/field-caps.asciidoc @@ -76,7 +76,7 @@ information about how each index contributes to the field's capabilities. -------------------------------------------------- include-tagged::{doc-tests}/SearchDocumentationIT.java[field-caps-response] -------------------------------------------------- -<1> The `user` field has two possible types, `keyword` and `text`. -<2> This field only has type `keyword` in the `authors` and `contributors` indices. -<3> Null, since the field is searchable in all indices for which it has the `keyword` type. -<4> The `user` field is not aggregatable in the `authors` index. \ No newline at end of file +<1> A map with entries for the field's possible types, in this case `keyword` and `text`. +<2> All indices where the `user` field has type `keyword`. +<3> The subset of these indices where the `user` field isn't searchable, or null if it's always searchable. +<4> Another subset of these indices where the `user` field isn't aggregatable, or null if it's always aggregatable. \ No newline at end of file diff --git a/docs/plugins/repository-gcs.asciidoc b/docs/plugins/repository-gcs.asciidoc index a51200fb7fef0..8cf2bc0a73c92 100644 --- a/docs/plugins/repository-gcs.asciidoc +++ b/docs/plugins/repository-gcs.asciidoc @@ -84,11 +84,7 @@ A service account file looks like this: "private_key_id": "...", "private_key": "-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----\n", "client_email": "service-account-for-your-repository@your-project-id.iam.gserviceaccount.com", - "client_id": "...", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "token_uri": "https://accounts.google.com/o/oauth2/token", - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_x509_cert_url": "..." + "client_id": "..." } ---- // NOTCONSOLE @@ -178,6 +174,12 @@ are marked as `Secure`. a custom name can be useful to authenticate your cluster when requests statistics are logged in the Google Cloud Platform. Default to `repository-gcs` +`project_id`:: + + The Google Cloud project id. This will be automatically infered from the credentials file but + can be specified explicitly. For example, it can be used to switch between projects when the + same credentials are usable for both the production and the development projects. + [[repository-gcs-repository]] ==== Repository Settings diff --git a/docs/reference/index-modules/translog.asciidoc b/docs/reference/index-modules/translog.asciidoc index b1eb36e346d9f..bed19bd5be1df 100644 --- a/docs/reference/index-modules/translog.asciidoc +++ b/docs/reference/index-modules/translog.asciidoc @@ -108,8 +108,8 @@ provide a command-line tool for this, `elasticsearch-translog`. [WARNING] The `elasticsearch-translog` tool should *not* be run while Elasticsearch is -running, and you will permanently lose the documents that were contained only in -the translog! +running. If you attempt to run this tool while Elasticsearch is running, you +will permanently lose the documents that were contained only in the translog! In order to run the `elasticsearch-translog` tool, specify the `truncate` subcommand as well as the directory for the corrupted translog with the `-d` diff --git a/docs/reference/index-shared4.asciidoc b/docs/reference/index-shared4.asciidoc index 3dfb3b641890f..3dc9e4f5e07cf 100644 --- a/docs/reference/index-shared4.asciidoc +++ b/docs/reference/index-shared4.asciidoc @@ -7,4 +7,4 @@ include::glossary.asciidoc[] include::release-notes/highlights.asciidoc[] -include::{docdir}/../CHANGELOG.asciidoc[] \ No newline at end of file +include::release-notes.asciidoc[] \ No newline at end of file diff --git a/docs/reference/release-notes/highlights-7.0.0.asciidoc b/docs/reference/release-notes/highlights-7.0.0.asciidoc index 1ea3d3fa3291e..d01d543c8257e 100644 --- a/docs/reference/release-notes/highlights-7.0.0.asciidoc +++ b/docs/reference/release-notes/highlights-7.0.0.asciidoc @@ -6,4 +6,4 @@ coming[7.0.0] -See also <> and <>. +See also <> and <>. diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index 1223e9a685a27..0ab4106c22c1f 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -1,5 +1,8 @@ [[release-highlights]] = {es} Release Highlights +++++ +Release Highlights +++++ [partintro] -- diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index e164a8553f81f..07ef4b4be5e62 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -22,38 +22,207 @@ esplugin { classname 'org.elasticsearch.repositories.gcs.GoogleCloudStoragePlugin' } -versions << [ - 'google': '1.23.0', -] - dependencies { - compile "com.google.apis:google-api-services-storage:v1-rev115-${versions.google}" - compile "com.google.api-client:google-api-client:${versions.google}" - compile "com.google.oauth-client:google-oauth-client:${versions.google}" - compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" - compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" - compile "commons-logging:commons-logging:${versions.commonslogging}" - compile "commons-codec:commons-codec:${versions.commonscodec}" - compile "com.google.http-client:google-http-client:${versions.google}" - compile "com.google.http-client:google-http-client-jackson2:${versions.google}" + compile 'com.google.cloud:google-cloud-storage:1.28.0' + compile 'com.google.cloud:google-cloud-core:1.28.0' + compile 'com.google.cloud:google-cloud-core-http:1.28.0' + compile 'com.google.auth:google-auth-library-oauth2-http:0.9.1' + compile 'com.google.auth:google-auth-library-credentials:0.9.1' + compile 'com.google.oauth-client:google-oauth-client:1.23.0' + compile 'com.google.http-client:google-http-client:1.23.0' + compile 'com.google.http-client:google-http-client-jackson:1.23.0' + compile 'com.google.http-client:google-http-client-jackson2:1.23.0' + compile 'com.google.http-client:google-http-client-appengine:1.23.0' + compile 'com.google.api-client:google-api-client:1.23.0' + compile 'com.google.api:gax:1.25.0' + compile 'com.google.api:gax-httpjson:0.40.0' + compile 'com.google.api:api-common:1.5.0' + compile 'com.google.api.grpc:proto-google-common-protos:1.8.0' + compile 'com.google.guava:guava:20.0' + compile 'com.google.apis:google-api-services-storage:v1-rev115-1.23.0' + compile 'org.codehaus.jackson:jackson-core-asl:1.9.13' + compile 'io.grpc:grpc-context:1.9.0' + compile 'io.opencensus:opencensus-api:0.11.1' + compile 'io.opencensus:opencensus-contrib-http-util:0.11.1' + compile 'org.threeten:threetenbp:1.3.6' } dependencyLicenses { - mapping from: /google-.*/, to: 'google' + mapping from: /google-cloud-.*/, to: 'google-cloud' + mapping from: /google-auth-.*/, to: 'google-auth' + mapping from: /google-http-.*/, to: 'google-http' + mapping from: /opencensus.*/, to: 'opencensus' } thirdPartyAudit.excludes = [ + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', // classes are missing - 'com.google.common.base.Splitter', - 'com.google.common.collect.Lists', - 'javax.servlet.ServletContextEvent', - 'javax.servlet.ServletContextListener', - 'org.apache.avalon.framework.logger.Logger', - 'org.apache.log.Hierarchy', - 'org.apache.log.Logger', + 'com.google.appengine.api.datastore.Blob', + 'com.google.appengine.api.datastore.DatastoreService', + 'com.google.appengine.api.datastore.DatastoreServiceFactory', + 'com.google.appengine.api.datastore.Entity', + 'com.google.appengine.api.datastore.Key', + 'com.google.appengine.api.datastore.KeyFactory', + 'com.google.appengine.api.datastore.PreparedQuery', + 'com.google.appengine.api.datastore.Query', + 'com.google.appengine.api.memcache.Expiration', + 'com.google.appengine.api.memcache.MemcacheService', + 'com.google.appengine.api.memcache.MemcacheServiceFactory', + 'com.google.appengine.api.urlfetch.FetchOptions$Builder', + 'com.google.appengine.api.urlfetch.FetchOptions', + 'com.google.appengine.api.urlfetch.HTTPHeader', + 'com.google.appengine.api.urlfetch.HTTPMethod', + 'com.google.appengine.api.urlfetch.HTTPRequest', + 'com.google.appengine.api.urlfetch.HTTPResponse', + 'com.google.appengine.api.urlfetch.URLFetchService', + 'com.google.appengine.api.urlfetch.URLFetchServiceFactory', + 'com.google.gson.Gson', + 'com.google.gson.GsonBuilder', + 'com.google.gson.TypeAdapter', + 'com.google.gson.stream.JsonReader', + 'com.google.gson.stream.JsonWriter', + 'com.google.iam.v1.Binding$Builder', + 'com.google.iam.v1.Binding', + 'com.google.iam.v1.Policy$Builder', + 'com.google.iam.v1.Policy', + 'com.google.protobuf.AbstractMessageLite$Builder', + 'com.google.protobuf.AbstractParser', + 'com.google.protobuf.Any$Builder', + 'com.google.protobuf.Any', + 'com.google.protobuf.AnyOrBuilder', + 'com.google.protobuf.AnyProto', + 'com.google.protobuf.Api$Builder', + 'com.google.protobuf.Api', + 'com.google.protobuf.ApiOrBuilder', + 'com.google.protobuf.ApiProto', + 'com.google.protobuf.ByteString', + 'com.google.protobuf.CodedInputStream', + 'com.google.protobuf.CodedOutputStream', + 'com.google.protobuf.DescriptorProtos', + 'com.google.protobuf.Descriptors$Descriptor', + 'com.google.protobuf.Descriptors$EnumDescriptor', + 'com.google.protobuf.Descriptors$EnumValueDescriptor', + 'com.google.protobuf.Descriptors$FieldDescriptor', + 'com.google.protobuf.Descriptors$FileDescriptor$InternalDescriptorAssigner', + 'com.google.protobuf.Descriptors$FileDescriptor', + 'com.google.protobuf.Descriptors$OneofDescriptor', + 'com.google.protobuf.Duration$Builder', + 'com.google.protobuf.Duration', + 'com.google.protobuf.DurationOrBuilder', + 'com.google.protobuf.DurationProto', + 'com.google.protobuf.EmptyProto', + 'com.google.protobuf.Enum$Builder', + 'com.google.protobuf.Enum', + 'com.google.protobuf.EnumOrBuilder', + 'com.google.protobuf.ExtensionRegistry', + 'com.google.protobuf.ExtensionRegistryLite', + 'com.google.protobuf.FloatValue$Builder', + 'com.google.protobuf.FloatValue', + 'com.google.protobuf.FloatValueOrBuilder', + 'com.google.protobuf.GeneratedMessage$GeneratedExtension', + 'com.google.protobuf.GeneratedMessage', + 'com.google.protobuf.GeneratedMessageV3$Builder', + 'com.google.protobuf.GeneratedMessageV3$BuilderParent', + 'com.google.protobuf.GeneratedMessageV3$FieldAccessorTable', + 'com.google.protobuf.GeneratedMessageV3', + 'com.google.protobuf.Internal$EnumLite', + 'com.google.protobuf.Internal$EnumLiteMap', + 'com.google.protobuf.Internal', + 'com.google.protobuf.InvalidProtocolBufferException', + 'com.google.protobuf.LazyStringArrayList', + 'com.google.protobuf.LazyStringList', + 'com.google.protobuf.MapEntry$Builder', + 'com.google.protobuf.MapEntry', + 'com.google.protobuf.MapField', + 'com.google.protobuf.Message', + 'com.google.protobuf.MessageOrBuilder', + 'com.google.protobuf.Parser', + 'com.google.protobuf.ProtocolMessageEnum', + 'com.google.protobuf.ProtocolStringList', + 'com.google.protobuf.RepeatedFieldBuilderV3', + 'com.google.protobuf.SingleFieldBuilderV3', + 'com.google.protobuf.Struct$Builder', + 'com.google.protobuf.Struct', + 'com.google.protobuf.StructOrBuilder', + 'com.google.protobuf.StructProto', + 'com.google.protobuf.Timestamp$Builder', + 'com.google.protobuf.Timestamp', + 'com.google.protobuf.TimestampProto', + 'com.google.protobuf.Type$Builder', + 'com.google.protobuf.Type', + 'com.google.protobuf.TypeOrBuilder', + 'com.google.protobuf.TypeProto', + 'com.google.protobuf.UInt32Value$Builder', + 'com.google.protobuf.UInt32Value', + 'com.google.protobuf.UInt32ValueOrBuilder', + 'com.google.protobuf.UnknownFieldSet$Builder', + 'com.google.protobuf.UnknownFieldSet', + 'com.google.protobuf.WireFormat$FieldType', + 'com.google.protobuf.WrappersProto', + 'com.google.protobuf.util.Timestamps', + 'org.apache.http.ConnectionReuseStrategy', + 'org.apache.http.Header', + 'org.apache.http.HttpEntity', + 'org.apache.http.HttpEntityEnclosingRequest', + 'org.apache.http.HttpHost', + 'org.apache.http.HttpRequest', + 'org.apache.http.HttpResponse', + 'org.apache.http.HttpVersion', + 'org.apache.http.RequestLine', + 'org.apache.http.StatusLine', + 'org.apache.http.client.AuthenticationHandler', + 'org.apache.http.client.HttpClient', + 'org.apache.http.client.HttpRequestRetryHandler', + 'org.apache.http.client.RedirectHandler', + 'org.apache.http.client.RequestDirector', + 'org.apache.http.client.UserTokenHandler', + 'org.apache.http.client.methods.HttpDelete', + 'org.apache.http.client.methods.HttpEntityEnclosingRequestBase', + 'org.apache.http.client.methods.HttpGet', + 'org.apache.http.client.methods.HttpHead', + 'org.apache.http.client.methods.HttpOptions', + 'org.apache.http.client.methods.HttpPost', + 'org.apache.http.client.methods.HttpPut', + 'org.apache.http.client.methods.HttpRequestBase', + 'org.apache.http.client.methods.HttpTrace', + 'org.apache.http.conn.ClientConnectionManager', + 'org.apache.http.conn.ConnectionKeepAliveStrategy', + 'org.apache.http.conn.params.ConnManagerParams', + 'org.apache.http.conn.params.ConnPerRouteBean', + 'org.apache.http.conn.params.ConnRouteParams', + 'org.apache.http.conn.routing.HttpRoutePlanner', + 'org.apache.http.conn.scheme.PlainSocketFactory', + 'org.apache.http.conn.scheme.Scheme', + 'org.apache.http.conn.scheme.SchemeRegistry', + 'org.apache.http.conn.ssl.SSLSocketFactory', + 'org.apache.http.conn.ssl.X509HostnameVerifier', + 'org.apache.http.entity.AbstractHttpEntity', + 'org.apache.http.impl.client.DefaultHttpClient', + 'org.apache.http.impl.client.DefaultHttpRequestRetryHandler', + 'org.apache.http.impl.conn.ProxySelectorRoutePlanner', + 'org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager', + 'org.apache.http.message.BasicHttpResponse', + 'org.apache.http.params.BasicHttpParams', + 'org.apache.http.params.HttpConnectionParams', + 'org.apache.http.params.HttpParams', + 'org.apache.http.params.HttpProtocolParams', + 'org.apache.http.protocol.HttpContext', + 'org.apache.http.protocol.HttpProcessor', + 'org.apache.http.protocol.HttpRequestExecutor' ] check { // also execute the QA tests when testing the plugin dependsOn 'qa:google-cloud-storage:check' -} \ No newline at end of file +} diff --git a/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 b/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 new file mode 100644 index 0000000000000..64435356e5eaf --- /dev/null +++ b/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 @@ -0,0 +1 @@ +7e537338d40a57ad469239acb6d828fa544fb52b \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/api-common-LICENSE.txt b/plugins/repository-gcs/licenses/api-common-LICENSE.txt new file mode 100644 index 0000000000000..6d16b6578a2f0 --- /dev/null +++ b/plugins/repository-gcs/licenses/api-common-LICENSE.txt @@ -0,0 +1,27 @@ +Copyright 2016, Google Inc. +All rights reserved. +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/api-common-NOTICE.txt b/plugins/repository-gcs/licenses/api-common-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 b/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 deleted file mode 100644 index 3fe8682a1b0f9..0000000000000 --- a/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4b95f4897fa13f2cd904aee711aeafc0c5295cd8 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 deleted file mode 100644 index 5b8f029e58293..0000000000000 --- a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 new file mode 100644 index 0000000000000..594177047c140 --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 @@ -0,0 +1 @@ +36ab73c0b5d4a67447eb89a3174cc76ced150bd1 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-LICENSE.txt b/plugins/repository-gcs/licenses/gax-LICENSE.txt new file mode 100644 index 0000000000000..267561bb386de --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-LICENSE.txt @@ -0,0 +1,27 @@ +Copyright 2016, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/gax-NOTICE.txt b/plugins/repository-gcs/licenses/gax-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 new file mode 100644 index 0000000000000..c251ea1dd956c --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 @@ -0,0 +1 @@ +cb4bafbfd45b9d24efbb6138a31e37918fac015f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-httpjson-LICENSE.txt b/plugins/repository-gcs/licenses/gax-httpjson-LICENSE.txt new file mode 100644 index 0000000000000..267561bb386de --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-httpjson-LICENSE.txt @@ -0,0 +1,27 @@ +Copyright 2016, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/gax-httpjson-NOTICE.txt b/plugins/repository-gcs/licenses/gax-httpjson-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-api-client-LICENSE.txt b/plugins/repository-gcs/licenses/google-api-client-LICENSE.txt new file mode 100644 index 0000000000000..4eedc0116add7 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-client-LICENSE.txt @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-api-client-NOTICE.txt b/plugins/repository-gcs/licenses/google-api-client-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt new file mode 100644 index 0000000000000..4eedc0116add7 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-LICENSE.txt @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-NOTICE.txt b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-auth-LICENSE.txt b/plugins/repository-gcs/licenses/google-auth-LICENSE.txt new file mode 100644 index 0000000000000..12edf23c6711f --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2014, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/google-auth-NOTICE.txt b/plugins/repository-gcs/licenses/google-auth-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 new file mode 100644 index 0000000000000..0922a53d2e356 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 @@ -0,0 +1 @@ +25e0f45f3b3d1b4fccc8944845e51a7a4f359652 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 new file mode 100644 index 0000000000000..100a44c187218 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 @@ -0,0 +1 @@ +c0fe3a39b0f28d59de1986b3c50f018cd7cb9ec2 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-LICENSE.txt b/plugins/repository-gcs/licenses/google-cloud-LICENSE.txt new file mode 100644 index 0000000000000..4eedc0116add7 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-LICENSE.txt @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-cloud-NOTICE.txt b/plugins/repository-gcs/licenses/google-cloud-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 new file mode 100644 index 0000000000000..071533f227839 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 @@ -0,0 +1 @@ +c0e88c78ce17c92d76bf46345faf3fa68833b216 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 new file mode 100644 index 0000000000000..fed3fc257c32c --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 @@ -0,0 +1 @@ +7b4559a9513abd98da50958c56a10f8ae00cb0f7 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 new file mode 100644 index 0000000000000..f49152ea05646 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 @@ -0,0 +1 @@ +226019ae816b42c59f1b06999aeeb73722b87200 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-LICENSE.txt b/plugins/repository-gcs/licenses/google-http-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/google-LICENSE.txt rename to plugins/repository-gcs/licenses/google-http-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/google-http-NOTICE.txt b/plugins/repository-gcs/licenses/google-http-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 new file mode 100644 index 0000000000000..823c3a85089a5 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 @@ -0,0 +1 @@ +0eda0d0f758c1cc525866e52e1226c4eb579d130 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 new file mode 100644 index 0000000000000..85ba0ab798d05 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 @@ -0,0 +1 @@ +a72ea3a197937ef63a893e73df312dac0d813663 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-oauth-client-LICENSE.txt b/plugins/repository-gcs/licenses/google-oauth-client-LICENSE.txt new file mode 100644 index 0000000000000..12edf23c6711f --- /dev/null +++ b/plugins/repository-gcs/licenses/google-oauth-client-LICENSE.txt @@ -0,0 +1,28 @@ +Copyright 2014, Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/plugins/repository-gcs/licenses/google-oauth-client-NOTICE.txt b/plugins/repository-gcs/licenses/google-oauth-client-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 b/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 new file mode 100644 index 0000000000000..02bac0e492074 --- /dev/null +++ b/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 @@ -0,0 +1 @@ +28b0836f48c9705abf73829bbc536dba29a1329a \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-codec-LICENSE.txt b/plugins/repository-gcs/licenses/grpc-context-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-codec-LICENSE.txt rename to plugins/repository-gcs/licenses/grpc-context-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/grpc-context-NOTICE.txt b/plugins/repository-gcs/licenses/grpc-context-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/guava-20.0.jar.sha1 b/plugins/repository-gcs/licenses/guava-20.0.jar.sha1 new file mode 100644 index 0000000000000..7b6ae09060b29 --- /dev/null +++ b/plugins/repository-gcs/licenses/guava-20.0.jar.sha1 @@ -0,0 +1 @@ +89507701249388e1ed5ddcf8c41f4ce1be7831ef \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/guava-LICENSE.txt b/plugins/repository-gcs/licenses/guava-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/guava-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/guava-NOTICE.txt b/plugins/repository-gcs/licenses/guava-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 b/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 deleted file mode 100644 index 6937112a09fb6..0000000000000 --- a/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 b/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 deleted file mode 100644 index 581726601745b..0000000000000 --- a/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 b/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 new file mode 100644 index 0000000000000..c5016bf828d60 --- /dev/null +++ b/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 @@ -0,0 +1 @@ +3c304d70f42f832e0a86d45bd437f692129299a4 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt b/plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-NOTICE.txt b/plugins/repository-gcs/licenses/jackson-core-asl-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt b/plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/commons-codec-NOTICE.txt b/plugins/repository-gcs/licenses/old/commons-codec-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-codec-NOTICE.txt rename to plugins/repository-gcs/licenses/old/commons-codec-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/commons-logging-LICENSE.txt b/plugins/repository-gcs/licenses/old/commons-logging-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-logging-LICENSE.txt rename to plugins/repository-gcs/licenses/old/commons-logging-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/commons-logging-NOTICE.txt b/plugins/repository-gcs/licenses/old/commons-logging-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/commons-logging-NOTICE.txt rename to plugins/repository-gcs/licenses/old/commons-logging-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/old/google-LICENSE.txt b/plugins/repository-gcs/licenses/old/google-LICENSE.txt new file mode 100644 index 0000000000000..980a15ac24eeb --- /dev/null +++ b/plugins/repository-gcs/licenses/old/google-LICENSE.txt @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/google-NOTICE.txt b/plugins/repository-gcs/licenses/old/google-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/google-NOTICE.txt rename to plugins/repository-gcs/licenses/old/google-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/httpclient-LICENSE.txt b/plugins/repository-gcs/licenses/old/httpclient-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpclient-LICENSE.txt rename to plugins/repository-gcs/licenses/old/httpclient-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/httpclient-NOTICE.txt b/plugins/repository-gcs/licenses/old/httpclient-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpclient-NOTICE.txt rename to plugins/repository-gcs/licenses/old/httpclient-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/httpcore-LICENSE.txt b/plugins/repository-gcs/licenses/old/httpcore-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpcore-LICENSE.txt rename to plugins/repository-gcs/licenses/old/httpcore-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/httpcore-NOTICE.txt b/plugins/repository-gcs/licenses/old/httpcore-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/httpcore-NOTICE.txt rename to plugins/repository-gcs/licenses/old/httpcore-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/opencensus-LICENSE.txt b/plugins/repository-gcs/licenses/opencensus-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/opencensus-NOTICE.txt b/plugins/repository-gcs/licenses/opencensus-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 b/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 new file mode 100644 index 0000000000000..61d8e3b148144 --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 @@ -0,0 +1 @@ +54689fbf750a7f26e34fa1f1f96b883c53f51486 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 b/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 new file mode 100644 index 0000000000000..c0b04f0f8ccce --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 @@ -0,0 +1 @@ +82e572b41e81ecf58d0d1e9a3953a05aa8f9c84b \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 b/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 new file mode 100644 index 0000000000000..0a2dee4447e92 --- /dev/null +++ b/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 @@ -0,0 +1 @@ +b3282312ba82536fc9a7778cabfde149a875e877 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt b/plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-NOTICE.txt b/plugins/repository-gcs/licenses/proto-google-common-protos-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 b/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 new file mode 100644 index 0000000000000..65c16fed4a07b --- /dev/null +++ b/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 @@ -0,0 +1 @@ +89dcc04a7e028c3c963413a71f950703cf51f057 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/threetenbp-LICENSE.txt b/plugins/repository-gcs/licenses/threetenbp-LICENSE.txt new file mode 100644 index 0000000000000..fcdfc8f0d0774 --- /dev/null +++ b/plugins/repository-gcs/licenses/threetenbp-LICENSE.txt @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2007-present, Stephen Colebourne & Michael Nascimento Santos + * + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * * Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * * Neither the name of JSR-310 nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR + * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, + * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, + * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ diff --git a/plugins/repository-gcs/licenses/threetenbp-NOTICE.txt b/plugins/repository-gcs/licenses/threetenbp-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle index afd49b9f4dc73..34ec92a354277 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/build.gradle +++ b/plugins/repository-gcs/qa/google-cloud-storage/build.gradle @@ -69,7 +69,6 @@ task googleCloudStorageFixture(type: AntFixture) { /** A service account file that points to the Google Cloud Storage service emulated by the fixture **/ task createServiceAccountFile() { - dependsOn googleCloudStorageFixture doLast { KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA") keyPairGenerator.initialize(1024) @@ -83,11 +82,7 @@ task createServiceAccountFile() { ' "private_key_id": "' + UUID.randomUUID().toString() + '",\n' + ' "private_key": "-----BEGIN PRIVATE KEY-----\\n' + encodedKey + '\\n-----END PRIVATE KEY-----\\n",\n' + ' "client_email": "integration_test@appspot.gserviceaccount.com",\n' + - ' "client_id": "123456789101112130594",\n' + - " \"auth_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/auth\",\n" + - " \"token_uri\": \"http://${googleCloudStorageFixture.addressAndPort}/o/oauth2/token\",\n" + - ' "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",\n' + - ' "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/integration_test%40appspot.gserviceaccount.com"\n' + + ' "client_id": "123456789101112130594"\n' + '}', 'UTF-8') } } @@ -109,6 +104,7 @@ integTestCluster { dependsOn createServiceAccountFile, googleCloudStorageFixture /* Use a closure on the string to delay evaluation until tests are executed */ setting 'gcs.client.integration_test.endpoint', "http://${ -> googleCloudStorageFixture.addressAndPort }" + setting 'gcs.client.integration_test.token_uri', "http://${ -> googleCloudStorageFixture.addressAndPort }/o/oauth2/token" } else { println "Using an external service to test the repository-gcs plugin" } diff --git a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java index 2330e230f4505..a9832ae318de4 100644 --- a/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java +++ b/plugins/repository-gcs/qa/google-cloud-storage/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageTestServer.java @@ -31,13 +31,18 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.zip.GZIPInputStream; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; @@ -52,7 +57,7 @@ */ public class GoogleCloudStorageTestServer { - private static byte[] EMPTY_BYTE = new byte[0]; + private static final byte[] EMPTY_BYTE = new byte[0]; /** List of the buckets stored on this test server **/ private final Map buckets = ConcurrentCollections.newConcurrentMap(); @@ -63,13 +68,6 @@ public class GoogleCloudStorageTestServer { /** Server endpoint **/ private final String endpoint; - /** - * Creates a {@link GoogleCloudStorageTestServer} with the default endpoint - */ - GoogleCloudStorageTestServer() { - this("https://www.googleapis.com"); - } - /** * Creates a {@link GoogleCloudStorageTestServer} with a custom endpoint */ @@ -87,29 +85,6 @@ public String getEndpoint() { return endpoint; } - /** - * Returns a Google Cloud Storage response for the given request - * - * @param method the HTTP method of the request - * @param url the HTTP URL of the request - * @param headers the HTTP headers of the request - * @param body the HTTP request body - * @return a {@link Response} - * - * @throws IOException if something goes wrong - */ - public Response handle(final String method, - final String url, - final Map> headers, - byte[] body) throws IOException { - - final int questionMark = url.indexOf('?'); - if (questionMark == -1) { - return handle(method, url, null, headers, body); - } - return handle(method, url.substring(0, questionMark), url.substring(questionMark + 1), headers, body); - } - /** * Returns a Google Cloud Storage response for the given request * @@ -165,7 +140,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/buckets/get handlers.insert("GET " + endpoint + "/storage/v1/b/{bucket}", (params, headers, body) -> { - String name = params.get("bucket"); + final String name = params.get("bucket"); if (Strings.hasText(name) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "bucket name is missing"); } @@ -181,7 +156,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/objects/get handlers.insert("GET " + endpoint + "/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { - String objectName = params.get("object"); + final String objectName = params.get("object"); if (Strings.hasText(objectName) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); } @@ -191,7 +166,7 @@ private static PathTrie defaultHandlers(final String endpoint, f return newError(RestStatus.NOT_FOUND, "bucket not found"); } - for (Map.Entry object : bucket.objects.entrySet()) { + for (final Map.Entry object : bucket.objects.entrySet()) { if (object.getKey().equals(objectName)) { return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(bucket.name, objectName, object.getValue())); } @@ -203,7 +178,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/objects/delete handlers.insert("DELETE " + endpoint + "/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { - String objectName = params.get("object"); + final String objectName = params.get("object"); if (Strings.hasText(objectName) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); } @@ -224,25 +199,149 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/objects/insert handlers.insert("POST " + endpoint + "/upload/storage/v1/b/{bucket}/o", (params, headers, body) -> { - if ("resumable".equals(params.get("uploadType")) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload type must be resumable"); - } - - final String objectName = params.get("name"); - if (Strings.hasText(objectName) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); - } - - final Bucket bucket = buckets.get(params.get("bucket")); - if (bucket == null) { - return newError(RestStatus.NOT_FOUND, "bucket not found"); - } - - if (bucket.objects.put(objectName, EMPTY_BYTE) == null) { - String location = endpoint + "/upload/storage/v1/b/" + bucket.name + "/o?uploadType=resumable&upload_id=" + objectName; - return new Response(RestStatus.CREATED, singletonMap("Location", location), XContentType.JSON.mediaType(), EMPTY_BYTE); + final String uploadType = params.get("uploadType"); + if ("resumable".equals(uploadType)) { + final String objectName = params.get("name"); + if (Strings.hasText(objectName) == false) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "object name is missing"); + } + final Bucket bucket = buckets.get(params.get("bucket")); + if (bucket == null) { + return newError(RestStatus.NOT_FOUND, "bucket not found"); + } + if (bucket.objects.putIfAbsent(objectName, EMPTY_BYTE) == null) { + final String location = endpoint + "/upload/storage/v1/b/" + bucket.name + "/o?uploadType=resumable&upload_id=" + + objectName; + return new Response(RestStatus.CREATED, singletonMap("Location", location), XContentType.JSON.mediaType(), EMPTY_BYTE); + } else { + return newError(RestStatus.CONFLICT, "object already exist"); + } + } else if ("multipart".equals(uploadType)) { + /* + * A multipart/related request body looks like this (note the binary dump inside a text blob! nice!): + * --__END_OF_PART__ + * Content-Length: 135 + * Content-Type: application/json; charset=UTF-8 + * content-transfer-encoding: binary + * + * {"bucket":"bucket_test","crc32c":"7XacHQ==","md5Hash":"fVztGkklMlUamsSmJK7W+w==", + * "name":"tests-KEwE3bU4TuyetBgQIghmUw/master.dat-temp"} + * --__END_OF_PART__ + * content-transfer-encoding: binary + * + * KEwE3bU4TuyetBgQIghmUw + * --__END_OF_PART__-- + */ + String boundary = "__END_OF_PART__"; + // Determine the multipart boundary + final List contentTypes = headers.getOrDefault("Content-Type", headers.get("Content-type")); + if (contentTypes != null) { + final String contentType = contentTypes.get(0); + if ((contentType != null) && contentType.contains("multipart/related; boundary=")) { + boundary = contentType.replace("multipart/related; boundary=", ""); + } + } + InputStream inputStreamBody = new ByteArrayInputStream(body); + final List contentEncodings = headers.getOrDefault("Content-Encoding", headers.get("Content-encoding")); + if (contentEncodings != null) { + if (contentEncodings.stream().anyMatch(x -> "gzip".equalsIgnoreCase(x))) { + inputStreamBody = new GZIPInputStream(inputStreamBody); + } + } + // Read line by line ?both? parts of the multipart. Decoding headers as + // IS_8859_1 is safe. + try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStreamBody, StandardCharsets.ISO_8859_1))) { + String line; + // read first part delimiter + line = reader.readLine(); + if ((line == null) || (line.equals("--" + boundary) == false)) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Does not start with the part delimiter."); + } + final Map> firstPartHeaders = new HashMap<>(); + // Reads the first part's headers, if any + while ((line = reader.readLine()) != null) { + if (line.equals("\r\n") || (line.length() == 0)) { + // end of headers + break; + } else { + final String[] header = line.split(":", 2); + firstPartHeaders.put(header[0], singletonList(header[1])); + } + } + final List firstPartContentTypes = firstPartHeaders.getOrDefault("Content-Type", + firstPartHeaders.get("Content-type")); + if ((firstPartContentTypes == null) + || (firstPartContentTypes.stream().noneMatch(x -> x.contains("application/json")))) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Metadata part expected to have the \"application/json\" content type."); + } + // read metadata part, a single line + line = reader.readLine(); + final byte[] metadata = line.getBytes(StandardCharsets.ISO_8859_1); + if ((firstPartContentTypes != null) && (firstPartContentTypes.stream().anyMatch((x -> x.contains("charset=utf-8"))))) { + // decode as utf-8 + line = new String(metadata, StandardCharsets.UTF_8); + } + final Matcher objectNameMatcher = Pattern.compile("\"name\":\"([^\"]*)\"").matcher(line); + objectNameMatcher.find(); + final String objectName = objectNameMatcher.group(1); + final Matcher bucketNameMatcher = Pattern.compile("\"bucket\":\"([^\"]*)\"").matcher(line); + bucketNameMatcher.find(); + final String bucketName = bucketNameMatcher.group(1); + // read second part delimiter + line = reader.readLine(); + if ((line == null) || (line.equals("--" + boundary) == false)) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Second part does not start with delimiter. " + + "Is the metadata multi-line?"); + } + final Map> secondPartHeaders = new HashMap<>(); + // Reads the second part's headers, if any + while ((line = reader.readLine()) != null) { + if (line.equals("\r\n") || (line.length() == 0)) { + // end of headers + break; + } else { + final String[] header = line.split(":", 2); + secondPartHeaders.put(header[0], singletonList(header[1])); + } + } + final List secondPartTransferEncoding = secondPartHeaders.getOrDefault("Content-Transfer-Encoding", + secondPartHeaders.get("content-transfer-encoding")); + if ((secondPartTransferEncoding == null) + || (secondPartTransferEncoding.stream().noneMatch(x -> x.contains("binary")))) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, + "Error parsing multipart request. Data part expected to have the \"binary\" content transfer encoding."); + } + final ByteArrayOutputStream baos = new ByteArrayOutputStream(); + int c; + while ((c = reader.read()) != -1) { + // one char to one byte, because of the ISO_8859_1 encoding + baos.write(c); + } + final byte[] temp = baos.toByteArray(); + final byte[] trailingEnding = ("\r\n--" + boundary + "--\r\n").getBytes(StandardCharsets.ISO_8859_1); + // check trailing + for (int i = trailingEnding.length - 1; i >= 0; i--) { + if (trailingEnding[i] != temp[(temp.length - trailingEnding.length) + i]) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "Error parsing multipart request."); + } + } + final Bucket bucket = buckets.get(bucketName); + if (bucket == null) { + return newError(RestStatus.NOT_FOUND, "bucket not found"); + } + final byte[] objectData = Arrays.copyOf(temp, temp.length - trailingEnding.length); + if ((objectName != null) && (bucketName != null) && (objectData != null)) { + bucket.objects.put(objectName, objectData); + return new Response(RestStatus.OK, emptyMap(), XContentType.JSON.mediaType(), metadata); + } else { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "error parsing multipart request"); + } + } } else { - return newError(RestStatus.CONFLICT, "object already exist"); + return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload type must be resumable or multipart"); } }); @@ -250,7 +349,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload handlers.insert("PUT " + endpoint + "/upload/storage/v1/b/{bucket}/o", (params, headers, body) -> { - String objectId = params.get("upload_id"); + final String objectId = params.get("upload_id"); if (Strings.hasText(objectId) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "upload id is missing"); } @@ -268,38 +367,46 @@ private static PathTrie defaultHandlers(final String endpoint, f return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(bucket.name, objectId, body)); }); - // Copy Object + // Rewrite or Copy Object // + // https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite // https://cloud.google.com/storage/docs/json_api/v1/objects/copy - handlers.insert("POST " + endpoint + "/storage/v1/b/{srcBucket}/o/{src}/copyTo/b/{destBucket}/o/{dest}", (params, headers, body)-> { - String source = params.get("src"); - if (Strings.hasText(source) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "source object name is missing"); - } - - final Bucket srcBucket = buckets.get(params.get("srcBucket")); - if (srcBucket == null) { - return newError(RestStatus.NOT_FOUND, "source bucket not found"); - } - - String dest = params.get("dest"); - if (Strings.hasText(dest) == false) { - return newError(RestStatus.INTERNAL_SERVER_ERROR, "destination object name is missing"); - } - - final Bucket destBucket = buckets.get(params.get("destBucket")); - if (destBucket == null) { - return newError(RestStatus.NOT_FOUND, "destination bucket not found"); - } - - final byte[] sourceBytes = srcBucket.objects.get(source); - if (sourceBytes == null) { - return newError(RestStatus.NOT_FOUND, "source object not found"); - } - - destBucket.objects.put(dest, sourceBytes); - return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(destBucket.name, dest, sourceBytes)); - }); + handlers.insert("POST " + endpoint + "/storage/v1/b/{srcBucket}/o/{src}/{action}/b/{destBucket}/o/{dest}", + (params, headers, body) -> { + final String action = params.get("action"); + if ((action.equals("rewriteTo") == false) && (action.equals("copyTo") == false)) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "Action not implemented. None of \"rewriteTo\" or \"copyTo\"."); + } + final String source = params.get("src"); + if (Strings.hasText(source) == false) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "source object name is missing"); + } + final Bucket srcBucket = buckets.get(params.get("srcBucket")); + if (srcBucket == null) { + return newError(RestStatus.NOT_FOUND, "source bucket not found"); + } + final String dest = params.get("dest"); + if (Strings.hasText(dest) == false) { + return newError(RestStatus.INTERNAL_SERVER_ERROR, "destination object name is missing"); + } + final Bucket destBucket = buckets.get(params.get("destBucket")); + if (destBucket == null) { + return newError(RestStatus.NOT_FOUND, "destination bucket not found"); + } + final byte[] sourceBytes = srcBucket.objects.get(source); + if (sourceBytes == null) { + return newError(RestStatus.NOT_FOUND, "source object not found"); + } + destBucket.objects.put(dest, sourceBytes); + if (action.equals("rewriteTo")) { + final XContentBuilder respBuilder = jsonBuilder(); + buildRewriteResponse(respBuilder, destBucket.name, dest, sourceBytes.length); + return newResponse(RestStatus.OK, emptyMap(), respBuilder); + } else { + assert action.equals("copyTo"); + return newResponse(RestStatus.OK, emptyMap(), buildObjectResource(destBucket.name, dest, sourceBytes)); + } + }); // List Objects // @@ -317,8 +424,8 @@ private static PathTrie defaultHandlers(final String endpoint, f builder.startArray("items"); final String prefixParam = params.get("prefix"); - for (Map.Entry object : bucket.objects.entrySet()) { - if (prefixParam != null && object.getKey().startsWith(prefixParam) == false) { + for (final Map.Entry object : bucket.objects.entrySet()) { + if ((prefixParam != null) && (object.getKey().startsWith(prefixParam) == false)) { continue; } buildObjectResource(builder, bucket.name, object.getKey(), object.getValue()); @@ -333,7 +440,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // // https://cloud.google.com/storage/docs/request-body handlers.insert("GET " + endpoint + "/download/storage/v1/b/{bucket}/o/{object}", (params, headers, body) -> { - String object = params.get("object"); + final String object = params.get("object"); if (Strings.hasText(object) == false) { return newError(RestStatus.INTERNAL_SERVER_ERROR, "object id is missing"); } @@ -353,7 +460,7 @@ private static PathTrie defaultHandlers(final String endpoint, f // Batch // // https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch - handlers.insert("POST " + endpoint + "/batch", (params, headers, body) -> { + handlers.insert("POST " + endpoint + "/batch/storage/v1", (params, headers, body) -> { final List batchedResponses = new ArrayList<>(); // A batch request body looks like this: @@ -385,7 +492,7 @@ private static PathTrie defaultHandlers(final String endpoint, f final List contentTypes = headers.getOrDefault("Content-Type", headers.get("Content-type")); if (contentTypes != null) { final String contentType = contentTypes.get(0); - if (contentType != null && contentType.contains("multipart/mixed; boundary=")) { + if ((contentType != null) && contentType.contains("multipart/mixed; boundary=")) { boundary = contentType.replace("multipart/mixed; boundary=", ""); } } @@ -398,25 +505,25 @@ private static PathTrie defaultHandlers(final String endpoint, f while ((line = reader.readLine()) != null) { // Start of a batched request if (line.equals("--" + boundary)) { - Map> batchedHeaders = new HashMap<>(); + final Map> batchedHeaders = new HashMap<>(); // Reads the headers, if any while ((line = reader.readLine()) != null) { - if (line.equals("\r\n") || line.length() == 0) { + if (line.equals("\r\n") || (line.length() == 0)) { // end of headers break; } else { - String[] header = line.split(":", 2); + final String[] header = line.split(":", 2); batchedHeaders.put(header[0], singletonList(header[1])); } } // Reads the method and URL line = reader.readLine(); - String batchedUrl = line.substring(0, line.lastIndexOf(' ')); + final String batchedUrl = line.substring(0, line.lastIndexOf(' ')); final Map batchedParams = new HashMap<>(); - int questionMark = batchedUrl.indexOf('?'); + final int questionMark = batchedUrl.indexOf('?'); if (questionMark != -1) { RestUtils.decodeQueryString(batchedUrl.substring(questionMark + 1), 0, batchedParams); } @@ -424,16 +531,16 @@ private static PathTrie defaultHandlers(final String endpoint, f // Reads the body line = reader.readLine(); byte[] batchedBody = new byte[0]; - if (line != null || line.startsWith("--" + boundary) == false) { + if ((line != null) || (line.startsWith("--" + boundary) == false)) { batchedBody = line.getBytes(StandardCharsets.UTF_8); } // Executes the batched request - RequestHandler handler = handlers.retrieve(batchedUrl, batchedParams); + final RequestHandler handler = handlers.retrieve(batchedUrl, batchedParams); if (handler != null) { try { batchedResponses.add(handler.execute(batchedParams, batchedHeaders, batchedBody)); - } catch (IOException e) { + } catch (final IOException e) { batchedResponses.add(newError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); } } @@ -442,11 +549,11 @@ private static PathTrie defaultHandlers(final String endpoint, f } // Now we can build the response - String sep = "--"; - String line = "\r\n"; + final String sep = "--"; + final String line = "\r\n"; - StringBuilder builder = new StringBuilder(); - for (Response response : batchedResponses) { + final StringBuilder builder = new StringBuilder(); + for (final Response response : batchedResponses) { builder.append(sep).append(boundary).append(line); builder.append("Content-Type: application/http").append(line); builder.append(line); @@ -465,7 +572,7 @@ private static PathTrie defaultHandlers(final String endpoint, f builder.append(line); builder.append(sep).append(boundary).append(sep); - byte[] content = builder.toString().getBytes(StandardCharsets.UTF_8); + final byte[] content = builder.toString().getBytes(StandardCharsets.UTF_8); return new Response(RestStatus.OK, emptyMap(), "multipart/mixed; boundary=" + boundary, content); }); @@ -525,7 +632,7 @@ private static Response newResponse(final RestStatus status, final Map { - try { - Bucket bucket = client.buckets().get(bucketName).execute(); - if (bucket != null) { - return Strings.hasText(bucket.getId()); - } - } catch (GoogleJsonResponseException e) { - GoogleJsonError error = e.getDetails(); - if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { - return false; - } - throw e; - } - return false; - }); - } catch (IOException e) { + final Bucket bucket = SocketAccess.doPrivilegedIOException(() -> storage.get(bucketName)); + return bucket != null; + } catch (final Exception e) { throw new BlobStoreException("Unable to check if bucket [" + bucketName + "] exists", e); } } /** - * List all blobs in the bucket + * List blobs in the bucket under the specified path. The path root is removed. * - * @param path base path of the blobs to list + * @param path + * base path of the blobs to list * @return a map of blob names and their metadata */ Map listBlobs(String path) throws IOException { - return SocketAccess.doPrivilegedIOException(() -> listBlobsByPath(bucket, path, path)); + return listBlobsByPrefix(path, ""); } /** * List all blobs in the bucket which have a prefix * - * @param path base path of the blobs to list - * @param prefix prefix of the blobs to list - * @return a map of blob names and their metadata + * @param path + * base path of the blobs to list. This path is removed from the + * names of the blobs returned. + * @param prefix + * prefix of the blobs to list. + * @return a map of blob names and their metadata. */ Map listBlobsByPrefix(String path, String prefix) throws IOException { - return SocketAccess.doPrivilegedIOException(() -> listBlobsByPath(bucket, buildKey(path, prefix), path)); - } - - /** - * Lists all blobs in a given bucket - * - * @param bucketName name of the bucket - * @param path base path of the blobs to list - * @param pathToRemove if true, this path part is removed from blob name - * @return a map of blob names and their metadata - */ - private Map listBlobsByPath(String bucketName, String path, String pathToRemove) throws IOException { - return blobsStream(client, bucketName, path, MAX_BATCHING_REQUESTS) - .map(new BlobMetaDataConverter(pathToRemove)) - .collect(Collectors.toMap(PlainBlobMetaData::name, Function.identity())); + final String pathPrefix = buildKey(path, prefix); + final MapBuilder mapBuilder = MapBuilder.newMapBuilder(); + SocketAccess.doPrivilegedVoidIOException(() -> { + storage.get(bucket).list(BlobListOption.prefix(pathPrefix)).iterateAll().forEach(blob -> { + assert blob.getName().startsWith(path); + final String suffixName = blob.getName().substring(path.length()); + mapBuilder.put(suffixName, new PlainBlobMetaData(suffixName, blob.getSize())); + }); + }); + return mapBuilder.immutableMap(); } /** @@ -161,19 +143,9 @@ private Map listBlobsByPath(String bucketName, String path * @return true if the blob exists, false otherwise */ boolean blobExists(String blobName) throws IOException { - try { - StorageObject blob = SocketAccess.doPrivilegedIOException(() -> client.objects().get(bucket, blobName).execute()); - if (blob != null) { - return Strings.hasText(blob.getId()); - } - } catch (GoogleJsonResponseException e) { - GoogleJsonError error = e.getDetails(); - if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { - return false; - } - throw e; - } - return false; + final BlobId blobId = BlobId.of(bucket, blobName); + final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); + return blob != null; } /** @@ -183,18 +155,29 @@ boolean blobExists(String blobName) throws IOException { * @return an InputStream */ InputStream readBlob(String blobName) throws IOException { - try { - return SocketAccess.doPrivilegedIOException(() -> { - Storage.Objects.Get object = client.objects().get(bucket, blobName); - return object.executeMediaAsInputStream(); - }); - } catch (GoogleJsonResponseException e) { - GoogleJsonError error = e.getDetails(); - if ((e.getStatusCode() == HTTP_NOT_FOUND) || ((error != null) && (error.getCode() == HTTP_NOT_FOUND))) { - throw new NoSuchFileException(e.getMessage()); - } - throw e; + final BlobId blobId = BlobId.of(bucket, blobName); + final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); + if (blob == null) { + throw new NoSuchFileException("Blob [" + blobName + "] does not exit"); } + final ReadChannel readChannel = SocketAccess.doPrivilegedIOException(blob::reader); + return Channels.newInputStream(new ReadableByteChannel() { + @SuppressForbidden(reason = "Channel is based of a socket not a file") + @Override + public int read(ByteBuffer dst) throws IOException { + return SocketAccess.doPrivilegedIOException(() -> readChannel.read(dst)); + } + + @Override + public boolean isOpen() { + return readChannel.isOpen(); + } + + @Override + public void close() throws IOException { + SocketAccess.doPrivilegedVoidIOException(readChannel::close); + } + }); } /** @@ -204,14 +187,58 @@ InputStream readBlob(String blobName) throws IOException { * @param blobSize expected size of the blob to be written */ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - SocketAccess.doPrivilegedVoidIOException(() -> { - InputStreamContent stream = new InputStreamContent(null, inputStream); - stream.setLength(blobSize); + final BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).build(); + if (blobSize > LARGE_BLOB_THRESHOLD_BYTE_SIZE) { + writeBlobResumable(blobInfo, inputStream); + } else { + writeBlobMultipart(blobInfo, inputStream, blobSize); + } + } - Storage.Objects.Insert insert = client.objects().insert(bucket, null, stream); - insert.setName(blobName); - insert.execute(); - }); + /** + * Uploads a blob using the "resumable upload" method (multiple requests, which + * can be independently retried in case of failure, see + * https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload + * + * @param blobInfo the info for the blob to be uploaded + * @param inputStream the stream containing the blob data + */ + private void writeBlobResumable(BlobInfo blobInfo, InputStream inputStream) throws IOException { + final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException(() -> storage.writer(blobInfo)); + Streams.copy(inputStream, Channels.newOutputStream(new WritableByteChannel() { + @Override + public boolean isOpen() { + return writeChannel.isOpen(); + } + + @Override + public void close() throws IOException { + SocketAccess.doPrivilegedVoidIOException(writeChannel::close); + } + + @SuppressForbidden(reason = "Channel is based of a socket not a file") + @Override + public int write(ByteBuffer src) throws IOException { + return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); + } + })); + } + + /** + * Uploads a blob using the "multipart upload" method (a single + * 'multipart/related' request containing both data and metadata. The request is + * gziped), see: + * https://cloud.google.com/storage/docs/json_api/v1/how-tos/multipart-upload + * + * @param blobInfo the info for the blob to be uploaded + * @param inputStream the stream containing the blob data + * @param blobSize the size + */ + private void writeBlobMultipart(BlobInfo blobInfo, InputStream inputStream, long blobSize) throws IOException { + assert blobSize <= LARGE_BLOB_THRESHOLD_BYTE_SIZE : "large blob uploads should use the resumable upload method"; + final ByteArrayOutputStream baos = new ByteArrayOutputStream(Math.toIntExact(blobSize)); + Streams.copy(inputStream, baos); + SocketAccess.doPrivilegedVoidIOException(() -> storage.create(blobInfo, baos.toByteArray())); } /** @@ -220,10 +247,11 @@ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws I * @param blobName name of the blob */ void deleteBlob(String blobName) throws IOException { - if (!blobExists(blobName)) { + final BlobId blobId = BlobId.of(bucket, blobName); + final boolean deleted = SocketAccess.doPrivilegedIOException(() -> storage.delete(blobId)); + if (deleted == false) { throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); } - SocketAccess.doPrivilegedIOException(() -> client.objects().delete(bucket, blobName).execute()); } /** @@ -232,7 +260,7 @@ void deleteBlob(String blobName) throws IOException { * @param prefix prefix of the buckets to delete */ void deleteBlobsByPrefix(String prefix) throws IOException { - deleteBlobs(listBlobsByPath(bucket, prefix, null).keySet()); + deleteBlobs(listBlobsByPrefix("", prefix).keySet()); } /** @@ -241,163 +269,55 @@ void deleteBlobsByPrefix(String prefix) throws IOException { * @param blobNames names of the bucket to delete */ void deleteBlobs(Collection blobNames) throws IOException { - if (blobNames == null || blobNames.isEmpty()) { + if (blobNames.isEmpty()) { return; } - + // for a single op submit a simple delete instead of a batch of size 1 if (blobNames.size() == 1) { deleteBlob(blobNames.iterator().next()); return; } - final List deletions = new ArrayList<>(Math.min(MAX_BATCHING_REQUESTS, blobNames.size())); - final Iterator blobs = blobNames.iterator(); - - SocketAccess.doPrivilegedVoidIOException(() -> { - while (blobs.hasNext()) { - // Create a delete request for each blob to delete - deletions.add(client.objects().delete(bucket, blobs.next())); - - if (blobs.hasNext() == false || deletions.size() == MAX_BATCHING_REQUESTS) { - try { - // Deletions are executed using a batch request - BatchRequest batch = client.batch(); - - // Used to track successful deletions - CountDown countDown = new CountDown(deletions.size()); - - for (Storage.Objects.Delete delete : deletions) { - // Queue the delete request in batch - delete.queue(batch, new JsonBatchCallback() { - @Override - public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException { - logger.error("failed to delete blob [{}] in bucket [{}]: {}", delete.getObject(), delete.getBucket(), e - .getMessage()); - } - - @Override - public void onSuccess(Void aVoid, HttpHeaders responseHeaders) throws IOException { - countDown.countDown(); - } - }); - } - - batch.execute(); - - if (countDown.isCountedDown() == false) { - throw new IOException("Failed to delete all [" + deletions.size() + "] blobs"); - } - } finally { - deletions.clear(); - } - } + final List blobIdsToDelete = blobNames.stream().map(blobName -> BlobId.of(bucket, blobName)).collect(Collectors.toList()); + final List deletedStatuses = SocketAccess.doPrivilegedIOException(() -> storage.delete(blobIdsToDelete)); + assert blobIdsToDelete.size() == deletedStatuses.size(); + boolean failed = false; + for (int i = 0; i < blobIdsToDelete.size(); i++) { + if (deletedStatuses.get(i) == false) { + logger.error("Failed to delete blob [{}] in bucket [{}]", blobIdsToDelete.get(i).getName(), bucket); + failed = true; } - }); + } + if (failed) { + throw new IOException("Failed to delete all [" + blobIdsToDelete.size() + "] blobs"); + } } /** * Moves a blob within the same bucket * * @param sourceBlob name of the blob to move - * @param targetBlob new name of the blob in the target bucket + * @param targetBlob new name of the blob in the same bucket */ - void moveBlob(String sourceBlob, String targetBlob) throws IOException { - SocketAccess.doPrivilegedIOException(() -> { + void moveBlob(String sourceBlobName, String targetBlobName) throws IOException { + final BlobId sourceBlobId = BlobId.of(bucket, sourceBlobName); + final BlobId targetBlobId = BlobId.of(bucket, targetBlobName); + final CopyRequest request = CopyRequest.newBuilder() + .setSource(sourceBlobId) + .setTarget(targetBlobId) + .build(); + SocketAccess.doPrivilegedVoidIOException(() -> { // There's no atomic "move" in GCS so we need to copy and delete - client.objects().copy(bucket, sourceBlob, bucket, targetBlob, null).execute(); - client.objects().delete(bucket, sourceBlob).execute(); - return null; + storage.copy(request).getResult(); + final boolean deleted = storage.delete(sourceBlobId); + if (deleted == false) { + throw new IOException("Failed to move source [" + sourceBlobName + "] to target [" + targetBlobName + "]"); + } }); } - private String buildKey(String keyPath, String s) { + private static String buildKey(String keyPath, String s) { assert s != null; return keyPath + s; } - /** - * Converts a {@link StorageObject} to a {@link PlainBlobMetaData} - */ - class BlobMetaDataConverter implements Function { - - private final String pathToRemove; - - BlobMetaDataConverter(String pathToRemove) { - this.pathToRemove = pathToRemove; - } - - @Override - public PlainBlobMetaData apply(StorageObject storageObject) { - String blobName = storageObject.getName(); - if (Strings.hasLength(pathToRemove)) { - blobName = blobName.substring(pathToRemove.length()); - } - return new PlainBlobMetaData(blobName, storageObject.getSize().longValue()); - } - } - - /** - * Spliterator can be used to list storage objects stored in a bucket. - */ - static class StorageObjectsSpliterator implements Spliterator { - - private final Storage.Objects.List list; - - StorageObjectsSpliterator(Storage client, String bucketName, String prefix, long pageSize) throws IOException { - list = SocketAccess.doPrivilegedIOException(() -> client.objects().list(bucketName)); - list.setMaxResults(pageSize); - if (prefix != null) { - list.setPrefix(prefix); - } - } - - @Override - public boolean tryAdvance(Consumer action) { - try { - // Retrieves the next page of items - Objects objects = SocketAccess.doPrivilegedIOException(list::execute); - - if ((objects == null) || (objects.getItems() == null) || (objects.getItems().isEmpty())) { - return false; - } - - // Consumes all the items - objects.getItems().forEach(action::accept); - - // Sets the page token of the next page, - // null indicates that all items have been consumed - String next = objects.getNextPageToken(); - if (next != null) { - list.setPageToken(next); - return true; - } - - return false; - } catch (Exception e) { - throw new BlobStoreException("Exception while listing objects", e); - } - } - - @Override - public Spliterator trySplit() { - return null; - } - - @Override - public long estimateSize() { - return Long.MAX_VALUE; - } - - @Override - public int characteristics() { - return 0; - } - } - - /** - * Returns a {@link Stream} of {@link StorageObject}s that are stored in a given bucket. - */ - static Stream blobsStream(Storage client, String bucketName, String prefix, long pageSize) throws IOException { - return StreamSupport.stream(new StorageObjectsSpliterator(client, bucketName, prefix, pageSize), false); - } - } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java index 03295c18c8ae6..99df38413326c 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettings.java @@ -18,8 +18,10 @@ */ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; import com.google.api.services.storage.StorageScopes; +import com.google.auth.oauth2.ServiceAccountCredentials; + +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -28,10 +30,12 @@ import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; +import java.net.URI; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; -import java.util.Locale; import java.util.Map; +import java.util.function.Function; import static org.elasticsearch.common.settings.Setting.timeSetting; @@ -44,11 +48,19 @@ public class GoogleCloudStorageClientSettings { /** A json Service Account file loaded from secure settings. */ static final Setting.AffixSetting CREDENTIALS_FILE_SETTING = Setting.affixKeySetting(PREFIX, "credentials_file", - key -> SecureSetting.secureFile(key, null)); + key -> SecureSetting.secureFile(key, null)); /** An override for the Storage endpoint to connect to. */ static final Setting.AffixSetting ENDPOINT_SETTING = Setting.affixKeySetting(PREFIX, "endpoint", - key -> new Setting<>(key, "", s -> s, Setting.Property.NodeScope)); + key -> Setting.simpleString(key, Setting.Property.NodeScope)); + + /** An override for the Google Project ID. */ + static final Setting.AffixSetting PROJECT_ID_SETTING = Setting.affixKeySetting(PREFIX, "project_id", + key -> Setting.simpleString(key, Setting.Property.NodeScope)); + + /** An override for the Token Server URI in the oauth flow. */ + static final Setting.AffixSetting TOKEN_URI_SETTING = Setting.affixKeySetting(PREFIX, "token_uri", + key -> new Setting<>(key, "", URI::create, Setting.Property.NodeScope)); /** * The timeout to establish a connection. A value of {@code -1} corresponds to an infinite timeout. A value of {@code 0} @@ -64,45 +76,59 @@ public class GoogleCloudStorageClientSettings { static final Setting.AffixSetting READ_TIMEOUT_SETTING = Setting.affixKeySetting(PREFIX, "read_timeout", key -> timeSetting(key, TimeValue.ZERO, TimeValue.MINUS_ONE, Setting.Property.NodeScope)); - /** Name used by the client when it uses the Google Cloud JSON API. **/ + /** Name used by the client when it uses the Google Cloud JSON API. */ static final Setting.AffixSetting APPLICATION_NAME_SETTING = Setting.affixKeySetting(PREFIX, "application_name", - key -> new Setting<>(key, "repository-gcs", s -> s, Setting.Property.NodeScope)); + key -> new Setting<>(key, "repository-gcs", Function.identity(), Setting.Property.NodeScope, Setting.Property.Deprecated)); - /** The credentials used by the client to connect to the Storage endpoint **/ - private final GoogleCredential credential; + /** The credentials used by the client to connect to the Storage endpoint. */ + private final ServiceAccountCredentials credential; - /** The Storage root URL the client should talk to, or empty string to use the default. **/ + /** The Storage endpoint URL the client should talk to. Null value sets the default. */ private final String endpoint; - /** The timeout to establish a connection **/ + /** The Google project ID overriding the default way to infer it. Null value sets the default. */ + private final String projectId; + + /** The timeout to establish a connection */ private final TimeValue connectTimeout; - /** The timeout to read data from an established connection **/ + /** The timeout to read data from an established connection */ private final TimeValue readTimeout; - /** The Storage client application name **/ + /** The Storage client application name */ private final String applicationName; - GoogleCloudStorageClientSettings(final GoogleCredential credential, + /** The token server URI. This leases access tokens in the oauth flow. */ + private final URI tokenUri; + + GoogleCloudStorageClientSettings(final ServiceAccountCredentials credential, final String endpoint, + final String projectId, final TimeValue connectTimeout, final TimeValue readTimeout, - final String applicationName) { + final String applicationName, + final URI tokenUri) { this.credential = credential; this.endpoint = endpoint; + this.projectId = projectId; this.connectTimeout = connectTimeout; this.readTimeout = readTimeout; this.applicationName = applicationName; + this.tokenUri = tokenUri; } - public GoogleCredential getCredential() { + public ServiceAccountCredentials getCredential() { return credential; } - public String getEndpoint() { + public String getHost() { return endpoint; } + public String getProjectId() { + return Strings.hasLength(projectId) ? projectId : (credential != null ? credential.getProjectId() : null); + } + public TimeValue getConnectTimeout() { return connectTimeout; } @@ -115,9 +141,13 @@ public String getApplicationName() { return applicationName; } + public URI getTokenUri() { + return tokenUri; + } + public static Map load(final Settings settings) { final Map clients = new HashMap<>(); - for (String clientName: settings.getGroups(PREFIX).keySet()) { + for (final String clientName: settings.getGroups(PREFIX).keySet()) { clients.put(clientName, getClientSettings(settings, clientName)); } if (clients.containsKey("default") == false) { @@ -132,22 +162,27 @@ static GoogleCloudStorageClientSettings getClientSettings(final Settings setting return new GoogleCloudStorageClientSettings( loadCredential(settings, clientName), getConfigValue(settings, clientName, ENDPOINT_SETTING), + getConfigValue(settings, clientName, PROJECT_ID_SETTING), getConfigValue(settings, clientName, CONNECT_TIMEOUT_SETTING), getConfigValue(settings, clientName, READ_TIMEOUT_SETTING), - getConfigValue(settings, clientName, APPLICATION_NAME_SETTING) + getConfigValue(settings, clientName, APPLICATION_NAME_SETTING), + getConfigValue(settings, clientName, TOKEN_URI_SETTING) ); } /** - * Loads the service account file corresponding to a given client name. If no file is defined for the client, - * a {@code null} credential is returned. + * Loads the service account file corresponding to a given client name. If no + * file is defined for the client, a {@code null} credential is returned. * - * @param settings the {@link Settings} - * @param clientName the client name + * @param settings + * the {@link Settings} + * @param clientName + * the client name * - * @return the {@link GoogleCredential} to use for the given client, {@code null} if no service account is defined. + * @return the {@link ServiceAccountCredentials} to use for the given client, + * {@code null} if no service account is defined. */ - static GoogleCredential loadCredential(final Settings settings, final String clientName) { + static ServiceAccountCredentials loadCredential(final Settings settings, final String clientName) { try { if (CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).exists(settings) == false) { // explicitly returning null here so that the default credential @@ -155,19 +190,22 @@ static GoogleCredential loadCredential(final Settings settings, final String cli return null; } try (InputStream credStream = CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).get(settings)) { - GoogleCredential credential = GoogleCredential.fromStream(credStream); - if (credential.createScopedRequired()) { - credential = credential.createScoped(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); - } - return credential; + final Collection scopes = Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL); + return SocketAccess.doPrivilegedIOException(() -> { + final ServiceAccountCredentials credentials = ServiceAccountCredentials.fromStream(credStream); + if (credentials.createScopedRequired()) { + return (ServiceAccountCredentials) credentials.createScoped(scopes); + } + return credentials; + }); } - } catch (IOException e) { + } catch (final IOException e) { throw new UncheckedIOException(e); } } private static T getConfigValue(final Settings settings, final String clientName, final Setting.AffixSetting clientSetting) { - Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); + final Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); return concreteSetting.get(settings); } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index ef24cd959e55b..1d2d70584adf9 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -19,21 +19,6 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.auth.oauth2.TokenRequest; -import com.google.api.client.auth.oauth2.TokenResponse; -import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.http.GenericUrl; -import com.google.api.client.http.HttpHeaders; -import com.google.api.client.json.GenericJson; -import com.google.api.client.json.webtoken.JsonWebSignature; -import com.google.api.client.json.webtoken.JsonWebToken; -import com.google.api.client.util.ClassInfo; -import com.google.api.client.util.Data; -import com.google.api.services.storage.Storage; -import com.google.api.services.storage.model.Bucket; -import com.google.api.services.storage.model.Objects; -import com.google.api.services.storage.model.StorageObject; -import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -42,8 +27,6 @@ import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -51,63 +34,6 @@ public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin { - static { - /* - * Google HTTP client changes access levels because its silly and we - * can't allow that on any old stack stack so we pull it here, up front, - * so we can cleanly check the permissions for it. Without this changing - * the permission can fail if any part of core is on the stack because - * our plugin permissions don't allow core to "reach through" plugins to - * change the permission. Because that'd be silly. - */ - SpecialPermission.check(); - AccessController.doPrivileged((PrivilegedAction) () -> { - // ClassInfo put in cache all the fields of a given class - // that are annoted with @Key; at the same time it changes - // the field access level using setAccessible(). Calling - // them here put the ClassInfo in cache (they are never evicted) - // before the SecurityManager is installed. - ClassInfo.of(HttpHeaders.class, true); - - ClassInfo.of(JsonWebSignature.Header.class, false); - ClassInfo.of(JsonWebToken.Payload.class, false); - - ClassInfo.of(TokenRequest.class, false); - ClassInfo.of(TokenResponse.class, false); - - ClassInfo.of(GenericJson.class, false); - ClassInfo.of(GenericUrl.class, false); - - Data.nullOf(GoogleJsonError.ErrorInfo.class); - ClassInfo.of(GoogleJsonError.class, false); - - Data.nullOf(Bucket.Cors.class); - ClassInfo.of(Bucket.class, false); - ClassInfo.of(Bucket.Cors.class, false); - ClassInfo.of(Bucket.Lifecycle.class, false); - ClassInfo.of(Bucket.Logging.class, false); - ClassInfo.of(Bucket.Owner.class, false); - ClassInfo.of(Bucket.Versioning.class, false); - ClassInfo.of(Bucket.Website.class, false); - - ClassInfo.of(StorageObject.class, false); - ClassInfo.of(StorageObject.Owner.class, false); - - ClassInfo.of(Objects.class, false); - - ClassInfo.of(Storage.Buckets.Get.class, false); - ClassInfo.of(Storage.Buckets.Insert.class, false); - - ClassInfo.of(Storage.Objects.Get.class, false); - ClassInfo.of(Storage.Objects.Insert.class, false); - ClassInfo.of(Storage.Objects.Delete.class, false); - ClassInfo.of(Storage.Objects.Copy.class, false); - ClassInfo.of(Storage.Objects.List.class, false); - - return null; - }); - } - private final Map clientsSettings; public GoogleCloudStoragePlugin(final Settings settings) { @@ -134,8 +60,10 @@ public List> getSettings() { return Arrays.asList( GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING, GoogleCloudStorageClientSettings.ENDPOINT_SETTING, + GoogleCloudStorageClientSettings.PROJECT_ID_SETTING, GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING, GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING, - GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING); + GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING, + GoogleCloudStorageClientSettings.TOKEN_URI_SETTING); } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index e193b8238b8d2..976befae0a269 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.services.storage.Storage; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; @@ -27,7 +26,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.repositories.RepositoryException; @@ -39,7 +37,8 @@ import static org.elasticsearch.common.settings.Setting.boolSetting; import static org.elasticsearch.common.settings.Setting.byteSizeSetting; import static org.elasticsearch.common.settings.Setting.simpleString; -import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; + +import com.google.cloud.storage.Storage; class GoogleCloudStorageRepository extends BlobStoreRepository { diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index bccc5e0ffdc5c..57bcc4b131356 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -19,23 +19,26 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; -import com.google.api.client.http.HttpBackOffIOExceptionHandler; -import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler; -import com.google.api.client.http.HttpRequest; -import com.google.api.client.http.HttpRequestInitializer; +import com.google.api.client.googleapis.GoogleUtils; import com.google.api.client.http.HttpTransport; -import com.google.api.client.http.HttpUnsuccessfulResponseHandler; -import com.google.api.client.json.jackson2.JacksonFactory; -import com.google.api.client.util.ExponentialBackOff; -import com.google.api.services.storage.Storage; +import com.google.api.client.http.javanet.DefaultConnectionFactory; +import com.google.api.client.http.javanet.NetHttpTransport; +import com.google.auth.oauth2.ServiceAccountCredentials; +import com.google.cloud.http.HttpTransportOptions; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.StorageOptions; + import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; import java.io.IOException; +import java.net.HttpURLConnection; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; import java.util.Map; public class GoogleCloudStorageService extends AbstractComponent { @@ -51,42 +54,107 @@ public GoogleCloudStorageService(final Environment environment, final Map httpTransport) + .build(); + final StorageOptions.Builder storageOptionsBuilder = StorageOptions.newBuilder() + .setTransportOptions(httpTransportOptions) + .setHeaderProvider(() -> { + final MapBuilder mapBuilder = MapBuilder.newMapBuilder(); + if (Strings.hasLength(clientSettings.getApplicationName())) { + mapBuilder.put("user-agent", clientSettings.getApplicationName()); + } + return mapBuilder.immutableMap(); + }); + if (Strings.hasLength(clientSettings.getHost())) { + storageOptionsBuilder.setHost(clientSettings.getHost()); } - if (Strings.hasLength(clientSettings.getEndpoint())) { - storage.setRootUrl(clientSettings.getEndpoint()); + if (Strings.hasLength(clientSettings.getProjectId())) { + storageOptionsBuilder.setProjectId(clientSettings.getProjectId()); } - return storage.build(); + if (clientSettings.getCredential() == null) { + logger.warn("\"Application Default Credentials\" are not supported out of the box." + + " Additional file system permissions have to be granted to the plugin."); + } else { + ServiceAccountCredentials serviceAccountCredentials = clientSettings.getCredential(); + // override token server URI + final URI tokenServerUri = clientSettings.getTokenUri(); + if (Strings.hasLength(tokenServerUri.toString())) { + // Rebuild the service account credentials in order to use a custom Token url. + // This is mostly used for testing purpose. + serviceAccountCredentials = serviceAccountCredentials.toBuilder().setTokenServerUri(tokenServerUri).build(); + } + storageOptionsBuilder.setCredentials(serviceAccountCredentials); + } + return storageOptionsBuilder.build().getService(); } - static HttpRequestInitializer createRequestInitializer(final GoogleCloudStorageClientSettings settings) throws IOException { - GoogleCredential credential = settings.getCredential(); - if (credential == null) { - credential = GoogleCredential.getApplicationDefault(); + /** + * Pins the TLS trust certificates and, more importantly, overrides connection + * URLs in the case of a custom endpoint setting because some connections don't + * fully honor this setting (bugs in the SDK). The default connection factory + * opens a new connection for each request. This is required for the storage + * instance to be thread-safe. + **/ + private static HttpTransport createHttpTransport(final String endpoint) throws Exception { + final NetHttpTransport.Builder builder = new NetHttpTransport.Builder(); + // requires java.lang.RuntimePermission "setFactory" + builder.trustCertificates(GoogleUtils.getCertificateTrustStore()); + if (Strings.hasLength(endpoint)) { + final URL endpointUrl = URI.create(endpoint).toURL(); + builder.setConnectionFactory(new DefaultConnectionFactory() { + @Override + public HttpURLConnection openConnection(final URL originalUrl) throws IOException { + // test if the URL is built correctly, ie following the `host` setting + if (originalUrl.getHost().equals(endpointUrl.getHost()) && originalUrl.getPort() == endpointUrl.getPort() + && originalUrl.getProtocol().equals(endpointUrl.getProtocol())) { + return super.openConnection(originalUrl); + } + // override connection URLs because some don't follow the config. See + // https://github.com/GoogleCloudPlatform/google-cloud-java/issues/3254 and + // https://github.com/GoogleCloudPlatform/google-cloud-java/issues/3255 + URI originalUri; + try { + originalUri = originalUrl.toURI(); + } catch (final URISyntaxException e) { + throw new RuntimeException(e); + } + String overridePath = "/"; + if (originalUri.getRawPath() != null) { + overridePath = originalUri.getRawPath(); + } + if (originalUri.getRawQuery() != null) { + overridePath += "?" + originalUri.getRawQuery(); + } + return super.openConnection( + new URL(endpointUrl.getProtocol(), endpointUrl.getHost(), endpointUrl.getPort(), overridePath)); + } + }); } - return new DefaultHttpRequestInitializer(credential, toTimeout(settings.getConnectTimeout()), toTimeout(settings.getReadTimeout())); + return builder.build(); } - /** Converts timeout values from the settings to a timeout value for the Google Cloud SDK **/ + /** + * Converts timeout values from the settings to a timeout value for the Google + * Cloud SDK + **/ static Integer toTimeout(final TimeValue timeout) { // Null or zero in settings means the default timeout if (timeout == null || TimeValue.ZERO.equals(timeout)) { - return null; + // negative value means using the default value + return -1; } // -1 means infinite timeout if (TimeValue.MINUS_ONE.equals(timeout)) { @@ -96,51 +164,4 @@ static Integer toTimeout(final TimeValue timeout) { return Math.toIntExact(timeout.getMillis()); } - /** - * HTTP request initializer that set timeouts and backoff handler while deferring authentication to GoogleCredential. - * See https://cloud.google.com/storage/transfer/create-client#retry - */ - static class DefaultHttpRequestInitializer implements HttpRequestInitializer { - - private final Integer connectTimeout; - private final Integer readTimeout; - private final GoogleCredential credential; - - DefaultHttpRequestInitializer(GoogleCredential credential, Integer connectTimeoutMillis, Integer readTimeoutMillis) { - this.credential = credential; - this.connectTimeout = connectTimeoutMillis; - this.readTimeout = readTimeoutMillis; - } - - @Override - public void initialize(HttpRequest request) { - if (connectTimeout != null) { - request.setConnectTimeout(connectTimeout); - } - if (readTimeout != null) { - request.setReadTimeout(readTimeout); - } - - request.setIOExceptionHandler(new HttpBackOffIOExceptionHandler(newBackOff())); - request.setInterceptor(credential); - - final HttpUnsuccessfulResponseHandler handler = new HttpBackOffUnsuccessfulResponseHandler(newBackOff()); - request.setUnsuccessfulResponseHandler((req, resp, supportsRetry) -> { - // Let the credential handle the response. If it failed, we rely on our backoff handler - return credential.handleResponse(req, resp, supportsRetry) || handler.handleResponse(req, resp, supportsRetry); - } - ); - } - - private ExponentialBackOff newBackOff() { - return new ExponentialBackOff.Builder() - .setInitialIntervalMillis(100) - .setMaxIntervalMillis(6000) - .setMaxElapsedTimeMillis(900000) - .setMultiplier(1.5) - .setRandomizationFactor(0.5) - .build(); - } - } - } diff --git a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy index ce9b0334638a0..fffe6cbbc0f24 100644 --- a/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-gcs/src/main/plugin-metadata/plugin-security.policy @@ -18,11 +18,12 @@ */ grant { + // required by: com.google.api.client.json.JsonParser#parseValue permission java.lang.RuntimePermission "accessDeclaredMembers"; - permission java.lang.RuntimePermission "setFactory"; + // required by: com.google.api.client.json.GenericJson# permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; - permission java.net.URLPermission "http://www.googleapis.com/*", "*"; - permission java.net.URLPermission "https://www.googleapis.com/*", "*"; + // required to add google certs to the gcs client trustore + permission java.lang.RuntimePermission "setFactory"; // gcs client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; diff --git a/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageRpcOptionUtils.java b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageRpcOptionUtils.java new file mode 100644 index 0000000000000..f2b8a0571ad87 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageRpcOptionUtils.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.google.cloud.storage; + +import com.google.cloud.storage.spi.v1.StorageRpc; + +import static org.mockito.Mockito.mock; + +/** + * Utility class that exposed Google SDK package protected methods to + * create specific StorageRpc objects in unit tests. + */ +public class StorageRpcOptionUtils { + + private StorageRpcOptionUtils(){} + + public static String getPrefix(final Storage.BlobListOption... options) { + if (options != null) { + for (final Option option : options) { + final StorageRpc.Option rpcOption = option.getRpcOption(); + if (StorageRpc.Option.PREFIX.equals(rpcOption)) { + return (String) option.getValue(); + } + } + } + return null; + } + + public static CopyWriter createCopyWriter(final Blob result) { + return new CopyWriter(mock(StorageOptions.class), mock(StorageRpc.RewriteResponse.class)) { + @Override + public Blob getResult() { + return result; + } + }; + } +} diff --git a/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageTestUtils.java b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageTestUtils.java new file mode 100644 index 0000000000000..68175d7f1be53 --- /dev/null +++ b/plugins/repository-gcs/src/test/java/com/google/cloud/storage/StorageTestUtils.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.google.cloud.storage; + +/** + * Utility class that exposed Google SDK package protected methods to + * create buckets and blobs objects in unit tests. + */ +public class StorageTestUtils { + + private StorageTestUtils(){} + + public static Bucket createBucket(final Storage storage, final String bucketName) { + return new Bucket(storage, (BucketInfo.BuilderImpl) BucketInfo.newBuilder(bucketName)); + } + + public static Blob createBlob(final Storage storage, final String bucketName, final String blobName, final long blobSize) { + return new Blob(storage, (BlobInfo.BuilderImpl) BlobInfo.newBuilder(bucketName, blobName).setSize(blobSize)); + } +} diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 19551f3b082fa..c4d9b67899672 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.services.storage.Storage; +import com.google.cloud.storage.Storage; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java index badd86cd8a2b3..14cb4fa242e7d 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageClientSettingsTests.java @@ -18,20 +18,25 @@ */ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; import com.google.api.services.storage.StorageScopes; +import com.google.auth.oauth2.ServiceAccountCredentials; + import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; +import java.net.URI; import java.nio.charset.StandardCharsets; import java.security.KeyPair; import java.security.KeyPairGenerator; +import java.util.ArrayList; import java.util.Base64; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; @@ -39,6 +44,7 @@ import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.CREDENTIALS_FILE_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.ENDPOINT_SETTING; +import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.PROJECT_ID_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.getClientSettings; import static org.elasticsearch.repositories.gcs.GoogleCloudStorageClientSettings.loadCredential; @@ -46,59 +52,78 @@ public class GoogleCloudStorageClientSettingsTests extends ESTestCase { public void testLoadWithEmptySettings() { - Map clientsSettings = GoogleCloudStorageClientSettings.load(Settings.EMPTY); + final Map clientsSettings = GoogleCloudStorageClientSettings.load(Settings.EMPTY); assertEquals(1, clientsSettings.size()); assertNotNull(clientsSettings.get("default")); } public void testLoad() throws Exception { final int nbClients = randomIntBetween(1, 5); - final Tuple, Settings> randomClients = randomClients(nbClients); + final List> deprecationWarnings = new ArrayList<>(); + final Tuple, Settings> randomClients = randomClients(nbClients, deprecationWarnings); final Map expectedClientsSettings = randomClients.v1(); - Map actualClientsSettings = GoogleCloudStorageClientSettings.load(randomClients.v2()); + final Map actualClientsSettings = GoogleCloudStorageClientSettings + .load(randomClients.v2()); assertEquals(expectedClientsSettings.size(), actualClientsSettings.size()); - for (String clientName : expectedClientsSettings.keySet()) { - GoogleCloudStorageClientSettings actualClientSettings = actualClientsSettings.get(clientName); + for (final String clientName : expectedClientsSettings.keySet()) { + final GoogleCloudStorageClientSettings actualClientSettings = actualClientsSettings.get(clientName); assertNotNull(actualClientSettings); - GoogleCloudStorageClientSettings expectedClientSettings = expectedClientsSettings.get(clientName); + final GoogleCloudStorageClientSettings expectedClientSettings = expectedClientsSettings.get(clientName); assertNotNull(expectedClientSettings); - assertGoogleCredential(expectedClientSettings.getCredential(), actualClientSettings.getCredential()); - assertEquals(expectedClientSettings.getEndpoint(), actualClientSettings.getEndpoint()); + assertEquals(expectedClientSettings.getHost(), actualClientSettings.getHost()); + assertEquals(expectedClientSettings.getProjectId(), actualClientSettings.getProjectId()); assertEquals(expectedClientSettings.getConnectTimeout(), actualClientSettings.getConnectTimeout()); assertEquals(expectedClientSettings.getReadTimeout(), actualClientSettings.getReadTimeout()); assertEquals(expectedClientSettings.getApplicationName(), actualClientSettings.getApplicationName()); } + + if (deprecationWarnings.isEmpty() == false) { + assertSettingDeprecationsAndWarnings(deprecationWarnings.toArray(new Setting[0])); + } } public void testLoadCredential() throws Exception { - Tuple, Settings> randomClient = randomClients(1); - GoogleCloudStorageClientSettings expectedClientSettings = randomClient.v1().values().iterator().next(); - String clientName = randomClient.v1().keySet().iterator().next(); - + final List> deprecationWarnings = new ArrayList<>(); + final Tuple, Settings> randomClient = randomClients(1, deprecationWarnings); + final GoogleCloudStorageClientSettings expectedClientSettings = randomClient.v1().values().iterator().next(); + final String clientName = randomClient.v1().keySet().iterator().next(); assertGoogleCredential(expectedClientSettings.getCredential(), loadCredential(randomClient.v2(), clientName)); } + public void testProjectIdDefaultsToCredentials() throws Exception { + final String clientName = randomAlphaOfLength(5); + final Tuple credentials = randomCredential(clientName); + final ServiceAccountCredentials credential = credentials.v1(); + final GoogleCloudStorageClientSettings googleCloudStorageClientSettings = new GoogleCloudStorageClientSettings(credential, + ENDPOINT_SETTING.getDefault(Settings.EMPTY), PROJECT_ID_SETTING.getDefault(Settings.EMPTY), + CONNECT_TIMEOUT_SETTING.getDefault(Settings.EMPTY), READ_TIMEOUT_SETTING.getDefault(Settings.EMPTY), + APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY), new URI("")); + assertEquals(credential.getProjectId(), googleCloudStorageClientSettings.getProjectId()); + } + /** Generates a given number of GoogleCloudStorageClientSettings along with the Settings to build them from **/ - private Tuple, Settings> randomClients(final int nbClients) throws Exception { + private Tuple, Settings> randomClients(final int nbClients, + final List> deprecationWarnings) + throws Exception { final Map expectedClients = new HashMap<>(); - expectedClients.put("default", getClientSettings(Settings.EMPTY, "default")); final Settings.Builder settings = Settings.builder(); final MockSecureSettings secureSettings = new MockSecureSettings(); for (int i = 0; i < nbClients; i++) { - String clientName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); - - GoogleCloudStorageClientSettings clientSettings = randomClient(clientName, settings, secureSettings); + final String clientName = randomAlphaOfLength(5).toLowerCase(Locale.ROOT); + final GoogleCloudStorageClientSettings clientSettings = randomClient(clientName, settings, secureSettings, deprecationWarnings); expectedClients.put(clientName, clientSettings); } if (randomBoolean()) { - GoogleCloudStorageClientSettings clientSettings = randomClient("default", settings, secureSettings); + final GoogleCloudStorageClientSettings clientSettings = randomClient("default", settings, secureSettings, deprecationWarnings); expectedClients.put("default", clientSettings); + } else { + expectedClients.put("default", getClientSettings(Settings.EMPTY, "default")); } return Tuple.tuple(expectedClients, settings.setSecureSettings(secureSettings).build()); @@ -107,20 +132,30 @@ private Tuple, Settings> randomCli /** Generates a random GoogleCloudStorageClientSettings along with the Settings to build it **/ private static GoogleCloudStorageClientSettings randomClient(final String clientName, final Settings.Builder settings, - final MockSecureSettings secureSettings) throws Exception { + final MockSecureSettings secureSettings, + final List> deprecationWarnings) throws Exception { - Tuple credentials = randomCredential(clientName); - GoogleCredential credential = credentials.v1(); + final Tuple credentials = randomCredential(clientName); + final ServiceAccountCredentials credential = credentials.v1(); secureSettings.setFile(CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName).getKey(), credentials.v2()); String endpoint; if (randomBoolean()) { - endpoint = randomAlphaOfLength(5); + endpoint = randomFrom("http://www.elastic.co", "http://metadata.google.com:88/oauth", "https://www.googleapis.com", + "https://www.elastic.co:443", "http://localhost:8443", "https://www.googleapis.com/oauth/token"); settings.put(ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint); } else { endpoint = ENDPOINT_SETTING.getDefault(Settings.EMPTY); } + String projectId; + if (randomBoolean()) { + projectId = randomAlphaOfLength(5); + settings.put(PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectId); + } else { + projectId = PROJECT_ID_SETTING.getDefault(Settings.EMPTY); + } + TimeValue connectTimeout; if (randomBoolean()) { connectTimeout = randomTimeout(); @@ -141,40 +176,35 @@ private static GoogleCloudStorageClientSettings randomClient(final String client if (randomBoolean()) { applicationName = randomAlphaOfLength(5); settings.put(APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), applicationName); + deprecationWarnings.add(APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName)); } else { applicationName = APPLICATION_NAME_SETTING.getDefault(Settings.EMPTY); } - return new GoogleCloudStorageClientSettings(credential, endpoint, connectTimeout, readTimeout, applicationName); + return new GoogleCloudStorageClientSettings(credential, endpoint, projectId, connectTimeout, readTimeout, applicationName, + new URI("")); } /** Generates a random GoogleCredential along with its corresponding Service Account file provided as a byte array **/ - private static Tuple randomCredential(final String clientName) throws Exception { - KeyPair keyPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); - - GoogleCredential.Builder credentialBuilder = new GoogleCredential.Builder(); - credentialBuilder.setServiceAccountId(clientName); - credentialBuilder.setServiceAccountProjectId("project_id_" + clientName); - credentialBuilder.setServiceAccountScopes(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); - credentialBuilder.setServiceAccountPrivateKey(keyPair.getPrivate()); - credentialBuilder.setServiceAccountPrivateKeyId("private_key_id_" + clientName); - - String encodedPrivateKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded()); - String serviceAccount = "{\"type\":\"service_account\"," + + private static Tuple randomCredential(final String clientName) throws Exception { + final KeyPair keyPair = KeyPairGenerator.getInstance("RSA").generateKeyPair(); + final ServiceAccountCredentials.Builder credentialBuilder = ServiceAccountCredentials.newBuilder(); + credentialBuilder.setClientId("id_" + clientName); + credentialBuilder.setClientEmail(clientName); + credentialBuilder.setProjectId("project_id_" + clientName); + credentialBuilder.setPrivateKey(keyPair.getPrivate()); + credentialBuilder.setPrivateKeyId("private_key_id_" + clientName); + credentialBuilder.setScopes(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); + final String encodedPrivateKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded()); + final String serviceAccount = "{\"type\":\"service_account\"," + "\"project_id\":\"project_id_" + clientName + "\"," + "\"private_key_id\":\"private_key_id_" + clientName + "\"," + "\"private_key\":\"-----BEGIN PRIVATE KEY-----\\n" + encodedPrivateKey + "\\n-----END PRIVATE KEY-----\\n\"," + "\"client_email\":\"" + clientName + "\"," + - "\"client_id\":\"id_" + clientName + "\"," + - "\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\"," + - "\"token_uri\":\"https://accounts.google.com/o/oauth2/token\"," + - "\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"," + - "\"client_x509_cert_url\":\"https://www.googleapis.com/robot/v1/metadata/x509/" + - clientName + - "%40appspot.gserviceaccount.com\"}"; - + "\"client_id\":\"id_" + clientName + "\"" + + "}"; return Tuple.tuple(credentialBuilder.build(), serviceAccount.getBytes(StandardCharsets.UTF_8)); } @@ -182,14 +212,16 @@ private static TimeValue randomTimeout() { return randomFrom(TimeValue.MINUS_ONE, TimeValue.ZERO, TimeValue.parseTimeValue(randomPositiveTimeValue(), "test")); } - private static void assertGoogleCredential(final GoogleCredential expected, final GoogleCredential actual) { + private static void assertGoogleCredential(ServiceAccountCredentials expected, ServiceAccountCredentials actual) { if (expected != null) { assertEquals(expected.getServiceAccountUser(), actual.getServiceAccountUser()); - assertEquals(expected.getServiceAccountId(), actual.getServiceAccountId()); - assertEquals(expected.getServiceAccountProjectId(), actual.getServiceAccountProjectId()); - assertEquals(expected.getServiceAccountScopesAsString(), actual.getServiceAccountScopesAsString()); - assertEquals(expected.getServiceAccountPrivateKey(), actual.getServiceAccountPrivateKey()); - assertEquals(expected.getServiceAccountPrivateKeyId(), actual.getServiceAccountPrivateKeyId()); + assertEquals(expected.getClientId(), actual.getClientId()); + assertEquals(expected.getClientEmail(), actual.getClientEmail()); + assertEquals(expected.getAccount(), actual.getAccount()); + assertEquals(expected.getProjectId(), actual.getProjectId()); + assertEquals(expected.getScopes(), actual.getScopes()); + assertEquals(expected.getPrivateKey(), actual.getPrivateKey()); + assertEquals(expected.getPrivateKeyId(), actual.getPrivateKeyId()); } else { assertNull(actual); } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index 44897819fd9e3..a33ae90c549bc 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -19,79 +19,65 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.client.http.GenericUrl; -import com.google.api.client.http.HttpIOExceptionHandler; -import com.google.api.client.http.HttpRequest; -import com.google.api.client.http.HttpRequestFactory; -import com.google.api.client.http.HttpRequestInitializer; -import com.google.api.client.http.HttpResponse; -import com.google.api.client.http.HttpUnsuccessfulResponseHandler; -import com.google.api.client.testing.http.MockHttpTransport; +import com.google.auth.Credentials; +import com.google.cloud.http.HttpTransportOptions; +import com.google.cloud.storage.Storage; + +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; +import java.util.Collections; +import java.util.Locale; -import java.io.IOException; - -import static org.mockito.Matchers.any; -import static org.mockito.Matchers.anyBoolean; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class GoogleCloudStorageServiceTests extends ESTestCase { - /** - * Test that the {@link GoogleCloudStorageService.DefaultHttpRequestInitializer} attaches new instances - * of {@link HttpIOExceptionHandler} and {@link HttpUnsuccessfulResponseHandler} for every HTTP requests. - */ - public void testDefaultHttpRequestInitializer() throws IOException { + public void testClientInitializer() throws Exception { + final String clientName = randomAlphaOfLength(4).toLowerCase(Locale.ROOT); final Environment environment = mock(Environment.class); - when(environment.settings()).thenReturn(Settings.EMPTY); - - final GoogleCredential credential = mock(GoogleCredential.class); - when(credential.handleResponse(any(HttpRequest.class), any(HttpResponse.class), anyBoolean())).thenReturn(false); - - final TimeValue readTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); - final TimeValue connectTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 120)); - final String endpoint = randomBoolean() ? randomAlphaOfLength(10) : null; - final String applicationName = randomBoolean() ? randomAlphaOfLength(10) : null; - - final GoogleCloudStorageClientSettings clientSettings = - new GoogleCloudStorageClientSettings(credential, endpoint, connectTimeout, readTimeout, applicationName); - - final HttpRequestInitializer initializer = GoogleCloudStorageService.createRequestInitializer(clientSettings); - final HttpRequestFactory requestFactory = new MockHttpTransport().createRequestFactory(initializer); - - final HttpRequest request1 = requestFactory.buildGetRequest(new GenericUrl()); - assertEquals((int) connectTimeout.millis(), request1.getConnectTimeout()); - assertEquals((int) readTimeout.millis(), request1.getReadTimeout()); - assertSame(credential, request1.getInterceptor()); - assertNotNull(request1.getIOExceptionHandler()); - assertNotNull(request1.getUnsuccessfulResponseHandler()); - - final HttpRequest request2 = requestFactory.buildGetRequest(new GenericUrl()); - assertEquals((int) connectTimeout.millis(), request2.getConnectTimeout()); - assertEquals((int) readTimeout.millis(), request2.getReadTimeout()); - assertSame(request1.getInterceptor(), request2.getInterceptor()); - assertNotNull(request2.getIOExceptionHandler()); - assertNotSame(request1.getIOExceptionHandler(), request2.getIOExceptionHandler()); - assertNotNull(request2.getUnsuccessfulResponseHandler()); - assertNotSame(request1.getUnsuccessfulResponseHandler(), request2.getUnsuccessfulResponseHandler()); - - request1.getUnsuccessfulResponseHandler().handleResponse(null, null, false); - verify(credential, times(1)).handleResponse(any(HttpRequest.class), any(HttpResponse.class), anyBoolean()); - - request2.getUnsuccessfulResponseHandler().handleResponse(null, null, false); - verify(credential, times(2)).handleResponse(any(HttpRequest.class), any(HttpResponse.class), anyBoolean()); + final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); + final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); + final String applicationName = randomAlphaOfLength(4); + final String hostName = randomFrom("http://", "https://") + randomAlphaOfLength(4) + ":" + randomIntBetween(1, 65535); + final String projectIdName = randomAlphaOfLength(4); + final Settings settings = Settings.builder() + .put(GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), + connectTimeValue.getStringRep()) + .put(GoogleCloudStorageClientSettings.READ_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), + readTimeValue.getStringRep()) + .put(GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), + applicationName) + .put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), hostName) + .put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName) + .build(); + when(environment.settings()).thenReturn(settings); + final GoogleCloudStorageClientSettings clientSettings = GoogleCloudStorageClientSettings.getClientSettings(settings, clientName); + final GoogleCloudStorageService service = new GoogleCloudStorageService(environment, + Collections.singletonMap(clientName, clientSettings)); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.createClient("another_client")); + assertThat(e.getMessage(), Matchers.startsWith("Unknown client name")); + assertSettingDeprecationsAndWarnings( + new Setting[] { GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName) }); + final Storage storage = service.createClient(clientName); + assertThat(storage.getOptions().getApplicationName(), Matchers.containsString(applicationName)); + assertThat(storage.getOptions().getHost(), Matchers.is(hostName)); + assertThat(storage.getOptions().getProjectId(), Matchers.is(projectIdName)); + assertThat(storage.getOptions().getTransportOptions(), Matchers.instanceOf(HttpTransportOptions.class)); + assertThat(((HttpTransportOptions) storage.getOptions().getTransportOptions()).getConnectTimeout(), + Matchers.is((int) connectTimeValue.millis())); + assertThat(((HttpTransportOptions) storage.getOptions().getTransportOptions()).getReadTimeout(), + Matchers.is((int) readTimeValue.millis())); + assertThat(storage.getOptions().getCredentials(), Matchers.nullValue(Credentials.class)); } public void testToTimeout() { - assertNull(GoogleCloudStorageService.toTimeout(null)); - assertNull(GoogleCloudStorageService.toTimeout(TimeValue.ZERO)); + assertEquals(-1, GoogleCloudStorageService.toTimeout(null).intValue()); + assertEquals(-1, GoogleCloudStorageService.toTimeout(TimeValue.ZERO).intValue()); assertEquals(0, GoogleCloudStorageService.toTimeout(TimeValue.MINUS_ONE).intValue()); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java index 325cea132beb6..2b52b7a32a9cc 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/MockStorage.java @@ -19,289 +19,478 @@ package org.elasticsearch.repositories.gcs; -import com.google.api.client.googleapis.json.GoogleJsonError; -import com.google.api.client.googleapis.json.GoogleJsonResponseException; -import com.google.api.client.http.AbstractInputStreamContent; -import com.google.api.client.http.HttpHeaders; -import com.google.api.client.http.HttpMethods; -import com.google.api.client.http.HttpRequest; -import com.google.api.client.http.HttpRequestInitializer; -import com.google.api.client.http.HttpResponseException; -import com.google.api.client.http.LowLevelHttpRequest; -import com.google.api.client.http.LowLevelHttpResponse; -import com.google.api.client.http.MultipartContent; -import com.google.api.client.json.JsonFactory; -import com.google.api.client.testing.http.MockHttpTransport; -import com.google.api.client.testing.http.MockLowLevelHttpRequest; -import com.google.api.client.testing.http.MockLowLevelHttpResponse; -import com.google.api.services.storage.Storage; -import com.google.api.services.storage.model.Bucket; -import com.google.api.services.storage.model.StorageObject; -import org.elasticsearch.common.io.Streams; -import org.elasticsearch.rest.RestStatus; +import com.google.api.gax.paging.Page; +import com.google.cloud.Policy; +import com.google.cloud.ReadChannel; +import com.google.cloud.RestorableState; +import com.google.cloud.WriteChannel; +import com.google.cloud.storage.Acl; +import com.google.cloud.storage.Blob; +import com.google.cloud.storage.BlobId; +import com.google.cloud.storage.BlobInfo; +import com.google.cloud.storage.Bucket; +import com.google.cloud.storage.BucketInfo; +import com.google.cloud.storage.CopyWriter; +import com.google.cloud.storage.ServiceAccount; +import com.google.cloud.storage.Storage; +import com.google.cloud.storage.StorageBatch; +import com.google.cloud.storage.StorageException; +import com.google.cloud.storage.StorageOptions; +import com.google.cloud.storage.StorageRpcOptionUtils; +import com.google.cloud.storage.StorageTestUtils; + +import org.elasticsearch.core.internal.io.IOUtils; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; -import java.math.BigInteger; +import java.net.URL; +import java.nio.ByteBuffer; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.nio.channels.WritableByteChannel; import java.util.ArrayList; +import java.util.List; +import java.util.Objects; import java.util.concurrent.ConcurrentMap; - -import static org.mockito.Mockito.mock; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; /** * {@link MockStorage} mocks a {@link Storage} client by storing all the blobs * in a given concurrent map. */ -class MockStorage extends Storage { - - /* A custom HTTP header name used to propagate the name of the blobs to delete in batch requests */ - private static final String DELETION_HEADER = "x-blob-to-delete"; +class MockStorage implements Storage { private final String bucketName; private final ConcurrentMap blobs; MockStorage(final String bucket, final ConcurrentMap blobs) { - super(new MockedHttpTransport(blobs), mock(JsonFactory.class), mock(HttpRequestInitializer.class)); - this.bucketName = bucket; - this.blobs = blobs; + this.bucketName = Objects.requireNonNull(bucket); + this.blobs = Objects.requireNonNull(blobs); } @Override - public Buckets buckets() { - return new MockBuckets(); + public Bucket get(String bucket, BucketGetOption... options) { + if (bucketName.equals(bucket)) { + return StorageTestUtils.createBucket(this, bucketName); + } else { + return null; + } } @Override - public Objects objects() { - return new MockObjects(); + public Blob get(BlobId blob) { + if (bucketName.equals(blob.getBucket())) { + final byte[] bytes = blobs.get(blob.getName()); + if (bytes != null) { + return StorageTestUtils.createBlob(this, bucketName, blob.getName(), bytes.length); + } + } + return null; } - class MockBuckets extends Buckets { + @Override + public boolean delete(BlobId blob) { + if (bucketName.equals(blob.getBucket()) && blobs.containsKey(blob.getName())) { + return blobs.remove(blob.getName()) != null; + } + return false; + } - @Override - public Get get(String getBucket) { - return new Get(getBucket) { - @Override - public Bucket execute() { - if (bucketName.equals(getBucket())) { - Bucket bucket = new Bucket(); - bucket.setId(bucketName); - return bucket; - } else { - return null; - } - } - }; + @Override + public List delete(Iterable blobIds) { + final List ans = new ArrayList<>(); + for (final BlobId blobId : blobIds) { + ans.add(delete(blobId)); } + return ans; } - class MockObjects extends Objects { + @Override + public Blob create(BlobInfo blobInfo, byte[] content, BlobTargetOption... options) { + if (bucketName.equals(blobInfo.getBucket()) == false) { + throw new StorageException(404, "Bucket not found"); + } + blobs.put(blobInfo.getName(), content); + return get(BlobId.of(blobInfo.getBucket(), blobInfo.getName())); + } + + @Override + public CopyWriter copy(CopyRequest copyRequest) { + if (bucketName.equals(copyRequest.getSource().getBucket()) == false) { + throw new StorageException(404, "Source bucket not found"); + } + if (bucketName.equals(copyRequest.getTarget().getBucket()) == false) { + throw new StorageException(404, "Target bucket not found"); + } + + final byte[] bytes = blobs.get(copyRequest.getSource().getName()); + if (bytes == null) { + throw new StorageException(404, "Source blob does not exist"); + } + blobs.put(copyRequest.getTarget().getName(), bytes); + return StorageRpcOptionUtils + .createCopyWriter(get(BlobId.of(copyRequest.getTarget().getBucket(), copyRequest.getTarget().getName()))); + } + + @Override + public Page list(String bucket, BlobListOption... options) { + if (bucketName.equals(bucket) == false) { + throw new StorageException(404, "Bucket not found"); + } + final Storage storage = this; + final String prefix = StorageRpcOptionUtils.getPrefix(options); - @Override - public Get get(String getBucket, String getObject) { - return new Get(getBucket, getObject) { + return new Page() { + @Override + public boolean hasNextPage() { + return false; + } + + @Override + public String getNextPageToken() { + return null; + } + + @Override + public Page getNextPage() { + throw new UnsupportedOperationException(); + } + + @Override + public Iterable iterateAll() { + return blobs.entrySet().stream() + .filter(blob -> ((prefix == null) || blob.getKey().startsWith(prefix))) + .map(blob -> StorageTestUtils.createBlob(storage, bucketName, blob.getKey(), blob.getValue().length)) + .collect(Collectors.toList()); + } + + @Override + public Iterable getValues() { + throw new UnsupportedOperationException(); + } + }; + } + + @Override + public ReadChannel reader(BlobId blob, BlobSourceOption... options) { + if (bucketName.equals(blob.getBucket())) { + final byte[] bytes = blobs.get(blob.getName()); + final ReadableByteChannel readableByteChannel = Channels.newChannel(new ByteArrayInputStream(bytes)); + return new ReadChannel() { @Override - public StorageObject execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - if (blobs.containsKey(getObject()) == false) { - throw newObjectNotFoundException(getObject()); - } - - StorageObject storageObject = new StorageObject(); - storageObject.setId(getObject()); - return storageObject; + public void close() { + IOUtils.closeWhileHandlingException(readableByteChannel); } @Override - public InputStream executeMediaAsInputStream() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - if (blobs.containsKey(getObject()) == false) { - throw newObjectNotFoundException(getObject()); - } - return new ByteArrayInputStream(blobs.get(getObject())); + public void seek(long position) throws IOException { + throw new UnsupportedOperationException(); } - }; - } - @Override - public Insert insert(String insertBucket, StorageObject insertObject, AbstractInputStreamContent insertStream) { - return new Insert(insertBucket, insertObject) { @Override - public StorageObject execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - - ByteArrayOutputStream out = new ByteArrayOutputStream(); - Streams.copy(insertStream.getInputStream(), out); - blobs.put(getName(), out.toByteArray()); - return null; + public void setChunkSize(int chunkSize) { + throw new UnsupportedOperationException(); + } + + @Override + public RestorableState capture() { + throw new UnsupportedOperationException(); + } + + @Override + public int read(ByteBuffer dst) throws IOException { + return readableByteChannel.read(dst); } - }; - } - @Override - public List list(String listBucket) { - return new List(listBucket) { @Override - public com.google.api.services.storage.model.Objects execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } - - final com.google.api.services.storage.model.Objects objects = new com.google.api.services.storage.model.Objects(); - - final java.util.List storageObjects = new ArrayList<>(); - for (Entry blob : blobs.entrySet()) { - if (getPrefix() == null || blob.getKey().startsWith(getPrefix())) { - StorageObject storageObject = new StorageObject(); - storageObject.setId(blob.getKey()); - storageObject.setName(blob.getKey()); - storageObject.setSize(BigInteger.valueOf((long) blob.getValue().length)); - storageObjects.add(storageObject); - } - } - - objects.setItems(storageObjects); - return objects; + public boolean isOpen() { + return readableByteChannel.isOpen(); } }; } + return null; + } + + @Override + public WriteChannel writer(BlobInfo blobInfo, BlobWriteOption... options) { + if (bucketName.equals(blobInfo.getBucket())) { + final ByteArrayOutputStream output = new ByteArrayOutputStream(); + return new WriteChannel() { + + final WritableByteChannel writableByteChannel = Channels.newChannel(output); - @Override - public Delete delete(String deleteBucket, String deleteObject) { - return new Delete(deleteBucket, deleteObject) { @Override - public Void execute() throws IOException { - if (bucketName.equals(getBucket()) == false) { - throw newBucketNotFoundException(getBucket()); - } + public void setChunkSize(int chunkSize) { + throw new UnsupportedOperationException(); + } - if (blobs.containsKey(getObject()) == false) { - throw newObjectNotFoundException(getObject()); - } + @Override + public RestorableState capture() { + throw new UnsupportedOperationException(); + } - blobs.remove(getObject()); - return null; + @Override + public int write(ByteBuffer src) throws IOException { + return writableByteChannel.write(src); } @Override - public HttpRequest buildHttpRequest() throws IOException { - HttpRequest httpRequest = super.buildHttpRequest(); - httpRequest.getHeaders().put(DELETION_HEADER, getObject()); - return httpRequest; + public boolean isOpen() { + return writableByteChannel.isOpen(); } - }; - } - @Override - public Copy copy(String srcBucket, String srcObject, String destBucket, String destObject, StorageObject content) { - return new Copy(srcBucket, srcObject, destBucket, destObject, content) { @Override - public StorageObject execute() throws IOException { - if (bucketName.equals(getSourceBucket()) == false) { - throw newBucketNotFoundException(getSourceBucket()); - } - if (bucketName.equals(getDestinationBucket()) == false) { - throw newBucketNotFoundException(getDestinationBucket()); - } - - final byte[] bytes = blobs.get(getSourceObject()); - if (bytes == null) { - throw newObjectNotFoundException(getSourceObject()); - } - blobs.put(getDestinationObject(), bytes); - - StorageObject storageObject = new StorageObject(); - storageObject.setId(getDestinationObject()); - return storageObject; + public void close() throws IOException { + IOUtils.closeWhileHandlingException(writableByteChannel); + blobs.put(blobInfo.getName(), output.toByteArray()); } }; } + return null; } - private static GoogleJsonResponseException newBucketNotFoundException(final String bucket) { - HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Bucket not found: " + bucket, new HttpHeaders()); - return new GoogleJsonResponseException(builder, new GoogleJsonError()); + // Everything below this line is not implemented. + + @Override + public Bucket create(BucketInfo bucketInfo, BucketTargetOption... options) { + return null; } - private static GoogleJsonResponseException newObjectNotFoundException(final String object) { - HttpResponseException.Builder builder = new HttpResponseException.Builder(404, "Object not found: " + object, new HttpHeaders()); - return new GoogleJsonResponseException(builder, new GoogleJsonError()); + @Override + public Blob create(BlobInfo blobInfo, BlobTargetOption... options) { + return null; } - /** - * {@link MockedHttpTransport} extends the existing testing transport to analyze the content - * of {@link com.google.api.client.googleapis.batch.BatchRequest} and delete the appropriates - * blobs. We use this because {@link Storage#batch()} is final and there is no other way to - * extend batch requests for testing purposes. - */ - static class MockedHttpTransport extends MockHttpTransport { + @Override + public Blob create(BlobInfo blobInfo, InputStream content, BlobWriteOption... options) { + return null; + } - private final ConcurrentMap blobs; + @Override + public Blob get(String bucket, String blob, BlobGetOption... options) { + return null; + } - MockedHttpTransport(final ConcurrentMap blobs) { - this.blobs = blobs; - } + @Override + public Blob get(BlobId blob, BlobGetOption... options) { + return null; + } - @Override - public LowLevelHttpRequest buildRequest(final String method, final String url) throws IOException { - // We analyze the content of the Batch request to detect our custom HTTP header, - // and extract from it the name of the blob to delete. Then we reply a simple - // batch response so that the client parser is happy. - // - // See https://cloud.google.com/storage/docs/json_api/v1/how-tos/batch for the - // format of the batch request body. - if (HttpMethods.POST.equals(method) && url.endsWith("/batch")) { - return new MockLowLevelHttpRequest() { - @Override - public LowLevelHttpResponse execute() throws IOException { - final String contentType = new MultipartContent().getType(); - - final StringBuilder builder = new StringBuilder(); - try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { - getStreamingContent().writeTo(out); - - Streams.readAllLines(new ByteArrayInputStream(out.toByteArray()), line -> { - if (line != null && line.startsWith(DELETION_HEADER)) { - builder.append("--__END_OF_PART__\r\n"); - builder.append("Content-Type: application/http").append("\r\n"); - builder.append("\r\n"); - builder.append("HTTP/1.1 "); - - final String blobName = line.substring(line.indexOf(':') + 1).trim(); - if (blobs.containsKey(blobName)) { - builder.append(RestStatus.OK.getStatus()); - blobs.remove(blobName); - } else { - builder.append(RestStatus.NOT_FOUND.getStatus()); - } - builder.append("\r\n"); - builder.append("Content-Type: application/json; charset=UTF-8").append("\r\n"); - builder.append("Content-Length: 0").append("\r\n"); - builder.append("\r\n"); - } - }); - builder.append("\r\n"); - builder.append("--__END_OF_PART__--"); - } - - MockLowLevelHttpResponse response = new MockLowLevelHttpResponse(); - response.setStatusCode(200); - response.setContent(builder.toString()); - response.setContentType(contentType); - return response; - } - }; - } else { - return super.buildRequest(method, url); - } - } + @Override + public Page list(BucketListOption... options) { + return null; + } + + @Override + public Bucket update(BucketInfo bucketInfo, BucketTargetOption... options) { + return null; + } + + @Override + public Blob update(BlobInfo blobInfo, BlobTargetOption... options) { + return null; + } + + @Override + public Blob update(BlobInfo blobInfo) { + return null; + } + + @Override + public boolean delete(String bucket, BucketSourceOption... options) { + return false; + } + + @Override + public boolean delete(String bucket, String blob, BlobSourceOption... options) { + return false; + } + + @Override + public boolean delete(BlobId blob, BlobSourceOption... options) { + return false; + } + + @Override + public Blob compose(ComposeRequest composeRequest) { + return null; + } + + @Override + public byte[] readAllBytes(String bucket, String blob, BlobSourceOption... options) { + return new byte[0]; + } + + @Override + public byte[] readAllBytes(BlobId blob, BlobSourceOption... options) { + return new byte[0]; + } + + @Override + public StorageBatch batch() { + return null; + } + + @Override + public ReadChannel reader(String bucket, String blob, BlobSourceOption... options) { + return null; + } + + @Override + public URL signUrl(BlobInfo blobInfo, long duration, TimeUnit unit, SignUrlOption... options) { + return null; + } + + @Override + public List get(BlobId... blobIds) { + return null; + } + + @Override + public List get(Iterable blobIds) { + return null; + } + + @Override + public List update(BlobInfo... blobInfos) { + return null; + } + + @Override + public List update(Iterable blobInfos) { + return null; + } + + @Override + public List delete(BlobId... blobIds) { + return null; + } + + @Override + public Acl getAcl(String bucket, Acl.Entity entity, BucketSourceOption... options) { + return null; + } + + @Override + public Acl getAcl(String bucket, Acl.Entity entity) { + return null; + } + + @Override + public boolean deleteAcl(String bucket, Acl.Entity entity, BucketSourceOption... options) { + return false; + } + + @Override + public boolean deleteAcl(String bucket, Acl.Entity entity) { + return false; + } + + @Override + public Acl createAcl(String bucket, Acl acl, BucketSourceOption... options) { + return null; + } + + @Override + public Acl createAcl(String bucket, Acl acl) { + return null; + } + + @Override + public Acl updateAcl(String bucket, Acl acl, BucketSourceOption... options) { + return null; + } + + @Override + public Acl updateAcl(String bucket, Acl acl) { + return null; + } + + @Override + public List listAcls(String bucket, BucketSourceOption... options) { + return null; + } + + @Override + public List listAcls(String bucket) { + return null; + } + + @Override + public Acl getDefaultAcl(String bucket, Acl.Entity entity) { + return null; + } + + @Override + public boolean deleteDefaultAcl(String bucket, Acl.Entity entity) { + return false; + } + + @Override + public Acl createDefaultAcl(String bucket, Acl acl) { + return null; + } + + @Override + public Acl updateDefaultAcl(String bucket, Acl acl) { + return null; + } + + @Override + public List listDefaultAcls(String bucket) { + return null; + } + + @Override + public Acl getAcl(BlobId blob, Acl.Entity entity) { + return null; + } + + @Override + public boolean deleteAcl(BlobId blob, Acl.Entity entity) { + return false; + } + + @Override + public Acl createAcl(BlobId blob, Acl acl) { + return null; + } + + @Override + public Acl updateAcl(BlobId blob, Acl acl) { + return null; + } + + @Override + public List listAcls(BlobId blob) { + return null; + } + + @Override + public Policy getIamPolicy(String bucket, BucketSourceOption... options) { + return null; + } + + @Override + public Policy setIamPolicy(String bucket, Policy policy, BucketSourceOption... options) { + return null; + } + + @Override + public List testIamPermissions(String bucket, List permissions, BucketSourceOption... options) { + return null; + } + + @Override + public ServiceAccount getServiceAccount(String projectId) { + return null; + } + + @Override + public StorageOptions getOptions() { + return null; } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml index 19f0584112a5a..74774f13e212e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/10_basic.yml @@ -34,7 +34,7 @@ setup: "Split index via API": - skip: version: " - 6.9.99" - reason: expects warnings that pre-7.0.0 will not send + reason: pre-7.0.0 will send warnings features: "warnings" # make it read-only @@ -105,12 +105,8 @@ setup: --- "Split from 1 to N": - skip: - # when re-enabling uncomment the below skips - version: "all" - reason: "AwaitsFix'ing, see https://github.com/elastic/elasticsearch/issues/30503" - # version: " - 6.9.99" - # reason: expects warnings that pre-7.0.0 will not send - features: "warnings" + version: " - 6.99.99" + reason: automatic preparation for splitting was added in 7.0.0 - do: indices.create: index: source_one_shard @@ -204,13 +200,11 @@ setup: - match: { _id: "3" } - match: { _source: { foo: "hello world 3" } } - - --- "Create illegal split indices": - skip: - version: " - 6.3.99" - reason: expects warnings that pre-6.4.0 will not send + version: " - 6.9.99" + reason: pre-7.0.0 will send warnings features: "warnings" # try to do an illegal split with number_of_routing_shards set diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml index 68aa4e3253c8a..727e1e374ba65 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.split/20_source_mapping.yml @@ -1,11 +1,8 @@ --- "Split index ignores target template mapping": - skip: - # when re-enabling uncomment the below skips - version: "all" - reason: "AwaitsFix'ing, see https://github.com/elastic/elasticsearch/issues/30503" - # version: " - 6.9.99" - # reason: expects warnings that pre-7.0.0 will not send + version: " - 6.9.99" + reason: pre-7.0.0 will send warnings features: "warnings" # create index diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index b018e24a565b8..5d4e558dbb25b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; @@ -45,6 +46,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import java.io.IOException; @@ -543,9 +545,6 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (customs.isEmpty() == false) { - throw new IllegalArgumentException("Custom data type is no longer supported in index template [" + customs + "]"); - } builder.field("index_patterns", indexPatterns); builder.field("order", order); if (version != null) { @@ -558,8 +557,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject("mappings"); for (Map.Entry entry : mappings.entrySet()) { - Map mapping = XContentHelper.convertToMap(new BytesArray(entry.getValue()), false).v2(); - builder.field(entry.getKey(), mapping); + builder.field(entry.getKey()); + XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, entry.getValue()); + builder.copyCurrentStructure(parser); } builder.endObject(); @@ -568,6 +569,11 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws alias.toXContent(builder, params); } builder.endObject(); + + for (Map.Entry entry : customs.entrySet()) { + builder.field(entry.getKey(), entry.getValue(), params); + } + return builder; } } diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 04bbb9279dab5..f47760491f8d5 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -158,6 +158,7 @@ private static class Entry { /** The decrypted secret data. See {@link #decrypt(char[])}. */ private final SetOnce> entries = new SetOnce<>(); + private volatile boolean closed; private KeyStoreWrapper(int formatVersion, boolean hasPassword, byte[] dataBytes) { this.formatVersion = formatVersion; @@ -448,8 +449,8 @@ private void decryptLegacyEntries() throws GeneralSecurityException, IOException } /** Write the keystore to the given config directory. */ - public void save(Path configDir, char[] password) throws Exception { - assert isLoaded(); + public synchronized void save(Path configDir, char[] password) throws Exception { + ensureOpen(); SimpleFSDirectory directory = new SimpleFSDirectory(configDir); // write to tmp file first, then overwrite @@ -500,16 +501,22 @@ public void save(Path configDir, char[] password) throws Exception { } } + /** + * It is possible to retrieve the setting names even if the keystore is closed. + * This allows {@link SecureSetting} to correctly determine that a entry exists even though it cannot be read. Thus attempting to + * read a secure setting after the keystore is closed will generate a "keystore is closed" exception rather than using the fallback + * setting. + */ @Override public Set getSettingNames() { - assert isLoaded(); + assert entries.get() != null : "Keystore is not loaded"; return entries.get().keySet(); } // TODO: make settings accessible only to code that registered the setting @Override - public SecureString getString(String setting) { - assert isLoaded(); + public synchronized SecureString getString(String setting) { + ensureOpen(); Entry entry = entries.get().get(setting); if (entry == null || entry.type != EntryType.STRING) { throw new IllegalArgumentException("Secret setting " + setting + " is not a string"); @@ -520,13 +527,12 @@ public SecureString getString(String setting) { } @Override - public InputStream getFile(String setting) { - assert isLoaded(); + public synchronized InputStream getFile(String setting) { + ensureOpen(); Entry entry = entries.get().get(setting); if (entry == null || entry.type != EntryType.FILE) { throw new IllegalArgumentException("Secret setting " + setting + " is not a file"); } - return new ByteArrayInputStream(entry.bytes); } @@ -543,8 +549,8 @@ public static void validateSettingName(String setting) { } /** Set a string setting. */ - void setString(String setting, char[] value) { - assert isLoaded(); + synchronized void setString(String setting, char[] value) { + ensureOpen(); validateSettingName(setting); ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(CharBuffer.wrap(value)); @@ -556,8 +562,8 @@ void setString(String setting, char[] value) { } /** Set a file setting. */ - void setFile(String setting, byte[] bytes) { - assert isLoaded(); + synchronized void setFile(String setting, byte[] bytes) { + ensureOpen(); validateSettingName(setting); Entry oldEntry = entries.get().put(setting, new Entry(EntryType.FILE, Arrays.copyOf(bytes, bytes.length))); @@ -568,15 +574,23 @@ void setFile(String setting, byte[] bytes) { /** Remove the given setting from the keystore. */ void remove(String setting) { - assert isLoaded(); + ensureOpen(); Entry oldEntry = entries.get().remove(setting); if (oldEntry != null) { Arrays.fill(oldEntry.bytes, (byte)0); } } + private void ensureOpen() { + if (closed) { + throw new IllegalStateException("Keystore is closed"); + } + assert isLoaded() : "Keystore is not loaded"; + } + @Override - public void close() { + public synchronized void close() { + this.closed = true; for (Entry entry : entries.get().values()) { Arrays.fill(entry.bytes, (byte)0); } diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index de29386022cc6..0374d74dcf58b 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -731,13 +731,13 @@ static final class StoreDirectory extends FilterDirectory { private final Logger deletesLogger; - StoreDirectory(Directory delegateDirectory, Logger deletesLogger) throws IOException { + StoreDirectory(Directory delegateDirectory, Logger deletesLogger) { super(delegateDirectory); this.deletesLogger = deletesLogger; } @Override - public void close() throws IOException { + public void close() { assert false : "Nobody should close this directory except of the Store itself"; } @@ -759,6 +759,17 @@ private void innerClose() throws IOException { public String toString() { return "store(" + in.toString() + ")"; } + + @Override + public boolean checkPendingDeletions() throws IOException { + if (super.checkPendingDeletions()) { + deletesLogger.warn("directory has still pending deletes"); + } + // we skip this check since our IW usage always goes forward. + // we still might run into situations where we have pending deletes ie. in shrink / split case + // and that will cause issues on windows since we open multiple IW instance one after another during the split/shrink recovery + return false; + } } /** diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java index 8443ac2bf2e3d..e48f151081f62 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/ShrinkIndexIT.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; -import org.apache.lucene.util.LuceneTestCase.AwaitsFix; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; @@ -77,7 +76,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30416") public class ShrinkIndexIT extends ESIntegTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index a7f7ed6f52546..fe6e980ab4259 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -24,7 +24,6 @@ import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.join.ScoreMode; -import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; @@ -81,7 +80,6 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30416") public class SplitIndexIT extends ESIntegTestCase { @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java index 294213452596f..577a8b55e61a3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java @@ -23,18 +23,18 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.yaml.YamlXContent; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.Arrays; import java.util.Base64; import java.util.Collections; @@ -45,7 +45,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.Is.is; -public class PutIndexTemplateRequestTests extends ESTestCase { +public class PutIndexTemplateRequestTests extends AbstractXContentTestCase { // bwc for #21009 public void testPutIndexTemplateRequest510() throws IOException { @@ -137,13 +137,14 @@ public void testValidateErrorMessage() throws Exception { assertThat(noError, is(nullValue())); } - private PutIndexTemplateRequest randomPutIndexTemplateRequest() throws IOException { + @Override + protected PutIndexTemplateRequest createTestInstance() { PutIndexTemplateRequest request = new PutIndexTemplateRequest(); request.name("test"); - if (randomBoolean()){ + if (randomBoolean()) { request.version(randomInt()); } - if (randomBoolean()){ + if (randomBoolean()) { request.order(randomInt()); } request.patterns(Arrays.asList(generateRandomStringArray(20, 100, false, false))); @@ -159,30 +160,39 @@ private PutIndexTemplateRequest randomPutIndexTemplateRequest() throws IOExcepti request.alias(alias); } if (randomBoolean()) { - request.mapping("doc", XContentFactory.jsonBuilder().startObject() - .startObject("doc").startObject("properties") - .startObject("field-" + randomInt()).field("type", randomFrom("keyword", "text")).endObject() - .endObject().endObject().endObject()); + try { + request.mapping("doc", XContentFactory.jsonBuilder().startObject() + .startObject("doc").startObject("properties") + .startObject("field-" + randomInt()).field("type", randomFrom("keyword", "text")).endObject() + .endObject().endObject().endObject()); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } } - if (randomBoolean()){ + if (randomBoolean()) { request.settings(Settings.builder().put("setting1", randomLong()).put("setting2", randomTimeValue()).build()); } return request; } - public void testFromToXContentPutTemplateRequest() throws Exception { - for (int i = 0; i < 10; i++) { - PutIndexTemplateRequest expected = randomPutIndexTemplateRequest(); - XContentType xContentType = randomFrom(XContentType.values()); - BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean()); - PutIndexTemplateRequest parsed = new PutIndexTemplateRequest().source(shuffled, xContentType); - assertNotSame(expected, parsed); - assertThat(parsed.version(), equalTo(expected.version())); - assertThat(parsed.order(), equalTo(expected.order())); - assertThat(parsed.patterns(), equalTo(expected.patterns())); - assertThat(parsed.aliases(), equalTo(expected.aliases())); - assertThat(parsed.mappings(), equalTo(expected.mappings())); - assertThat(parsed.settings(), equalTo(expected.settings())); - } + @Override + protected PutIndexTemplateRequest doParseInstance(XContentParser parser) throws IOException { + return new PutIndexTemplateRequest().source(parser.map()); + } + + @Override + protected void assertEqualInstances(PutIndexTemplateRequest expected, PutIndexTemplateRequest actual) { + assertNotSame(expected, actual); + assertThat(actual.version(), equalTo(expected.version())); + assertThat(actual.order(), equalTo(expected.order())); + assertThat(actual.patterns(), equalTo(expected.patterns())); + assertThat(actual.aliases(), equalTo(expected.aliases())); + assertThat(actual.mappings(), equalTo(expected.mappings())); + assertThat(actual.settings(), equalTo(expected.settings())); + } + + @Override + protected boolean supportsUnknownFields() { + return false; } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateResponseTests.java new file mode 100644 index 0000000000000..096d62bf2bb5b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateResponseTests.java @@ -0,0 +1,45 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.template.put; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +public class PutIndexTemplateResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected PutIndexTemplateResponse doParseInstance(XContentParser parser) { + return PutIndexTemplateResponse.fromXContent(parser); + } + + @Override + protected PutIndexTemplateResponse createTestInstance() { + return new PutIndexTemplateResponse(randomBoolean()); + } + + @Override + protected PutIndexTemplateResponse createBlankInstance() { + return new PutIndexTemplateResponse(); + } + + @Override + protected PutIndexTemplateResponse mutateInstance(PutIndexTemplateResponse response) { + return new PutIndexTemplateResponse(response.isAcknowledged() == false); + } +} diff --git a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java index e22836087367c..849841943ecc6 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java @@ -48,11 +48,13 @@ import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.test.ESTestCase; +import org.hamcrest.Matchers; import org.junit.After; import org.junit.Before; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.instanceOf; public class KeyStoreWrapperTests extends ESTestCase { @@ -97,6 +99,19 @@ public void testCreate() throws Exception { assertTrue(keystore.getSettingNames().contains(KeyStoreWrapper.SEED_SETTING.getKey())); } + public void testCannotReadStringFromClosedKeystore() throws Exception { + KeyStoreWrapper keystore = KeyStoreWrapper.create(); + assertThat(keystore.getSettingNames(), Matchers.hasItem(KeyStoreWrapper.SEED_SETTING.getKey())); + assertThat(keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()), notNullValue()); + + keystore.close(); + + assertThat(keystore.getSettingNames(), Matchers.hasItem(KeyStoreWrapper.SEED_SETTING.getKey())); + final IllegalStateException exception = expectThrows(IllegalStateException.class, + () -> keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey())); + assertThat(exception.getMessage(), containsString("closed")); + } + public void testUpgradeNoop() throws Exception { KeyStoreWrapper keystore = KeyStoreWrapper.create(); SecureString seed = keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java index 6178a72c83e3e..eb6a2e40a01b9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java @@ -84,7 +84,7 @@ protected void assertReduced(InternalExtendedStats reduced, List createParser(b parser.declareString(CategoryDefinition::setRegex, REGEX); parser.declareLong(CategoryDefinition::setMaxMatchingLength, MAX_MATCHING_LENGTH); parser.declareStringArray(CategoryDefinition::setExamples, EXAMPLES); + parser.declareString(CategoryDefinition::setGrokPattern, GROK_PATTERN); return parser; } @@ -61,6 +64,7 @@ private static ConstructingObjectParser createParser(b private String regex = ""; private long maxMatchingLength = 0L; private final Set examples; + private String grokPattern; public CategoryDefinition(String jobId) { this.jobId = jobId; @@ -74,6 +78,9 @@ public CategoryDefinition(StreamInput in) throws IOException { regex = in.readString(); maxMatchingLength = in.readLong(); examples = new TreeSet<>(in.readList(StreamInput::readString)); + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { + grokPattern = in.readOptionalString(); + } } @Override @@ -84,6 +91,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(regex); out.writeLong(maxMatchingLength); out.writeStringList(new ArrayList<>(examples)); + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { + out.writeOptionalString(grokPattern); + } } public String getJobId() { @@ -139,6 +149,14 @@ public void addExample(String example) { examples.add(example); } + public String getGrokPattern() { + return grokPattern; + } + + public void setGrokPattern(String grokPattern) { + this.grokPattern = grokPattern; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -148,6 +166,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(REGEX.getPreferredName(), regex); builder.field(MAX_MATCHING_LENGTH.getPreferredName(), maxMatchingLength); builder.field(EXAMPLES.getPreferredName(), examples); + if (grokPattern != null) { + builder.field(GROK_PATTERN.getPreferredName(), grokPattern); + } builder.endObject(); return builder; } @@ -166,11 +187,12 @@ public boolean equals(Object other) { && Objects.equals(this.terms, that.terms) && Objects.equals(this.regex, that.regex) && Objects.equals(this.maxMatchingLength, that.maxMatchingLength) - && Objects.equals(this.examples, that.examples); + && Objects.equals(this.examples, that.examples) + && Objects.equals(this.grokPattern, that.grokPattern); } @Override public int hashCode() { - return Objects.hash(jobId, categoryId, terms, regex, maxMatchingLength, examples); + return Objects.hash(jobId, categoryId, terms, regex, maxMatchingLength, examples, grokPattern); } } diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index d9d4882b00e1c..8b991555c0670 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -46,6 +46,7 @@ dependencies { testCompile project(path: xpackModule('security'), configuration: 'testArtifacts') // ml deps + compile project(':libs:grok') compile 'net.sf.supercsv:super-csv:2.4.0' nativeBundle "org.elasticsearch.ml:ml-cpp:${project.version}@zip" testCompile 'org.ini4j:ini4j:0.5.2' diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java index 25d0cc0cdf821..abf3a33052995 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java @@ -41,7 +41,7 @@ protected void doExecute(GetCategoriesAction.Request request, ActionListener listener.onResponse(new GetCategoriesAction.Response(r)), listener::onFailure, client); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java new file mode 100644 index 0000000000000..04280261b2634 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreator.java @@ -0,0 +1,243 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.categorization; + +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.grok.Grok; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +/** + * Creates Grok patterns that will match all the examples in a given category_definition. + * + * The choice of field names is quite primitive. The intention is that a human will edit these. + */ +public final class GrokPatternCreator { + + private static String PREFACE = "preface"; + private static String EPILOGUE = "epilogue"; + + /** + * The first match in this list will be chosen, so it needs to be ordered + * such that more generic patterns come after more specific patterns. + */ + private static final List ORDERED_CANDIDATE_GROK_PATTERNS = Arrays.asList( + new GrokPatternCandidate("TIMESTAMP_ISO8601", "timestamp"), + new GrokPatternCandidate("DATESTAMP_RFC822", "timestamp"), + new GrokPatternCandidate("DATESTAMP_RFC2822", "timestamp"), + new GrokPatternCandidate("DATESTAMP_OTHER", "timestamp"), + new GrokPatternCandidate("DATESTAMP_EVENTLOG", "timestamp"), + new GrokPatternCandidate("SYSLOGTIMESTAMP", "timestamp"), + new GrokPatternCandidate("HTTPDATE", "timestamp"), + new GrokPatternCandidate("CATALINA_DATESTAMP", "timestamp"), + new GrokPatternCandidate("TOMCAT_DATESTAMP", "timestamp"), + new GrokPatternCandidate("CISCOTIMESTAMP", "timestamp"), + new GrokPatternCandidate("DATE", "date"), + new GrokPatternCandidate("TIME", "time"), + new GrokPatternCandidate("LOGLEVEL", "loglevel"), + new GrokPatternCandidate("URI", "uri"), + new GrokPatternCandidate("UUID", "uuid"), + new GrokPatternCandidate("MAC", "macaddress"), + // Can't use \b as the breaks, because slashes are not "word" characters + new GrokPatternCandidate("PATH", "path", "(? examples) { + + // The first string in this array will end up being the empty string, and it doesn't correspond + // to an "in between" bit. Although it could be removed for "neatness", it actually makes the + // loops below slightly neater if it's left in. + // + // E.g., ".*?cat.+?sat.+?mat.*" -> [ "", "cat", "sat", "mat" ] + String[] fixedRegexBits = regex.split("\\.[*+]\\??"); + + // Create a pattern that will capture the bits in between the fixed parts of the regex + // + // E.g., ".*?cat.+?sat.+?mat.*" -> Pattern (.*?)cat(.+?)sat(.+?)mat(.*) + Pattern exampleProcessor = Pattern.compile(regex.replaceAll("(\\.[*+]\\??)", "($1)"), Pattern.DOTALL); + + List> groupsMatchesFromExamples = new ArrayList<>(fixedRegexBits.length); + for (int i = 0; i < fixedRegexBits.length; ++i) { + groupsMatchesFromExamples.add(new ArrayList<>(examples.size())); + } + for (String example : examples) { + Matcher matcher = exampleProcessor.matcher(example); + if (matcher.matches()) { + assert matcher.groupCount() == fixedRegexBits.length; + // E.g., if the input regex was ".*?cat.+?sat.+?mat.*" then the example + // "the cat sat on the mat" will result in "the ", " ", " on the ", and "" + // being added to the 4 "in between" collections in that order + for (int groupNum = 1; groupNum <= matcher.groupCount(); ++groupNum) { + groupsMatchesFromExamples.get(groupNum - 1).add(matcher.group(groupNum)); + } + } else { + // We should never get here. If we do it implies a bug in the original categorization, + // as it's produced a regex that doesn't match the examples. + assert matcher.matches() : exampleProcessor.pattern() + " did not match " + example; + Loggers.getLogger(GrokPatternCreator.class).error("[{}] Pattern [{}] did not match example [{}]", jobId, + exampleProcessor.pattern(), example); + } + } + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + // Finally, for each collection of "in between" bits we look for the best Grok pattern and incorporate + // it into the overall Grok pattern that will match the each example in its entirety + for (int inBetweenBitNum = 0; inBetweenBitNum < groupsMatchesFromExamples.size(); ++inBetweenBitNum) { + // Remember (from the first comment in this method) that the first element in this array is + // always the empty string + overallGrokPatternBuilder.append(fixedRegexBits[inBetweenBitNum]); + appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, inBetweenBitNum == 0, + inBetweenBitNum == fixedRegexBits.length - 1, groupsMatchesFromExamples.get(inBetweenBitNum)); + } + return overallGrokPatternBuilder.toString(); + } + + /** + * Given a collection of strings, work out which (if any) of the grok patterns we're allowed + * to use matches it best. Then append the appropriate grok language to represent that finding + * onto the supplied string builder. + */ + static void appendBestGrokMatchForStrings(Map fieldNameCountStore, StringBuilder overallGrokPatternBuilder, + boolean isFirst, boolean isLast, Collection mustMatchStrings) { + + GrokPatternCandidate bestCandidate = null; + if (mustMatchStrings.isEmpty() == false) { + for (GrokPatternCandidate candidate : ORDERED_CANDIDATE_GROK_PATTERNS) { + if (mustMatchStrings.stream().allMatch(candidate.grok::match)) { + bestCandidate = candidate; + break; + } + } + } + + if (bestCandidate == null) { + if (isLast) { + overallGrokPatternBuilder.append(".*"); + } else if (isFirst || mustMatchStrings.stream().anyMatch(String::isEmpty)) { + overallGrokPatternBuilder.append(".*?"); + } else { + overallGrokPatternBuilder.append(".+?"); + } + } else { + Collection prefaces = new ArrayList<>(); + Collection epilogues = new ArrayList<>(); + populatePrefacesAndEpilogues(mustMatchStrings, bestCandidate.grok, prefaces, epilogues); + appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, isFirst, false, prefaces); + overallGrokPatternBuilder.append("%{").append(bestCandidate.grokPatternName).append(':') + .append(buildFieldName(fieldNameCountStore, bestCandidate.fieldName)).append('}'); + appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, isLast, epilogues); + } + } + + /** + * Given a collection of strings, and a grok pattern that matches some part of them all, + * return collections of the bits that come before (prefaces) and after (epilogues) the + * bit that matches. + */ + static void populatePrefacesAndEpilogues(Collection matchingStrings, Grok grok, Collection prefaces, + Collection epilogues) { + for (String s : matchingStrings) { + Map captures = grok.captures(s); + // If the pattern doesn't match then captures will be null. But we expect this + // method to only be called after validating that the pattern does match. + assert captures != null; + prefaces.add(captures.getOrDefault(PREFACE, "").toString()); + epilogues.add(captures.getOrDefault(EPILOGUE, "").toString()); + } + } + + /** + * The first time a particular field name is passed, simply return it. + * The second time return it with "2" appended. + * The third time return it with "3" appended. + * Etc. + */ + static String buildFieldName(Map fieldNameCountStore, String fieldName) { + Integer numberSeen = fieldNameCountStore.compute(fieldName, (k, v) -> 1 + ((v == null) ? 0 : v)); + if (numberSeen > 1) { + return fieldName + numberSeen; + } else { + return fieldName; + } + } + + static class GrokPatternCandidate { + + final String grokPatternName; + final String fieldName; + final Grok grok; + + /** + * Pre/post breaks default to \b, but this may not be appropriate for Grok patterns that start or + * end with a non "word" character (i.e. letter, number or underscore). For such patterns use one + * of the other constructors. + * + * In cases where the Grok pattern defined by Logstash already includes conditions on what must + * come before and after the match, use one of the other constructors and specify an empty string + * for the pre and/or post breaks. + * @param grokPatternName Name of the Grok pattern to try to match - must match one defined in Logstash. + * @param fieldName Name of the field to extract from the match. + */ + GrokPatternCandidate(String grokPatternName, String fieldName) { + this(grokPatternName, fieldName, "\\b", "\\b"); + } + + GrokPatternCandidate(String grokPatternName, String fieldName, String preBreak) { + this(grokPatternName, fieldName, preBreak, "\\b"); + } + + /** + * @param grokPatternName Name of the Grok pattern to try to match - must match one defined in Logstash. + * @param fieldName Name of the field to extract from the match. + * @param preBreak Only consider the match if it's broken from the previous text by this. + * @param postBreak Only consider the match if it's broken from the following text by this. + */ + GrokPatternCandidate(String grokPatternName, String fieldName, String preBreak, String postBreak) { + this.grokPatternName = grokPatternName; + this.fieldName = fieldName; + this.grok = new Grok(Grok.getBuiltinPatterns(), "%{DATA:" + PREFACE + "}" + preBreak + "%{" + grokPatternName + ":this}" + + postBreak + "%{GREEDYDATA:" + EPILOGUE + "}"); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java index 4b15ef36e6ac7..d7b10fb622bdf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java @@ -98,6 +98,7 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlIndicesUtils; import org.elasticsearch.xpack.core.security.support.Exceptions; +import org.elasticsearch.xpack.ml.job.categorization.GrokPatternCreator; import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder.InfluencersQuery; import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; @@ -486,7 +487,7 @@ private T parseSearchHit(SearchHit hit, BiFunction } } - private T parseGetHit(GetResponse getResponse, BiFunction objectParser, + private T parseGetHit(GetResponse getResponse, BiFunction objectParser, Consumer errorHandler) { BytesReference source = getResponse.getSourceAsBytesRef(); @@ -626,10 +627,11 @@ public void bucketRecords(String jobId, Bucket bucket, int from, int size, boole * Get a page of {@linkplain CategoryDefinition}s for the given jobId. * Uses a supplied client, so may run as the currently authenticated user * @param jobId the job id + * @param augment Should the category definition be augmented with a Grok pattern? * @param from Skip the first N categories. This parameter is for paging * @param size Take only this number of categories */ - public void categoryDefinitions(String jobId, Long categoryId, Integer from, Integer size, + public void categoryDefinitions(String jobId, Long categoryId, boolean augment, Integer from, Integer size, Consumer> handler, Consumer errorHandler, Client client) { if (categoryId != null && (from != null || size != null)) { @@ -663,6 +665,9 @@ public void categoryDefinitions(String jobId, Long categoryId, Integer from, Int XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { CategoryDefinition categoryDefinition = CategoryDefinition.LENIENT_PARSER.apply(parser, null); + if (augment) { + augmentWithGrokPattern(categoryDefinition); + } results.add(categoryDefinition); } catch (IOException e) { throw new ElasticsearchParseException("failed to parse category definition", e); @@ -674,6 +679,17 @@ public void categoryDefinitions(String jobId, Long categoryId, Integer from, Int }, e -> errorHandler.accept(mapAuthFailure(e, jobId, GetCategoriesAction.NAME))), client::search); } + void augmentWithGrokPattern(CategoryDefinition categoryDefinition) { + List examples = categoryDefinition.getExamples(); + String regex = categoryDefinition.getRegex(); + if (examples.isEmpty() || regex.isEmpty()) { + categoryDefinition.setGrokPattern(""); + } else { + categoryDefinition.setGrokPattern(GrokPatternCreator.findBestGrokMatchFromExamples(categoryDefinition.getJobId(), + regex, examples)); + } + } + /** * Search for anomaly records with the parameters in the * {@link RecordsQueryBuilder} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java index 484d1648fbbb2..09bb3f7591677 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectResultProcessorIT.java @@ -461,7 +461,7 @@ private QueryPage getCategoryDefinition(long categoryId) thr AtomicReference errorHolder = new AtomicReference<>(); AtomicReference> resultHolder = new AtomicReference<>(); CountDownLatch latch = new CountDownLatch(1); - jobProvider.categoryDefinitions(JOB_ID, categoryId, null, null, r -> { + jobProvider.categoryDefinitions(JOB_ID, categoryId, false, null, null, r -> { resultHolder.set(r); latch.countDown(); }, e -> { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreatorTests.java new file mode 100644 index 0000000000000..4189dc35f0caa --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/GrokPatternCreatorTests.java @@ -0,0 +1,232 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.job.categorization; + +import org.elasticsearch.grok.Grok; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsInAnyOrder; + +public class GrokPatternCreatorTests extends ESTestCase { + + public void testBuildFieldName() { + Map fieldNameCountStore = new HashMap<>(); + assertEquals("field", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field")); + assertEquals("field2", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field")); + assertEquals("field3", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field")); + assertEquals("timestamp", GrokPatternCreator.buildFieldName(fieldNameCountStore, "timestamp")); + assertEquals("field4", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field")); + assertEquals("uri", GrokPatternCreator.buildFieldName(fieldNameCountStore, "uri")); + assertEquals("timestamp2", GrokPatternCreator.buildFieldName(fieldNameCountStore, "timestamp")); + assertEquals("field5", GrokPatternCreator.buildFieldName(fieldNameCountStore, "field")); + } + + public void testPopulatePrefacesAndEpiloguesGivenTimestamp() { + + Collection matchingStrings = Arrays.asList("[2018-01-25T15:33:23] DEBUG ", + "[2018-01-24T12:33:23] ERROR ", + "junk [2018-01-22T07:33:23] INFO ", + "[2018-01-21T03:33:23] DEBUG "); + Grok grok = new GrokPatternCreator.GrokPatternCandidate("TIMESTAMP_ISO8601", "timestamp").grok; + Collection prefaces = new ArrayList<>(); + Collection epilogues = new ArrayList<>(); + + GrokPatternCreator.populatePrefacesAndEpilogues(matchingStrings, grok, prefaces, epilogues); + + assertThat(prefaces, containsInAnyOrder("[", "[", "junk [", "[")); + assertThat(epilogues, containsInAnyOrder("] DEBUG ", "] ERROR ", "] INFO ", "] DEBUG ")); + } + + public void testPopulatePrefacesAndEpiloguesGivenEmailAddress() { + + Collection matchingStrings = Arrays.asList("before alice@acme.com after", + "abc bob@acme.com xyz", + "carol@acme.com"); + Grok grok = new GrokPatternCreator.GrokPatternCandidate("EMAILADDRESS", "email").grok; + Collection prefaces = new ArrayList<>(); + Collection epilogues = new ArrayList<>(); + + GrokPatternCreator.populatePrefacesAndEpilogues(matchingStrings, grok, prefaces, epilogues); + + assertThat(prefaces, containsInAnyOrder("before ", "abc ", "")); + assertThat(epilogues, containsInAnyOrder(" after", " xyz", "")); + } + + public void testAppendBestGrokMatchForStringsGivenTimestampsAndLogLevels() { + + Collection mustMatchStrings = Arrays.asList("[2018-01-25T15:33:23] DEBUG ", + "[2018-01-24T12:33:23] ERROR ", + "junk [2018-01-22T07:33:23] INFO ", + "[2018-01-21T03:33:23] DEBUG "); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".+?%{TIMESTAMP_ISO8601:timestamp}.+?%{LOGLEVEL:loglevel}.+?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenNumbersInBrackets() { + + Collection mustMatchStrings = Arrays.asList("(-2)", + " (-3)", + " (4)", + " (-5) "); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".+?%{NUMBER:field}.+?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenNegativeNumbersWithoutBreak() { + + Collection mustMatchStrings = Arrays.asList("before-2 ", + "prior to-3", + "-4"); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + // It seems sensible that we don't detect these suffices as either base 10 or base 16 numbers + assertEquals(".+?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenHexNumbers() { + + Collection mustMatchStrings = Arrays.asList(" abc", + " 123", + " -123", + "1f is hex"); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".*?%{BASE16NUM:field}.*?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenHostnamesWithNumbers() { + + Collection mustMatchStrings = Arrays.asList(" fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + // We don't want the .1. in the middle to get detected as a hex number + assertEquals(".+?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenEmailAddresses() { + + Collection mustMatchStrings = Arrays.asList("before alice@acme.com after", + "abc bob@acme.com xyz", + "carol@acme.com"); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".*?%{EMAILADDRESS:email}.*?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenUris() { + + Collection mustMatchStrings = Arrays.asList("main site https://www.elastic.co/ with trailing slash", + "https://www.elastic.co/guide/en/x-pack/current/ml-configuring-categories.html#ml-configuring-categories is a section", + "download today from https://www.elastic.co/downloads"); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".*?%{URI:uri}.*?", overallGrokPatternBuilder.toString()); + } + + public void testAppendBestGrokMatchForStringsGivenPaths() { + + Collection mustMatchStrings = Arrays.asList("on Mac /Users/dave", + "on Windows C:\\Users\\dave", + "on Linux /home/dave"); + + Map fieldNameCountStore = new HashMap<>(); + StringBuilder overallGrokPatternBuilder = new StringBuilder(); + + GrokPatternCreator.appendBestGrokMatchForStrings(fieldNameCountStore, overallGrokPatternBuilder, false, false, mustMatchStrings); + + assertEquals(".+?%{PATH:path}.*?", overallGrokPatternBuilder.toString()); + } + + public void testFindBestGrokMatchFromExamplesGivenNamedLogs() { + + String regex = ".*?linux.+?named.+?error.+?unexpected.+?RCODE.+?REFUSED.+?resolving.*"; + Collection examples = Arrays.asList( + "Sep 8 11:55:06 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'elastic.slack.com/A/IN': 95.110.64.205#53", + "Sep 8 11:55:08 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'slack-imgs.com/A/IN': 95.110.64.205#53", + "Sep 8 11:55:35 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'www.elastic.co/A/IN': 95.110.68.206#53", + "Sep 8 11:55:42 linux named[22529]: error (unexpected RCODE REFUSED) resolving 'b.akamaiedge.net/A/IN': 95.110.64.205#53"); + + assertEquals(".*?%{SYSLOGTIMESTAMP:timestamp}.+?linux.+?named.+?%{NUMBER:field}.+?error.+?" + + "unexpected.+?RCODE.+?REFUSED.+?resolving.+?%{QUOTEDSTRING:field2}.+?%{IP:ipaddress}.+?%{NUMBER:field3}.*", + GrokPatternCreator.findBestGrokMatchFromExamples("foo", regex, examples)); + } + + public void testFindBestGrokMatchFromExamplesGivenCatalinaLogs() { + + String regex = ".*?org\\.apache\\.tomcat\\.util\\.http\\.Parameters.+?processParameters.+?WARNING.+?Parameters.+?" + + "Invalid.+?chunk.+?ignored.*"; + // The embedded newline ensures the regular expressions we're using are compiled with Pattern.DOTALL + Collection examples = Arrays.asList( + "Aug 29, 2009 12:03:33 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: " + + "Invalid chunk ignored.", + "Aug 29, 2009 12:03:40 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: " + + "Invalid chunk ignored.", + "Aug 29, 2009 12:03:45 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: " + + "Invalid chunk ignored.", + "Aug 29, 2009 12:03:57 AM org.apache.tomcat.util.http.Parameters processParameters\nWARNING: Parameters: " + + "Invalid chunk ignored."); + + assertEquals(".*?%{CATALINA_DATESTAMP:timestamp}.+?org\\.apache\\.tomcat\\.util\\.http\\.Parameters.+?processParameters.+?" + + "WARNING.+?Parameters.+?Invalid.+?chunk.+?ignored.*", + GrokPatternCreator.findBestGrokMatchFromExamples("foo", regex, examples)); + } + + public void testFindBestGrokMatchFromExamplesGivenMultiTimestampLogs() { + + String regex = ".*?Authpriv.+?Info.+?sshd.+?subsystem.+?request.+?for.+?sftp.*"; + // Two timestamps: one local, one UTC + Collection examples = Arrays.asList( + "559550912540598297\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t38545844\tserv02nw07\t192.168.114.28\tAuthpriv\t" + + "Info\tsshd\tsubsystem request for sftp", + "559550912548986880\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t9049724\tserv02nw03\t10.120.48.147\tAuthpriv\t" + + "Info\tsshd\tsubsystem request for sftp", + "559550912548986887\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t884343\tserv02tw03\t192.168.121.189\tAuthpriv\t" + + "Info\tsshd\tsubsystem request for sftp", + "559550912603512850\t2016-04-20T14:06:53\t2016-04-20T21:06:53Z\t8907014\tserv02nw01\t192.168.118.208\tAuthpriv\t" + + "Info\tsshd\tsubsystem request for sftp"); + + assertEquals(".*?%{NUMBER:field}.+?%{TIMESTAMP_ISO8601:timestamp}.+?%{TIMESTAMP_ISO8601:timestamp2}.+?%{NUMBER:field2}.+?" + + "%{IP:ipaddress}.+?Authpriv.+?Info.+?sshd.+?subsystem.+?request.+?for.+?sftp.*", + GrokPatternCreator.findBestGrokMatchFromExamples("foo", regex, examples)); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java index 485fe44a95fa9..9fea904a99fa1 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobProviderTests.java @@ -61,7 +61,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; @@ -235,8 +234,7 @@ public void onFailure(Exception e) { }); } - public void testBuckets_OneBucketNoInterim() - throws InterruptedException, ExecutionException, IOException { + public void testBuckets_OneBucketNoInterim() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -268,8 +266,7 @@ public void testBuckets_OneBucketNoInterim() ".*")); } - public void testBuckets_OneBucketInterim() - throws InterruptedException, ExecutionException, IOException { + public void testBuckets_OneBucketInterim() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -302,8 +299,7 @@ public void testBuckets_OneBucketInterim() assertFalse(queryString.matches("(?s).*is_interim.*")); } - public void testBuckets_UsingBuilder() - throws InterruptedException, ExecutionException, IOException { + public void testBuckets_UsingBuilder() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -339,8 +335,7 @@ public void testBuckets_UsingBuilder() assertFalse(queryString.matches("(?s).*is_interim.*")); } - public void testBucket_NoBucketNoExpand() - throws InterruptedException, ExecutionException, IOException { + public void testBucket_NoBucketNoExpand() throws IOException { String jobId = "TestJobIdentification"; Long timestamp = 98765432123456789L; List> source = new ArrayList<>(); @@ -357,8 +352,7 @@ public void testBucket_NoBucketNoExpand() assertEquals(ResourceNotFoundException.class, holder[0].getClass()); } - public void testBucket_OneBucketNoExpand() - throws InterruptedException, ExecutionException, IOException { + public void testBucket_OneBucketNoExpand() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -384,7 +378,7 @@ public void testBucket_OneBucketNoExpand() assertEquals(now, b.getTimestamp()); } - public void testRecords() throws InterruptedException, ExecutionException, IOException { + public void testRecords() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -431,8 +425,7 @@ public void testRecords() throws InterruptedException, ExecutionException, IOExc assertEquals("irrascible", records.get(1).getFunction()); } - public void testRecords_UsingBuilder() - throws InterruptedException, ExecutionException, IOException { + public void testRecords_UsingBuilder() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); List> source = new ArrayList<>(); @@ -485,7 +478,7 @@ public void testRecords_UsingBuilder() assertEquals("irrascible", records.get(1).getFunction()); } - public void testBucketRecords() throws InterruptedException, ExecutionException, IOException { + public void testBucketRecords() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); Bucket bucket = mock(Bucket.class); @@ -532,7 +525,7 @@ public void testBucketRecords() throws InterruptedException, ExecutionException, assertEquals("irrascible", records.get(1).getFunction()); } - public void testexpandBucket() throws InterruptedException, ExecutionException, IOException { + public void testexpandBucket() throws IOException { String jobId = "TestJobIdentification"; Date now = new Date(); Bucket bucket = new Bucket("foo", now, 22); @@ -559,8 +552,7 @@ public void testexpandBucket() throws InterruptedException, ExecutionException, assertEquals(400L, records); } - public void testCategoryDefinitions() - throws InterruptedException, ExecutionException, IOException { + public void testCategoryDefinitions() throws IOException { String jobId = "TestJobIdentification"; String terms = "the terms and conditions are not valid here"; List> source = new ArrayList<>(); @@ -580,15 +572,14 @@ public void testCategoryDefinitions() JobProvider provider = createProvider(client); @SuppressWarnings({"unchecked", "rawtypes"}) QueryPage[] holder = new QueryPage[1]; - provider.categoryDefinitions(jobId, null, from, size, r -> holder[0] = r, + provider.categoryDefinitions(jobId, null, false, from, size, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage categoryDefinitions = holder[0]; assertEquals(1L, categoryDefinitions.count()); assertEquals(terms, categoryDefinitions.results().get(0).getTerms()); } - public void testCategoryDefinition() - throws InterruptedException, ExecutionException, IOException { + public void testCategoryDefinition() throws IOException { String jobId = "TestJobIdentification"; String terms = "the terms and conditions are not valid here"; @@ -603,14 +594,14 @@ public void testCategoryDefinition() JobProvider provider = createProvider(client); @SuppressWarnings({"unchecked", "rawtypes"}) QueryPage[] holder = new QueryPage[1]; - provider.categoryDefinitions(jobId, categoryId, null, null, + provider.categoryDefinitions(jobId, categoryId, false, null, null, r -> holder[0] = r, e -> {throw new RuntimeException(e);}, client); QueryPage categoryDefinitions = holder[0]; assertEquals(1L, categoryDefinitions.count()); assertEquals(terms, categoryDefinitions.results().get(0).getTerms()); } - public void testInfluencers_NoInterim() throws InterruptedException, ExecutionException, IOException { + public void testInfluencers_NoInterim() throws IOException { String jobId = "TestJobIdentificationForInfluencers"; Date now = new Date(); List> source = new ArrayList<>(); @@ -670,7 +661,7 @@ public void testInfluencers_NoInterim() throws InterruptedException, ExecutionEx assertEquals(5.0, records.get(1).getInitialInfluencerScore(), 0.00001); } - public void testInfluencers_WithInterim() throws InterruptedException, ExecutionException, IOException { + public void testInfluencers_WithInterim() throws IOException { String jobId = "TestJobIdentificationForInfluencers"; Date now = new Date(); List> source = new ArrayList<>(); @@ -730,7 +721,7 @@ public void testInfluencers_WithInterim() throws InterruptedException, Execution assertEquals(5.0, records.get(1).getInitialInfluencerScore(), 0.00001); } - public void testModelSnapshots() throws InterruptedException, ExecutionException, IOException { + public void testModelSnapshots() throws IOException { String jobId = "TestJobIdentificationForInfluencers"; Date now = new Date(); List> source = new ArrayList<>(); @@ -851,8 +842,7 @@ private static GetResponse createGetResponse(boolean exists, Map return getResponse; } - private static SearchResponse createSearchResponse(List> source) - throws IOException { + private static SearchResponse createSearchResponse(List> source) throws IOException { SearchResponse response = mock(SearchResponse.class); List list = new ArrayList<>(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinitionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinitionTests.java index fdaa28508235a..ee7d4ad4b7add 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinitionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/results/CategoryDefinitionTests.java @@ -25,6 +25,9 @@ public CategoryDefinition createTestInstance(String jobId) { categoryDefinition.setRegex(randomAlphaOfLength(10)); categoryDefinition.setMaxMatchingLength(randomLong()); categoryDefinition.setExamples(Arrays.asList(generateRandomStringArray(10, 10, false))); + if (randomBoolean()) { + categoryDefinition.setGrokPattern(randomAlphaOfLength(50)); + } return categoryDefinition; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java index a44d39a0d7a56..c143978468dfd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java @@ -113,7 +113,13 @@ public ByteBuffer getNetworkReadBuffer() { } public void read(InboundChannelBuffer buffer) throws SSLException { - currentMode.read(buffer); + Mode modePriorToRead; + do { + modePriorToRead = currentMode; + currentMode.read(buffer); + // If we switched modes we want to read again as there might be unhandled bytes that need to be + // handled by the new mode. + } while (modePriorToRead != currentMode); } public boolean readyForApplicationWrites() { @@ -365,8 +371,9 @@ public void read(InboundChannelBuffer buffer) throws SSLException { try { SSLEngineResult result = unwrap(buffer); handshakeStatus = result.getHandshakeStatus(); - continueUnwrap = result.bytesConsumed() > 0; handshake(); + // If we are done handshaking we should exit the handshake read + continueUnwrap = result.bytesConsumed() > 0 && currentMode.isHandshake(); } catch (SSLException e) { closingInternal(); throw e; diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index 6040de8f50cda..c9ad4b3053cbe 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -8,7 +8,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; -import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.TimeUnits; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -30,7 +29,6 @@ import static org.hamcrest.Matchers.is; @TimeoutSuite(millis = 5 * TimeUnits.MINUTE) // to account for slow as hell VMs -@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30456") public class UpgradeClusterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { /**