Skip to content

Commit

Permalink
Merge e5d3887 into be51eb6
Browse files Browse the repository at this point in the history
  • Loading branch information
Tpt committed Nov 20, 2019
2 parents be51eb6 + e5d3887 commit 7e68294
Show file tree
Hide file tree
Showing 9 changed files with 180 additions and 33 deletions.
Expand Up @@ -190,6 +190,18 @@ public static String getJsonString(PropertyDocument propertyDocument) {
return jacksonObjectToString(propertyDocument);
}

/**
* Serializes the given object in JSON and returns the resulting string. In
* case of errors, null is returned.
*
* @param mediaInfoDocument
* object to serialize
* @return JSON serialization or null
*/
public static String getJsonString(MediaInfoDocument mediaInfoDocument) {
return jacksonObjectToString(mediaInfoDocument);
}

/**
* Serializes the given object in JSON and returns the resulting string. In
* case of errors, null is returned.
Expand Down
Expand Up @@ -97,11 +97,6 @@ public MediaInfoIdValue getEntityId() {
return new MediaInfoIdValueImpl(this.entityId, this.siteIri);
}

@JsonProperty("type")
String getType() {
return EntityDocumentImpl.JSON_TYPE_MEDIA_INFO;
}

@JsonProperty("statements")
@Override
public Map<String, List<Statement>> getJsonClaims() {
Expand Down
@@ -0,0 +1,72 @@
package org.wikidata.wdtk.examples;

/*
* #%L
* Wikidata Toolkit Examples
* %%
* Copyright (C) 2014 - 2015 Wikidata Toolkit Developers
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/

import org.wikidata.wdtk.datamodel.interfaces.*;
import org.wikidata.wdtk.wikibaseapi.WikibaseDataFetcher;
import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException;

import java.io.IOException;

/**
* This example shows how to retrieve MediaInfo data from the API.
*
* @author Thomas Pellissier Tanon
*
*/
public class OnlineMediaInfoExample {

public static void main(String[] args) throws IOException, MediaWikiApiErrorException {
ExampleHelpers.configureLogging();
printDocumentation();

WikibaseDataFetcher commonsDataFetcher = WikibaseDataFetcher.getWikimediaCommonsDataFetcher();
WikibaseDataFetcher wikidataDataFetcher = WikibaseDataFetcher.getWikidataDataFetcher();

System.out.println("*** Retrieving a media info document ...");
MediaInfoDocument mediaInfoDocument = (MediaInfoDocument) commonsDataFetcher.getEntityDocumentByTitle("commonswiki", "File:Black hole - Messier 87 crop max res.jpg");

// Print the English caption
System.out.println("Caption: " + mediaInfoDocument.getLabels().get("en").getText());

// Print the depict with labels from Wikidata:
for(Statement statement : mediaInfoDocument.findStatementGroup("P180").getStatements()) {
Value value = statement.getValue();
if(value instanceof ItemIdValue) {
ItemDocument depict = (ItemDocument) wikidataDataFetcher.getEntityDocument(((ItemIdValue) value).getId());
System.out.println("Depict: " + depict.getLabels().get("en").getText() + "(" + depict.getEntityId().getIri() + ")");
}
}

System.out.println("*** Done.");
}

/**
* Prints some basic documentation about this program.
*/
public static void printDocumentation() {
System.out.println("********************************************************************");
System.out.println("*** Wikidata Toolkit: MediaInfoDataExample");
System.out.println("*** ");
System.out.println("*** It does not download any dump files. See source code for details.");
System.out.println("********************************************************************");
}
}
Expand Up @@ -70,6 +70,10 @@ public class ApiConnection {
* URL of the API of test.wikidata.org.
*/
public final static String URL_TEST_WIKIDATA_API = "https://test.wikidata.org/w/api.php";
/**
* URL of the API of commons.wikimedia.org.
*/
public final static String URL_WIKIMEDIA_COMMONS_API = "https://commons.wikimedia.org/w/api.php";

/**
* Name of the HTTP parameter to submit an action to the API.
Expand Down
Expand Up @@ -87,6 +87,15 @@ public static BasicApiConnection getWikidataApiConnection() {
public static BasicApiConnection getTestWikidataApiConnection() {
return new BasicApiConnection(ApiConnection.URL_TEST_WIKIDATA_API);
}

/**
* Creates an API connection to commons.wikimedia.org.
*
* @return {@link BasicApiConnection}
*/
public static BasicApiConnection getWikimediaCommonsApiConnection() {
return new BasicApiConnection(ApiConnection.URL_WIKIMEDIA_COMMONS_API);
}

/**
* Logs in using the specified user credentials. After successful login, the
Expand Down
Expand Up @@ -26,12 +26,12 @@
import java.util.Map;
import java.util.Map.Entry;

import com.fasterxml.jackson.databind.DeserializationFeature;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wikidata.wdtk.datamodel.helpers.DatamodelMapper;
import org.wikidata.wdtk.datamodel.implementation.EntityDocumentImpl;
import org.wikidata.wdtk.datamodel.implementation.ItemDocumentImpl;
import org.wikidata.wdtk.datamodel.interfaces.EntityDocument;
import org.wikidata.wdtk.datamodel.interfaces.*;
import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException;

import com.fasterxml.jackson.core.JsonProcessingException;
Expand Down Expand Up @@ -197,21 +197,24 @@ public Map<String, EntityDocument> wbGetEntities(String ids, String sites,
while(entitiesIterator.hasNext()) {
Entry<String,JsonNode> entry = entitiesIterator.next();
JsonNode entityNode = entry.getValue();
if (!entityNode.has("missing")) {
if(!entityNode.has("missing")) {
try {
EntityDocument ed = mapper.treeToValue(entityNode, EntityDocumentImpl.class);
EntityDocument ed = mapper.reader()
.with(DeserializationFeature.ACCEPT_EMPTY_ARRAY_AS_NULL_OBJECT)
.treeToValue(entityNode, EntityDocumentImpl.class);

if (titles == null) {
// We use the JSON key rather than the id of the value
// so that retrieving redirected entities works.
result.put(entry.getKey(), ed);
} else {
if (ed instanceof ItemDocumentImpl
&& ((ItemDocumentImpl) ed)
.getSiteLinks().containsKey(sites)) {
result.put(((ItemDocumentImpl) ed)
.getSiteLinks().get(sites)
.getPageTitle(), ed);
if (ed instanceof ItemDocument) {
SiteLink siteLink = ((ItemDocument) ed).getSiteLinks().get(sites);
if(siteLink != null) {
result.put(siteLink.getPageTitle(), ed);
}
} else if(ed instanceof MediaInfoDocument) {
result.put(entityNode.get("title").textValue(), ed);
}
}
} catch (JsonProcessingException e) {
Expand Down
@@ -1,7 +1,5 @@
package org.wikidata.wdtk.wikibaseapi;

import java.io.IOException;

/*
* #%L
* Wikidata Toolkit Wikibase API
Expand All @@ -11,9 +9,9 @@
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
*
* http://www.apache.org/licenses/LICENSE-2.0
*
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Expand All @@ -22,33 +20,26 @@
* #L%
*/

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wikidata.wdtk.datamodel.helpers.Datamodel;
import org.wikidata.wdtk.datamodel.interfaces.DocumentDataFilter;
import org.wikidata.wdtk.datamodel.interfaces.EntityDocument;
import org.wikidata.wdtk.wikibaseapi.apierrors.MediaWikiApiErrorException;

import com.fasterxml.jackson.databind.ObjectMapper;

/**
* Simple class to fetch data from Wikibase via the online API.
*
* @author Markus Kroetzsch
* @author Michael Guenther
*/
public class WikibaseDataFetcher {

static final Logger logger = LoggerFactory
.getLogger(WikibaseDataFetcher.class);

/**
* API Action to fetch data.
*/
Expand All @@ -61,11 +52,6 @@ public class WikibaseDataFetcher {
*/
final String siteIri;

/**
* Mapper object used for deserializing JSON data.
*/
final ObjectMapper mapper = new ObjectMapper();

/**
* Filter that is used to restrict API requests.
*/
Expand All @@ -86,10 +72,24 @@ public class WikibaseDataFetcher {
*/
public static WikibaseDataFetcher getWikidataDataFetcher() {
return new WikibaseDataFetcher(
ApiConnection.getWikidataApiConnection(),
BasicApiConnection.getWikidataApiConnection(),
Datamodel.SITE_WIKIDATA);
}


/**
* Creates an object to fetch data from commons.wikimedia.org. This convenience
* method creates a default {@link ApiConnection} that is not logged in. To
* use an existing connection, the constructor
* {@link #WikibaseDataFetcher(ApiConnection, String)} should be called,
* using {@link Datamodel#SITE_WIKIMEDIA_COMMONS} as a site URI.
*/
public static WikibaseDataFetcher getWikimediaCommonsDataFetcher() {
return new WikibaseDataFetcher(
BasicApiConnection.getWikimediaCommonsApiConnection(),
Datamodel.SITE_WIKIMEDIA_COMMONS);
}

/**
* Creates an object to fetch data from API with the given
* {@link ApiConnection} object. The site URI is necessary since it is not
Expand Down Expand Up @@ -194,6 +194,9 @@ public Map<String, EntityDocument> getEntityDocuments(List<String> entityIds)
* Fetches the document for the entity that has a page of the given title on
* the given site. Site keys should be some site identifier known to the
* Wikibase site that is queried, such as "enwiki" for Wikidata.org.
*
* It could also be used to retrieve Wikimedia Commons MediaInfo entities
* using the siteKey "commonswiki" and the file title (with the File: prefix) for title.
* <p>
* Note: This method will not work properly if a filter is set for sites
* that excludes the requested site.
Expand Down
Expand Up @@ -145,6 +145,33 @@ public void testWbGetEntitiesTitleEmpty() throws IOException,
assertNull(result);
}

@Test
public void testWbGetMediaInfoEntityFromId() throws IOException, MediaWikiApiErrorException {
Map<String, String> parameters = new HashMap<>();
this.setStandardParameters(parameters);
parameters.put("ids", "M65057");
con.setWebResourceFromPath(parameters, getClass(),
"/wbgetentities-RandomImage.jpg.json", CompressionType.NONE);

EntityDocument result = wdf.getEntityDocument("M65057");

assertEquals("M65057", result.getEntityId().getId());
}

@Test
public void testWbGetMediaInfoEntityFromTitle() throws IOException, MediaWikiApiErrorException {
Map<String, String> parameters = new HashMap<>();
this.setStandardParameters(parameters);
parameters.put("titles", "File:RandomImage 4658098723742867.jpg");
parameters.put("sites", "commonswiki");
con.setWebResourceFromPath(parameters, getClass(),
"/wbgetentities-RandomImage.jpg.json", CompressionType.NONE);

EntityDocument result = wdf.getEntityDocumentByTitle("commonswiki", "File:RandomImage 4658098723742867.jpg");

assertEquals("M65057", result.getEntityId().getId());
}

@Test
public void testWikidataDataFetcher() {
WikibaseDataFetcher wbdf = WikibaseDataFetcher.getWikidataDataFetcher();
Expand Down
@@ -0,0 +1,22 @@
{
"entities": {
"M65057": {
"pageid": 65057,
"ns": 6,
"title": "File:RandomImage 4658098723742867.jpg",
"lastrevid": 146188,
"modified": "2019-11-19T20:36:58Z",
"type": "mediainfo",
"id": "M65057",
"labels": {
"en": {
"language": "en",
"value": "ddddd"
}
},
"descriptions": {},
"statements": []
}
},
"success": 1
}

0 comments on commit 7e68294

Please sign in to comment.