Commit 9bb3e5a2 authored by Florian Schäfer's avatar Florian Schäfer

Add API query for sitelinks of a Wikidata item

parent 902f2227
......@@ -3,6 +3,7 @@ package org.wikipedia.api.wikidata_action;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
......@@ -121,18 +122,33 @@ public final class WikidataActionApiQuery<T> extends ApiQuery<T> {
return new WikidataActionApiQuery<>(
FORMAT_PARAMS + "&action=wbgetentities&props=labels&ids=" + qId,
WbgetentitiesResult.SCHEMA,
TimeUnit.MINUTES.toMillis(5),
TimeUnit.MINUTES.toMillis(10),
result -> result.getEntities().values().stream().findFirst().map(WbgetentitiesResult.Entity::getLabels).orElse(new HashMap<>())
);
}
public static WikidataActionApiQuery<WbgetclaimsResult> wbgetclaims(final String qId) {
public static WikidataActionApiQuery<Collection<WbgetentitiesResult.Entity.Sitelink>> wbgetentitiesSitelinks(final String qId) {
RegexUtil.requireValidQId(qId);
return new WikidataActionApiQuery<>(
FORMAT_PARAMS + "&action=wbgetentities&props=sitelinks&ids=" + qId,
WbgetentitiesResult.SCHEMA,
TimeUnit.MINUTES.toMillis(10),
result -> result.getEntities().values().stream()
.findFirst()
.map(WbgetentitiesResult.Entity::getSitelinks)
.orElse(Collections.emptyList())
);
}
public static WikidataActionApiQuery<Collection<WbgetclaimsResult.Claim>> wbgetclaims(final String qId) {
if (!RegexUtil.isValidQId(qId)) {
throw new IllegalArgumentException("Invalid Q-ID: " + qId);
}
return new WikidataActionApiQuery<>(
FORMAT_PARAMS + "&action=wbgetclaims&props=&entity=" + qId,
WbgetclaimsResult.SCHEMA
WbgetclaimsResult.SCHEMA,
TimeUnit.MINUTES.toMillis(10),
WbgetclaimsResult::getClaims
);
}
......
......@@ -17,6 +17,9 @@ import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Stream;
import org.wikipedia.api.SerializationSchema;
import org.wikipedia.tools.RegexUtil;
......@@ -59,6 +62,12 @@ public final class SitematrixResult {
return Collections.unmodifiableCollection(specialSites);
}
public Optional<Site> getSiteForDbname(final String dbname) {
return Stream.concat(languages.stream().flatMap(it -> it.getSites().stream()), specialSites.stream())
.filter(it -> dbname != null && dbname.equals(it.getDbName()))
.findFirst();
}
public static class Deserializer extends StdDeserializer<Sitematrix> {
private final ObjectMapper mapper;
public Deserializer(final ObjectMapper mapper) {
......@@ -102,6 +111,7 @@ public final class SitematrixResult {
this.name = name;
if (sites != null) {
this.sites.addAll(sites);
this.sites.forEach(it -> it.language = this);
}
}
......@@ -124,18 +134,22 @@ public final class SitematrixResult {
private final boolean closed;
private final String code;
private final String dbName;
private final String siteName;
private final String url;
private Language language;
@JsonCreator
public Site(
@JsonProperty("url") final String url,
@JsonProperty("dbname") final String dbName,
@JsonProperty("code") final String code,
@JsonProperty("closed") final String closed
@JsonProperty("closed") final String closed,
@JsonProperty("sitename") final String siteName
) {
this.closed = closed != null;
this.code = code;
this.dbName = dbName;
this.siteName = siteName;
this.url = url;
}
......@@ -154,6 +168,14 @@ public final class SitematrixResult {
return dbName;
}
public Language getLanguage() {
return language;
}
public String getSiteName() {
return siteName;
}
public String getUrl() {
return url;
}
......
......@@ -3,6 +3,8 @@ package org.wikipedia.gui;
import java.awt.BorderLayout;
import java.awt.GridLayout;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import org.openstreetmap.josm.gui.Notification;
......@@ -29,12 +31,12 @@ class WikidataInfoClaimPanel extends ProgressJPanel {
try {
mainPanel.removeAll();
showProgress(I18n.tr("Download statements for {0}…", qId));
final WbgetclaimsResult result = ApiQueryClient.query(WikidataActionApiQuery.wbgetclaims(qId));
final Collection<WbgetclaimsResult.Claim> result = ApiQueryClient.query(WikidataActionApiQuery.wbgetclaims(qId));
if (qIdBeingDownloaded != null && qIdBeingDownloaded.equals(qId)) {
synchronized (mainPanel) {
mainPanel.removeAll();
mainPanel.setLayout(new GridLayout(result.getClaims().size(), 1));
result.getClaims().forEach(claim -> {
mainPanel.setLayout(new GridLayout(result.size(), 1));
result.forEach(claim -> {
final WbgetclaimsResult.Claim.MainSnak.DataValue value = claim.getMainSnak().getDataValue(); // nullable
mainPanel.add(new StatementPanel(claim.getMainSnak().getProperty(), value == null ? I18n.tr("Unknown datatype!") : value.toString()));
});
......
......@@ -23,6 +23,12 @@ public class RegexUtil {
return value != null && Q_ID_PATTERN.matcher(value).matches();
}
public static void requireValidQId(final String value) {
if (!isValidQId(value)) {
throw new IllegalArgumentException("Q-ID is invalid!");
}
}
/**
* Validates that a given string matches "[a-z]{2,3}wiki".
* This has to be improved in the future to exactly allow only existing site IDs and allow other wikimedia sites
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment