...
 
Commits (12)
version: 2
jobs:
test-java-9:
docker:
- image: circleci/openjdk:9-jdk-browsers
steps:
- restore_cache: &restore_gradleGit_cache
keys:
- gradleGit-v1-{{ .Branch }}-{{ .Revision }}
- gradleGit-v1-{{ .Branch }}-
- gradleGit-v1-
- run: sudo apt install -y gettext
# Temporary workaround for https://discuss.circleci.com/t/22437
- run: |
if [ -n "$CIRCLE_TAG" ]
then
mkdir -p ~/.ssh
echo 'github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==
' >> ~/.ssh/known_hosts
git fetch --force origin "refs/tags/${CIRCLE_TAG}:refs/tags/${CIRCLE_TAG}"
fi
- checkout
- run: ./gradlew assemble test
- store_test_results:
path: ./build/test-results/test
- save_cache: &save_gradleGit_cache
key: gradleGit-v1-{{ .Branch }}-{{ .Revision }}
paths:
- "~/.gradle/caches"
- "~/.gradle/wrapper"
- ".git"
- ".gradle"
test-java-8:
docker: &jdk8_image
- image: circleci/openjdk:8-jdk-browsers
steps:
- restore_cache: *restore_gradleGit_cache
- run: sudo apt install -y gettext
# Temporary workaround for https://discuss.circleci.com/t/22437
- run: |
if [ -n "$CIRCLE_TAG" ]
then
mkdir -p ~/.ssh
echo 'github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==
' >> ~/.ssh/known_hosts
git fetch --force origin "refs/tags/${CIRCLE_TAG}:refs/tags/${CIRCLE_TAG}"
fi
- checkout
# Main commands
- run: ./gradlew assemble test generatePot generateMasterPluginList
# Storing to cache/workspace
- persist_to_workspace: &persist_build_artifacts
root: .
paths:
- .tx
- build
- save_cache: *save_gradleGit_cache
- store_artifacts:
path: ./build/dist
destination: dist
- store_artifacts:
path: ./build/tmp/jar/MANIFEST.MF
destination: dist/MANIFEST.MF
- run: |
if [ "$CIRCLE_BRANCH" == "master" ]; then
git fetch origin gh-pages
git checkout gh-pages
cp ./build/pluginMasterSnapshots .
git stage pluginMasterSnapshots
git config user.name "CircleCI $CIRCLE_BUILD_NUM"
git config user.email "deploy@circleci"
git commit -m "[ci skip] Update plugin update site for JOSM with latest version from master branch"
git push -q https://${GITHUB_TOKEN}@github.com/JOSM/wikipedia.git gh-pages > /dev/null 2>&1
fi
min-josm:
docker: *jdk8_image
steps:
- restore_cache: *restore_gradleGit_cache
- checkout
# Attach build artifacts
- attach_workspace:
at: .
# Main commands
- run: ./gradlew compileJava_minJosm
check:
docker: *jdk8_image
steps:
- restore_cache: *restore_gradleGit_cache
- run: sudo apt install -y gettext
- checkout
# Attach build artifacts
- attach_workspace:
at: .
# Main commands
- run: ./gradlew check
# Storing to workspace
- persist_to_workspace: *persist_build_artifacts
transifex-upload:
docker:
- image: circleci/python:3.6-node-browsers
steps:
- attach_workspace:
at: .
- run: pip install --user git+https://github.com/transifex/transifex-client.git@699dd42e04074be92a07b5b87e8f1ea672a6571f#egg=transifex-client
- run: export PATH="$PATH:$HOME/.local/bin" && pip install --user codecov && codecov
- run: |
export PATH="$PATH:$HOME/.local/bin" && echo "$PATH"
if [ ! -z "$TRANSIFEX_TOKEN" ]; then
TX_TOKEN="$TRANSIFEX_TOKEN" tx push -s --no-interactive
fi
# Store artifacts
- store_test_results:
path: ./build/test-results/test
- store_artifacts:
path: ./build/reports/jacoco/test
destination: jacoco
- store_artifacts:
path: ./build/reports/pmd
destination: pmd
- store_artifacts:
path: ./build/reports/tests/test
destination: junit
publish-release:
docker:
- image: circleci/golang:1.10
steps:
- restore_cache: *restore_gradleGit_cache
- checkout
- attach_workspace:
at: .
- run: go get github.com/aktau/github-release
- run: |
export TAG_SUBJECT=`git tag -l --format="%(contents:subject)" $CIRCLE_TAG`
export TAG_BODY=`git tag -l --format="%(contents:body)" $CIRCLE_TAG`
export GITHUB_USER="$CIRCLE_PROJECT_USERNAME"
export GITHUB_REPO="$CIRCLE_PROJECT_REPONAME"
github-release release --tag "$CIRCLE_TAG" --name "$TAG_SUBJECT" --description "$TAG_BODY"
github-release upload --tag "$CIRCLE_TAG" --file "./build/dist/wikipedia.jar" --name "wikipedia.jar"
github-release upload --tag "$CIRCLE_TAG" --file "./build/tmp/jar/MANIFEST.MF" --name "MANIFEST.MF"
workflows:
version: 2
build-workflow:
jobs:
- test-java-8:
filters: &always_run
tags:
only: /.*/
- test-java-9:
filters: *always_run
- check: &depends_on_test_tasks
requires:
- test-java-8
- test-java-9
filters: *always_run
- min-josm: *depends_on_test_tasks
# Runs only on master branch
- transifex-upload:
requires:
- check
- min-josm
filters:
branches:
only: master
# Runs only on tags matching /v[0-9].*/
- publish-release:
requires:
- check
- min-josm
filters:
tags:
only: /v[0-9].*/
branches:
ignore: /.*/
image: registry.gitlab.com/josm/wikipedia/java8:latest
before_script:
- export GRADLE_USER_HOME=`pwd`/.gradle
cache:
paths:
- .gradle/wrapper
- .gradle/caches
###############
# Build stage #
###############
assemble:
stage: build
script:
- ./gradlew assemble
artifacts:
paths:
- build
##############
# Test stage #
##############
build:
stage: test
script:
- ./gradlew build generatePot generateSnapshotUpdateSite
artifacts:
paths:
- build
compile against min JOSM:
stage: test
script:
- ./gradlew compileJava_minJosm
compile against latest JOSM:
stage: test
script:
- ./gradlew compileJava_latestJosm
################
# Deploy stage #
################
upload to transifex:
image: python:3.6-stretch
stage: deploy
environment:
name: transifex
url: https://www.transifex.com/josm/josm/josm-plugin_wikipedia/
before_script:
- apt-get update && apt-get install -yq gettext git
- pip install git+https://github.com/transifex/transifex-client.git
script:
- TX_TOKEN="$TRANSIFEX_TOKEN" tx push -s --no-interactive
dependencies:
- build
only:
- master@JOSM/wikipedia
release:
stage: deploy
environment:
name: pages branch
url: https://gitlab.com/JOSM/wikipedia/tree/pages
script:
- &clone_pages_branch |
echo "$SSH_PRIVATE_DEPLOY_KEY" > ~/.ssh/id_rsa
chmod 600 ~/.ssh/id_rsa
git clone --depth 1 --branch pages git@gitlab.com:JOSM/wikipedia.git pages
- |
version=`git describe --always --dirty`
longVersion=`git describe --always --long --dirty`
commitMessage="Release version $longVersion"
- |
mkdir -pv "pages/dist/$version"
cp -v build/dist/* build/tmp/jar/MANIFEST.MF "pages/dist/$version"
rm -fv "pages/dist/latest"
ln -s "./$version" "pages/dist/latest"
- &push_pages_branch |
cd pages/
git config user.name "Deploy with GitLab CI"
git config user.email "JOSM/wikipedia@gitlab.com"
git stage .
git commit -a -m "$commitMessage"
git push origin pages
dependencies:
- build
only:
- tags@JOSM/wikipedia
publish master update site:
stage: deploy
environment:
name: master update site
url: https://josm.gitlab.io/wikipedia/snapshot/master/update-site
script:
- *clone_pages_branch
- |
commitHash=`git rev-parse HEAD`
commitMessage="Make latest commit available via JOSM update site (master@$commitHash)"
- |
rm -vrf pages/snapshot/master
mkdir -pv pages/snapshot/master
cp -v build/snapshot-update-site pages/snapshot/master/update-site
cp -v build/dist/* pages/snapshot/master
- *push_pages_branch
dependencies:
- build
only:
- master@JOSM/wikipedia
# JOSM Wikipedia Plugin
[![build status](https://img.shields.io/circleci/project/github/JOSM/wikipedia/master.svg?style=flat-square)](https://circleci.com/gh/JOSM/workflows/wikipedia/tree/master)
[![code coverage](https://img.shields.io/codecov/c/github/JOSM/wikipedia/master.svg?style=flat-square)](https://codecov.io/gh/JOSM/wikipedia/branch/master)
[![latest release](https://img.shields.io/github/release/JOSM/wikipedia.svg?style=flat-square)](https://github.com/JOSM/wikipedia/releases/latest)
[![license](https://img.shields.io/github/license/JOSM/wikipedia.svg?style=flat-square)](./LICENSE)
[![pipeline status](https://gitlab.com/JOSM/wikipedia/badges/master/pipeline.svg)](https://gitlab.com/JOSM/wikipedia/commits/master)
[![license](https://img.shields.io/badge/license-GPLv2-blue.svg?style=flat-square)](./LICENSE)
This plugin simplifies linking OSM objects to Wikipedia articles and Wikidata items. It also helps to maintain existing links to Wikipedia/Wikidata.
......@@ -15,7 +13,7 @@ To use this plugin, [install JOSM](https://josm.openstreetmap.de/wiki/Download)
## Contributing
- The **source code** is hosted on GitHub, https://github.com/JOSM/wikipedia
- The **source code** is hosted on [GitLab](https://gitlab.com/JOSM/wikipedia) and [GitHub](https://github.com/JOSM/wikipedia).
- **Issues** are managed in [JOSM Trac](https://josm.openstreetmap.de/query?status=assigned&status=needinfo&status=new&status=reopened&component=Plugin+wikipedia&col=id&col=summary&col=status&col=type&col=priority&col=milestone&col=component&col=time&col=changetime&report=1&desc=1&order=changet)
- **Translations** are managed [at Transifex](https://www.transifex.com/josm/josm/josm-plugin_wikipedia)
[![translation status](https://www.transifex.com/projects/p/josm/resource/josm-plugin_wikipedia/chart/image_png/)](https://www.transifex.com/josm/josm/josm-plugin_wikipedia)
......
......@@ -2,9 +2,6 @@ import java.nio.file.Files
import java.nio.file.Paths
import java.time.Duration
import java.time.Instant
import java.util.jar.JarInputStream
import java.util.regex.Pattern
import java.util.stream.Collectors
import org.openstreetmap.josm.gradle.plugin.task.GeneratePluginList
import com.github.spotbugs.SpotBugsTask
......@@ -47,6 +44,7 @@ classes.dependsOn(copyToLib)
test {
useJUnitPlatform()
testLogging.exceptionFormat = 'full'
}
sourceSets {
......@@ -72,7 +70,7 @@ josm {
oldVersionDownloadLink 12878, "33635", new URL("https://svn.openstreetmap.org/applications/editors/josm/dist/wikipedia.jar?p=33636")
}
i18n {
pathTransformer = getPathTransformer("github.com/JOSM/wikipedia/blob")
pathTransformer = getPathTransformer("gitlab.com/JOSM/wikipedia/blob")
}
}
......@@ -82,6 +80,7 @@ tasks.withType(JavaCompile) {
"-Xep:DefaultCharset:ERROR",
"-Xep:StringEquality:ERROR",
"-Xep:ConstantField:WARN",
"-Xep:FieldCanBeFinal:WARN",
"-Xep:LambdaFunctionalInterface:WARN",
"-Xep:MethodCanBeStatic:WARN",
"-Xep:MultiVariableDeclaration:WARN",
......@@ -129,28 +128,13 @@ tasks.withType(SpotBugsTask) {
}
}
task generateMasterPluginList(type: GeneratePluginList) {
task generateSnapshotUpdateSite(type: GeneratePluginList) {
dependsOn(tasks.processResources)
outputFile = new File(project.buildDir, "pluginMasterSnapshots")
def defaultVersionSuffix = versionSuffix
versionSuffix = {String s ->
if (s.startsWith(archivesBaseName)) {
return ""
}
return defaultVersionSuffix.invoke(s)
}
outputFile = new File(project.buildDir, "snapshot-update-site")
versionSuffix = {a -> ""}
doFirst {
def wikipediaURL = System.getenv("WIKIPEDIA_URL")
def ptAssistantURL = System.getenv("PT_ASSISTANT_URL")
if (wikipediaURL == null || ptAssistantURL == null) {
throw new TaskExecutionException(it, new IllegalStateException("This tasks requires the environment variables `WIKIPEDIA_URL` and `PT_ASSISTANT_URL` to be set!"))
}
def circleBuildNum = System.getenv("CIRCLE_BUILD_NUM")
if (wikipediaURL.count("%s") == 1 && circleBuildNum != null) {
wikipediaURL = String.format(wikipediaURL, circleBuildNum)
}
def pluginDownloadUrl = "https://josm.gitlab.io/$archivesBaseName/snapshot/master/${archivesBaseName}.jar"
it.iconBase64Provider = {
def file = new File(sourceSets.main.resources.srcDirs[0], it)
if (file.exists()) {
......@@ -158,65 +142,8 @@ task generateMasterPluginList(type: GeneratePluginList) {
return "data:image/" + contentType + ";base64," + Base64.getEncoder().encodeToString(Files.readAllBytes(Paths.get(file.toURI())));
}
}
it.addPlugin("$archivesBaseName-dev.jar", project.josm.manifest.createJosmPluginJarManifest(), new URL(wikipediaURL))
def manifestAttributes = new JarInputStream(new URL(ptAssistantURL).openConnection().inputStream).manifest.mainAttributes.entrySet().stream()
.collect(Collectors.toMap({ e -> e.key.toString()}, { e -> e.value.toString()}))
it.addPlugin("pt_assistant-dev.jar", manifestAttributes, new URL(ptAssistantURL))
}
}
/**
* @return the current version of the repo as determined by the first of these commands that returns a valid result:
* <ul>
* <li>`git log` Search for a line with a git-svn-id in the current commit (append "-dirty" if working tree differs)</li>
* <li>`git describe` Let git describe the current commit, should only fail, if this is not a git repo</li>
* <li>`svn info` take the revision number from the SVN info command</li>
* </ul>
*/
def getVersion() {
// First attempt: Check if the commit has a git-svn-id, return SVN revision
def result = getVersion("git-svn-id: .*@([1-9][0-9]*) .*", "git", "log", "-1", "--format=%b")
if (result == null) {
// Second attempt: Check if the commit can be git-described, return the description by git
result = getVersion("(.+)", "git", "describe", "--always", "--dirty")
if (result == null) {
// Third attempt: Check if we are in an SVN repo, return revision number
result = getVersion("Revision: ([1-9][0-9]*)", "svn", "info")
if (result == null) {
result = "UNKNOWN"
} else {
result = "r$result"
}
}
} else {
result = "r$result"
def dirtyProcess = new ProcessBuilder("git", "diff-index", "--quiet", "HEAD").start()
if (dirtyProcess.waitFor() != 0) {
result += "-dirty"
}
}
return result
}
/**
* Runs the specified command, matches the lines of the output with the given linePattern.
* @param linePattern the linePattern to match the lines against
* @param command the command to execute
* @return if a line matches, return the first RegEx group, else return null
*/
def getVersion(String linePattern, String... command) {
def process = new ProcessBuilder(command).directory(project.projectDir).start()
if (process.waitFor() != 0) {
return null
it.addPlugin("$archivesBaseName-dev.jar", project.josm.manifest.createJosmPluginJarManifest(), new URL(pluginDownloadUrl))
}
def pattern = Pattern.compile(linePattern)
return Arrays.stream(process.inputStream.text.split("\n"))
.map { pattern.matcher(it)}
.filter { it.matches() }
.map { it.group(1).trim() }
.findFirst()
.orElse(null)
}
// Show task duration and skipped tasks
......
......@@ -5,7 +5,7 @@ plugin.main.version = 13927
# The special values "latest" and "tested" are also possible here, but not recommended.
plugin.compile.version = 13996
plugin.canloadatruntime = true
plugin.author = floscher <incoming+floscher/JOSM-wikipedia@incoming.gitlab.com>, simon04
plugin.author = floscher <incoming+JOSM/wikipedia@incoming.gitlab.com>, simon04
plugin.class = org.wikipedia.WikipediaPlugin
plugin.icon = images/dialogs/wikipedia.png
plugin.link = https://josm.openstreetmap.de/wiki/Help/Plugin/Wikipedia
......
......@@ -17,10 +17,11 @@ import org.wikipedia.actions.FetchWikidataAction;
import org.wikipedia.actions.WikipediaAddNamesAction;
import org.wikipedia.actions.WikipediaCopyTemplate;
import org.wikipedia.gui.SophoxDownloadReader;
import org.wikipedia.gui.SophoxServerPreference;
import org.wikipedia.gui.WikiPreferences;
import org.wikipedia.gui.WikidataItemSearchDialog;
import org.wikipedia.gui.WikidataTagCellRenderer;
import org.wikipedia.gui.WikipediaToggleDialog;
import org.wikipedia.validator.UnusualWikidataClasses;
import org.wikipedia.validator.WikidataItemExists;
import org.wikipedia.validator.WikipediaAgainstWikidata;
import org.wikipedia.validator.WikipediaRedirect;
......@@ -32,7 +33,7 @@ public final class WikipediaPlugin extends Plugin {
private static String name;
private static String versionInfo;
private PreferenceSetting preferences;
private final PreferenceSetting preferences = new WikiPreferences();
public WikipediaPlugin(PluginInformation info) {
super(info);
......@@ -49,6 +50,7 @@ public final class WikipediaPlugin extends Plugin {
OsmValidator.addTest(WikidataItemExists.class);
OsmValidator.addTest(WikipediaAgainstWikidata.class);
OsmValidator.addTest(WikipediaRedirect.class);
OsmValidator.addTest(UnusualWikidataClasses.class);
}
public static String getVersionInfo() {
......@@ -69,9 +71,6 @@ public final class WikipediaPlugin extends Plugin {
@Override
public PreferenceSetting getPreferenceSetting() {
if (preferences == null) {
preferences = (new SophoxServerPreference.Factory()).createPreferenceSetting();
}
return preferences;
}
}
......@@ -37,6 +37,8 @@ public abstract class ApiQuery<T> {
return url.toString();
}
public abstract String getApiName();
public abstract HttpClient getHttpClient();
/**
......
......@@ -51,7 +51,7 @@ public final class ApiQueryClient {
);
} catch (IOException e) {
if (cachedValue == null) {
throw wrapReadDecodeJsonExceptions(e);
throw wrapReadDecodeJsonExceptions(e, query.getApiName());
}
// If there's an expired cache entry, continue using it
Logging.log(Level.INFO, "Failed to update the cached API response. Falling back to the cached response.", e);
......@@ -66,7 +66,7 @@ public final class ApiQueryClient {
try {
return query.getSchema().getMapper().readValue(stream, query.getSchema().getSchemaClass());
} catch (IOException e) {
throw wrapReadDecodeJsonExceptions(e);
throw wrapReadDecodeJsonExceptions(e, query.getApiName());
}
}
......@@ -76,16 +76,20 @@ public final class ApiQueryClient {
response = query.getHttpClient().connect();
} catch (IOException e) {
throw new IOException(I18n.tr(
// i18n: {0} is the name of the exception, {1} is the message of the exception. Typical values would be: {0}="UnknownHostException" {1}="www.wikidata.org"
"Could not connect to the Wikidata Action API, probably a network issue or the website is currently offline ({0}: {1})",
// i18n: {0} is the API name, {1} is the name of the exception, {2} is the message of the exception.
// i18n: Typical values would be: {0}="Wikidata Action API" {1}="UnknownHostException" {2}="www.wikidata.org"
"Could not connect to the {0}, probably a network issue or the website is currently offline ({1}: {2})",
query.getApiName(),
e.getClass().getSimpleName(),
e.getLocalizedMessage()
), e);
}
if (response.getResponseCode() != 200) {
throw new IOException(I18n.tr(
// i18n: {0} is the response code, {1} is the response message. Typical values would be: {0}=404 {1}="Not Found"
"The Wikidata Action API responded with an unexpected response code: {0} {1}",
// i18n: {0} is the API name, {1} is the response code, {2} is the response message.
// i18n: Typical values would be: {0}="Wikidata Action API" {1}=404 {2}="Not Found"
"The {0} responded with an unexpected response code: {1} {2}",
query.getApiName(),
response.getResponseCode(),
response.getResponseMessage()
));
......@@ -93,8 +97,9 @@ public final class ApiQueryClient {
final String errorHeader = response.getHeaderField("MediaWiki-API-Error");
if (errorHeader != null) {
final IOException wrapperEx = new IOException(I18n.tr(
// I18n: {0} is the query, normally as URL. {1} is the error message returned from the API
"The Wikidata Action API reported an invalid query for {0} ({1}). This is a programming error, please report to the Wikipedia plugin.",
// I18n: {0} is the API name, {1} is the query, normally as URL. {2} is the error message returned from the API
"The {0} reported an invalid query for {1} ({2}). This is a programming error, please report to the Wikipedia plugin.",
query.getApiName(),
query.getCacheKey(),
errorHeader
));
......@@ -110,14 +115,15 @@ public final class ApiQueryClient {
return response.getContent();
}
private static IOException wrapReadDecodeJsonExceptions(final IOException exception) {
private static IOException wrapReadDecodeJsonExceptions(final IOException exception, final String apiName) {
final IOException wrapper;
if (exception instanceof JsonParseException || exception instanceof JsonMappingException) {
wrapper = new IOException(I18n.tr("The JSON response from the Wikidata Action API can't be decoded!"), exception);
} else {
wrapper = new IOException(I18n.tr(
// i18n: {0} is the name of the Exception, {1} is the message that exception provides
"When reading the JSON response from the Wikidata Action API, an error occured! ({0}: {1})",
// i18n: {0} is the name of the API, {1} is the name of the Exception, {2} is the message that exception provides
"When reading the JSON response from the {0}, an error occured! ({1}: {2})",
apiName,
exception.getClass().getSimpleName(),
exception.getLocalizedMessage()
), exception);
......
package org.wikipedia.api.wdq;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Objects;
import org.openstreetmap.josm.tools.HttpClient;
import org.openstreetmap.josm.tools.Utils;
import org.wikipedia.api.ApiQuery;
import org.wikipedia.api.ApiUrl;
import org.wikipedia.api.SerializationSchema;
import org.wikipedia.api.wdq.json.SparqlResult;
import org.wikipedia.tools.RegexUtil;
public class WdqApiQuery<T> extends ApiQuery<T> {
private static final String[] TICKET_KEYWORDS = {"wikidata", "QueryService"};
private final String queryString;
public WdqApiQuery(final URL url, final String queryString, final SerializationSchema<T> schema) {
super(url, schema, -1);
this.queryString = Objects.requireNonNull(queryString);
}
@Override
public String getApiName() {
return "Wikidata Query Service API";
}
@Override
public HttpClient getHttpClient() {
return HttpClient.create(getUrl(), "POST")
.setAccept("application/sparql-results+json")
.setHeader("Content-Type", "application/x-www-form-urlencoded")
.setHeader("User-Agent", getUserAgent(TICKET_KEYWORDS))
.setReasonForRequest(queryString.replace('&', ' '))
.setRequestBody(queryString.getBytes(StandardCharsets.UTF_8));
}
/**
* @param items the items for which we check if they are instances of {@code x}
* or instances of any subclass of {@code x}.
* @param x the Q-ID of an item, for which the query checks if the provided items are instances of it,
* or instances of subclasses of it.
* @return the API query
*/
public static WdqApiQuery<SparqlResult> findInstancesOfXOrOfSubclass(final Collection<String> items, final String x) {
Objects.requireNonNull(items);
Objects.requireNonNull(x);
if (items.size() <= 0 || !items.stream().allMatch(RegexUtil::isValidQId) || !RegexUtil.isValidQId(x)) {
throw new IllegalArgumentException("All arguments for the 'is instance of X or of subclass' check must be valid Q-IDs!");
}
return new WdqApiQuery<>(
ApiUrl.url("https://query.wikidata.org/sparql"),
"format=json&query=" + Utils.encodeUrl(String.format("SELECT DISTINCT ?item WHERE { VALUES ?item { wd:%s } ?item wdt:P31/wdt:P279* wd:%s. }", String.join(" wd:", items), x)),
SparqlResult.SCHEMA
);
}
}
package org.wikipedia.api.wdq.json;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.DeserializationFeature;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;
import org.wikipedia.api.SerializationSchema;
public class SparqlResult {
public static final SerializationSchema<SparqlResult> SCHEMA = new SerializationSchema<>(
SparqlResult.class,
mapper -> mapper.disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES)
);
private final Head head;
private final Results results;
@JsonCreator
public SparqlResult(@JsonProperty("head") final Head head, @JsonProperty("results") final Results results) {
this.head = Objects.requireNonNull(head);
this.results = Objects.requireNonNull(results);
}
public Collection<String> getColumnLabels() {
return Collections.unmodifiableCollection(head.vars);
}
public int getNumColumns() {
return head.vars.size();
}
public int size() {
return results.bindings.size();
}
public Results.Entry getEntry(final int rowIndex, final int columnIndex) {
return results.bindings.get(rowIndex).get(head.vars.get(columnIndex));
}
public List<List<Results.Entry>> getRows() {
return Collections.unmodifiableList(
results.bindings.stream()
.map(row -> head.vars.stream().map(row::get).collect(Collectors.toList()))
.collect(Collectors.toList())
);
}
private static class Head {
private final List<String> vars;
@JsonCreator
public Head(@JsonProperty("vars") final List<String> vars) {
this.vars = Objects.requireNonNull(vars);
}
}
public static class Results {
private final List<Map<String,Entry>> bindings;
@JsonCreator
public Results(@JsonProperty("bindings") final List<Map<String, Entry>> bindings) {
this.bindings = Objects.requireNonNull(bindings);
}
public static class Entry {
private final String type;
private final String value;
@JsonCreator
public Entry(@JsonProperty("type") final String type, @JsonProperty("value") final String value) {
this.type = Objects.requireNonNull(type);
this.value = Objects.requireNonNull(value);
}
public String getType() {
return type;
}
public String getValue() {
return value;
}
}
}
}
......@@ -73,6 +73,11 @@ public final class WikidataActionApiQuery<T> extends ApiQuery<T> {
return getUrl().toString() + '?' + getQuery();
}
@Override
public String getApiName() {
return "Wikidata Action API";
}
@Override
public HttpClient getHttpClient() {
return HttpClient.create(getUrl(), "POST")
......
......@@ -28,6 +28,11 @@ public class WikipediaActionApiQuery<T> extends ApiQuery<T> {
return queryString;
}
@Override
public String getApiName() {
return "Wikipedia Action API";
}
@Override
public HttpClient getHttpClient() {
return HttpClient.create(getUrl(), "POST")
......
// License: GPL. For details, see LICENSE file.
package org.wikipedia.gui;
import static org.openstreetmap.josm.tools.I18n.tr;
import java.awt.GridBagLayout;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.JLabel;
import javax.swing.JPanel;
import org.openstreetmap.josm.gui.preferences.PreferenceSettingFactory;
import org.openstreetmap.josm.gui.preferences.PreferenceTabbedPane;
import org.openstreetmap.josm.gui.preferences.SubPreferenceSetting;
import org.openstreetmap.josm.gui.preferences.TabPreferenceSetting;
import org.openstreetmap.josm.gui.widgets.HistoryComboBox;
import org.openstreetmap.josm.tools.GBC;
import org.wikipedia.io.SophoxDownloadReader;
/**
* Preferences related to Sophox API servers.
*/
public class SophoxServerPreference implements SubPreferenceSetting {
private final HistoryComboBox SophoxServer = new HistoryComboBox();
/**
* Factory used to create a new {@link SophoxServerPreference}.
*/
public static class Factory implements PreferenceSettingFactory {
@Override
public SophoxServerPreference createPreferenceSetting() {
return new SophoxServerPreference();
}
}
@Override
public TabPreferenceSetting getTabPreferenceSetting(PreferenceTabbedPane gui) {
return gui.getServerPreference();
}
@Override
public void addGui(PreferenceTabbedPane gui) {
final JPanel panel = new JPanel(new GridBagLayout());
panel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
panel.add(new JLabel(tr("Server: ")), GBC.std().insets(5, 5, 5, 5));
panel.add(SophoxServer, GBC.eop().fill(GBC.HORIZONTAL));
SophoxServer.setPossibleItems(SophoxDownloadReader.SOPHOX_SERVER_HISTORY.get());
SophoxServer.setText(SophoxDownloadReader.SOPHOX_SERVER.get());
panel.add(Box.createVerticalGlue(), GBC.eol().fill());
getTabPreferenceSetting(gui).addSubTab(this, tr("Wikidata+OSM server"), panel);
}
@Override
public boolean ok() {
SophoxDownloadReader.SOPHOX_SERVER.put(SophoxServer.getText());
SophoxDownloadReader.SOPHOX_SERVER_HISTORY.put(SophoxServer.getHistory());
return false;
}
@Override
public boolean isExpert() {
return true;
}
}
......@@ -4,13 +4,11 @@ package org.wikipedia.gui;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.Insets;
import java.awt.Point;
import java.awt.RenderingHints;
import java.awt.geom.Arc2D;
import java.awt.geom.Ellipse2D;
import java.awt.geom.Path2D;
import java.awt.geom.Rectangle2D;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.Collections;
......@@ -122,7 +120,7 @@ public class WikiLayer extends Layer implements ListDataListener {
paintWikiMarkers(g, selectedEntries.stream().map(it -> Collections.singleton(mv.getPoint(it.coordinate))).collect(Collectors.toList()), true);
}
private void paintWikiMarkers(final Graphics2D g, final Collection<Collection<Point>> clusters, final boolean selected) {
private static void paintWikiMarkers(final Graphics2D g, final Collection<Collection<Point>> clusters, final boolean selected) {
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY);
g.setStroke(new BasicStroke(selected ? 3 : 2));
......
// License: GPL. For details, see LICENSE file.
package org.wikipedia.gui;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import javax.swing.Box;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
import org.openstreetmap.josm.actions.ExpertToggleAction;
import org.openstreetmap.josm.gui.preferences.PreferenceTabbedPane;
import org.openstreetmap.josm.gui.preferences.SubPreferenceSetting;
import org.openstreetmap.josm.gui.preferences.TabPreferenceSetting;
import org.openstreetmap.josm.gui.widgets.HistoryComboBox;
import org.openstreetmap.josm.tools.I18n;
import org.wikipedia.io.SophoxDownloadReader;
import org.wikipedia.tools.WikiProperties;
public class WikiPreferences implements SubPreferenceSetting {
private final JLabel languageLabel = new JLabel(I18n.tr("Wikipedia language"), JLabel.TRAILING);
private final JTextField languageField = new JTextField();
private final JLabel sophoxServerLabel = new JLabel(I18n.tr("Sophox server"), JLabel.TRAILING);
private final HistoryComboBox sophoxServerField = new HistoryComboBox();
public WikiPreferences() {
super();
languageLabel.setToolTipText(I18n.tr("Sets the default language for the Wikipedia articles"));
}
@Override
public void addGui(PreferenceTabbedPane gui) {
final JPanel container = new JPanel(new GridBagLayout());
ExpertToggleAction.addExpertModeChangeListener(isExpert -> {
sophoxServerLabel.setVisible(isExpert);
sophoxServerField.setVisible(isExpert);
container.revalidate();
container.repaint();
}, true);
container.setAlignmentY(JPanel.TOP_ALIGNMENT);
final GridBagConstraints constraints = new GridBagConstraints();
constraints.gridx = 0;
constraints.gridy = 0;
constraints.weightx = .1;
constraints.weighty = 0;
constraints.insets = new Insets(5, 10, 5, 10);
constraints.anchor = GridBagConstraints.EAST;
constraints.fill = GridBagConstraints.HORIZONTAL;
container.add(languageLabel, constraints);
constraints.gridx++;
constraints.weightx = 1;
container.add(languageField, constraints);
constraints.gridy++;
constraints.gridx = 0;
constraints.weightx = .1;
container.add(sophoxServerLabel, constraints);
constraints.gridx++;
constraints.weightx = 1;
container.add(sophoxServerField, constraints);
constraints.gridy++;
constraints.weighty = 1;
container.add(Box.createVerticalGlue(), constraints);
languageField.setText(WikiProperties.WIKIPEDIA_LANGUAGE.get());
sophoxServerField.setPossibleItems(SophoxDownloadReader.SOPHOX_SERVER_HISTORY.get());
sophoxServerField.setText(SophoxDownloadReader.SOPHOX_SERVER.get());
getTabPreferenceSetting(gui).addSubTab(this, "Wikipedia", container);
}
@Override
public TabPreferenceSetting getTabPreferenceSetting(PreferenceTabbedPane gui) {
return gui.getPluginPreference();
}
@Override
public boolean isExpert() {
return false;
}
@Override
public boolean ok() {
WikiProperties.WIKIPEDIA_LANGUAGE.put(languageField.getText());
SophoxDownloadReader.SOPHOX_SERVER_HISTORY.put(sophoxServerField.getHistory());
SophoxDownloadReader.SOPHOX_SERVER.put(sophoxServerField.getText());
return false;
}
}
......@@ -31,6 +31,7 @@ import org.openstreetmap.josm.tools.GBC;
import org.openstreetmap.josm.tools.Utils;
import org.wikipedia.WikipediaApp;
import org.wikipedia.data.WikidataEntry;
import org.wikipedia.tools.WikiProperties;
public final class WikidataItemSearchDialog extends ExtendedDialog {
......@@ -128,7 +129,7 @@ public final class WikidataItemSearchDialog extends ExtendedDialog {
debouncer.debounce(getClass(), () -> {
final List<WikidataEntry> entries = query == null || query.isEmpty()
? Collections.emptyList()
: WikipediaApp.getWikidataEntriesForQuery(WikipediaToggleDialog.wikipediaLang.get(), query, Locale.getDefault());
: WikipediaApp.getWikidataEntriesForQuery(WikiProperties.WIKIPEDIA_LANGUAGE.get(), query, Locale.getDefault());
GuiHelper.runInEDT(() -> lsResultModel.setItems(entries));
}, 200, TimeUnit.MILLISECONDS);
}
......
......@@ -11,6 +11,7 @@ import org.openstreetmap.josm.Main;
import org.openstreetmap.josm.gui.ExtendedDialog;
import org.openstreetmap.josm.gui.util.GuiHelper;
import org.wikipedia.WikipediaApp;
import org.wikipedia.tools.WikiProperties;
final class WikipediaCategorySearchDialog extends ExtendedDialog {
......@@ -56,7 +57,7 @@ final class WikipediaCategorySearchDialog extends ExtendedDialog {
debouncer.debounce(getClass(), () -> {
final List<String> entries = query == null || query.isEmpty()
? Collections.emptyList()
: WikipediaApp.forLanguage(WikipediaToggleDialog.wikipediaLang.get()).getCategoriesForPrefix(query);
: WikipediaApp.forLanguage(WikiProperties.WIKIPEDIA_LANGUAGE.get()).getCategoriesForPrefix(query);
GuiHelper.runInEDT(() -> lsResultModel.setItems(entries));
}, 200, TimeUnit.MILLISECONDS);
}
......
......@@ -20,7 +20,6 @@ import javax.swing.DefaultListCellRenderer;
import javax.swing.DefaultListModel;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JOptionPane;
import javax.swing.JPopupMenu;
import javax.swing.SwingWorker;
import org.openstreetmap.josm.Main;
......@@ -36,7 +35,6 @@ import org.openstreetmap.josm.data.osm.event.DatasetEventManager;
import org.openstreetmap.josm.data.osm.event.DatasetEventManager.FireMode;
import org.openstreetmap.josm.data.osm.search.SearchMode;
import org.openstreetmap.josm.data.osm.visitor.BoundingXYVisitor;
import org.openstreetmap.josm.data.preferences.StringProperty;
import org.openstreetmap.josm.gui.MainApplication;
import org.openstreetmap.josm.gui.MapView;
import org.openstreetmap.josm.gui.SideButton;
......@@ -45,7 +43,6 @@ import org.openstreetmap.josm.gui.layer.MainLayerManager.ActiveLayerChangeEvent;
import org.openstreetmap.josm.gui.layer.MainLayerManager.ActiveLayerChangeListener;
import org.openstreetmap.josm.tools.I18n;
import org.openstreetmap.josm.tools.ImageProvider;
import org.openstreetmap.josm.tools.LanguageInfo;
import org.openstreetmap.josm.tools.Logging;
import org.openstreetmap.josm.tools.OpenBrowser;
import org.wikipedia.WikipediaApp;
......@@ -54,11 +51,20 @@ import org.wikipedia.actions.MultiAction;
import org.wikipedia.actions.ToggleWikiLayerAction;
import org.wikipedia.data.WikipediaEntry;
import org.wikipedia.tools.ListUtil;
import org.wikipedia.tools.WikiProperties;
public class WikipediaToggleDialog extends ToggleDialog implements ActiveLayerChangeListener, DataSetListenerAdapter.Listener {
public WikipediaToggleDialog() {
super(tr("Wikipedia"), "wikipedia", tr("Fetch Wikipedia articles with coordinates"), null, 150);
super(
tr("Wikipedia"),
"wikipedia",
tr("Fetch Wikipedia articles with coordinates"),
null,
150,
true,
WikiPreferences.class
);
final Action[] downloadActions = {
new WikipediaLoadCoordinatesAction(false),
......@@ -70,19 +76,17 @@ public class WikipediaToggleDialog extends ToggleDialog implements ActiveLayerCh
MultiAction.createButton(
I18n.tr("Download elements"),
"download",
I18n.tr("Download all elements in the current viewport from one of {0} sources", downloadActions.length),
I18n.tr("Download all elements from one of {0} sources", downloadActions.length),
downloadActions
),
new SideButton(new PasteWikipediaArticlesAction()),
new SideButton(new AddWikipediaTagAction(list)),
new SideButton(new WikipediaSettingsAction(), false)
new SideButton(new AddWikipediaTagAction(list))
));
updateTitle();
}
/** A string describing the context (use-case) for determining the dialog title */
String titleContext = null;
static final StringProperty wikipediaLang = new StringProperty("wikipedia.lang", LanguageInfo.getJOSMLocaleCode().substring(0, 2));
final Set<String> articles = new HashSet<>();
final DefaultListModel<WikipediaEntry> model = new DefaultListModel<>();
final JList<WikipediaEntry> list = new JList<WikipediaEntry>(model) {
......@@ -152,7 +156,7 @@ public class WikipediaToggleDialog extends ToggleDialog implements ActiveLayerCh
try {
return list.getModel().getElementAt(0).lang;
} catch (ArrayIndexOutOfBoundsException ignore) {
return wikipediaLang.get();
return WikiProperties.WIKIPEDIA_LANGUAGE.get();
}
}
......@@ -161,7 +165,11 @@ public class WikipediaToggleDialog extends ToggleDialog implements ActiveLayerCh
private final boolean wikidata;
WikipediaLoadCoordinatesAction(boolean wikidata) {
super(wikidata ? tr("Wikidata") : tr("Coordinates"));
super(
wikidata
? tr("Wikidata items in viewport")
: tr("Wikipedia articles in viewport (for language {0})", WikiProperties.WIKIPEDIA_LANGUAGE.get())
);
this.wikidata = wikidata;
new ImageProvider("dialogs", wikidata ? "wikidata" : "wikipedia").getResource().attachImageIcon(this, true);
putValue(SHORT_DESCRIPTION, wikidata
......@@ -183,7 +191,7 @@ public class WikipediaToggleDialog extends ToggleDialog implements ActiveLayerCh
@Override
List<WikipediaEntry> getEntries() {
return WikipediaApp.forLanguage(wikidata ? "wikidata" : wikipediaLang.get())
return WikipediaApp.forLanguage(wikidata ? "wikidata" : WikiProperties.WIKIPEDIA_LANGUAGE.get())
.getEntriesFromCoordinates(min, max);
}
}.execute();
......@@ -222,7 +230,7 @@ public class WikipediaToggleDialog extends ToggleDialog implements ActiveLayerCh
class WikipediaLoadCategoryAction extends AbstractAction {
WikipediaLoadCategoryAction() {
super(tr("Category"));
super(tr("all Wikipedia articles in Category"));
new ImageProvider("data", "sequence").getResource().attachImageIcon(this, true);
putValue(SHORT_DESCRIPTION, tr("Fetches a list of all Wikipedia articles of a category"));
}
......@@ -245,7 +253,7 @@ public class WikipediaToggleDialog extends ToggleDialog implements ActiveLayerCh
new UpdateWikipediaArticlesSwingWorker() {
@Override
List<WikipediaEntry> getEntries() {
return WikipediaApp.forLanguage(wikipediaLang.get())
return WikipediaApp.forLanguage(WikiProperties.WIKIPEDIA_LANGUAGE.get())
.getEntriesFromCategory(category, Main.pref.getInt("wikipedia.depth", 3));
}
}.execute();
......@@ -268,7 +276,7 @@ public class WikipediaToggleDialog extends ToggleDialog implements ActiveLayerCh
@Override
List<WikipediaEntry> getEntries() {
return WikipediaApp.getEntriesFromClipboard(wikipediaLang.get());
return WikipediaApp.getEntriesFromClipboard(WikiProperties.WIKIPEDIA_LANGUAGE.get());
}
}.execute();
}
......@@ -292,28 +300,6 @@ public class WikipediaToggleDialog extends ToggleDialog implements ActiveLayerCh
}
}
class WikipediaSettingsAction extends AbstractAction {
WikipediaSettingsAction() {
super(tr("Language"));
new ImageProvider("dialogs/settings").getResource().attachImageIcon(this, true);
putValue(SHORT_DESCRIPTION, tr("Sets the default language for the Wikipedia articles"));
}
@Override
public void actionPerformed(ActionEvent e) {
String lang = JOptionPane.showInputDialog(
Main.parent,
tr("Enter the Wikipedia language"),
wikipediaLang.get());
if (lang != null) {
wikipediaLang.put(lang);
updateTitle();
updateWikipediaArticles();
}
}
}
static class AddWikipediaTagAction extends AbstractAction {
private final JList<WikipediaEntry> list;
......
......@@ -2,12 +2,34 @@
package org.wikipedia.tools;
import java.util.Arrays;
import org.openstreetmap.josm.data.preferences.DoubleProperty;
import org.openstreetmap.josm.data.preferences.ListProperty;
import org.openstreetmap.josm.data.preferences.StringProperty;
import org.openstreetmap.josm.tools.LanguageInfo;
public final class WikiProperties {
public static final DoubleProperty WIKI_LAYER_MARKER_HEIGHT = new DoubleProperty("wikipedia.layer.marker_height", 30.0);
private static final String JOSM_LOCALE = LanguageInfo.getJOSMLocaleCode();
public static final StringProperty WIKIPEDIA_LANGUAGE = new StringProperty(
"wikipedia.lang",
JOSM_LOCALE.substring(0, JOSM_LOCALE.indexOf('_') >= 1 ? JOSM_LOCALE.indexOf('_') : JOSM_LOCALE.length())
);
public static final ListProperty WIKIDATA_VALIDATOR_UNUSUAL_CLASSES = new ListProperty(
"wikipedia.validator.wikidata.unusual-classes",
Arrays.asList(
"Q36774", /* web page (includes e.g. disambiguation pages) */
"Q215627", /* person (included by "abstract object") */
"Q729", /* animal */
"Q8253", /* fiction */
"Q7184903" /* abstract object (includes e.g. taxons or brands) */
)
);
private WikiProperties() {
// Private constructor to avoid instantiation
}
......
......@@ -16,6 +16,7 @@ class AllValidationTests {
static final ValidationTest<WikipediaAgainstWikidata> WIKIDATA_ITEM_NOT_MATCHING_WIKIPEDIA = new ValidationTest<>(Severity.WARNING, 30_004);
static final ValidationTest<WikipediaRedirect> WIKIPEDIA_ARTICLE_REDIRECTS = new ValidationTest<>(Severity.WARNING, 30_005);
static final ValidationTest<WikipediaRedirect> WIKIPEDIA_TAG_INVALID = new ValidationTest<>(Severity.ERROR, 30_006);
static final ValidationTest<UnusualWikidataClasses> WIKIDATA_TAG_HAS_UNUSUAL_TYPE = new ValidationTest<>(Severity.WARNING, 30_007);
// i18n: Prefix for the validator messages. Note the space at the end!
static final String VALIDATOR_MESSAGE_MARKER = I18n.tr("[Wiki] ");
......@@ -31,8 +32,8 @@ class AllValidationTests {
}
static class ValidationTest<T extends Test> {
private Severity severity;
private int code;
private final Severity severity;
private final int code;
ValidationTest(final Severity severity, final int code) {
this.severity = severity;
this.code = code;
......
......@@ -22,7 +22,7 @@ public abstract class BatchProcessedTagTest<T extends BatchProcessedTagTest.Test
super(name, description);
}
private List<T> primitivesForBatches = new ArrayList<>();
private final List<T> primitivesForBatches = new ArrayList<>();
/**
* Creates a companion object for the given primitive, on which the test can later continue to operate.
......
package org.wikipedia.validator;
import static org.wikipedia.validator.AllValidationTests.SEE_OTHER_CATEGORY_VALIDATOR_ERRORS;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.openstreetmap.josm.data.osm.OsmPrimitive;
import org.openstreetmap.josm.gui.Notification;
import org.openstreetmap.josm.tools.I18n;
import org.wikipedia.WikipediaPlugin;
import org.wikipedia.api.ApiQueryClient;
import org.wikipedia.api.wdq.WdqApiQuery;
import org.wikipedia.api.wdq.json.SparqlResult;
import org.wikipedia.tools.ListUtil;
import org.wikipedia.tools.RegexUtil;
import org.wikipedia.tools.WikiProperties;
public class UnusualWikidataClasses extends BatchProcessedTagTest<UnusualWikidataClasses.TestCompanion> {
private static final Notification NETWORK_FAILED_NOTIFICATION = new Notification(
I18n.tr("Could not check for unusual classes in wikidata=* tags.") +
"\n" + SEE_OTHER_CATEGORY_VALIDATOR_ERRORS
).setIcon(WikipediaPlugin.LOGO);
public UnusualWikidataClasses() {
super(
I18n.tr("Find OSM objects linked with wikidata items of a class that is untypical for OSM"),
I18n.tr("This check queries Wikidata to find those OSM objects that are linked to wikidata items of a type, which should not occur in OSM data (at least not as the main wikidata tag)")
);
}
@Override
protected TestCompanion prepareTestCompanion(OsmPrimitive primitive) {
final String wikidataValue = primitive.get("wikidata");
if (RegexUtil.isValidQId(wikidataValue)) {
return new TestCompanion(primitive, wikidataValue);
}
return null;
}
@Override
protected void check(List<TestCompanion> allPrimitives) {
ListUtil.processInBatches(allPrimitives, 50, batch -> {
for (final String forbiddenType : WikiProperties.WIKIDATA_VALIDATOR_UNUSUAL_CLASSES.get()) {
try {
checkBatch(batch, forbiddenType);
} catch (IOException e) {
errors.add(
AllValidationTests.API_REQUEST_FAILED.getBuilder(this)
.primitives(batch.stream().map(BatchProcessedTagTest.TestCompanion::getPrimitive).collect(Collectors.toList()))
.message(AllValidationTests.VALIDATOR_MESSAGE_MARKER + e.getMessage())
.build()
);
}
}
});
}
private void checkBatch(final Collection<TestCompanion> batch, final String forbiddenType) throws IOException {
final SparqlResult result = ApiQueryClient.query(WdqApiQuery.findInstancesOfXOrOfSubclass(batch.stream().map(it -> it.wikidataValue).collect(Collectors.toList()), forbiddenType));
for (List<SparqlResult.Results.Entry> row : result.getRows()) {
final String entityURL = row.get(0).getValue();
final String qID = entityURL.substring(entityURL.lastIndexOf('/') >= 0 ? entityURL.lastIndexOf('/') + 1 : 0);
final Collection<OsmPrimitive> primitives = batch.stream()
.filter(it -> qID.equals(it.wikidataValue))
.map(BatchProcessedTagTest.TestCompanion::getPrimitive)
.collect(Collectors.toList());
if (primitives.size() >= 1) {
errors.add(
AllValidationTests.WIKIDATA_TAG_HAS_UNUSUAL_TYPE.getBuilder(this)
.primitives(primitives)
.message(
"Wikidata value is of unusual type for the wikidata=* tag on OSM objects",
I18n.marktr("{0} is an instance of {1} (or any subclass thereof)"),
qID,
forbiddenType
)
.build()
);
}
}
}
static class TestCompanion extends BatchProcessedTagTest.TestCompanion {
private final String wikidataValue;
TestCompanion(final OsmPrimitive primitive, final String wikidataValue) {
super(primitive);
this.wikidataValue = wikidataValue;
}
}
}
......@@ -104,7 +104,7 @@ public class WikipediaAgainstWikidata extends BatchProcessedTagTest<WikipediaAga
}
static class TestCompanion extends BatchProcessedTagTest.TestCompanion {
final String language;
final String language; // TODO: Use WikipediaSite here to verify a wiki in that language actually exists
final String title;
final String qId;
private TestCompanion(final OsmPrimitive primitive, String language, String title, final String qId) {
......
......@@ -93,11 +93,11 @@ public class WikipediaRedirect extends BatchProcessedTagTest<WikipediaRedirect.T
*/
private void checkBatch(final IWikipediaSite site, final List<Map.Entry<String, List<OsmPrimitive>>> batch) {
try {
final QueryResult queryResult = ApiQueryClient.query(
final QueryResult.Query.Redirects redirects = ApiQueryClient.query(
WikipediaActionApiQuery.query(site, batch.stream().map(Map.Entry::getKey).collect(Collectors.toList()))
);
).getQuery().getRedirects();
for (Map.Entry<String, List<OsmPrimitive>> entry : batch) {
final String redirectedTitle = queryResult.getQuery().getRedirects().resolveRedirect(entry.getKey());
final String redirectedTitle = redirects.resolveRedirect(entry.getKey());
if (redirectedTitle != null && !redirectedTitle.equals(entry.getKey())) {
errors.add(
AllValidationTests.WIKIPEDIA_ARTICLE_REDIRECTS.getBuilder(this)
......
package org.wikipedia.api.wdq;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import org.junit.Rule;
import org.junit.Test;
import org.openstreetmap.josm.testutils.JOSMTestRules;
import org.wikipedia.api.ApiQueryClient;
import org.wikipedia.api.wdq.json.SparqlResult;
public class WdqApiQueryTest {
// TODO: Mock API responses with WireMock
@Rule
public JOSMTestRules josmRules = new JOSMTestRules().preferences().timeout(30_000);
private static final List<String> BRIDGE_LIST = Arrays.asList(
"Q99236", /* Millau viaduct */
"Q44440", /* Golden Gate Bridge */
"Q83125", /* Tower Bridge */
"Q54495", /* Sydney Harbour Bridge */
"Q459086", /* Jungfern Bridge */
"Q52505", /* Rialto Bridge */
"Q18109819", /* Duge Bridge */
"Q805835", /* Baluarte Bridge */
"Q5459867" /* Floating Bridge */
);
private static final List<String> BUILDING_LIST = Arrays.asList(
"Q48435", /* Sagrada Família */
"Q18712428", /* Makkah Clock Royal Tower Hotel */
"Q494895", /* Lotte World Tower */
"Q507939", /* World One */
"Q201013", /* Svalbard Seed Vault */
"Q379080", /* Fort Jesus */
"Q3368242" /* Dom Tower of Utrecht */
);
private static final Collection<String> MIXED_LIST = Arrays.asList(
BRIDGE_LIST.get(0),
BUILDING_LIST.get(0),
BRIDGE_LIST.get(1),
BUILDING_LIST.get(1),
BUILDING_LIST.get(2),
BRIDGE_LIST.get(2),
BRIDGE_LIST.get(3),
BRIDGE_LIST.get(4),
BUILDING_LIST.get(3),
BRIDGE_LIST.get(5),
BUILDING_LIST.get(4),
BRIDGE_LIST.get(6),
BRIDGE_LIST.get(7),
BRIDGE_LIST.get(8),
BUILDING_LIST.get(5),
BUILDING_LIST.get(6)
);
private static final String BRIDGE_CLASS = "Q12280";
private static final String BUILDING_CLASS = "Q41176";
@Test
public void test() throws IOException {
final SparqlResult bridgeResult = ApiQueryClient.query(WdqApiQuery.findInstancesOfXOrOfSubclass(MIXED_LIST, BRIDGE_CLASS));
bridgeResult.getRows().forEach(row -> assertEquals(1, row.size()));
for (final String bridge : BRIDGE_LIST) {
assertEquals("Bridge " + bridge + " not found in the result!", 1, bridgeResult.getRows().stream().filter(it -> ("http://www.wikidata.org/entity/" + bridge).equals(it.get(0).getValue())).count());
}
assertEquals(BRIDGE_LIST.size(), bridgeResult.size());
assertTrue(bridgeResult.getRows().stream().allMatch(row -> "uri".equals(row.get(0).getType())));
final SparqlResult buildingResult = ApiQueryClient.query(WdqApiQuery.findInstancesOfXOrOfSubclass(MIXED_LIST, BUILDING_CLASS));
buildingResult.getRows().forEach(row -> assertEquals(1, row.size()));
for (final String building : BUILDING_LIST) {
assertEquals(
"Building " + building + " not found in the result!",
1,
buildingResult.getRows().stream().filter(row -> ("http://www.wikidata.org/entity/" + building).equals(row.get(0).getValue())).count()
);
}
assertEquals(BUILDING_LIST.size(), buildingResult.size());
assertTrue(buildingResult.getRows().stream().allMatch(row -> "uri".equals(row.get(0).getType())));
}
}
......@@ -91,7 +91,7 @@ public class WikidataActionApiQueryTest {
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json")
.withBody(ResourceFileLoader.getResourceBytes(WikidataActionApiQueryTest.class, "response/wbgetentities/dewiki:Berlin.json"))
.withBody(ResourceFileLoader.getResourceBytes(WikidataActionApiQueryTest.class, "response/wbgetentities/dewiki_Berlin.json"))
)
);
......@@ -120,7 +120,7 @@ public class WikidataActionApiQueryTest {
aResponse()
.withStatus(200)
.withHeader("Content-Type", "application/json")
.withBody(ResourceFileLoader.getResourceBytes(WikidataActionApiQueryTest.class, "response/wbgetentities/enwiki:2entities2missing.json"))
.withBody(ResourceFileLoader.getResourceBytes(WikidataActionApiQueryTest.class, "response/wbgetentities/enwiki_2entities2missing.json"))
)
);
......
......@@ -9,8 +9,8 @@ import static com.github.tomakehurst.wiremock.client.WireMock.stubFor;
import static com.github.tomakehurst.wiremock.client.WireMock.urlEqualTo;
import static com.github.tomakehurst.wiremock.client.WireMock.verify;
import static com.github.tomakehurst.wiremock.core.<