Commit 5ecdf423 authored by Maxime Lefrançois's avatar Maxime Lefrançois

added htaccess and dataset

parent 0838355f
......@@ -200,6 +200,11 @@
<version>4.1</version>
</dependency>
<dependency>
<groupId>org.apache.jena</groupId>
<artifactId>jena-text</artifactId>
<version>3.13.0</version>
</dependency>
</dependencies>
<build>
......
......@@ -36,11 +36,22 @@ import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import org.apache.commons.io.FileUtils;
import org.apache.jena.query.Dataset;
import org.apache.jena.query.QueryExecutionFactory;
import org.apache.jena.query.ReadWrite;
import org.apache.jena.query.text.TextDatasetFactory;
import org.apache.jena.query.text.TextIndexConfig;
import org.apache.jena.query.text.TextIndexDB;
import org.apache.jena.query.text.TextIndexLucene;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.sparql.expr.nodevalue.NodeValueString;
import org.apache.log4j.Layout;
import org.apache.log4j.PatternLayout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.sun.javafx.geom.AreaOp.NZWindOp;
import fr.emse.gitlab.saref.managers.DatasetManager;
import fr.emse.gitlab.saref.managers.OntologyManager;
import fr.emse.gitlab.saref.managers.SiteManager;
......@@ -52,7 +63,7 @@ public class SAREFPipeline {
private static final Logger LOG = LoggerFactory.getLogger(SAREFPipeline.class);
private static final String NAME_TARGET = "target";
public static final String NAME_TARGET = "target";
private static final String NAME_LOG_FILE = "output.log";
public static enum Mode {
......@@ -120,9 +131,6 @@ public class SAREFPipeline {
ontologyManager = new OntologyManager(this, logger);
// datasetManager = new DatasetManager(this, logger);
// datasetManager.emptyDataset();
siteManager = new SiteManager(this, logger);
siteManager.prepareSite();
......@@ -134,7 +142,10 @@ public class SAREFPipeline {
sourcesManager.checkTerms();
sourcesManager.generateSite();
sourcesManager.resetCheckout();
if(mode == Mode.PORTAL) {
datasetManager = new DatasetManager(this, logger);
datasetManager.createDataset();
}
} catch (IOException ex) {
LOG.warn("IOException:", ex);
} catch (SAREFPipelineException ex) {
......
......@@ -27,11 +27,14 @@ package fr.emse.gitlab.saref.managers;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.apache.jena.atlas.io.IndentedWriter;
......@@ -58,6 +61,8 @@ import fr.emse.gitlab.saref.SAREFPipeline;
import fr.emse.gitlab.saref.SAREFPipeline.Mode;
import fr.emse.gitlab.saref.SAREFPipelineException;
import fr.emse.gitlab.saref.entities.SAREFExample;
import fr.emse.gitlab.saref.entities.SAREFProject;
import fr.emse.gitlab.saref.entities.SAREFRepository;
import fr.emse.gitlab.saref.entities.SAREFTerm;
import fr.emse.gitlab.saref.entities.SAREFVersion;
import fr.emse.gitlab.saref.utils.StreamManagerFactory;
......@@ -75,6 +80,13 @@ public class SiteManager extends SAREFErrorLogger {
private static final String NAME_STATIC = "static";
private static final String NAME_REPORT_HTML = "report.html";
private static final String HTACCESS_HTML = "RewriteCond %{HTTP_ACCEPT} !(application/rdf\\+xml|text/turtle|text/n3|application/n\\-triples|application/ld\\+json)";
private static final String HTACCESS_RDFXML = "RewriteCond %{HTTP_ACCEPT} application/rdf\\+xml";
private static final String HTACCESS_TURTLE = "RewriteCond %{HTTP_ACCEPT} text/turtle";
private static final String HTACCESS_N3 = "RewriteCond %{HTTP_ACCEPT} text/n3";
private static final String HTACCESS_NTRIPLE = "RewriteCond %{HTTP_ACCEPT} application/n-triples";
private static final String HTACCESS_JSONLD = "RewriteCond %{HTTP_ACCEPT} application/ld\\+json";
private static final SPARQLExtStreamManager STREAM_MANAGER_BASE = StreamManagerFactory.get();
public final File siteDir;
private final File reportFileHTML;
......@@ -108,9 +120,6 @@ public class SiteManager extends SAREFErrorLogger {
* Prepare the site folder and clone the saref-portal-static project repository.
*/
public void prepareSite() throws SAREFPipelineException {
// if (pipeline.ignoreSite) {
// return;
// }
try (Git git = Git.cloneRepository().setURI(SAREF.SAREF_PORTAL_STATIC_GIT).setDirectory(siteDir).call()) {
} catch (Exception ex) {
try (Git git = Git.open(siteDir)) {
......@@ -124,9 +133,6 @@ public class SiteManager extends SAREFErrorLogger {
}
public void writeReport(StringWriter sw) throws IOException {
// if (pipeline.ignoreSite) {
// return;
// }
try (IndentedWriter writer = new IndentedWriter(new FileOutputStream(reportFileHTML));) {
boolean debugTemplate = pipeline.mode == Mode.DEVELOP;
Context context = ContextUtils.build(writer).setBase(SAREF.BASE).setDebugTemplate(debugTemplate)
......@@ -146,12 +152,14 @@ public class SiteManager extends SAREFErrorLogger {
}
}
public void generateOntologyDocumentation(SAREFVersion version, IndentedWriter writer, SPARQLExtStreamManager streamManager, Dataset dataset) {
public void generateOntologyDocumentation(SAREFVersion version, IndentedWriter writer,
SPARQLExtStreamManager streamManager, Dataset dataset) {
Resource resource = version.getResource();
generateHTML(planForOntologyVersion, VAR_VERSION_IRI, resource, writer, streamManager, dataset);
}
public void generateExampleDocumentation(SAREFExample example, IndentedWriter writer, SPARQLExtStreamManager streamManager, Dataset dataset) {
public void generateExampleDocumentation(SAREFExample example, IndentedWriter writer,
SPARQLExtStreamManager streamManager, Dataset dataset) {
Resource resource = example.getResource();
generateHTML(planForExample, VAR_EXAMPLE, resource, writer, streamManager, dataset);
}
......@@ -160,11 +168,11 @@ public class SiteManager extends SAREFErrorLogger {
Resource resource = term.getResource();
generateHTML(planForTerm, VAR_TERM, resource, writer, STREAM_MANAGER_BASE, dataset);
}
private void generateHTML(RootPlan plan, Var var, Resource resource, IndentedWriter writer, SPARQLExtStreamManager streamManager, Dataset dataset) {
Context context = ContextUtils.build(writer).setBase(SAREF.BASE)
.setDebugTemplate(pipeline.mode == Mode.DEVELOP).setInputDataset(dataset)
.setStreamManager(streamManager).build();
private void generateHTML(RootPlan plan, Var var, Resource resource, IndentedWriter writer,
SPARQLExtStreamManager streamManager, Dataset dataset) {
Context context = ContextUtils.build(writer).setBase(SAREF.BASE).setDebugTemplate(pipeline.mode == Mode.DEVELOP)
.setInputDataset(dataset).setStreamManager(streamManager).build();
BindingHashMap binding = new BindingHashMap();
binding.add(var, resource.asNode());
......@@ -172,9 +180,78 @@ public class SiteManager extends SAREFErrorLogger {
bindings.add(binding);
plan.execTemplateStream(bindings, context);
}
public static SPARQLExtStreamManager getStreamManagerBase() {
return STREAM_MANAGER_BASE;
}
public void generateHtaccess() {
File htaccess = new File(siteDir, ".htaccess");
try (FileWriter writer = new FileWriter(htaccess)) {
writer.write("RewriteEngine on\nDirectorySlash Off\n\n");
writer.write(HTACCESS_HTML);
writer.write("\nRewriteRule ^(.*)\\.conneg$ /$1.html\n");
writer.write(HTACCESS_JSONLD);
writer.write("\nRewriteRule ^(.*)\\.conneg$ /$1.jsonld\n");
writer.write(HTACCESS_N3);
writer.write("\nRewriteRule ^(.*)\\.conneg$ /$1.n3\n");
writer.write(HTACCESS_NTRIPLE);
writer.write("\nRewriteRule ^(.*)\\.conneg$ /$1.nt\n");
writer.write(HTACCESS_RDFXML);
writer.write("\nRewriteRule ^(.*)\\.conneg$ /$1.rdf\n");
writer.write(HTACCESS_TURTLE);
writer.write("\nRewriteRule ^(.*)\\.conneg$ /$1.ttl\n");
writer.write("\n");
for (RepositoryManager repositoryManager : pipeline.getSourcesManager().getSourceRepositoryManagers()) {
SAREFRepository repository = repositoryManager.getRepository();
SAREFProject project = repository.getProject();
SAREFVersion lastVersion = repository.getVersions().lastEntry().getValue();
// redirects core to core/v3.1.1
// redirects core/ to core/v3.1.1
writer.write(String.format("RewriteRule ^%s/?$ /%s\n", project.getPath(), lastVersion.getVersionPath()));
// redirects core/Sensor to core/Sensor.conneg
String choiceOfTerms = repository.getTerms().values().stream().map(SAREFTerm::getLocalName)
.collect(Collectors.joining("|", "(", ")"));
writer.write(String.format("RewriteCond %%{REQUEST_URI} ^/%s/%s$\n", project.getPath(), choiceOfTerms));
writer.write(String.format("RewriteRule ^%s/(.*)$ /%s/$1.conneg\n\n", project.getPath(), project.getPath()));
// redirects core.ttl to core/v1.1.1.ttl
writer.write(String.format("RewriteRule ^%s\\.([^\\./]+)$ /%s.$1\n\n",
project.getPath(), lastVersion.getVersionPath()));
for (SAREFVersion version : repository.getVersions().values()) {
writer.write(String.format("RewriteRule ^%s/?$ /%s/%s.conneg\n",
version.getVersionPath(), version.getVersionPath(), project.getOntologyFileName()));
// redirects core/v1.1.1.ttl to core/v1.1.1/saref.ttl
writer.write(String.format("RewriteRule ^%s\\.([^\\./]+)$ /%s/%s.$1\n",
version.getVersionPath(), version.getVersionPath(), project.getOntologyFileName()));
// redirects core/v1.1.1/saref to core/v1.1.1/saref.conneg
writer.write(String.format("RewriteRule ^%s/%s$ /%s/%s.conneg\n",
version.getVersionPath(), project.getOntologyFileName(), version.getVersionPath(),
project.getOntologyFileName()));
// redirects core/v1.1.1/tests to core/v1.1.1/tests.conneg
writer.write(String.format("RewriteRule ^%s/tests$ /%s/tests.conneg\n",
version.getVersionPath(), version.getVersionPath()));
// redirects core/v1.1.1/requirements to core/v1.1.1/requirements.conneg
writer.write(String.format("RewriteRule ^%s/requirements$ /%s/requirements.conneg\n",
version.getVersionPath(), version.getVersionPath()));
for (SAREFExample example : version.getExamples().values()) {
writer.write(String.format("RewriteRule ^%s/example/%s$ /%s/example/%s.conneg\n",
version.getVersionPath(), example.getName(), version.getVersionPath(),
example.getName()));
}
writer.write("\n\n");
}
}
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
......@@ -340,6 +340,7 @@ public class SourcesManager extends SAREFErrorLogger {
new TermSiteManager(pipeline, repository).generateSite();
}
}
siteManager.generateHtaccess();
}
}
......
......@@ -39,4 +39,5 @@ public class VANN {
public static final Property preferredNamespacePrefix = ResourceFactory.createProperty(NS,
"preferredNamespacePrefix");
public static final Property preferredNamespaceUri = ResourceFactory.createProperty(NS, "preferredNamespaceUri");
public static final Property example = ResourceFactory.createProperty(NS, "example");
}
\ No newline at end of file
@prefix fuseki: <http://jena.apache.org/fuseki#> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
@prefix tdb: <http://jena.hpl.hp.com/2008/tdb#> .
@prefix ja: <http://jena.hpl.hp.com/2005/11/Assembler#> .
@prefix text: <http://jena.apache.org/text#> .
@prefix skos: <http://www.w3.org/2004/02/skos/core#> .
@prefix skosxl: <http://www.w3.org/2008/05/skos-xl#> .
@prefix agent: <http://schema.onki.fi/agent-schema#> .
@prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix spatial: <http://jena.apache.org/spatial#> .
@prefix schema: <http://schema.org/> .
@prefix ex: <http://example.org/>.
@prefix : <#> .
@base <http://example.org/> .
<#service> rdf:type fuseki:Service ;
fuseki:name "ds" ; # http://host:port/ds
fuseki:serviceQuery "sparql" ; # SPARQL query service
fuseki:serviceUpdate "update" ; # SPARQL update service
fuseki:serviceUpload "upload" ; # Non-SPARQL upload service
fuseki:serviceReadWriteGraphStore "data" ; # SPARQL Graph store protocol
fuseki:serviceReadGraphStore "data" ; # SPARQL Graph store protocol (read only)
fuseki:dataset <#dataset> ;
.
<#dataset> rdf:type text:TextDataset ;
text:dataset <#tdb> ;
text:index <#lucene> ;
.
<#tdb> rdf:type tdb:DatasetTDB ;
tdb:location "target/tdb" ;
tdb:unionDefaultGraph true ;
.
<#lucene> a text:TextIndexLucene ;
text:directory <file:/fuseki-base/databases/lucene> ;
text:storeValues true ;
text:entityMap <#entity-map> ;
text:analyzer [ a text:StandardAnalyzer ] ;
text:queryAnalyzer [ a text:KeywordAnalyzer ] ;
text:queryParser text:AnalyzingQueryParser ;
text:propLists (
[ text:propListProp ex:comments ;
text:props ( rdfs:label
rdfs:comment ) ;
] ) .
<#entity-map> a text:EntityMap ;
text:entityField "uri" ;
text:graphField "graph" ; ## enable graph-specific indexing
text:defaultField "text" ; ## Must be defined in the text:map
text:uidField "uid" ;
text:langField "lang" ;
text:map (
[ text:field "text" ; text:predicate rdfs:label ]
[ text:field "text" ; text:predicate rdfs:comment ]
)
.
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment