001: /*
002: * Copyright Aduna (http://www.aduna-software.com/) (c) 2007.
003: *
004: * Licensed under the Aduna BSD-style license.
005: */
006: package org.openrdf.rio.trig;
007:
008: import java.io.IOException;
009: import java.io.OutputStream;
010: import java.io.Writer;
011:
012: import org.openrdf.model.Resource;
013: import org.openrdf.model.Statement;
014: import org.openrdf.rio.RDFFormat;
015: import org.openrdf.rio.RDFHandlerException;
016: import org.openrdf.rio.turtle.TurtleWriter;
017:
018: /**
019: * An extension of {@link TurtleWriter} that writes RDF documents in <a
020: * href="http://www.wiwiss.fu-berlin.de/suhl/bizer/TriG/Spec/">TriG</a> format
021: * by adding graph scopes to the Turtle document.
022: *
023: * @author Arjohn Kampman
024: */
025: public class TriGWriter extends TurtleWriter {
026:
027: /*-----------*
028: * Variables *
029: *-----------*/
030:
031: private boolean inActiveContext;
032:
033: private Resource currentContext;
034:
035: /*--------------*
036: * Constructors *
037: *--------------*/
038:
039: /**
040: * Creates a new TriGWriter that will write to the supplied OutputStream.
041: *
042: * @param out
043: * The OutputStream to write the TriG document to.
044: */
045: public TriGWriter(OutputStream out) {
046: super (out);
047: }
048:
049: /**
050: * Creates a new TriGWriter that will write to the supplied Writer.
051: *
052: * @param writer
053: * The Writer to write the TriG document to.
054: */
055: public TriGWriter(Writer writer) {
056: super (writer);
057: }
058:
059: /*---------*
060: * Methods *
061: *---------*/
062:
063: @Override
064: public RDFFormat getRDFFormat() {
065: return RDFFormat.TRIG;
066: }
067:
068: @Override
069: public void startRDF() throws RDFHandlerException {
070: super .startRDF();
071:
072: inActiveContext = false;
073: currentContext = null;
074: }
075:
076: @Override
077: public void endRDF() throws RDFHandlerException {
078: super .endRDF();
079:
080: try {
081: closeActiveContext();
082: writer.flush();
083: } catch (IOException e) {
084: throw new RDFHandlerException(e);
085: }
086: }
087:
088: @Override
089: public void handleStatement(Statement st)
090: throws RDFHandlerException {
091: if (!writingStarted) {
092: throw new RuntimeException(
093: "Document writing has not yet been started");
094: }
095:
096: try {
097: Resource context = st.getContext();
098:
099: if (inActiveContext
100: && !contextsEquals(context, currentContext)) {
101: closePreviousStatement();
102: closeActiveContext();
103: }
104:
105: if (!inActiveContext) {
106: writer.writeEOL();
107:
108: if (context != null) {
109: writeResource(context);
110: writer.write(" ");
111: }
112:
113: writer.write("{");
114: writer.increaseIndentation();
115:
116: currentContext = context;
117: inActiveContext = true;
118: }
119: } catch (IOException e) {
120: throw new RDFHandlerException(e);
121: }
122:
123: super .handleStatement(st);
124: }
125:
126: @Override
127: protected void writeCommentLine(String line) throws IOException {
128: closeActiveContext();
129: super .writeCommentLine(line);
130: }
131:
132: @Override
133: protected void writeNamespace(String prefix, String name)
134: throws IOException {
135: closeActiveContext();
136: super .writeNamespace(prefix, name);
137: }
138:
139: protected void closeActiveContext() throws IOException {
140: if (inActiveContext) {
141: writer.decreaseIndentation();
142: writer.write("}");
143: writer.writeEOL();
144:
145: inActiveContext = false;
146: currentContext = null;
147: }
148: }
149:
150: private static final boolean contextsEquals(Resource context1,
151: Resource context2) {
152: if (context1 == null) {
153: return context2 == null;
154: } else {
155: return context1.equals(context2);
156: }
157: }
158: }
|