Initial commit

That's a lotta stuff for an initial commit, but well...
This commit is contained in:
2024-05-25 19:22:26 +02:00
commit b0f35b03b9
99 changed files with 6476 additions and 0 deletions

View File

@@ -0,0 +1,15 @@
package de.siphalor.tweed5.data.hjson;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
@RequiredArgsConstructor
@Getter
public enum HjsonCommentType {
HASH(false),
SLASHES(false),
BLOCK(true),
;
private final boolean block;
}

View File

@@ -0,0 +1,487 @@
package de.siphalor.tweed5.data.hjson;
import de.siphalor.tweed5.dataapi.api.TweedDataReadException;
import lombok.RequiredArgsConstructor;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.io.Reader;
import java.util.PrimitiveIterator;
@ApiStatus.Internal
@RequiredArgsConstructor
public class HjsonLexer {
private static final int EMPTY_CODEPOINT = -2;
private final Reader reader;
private final HjsonReadPosition currentPos = new HjsonReadPosition();
private int peekedCodePoint = EMPTY_CODEPOINT;
private int peeked2CodePoint = EMPTY_CODEPOINT;
public HjsonLexerToken nextGeneralToken() throws TweedDataReadException {
chompInlineWhitespaceAndComments();
int codePoint = eatCodePoint();
HjsonLexerToken.Type terminalTokenType = getTerminalTokenType(codePoint);
if (terminalTokenType != null) {
return createTerminalToken(terminalTokenType);
}
HjsonLexerToken token = tryReadQuotedString(codePoint);
if (token != null) {
return token;
}
return readQuotelessLiteral(codePoint);
}
public HjsonLexerToken nextInnerObjectToken() throws TweedDataReadException {
chompInlineWhitespaceAndComments();
int codePoint = eatCodePoint();
HjsonLexerToken.Type terminalTokenType = getTerminalTokenType(codePoint);
if (terminalTokenType != null) {
return createTerminalToken(terminalTokenType);
}
if (codePoint == '"') {
return readJsonQuotedString(codePoint);
} else if (codePoint == '\'') {
return readJsonQuotedString(codePoint);
} else if (codePoint < 0x21) {
throw TweedDataReadException.builder().message("Illegal character \"" + String.copyValueOf(Character.toChars(codePoint)) + "\"").build();
} else {
return readQuotelessMemberName(codePoint);
}
}
@Nullable
private HjsonLexerToken.Type getTerminalTokenType(int codePoint) {
switch (codePoint) {
case -1: return HjsonLexerToken.Type.EOF;
case '[': return HjsonLexerToken.Type.BRACKET_OPEN;
case ']': return HjsonLexerToken.Type.BRACKET_CLOSE;
case '{': return HjsonLexerToken.Type.BRACE_OPEN;
case '}': return HjsonLexerToken.Type.BRACE_CLOSE;
case ':': return HjsonLexerToken.Type.COLON;
case ',': return HjsonLexerToken.Type.COMMA;
case '\n': return HjsonLexerToken.Type.LINE_FEED;
default: return null;
}
}
private HjsonLexerToken createTerminalToken(HjsonLexerToken.Type tokenType) {
HjsonReadPosition position = currentPos.copy();
return new HjsonLexerToken(tokenType, position, position, null);
}
@Nullable
private HjsonLexerToken tryReadQuotedString(int codePoint) throws TweedDataReadException {
if (codePoint == '"') {
return readJsonQuotedString('"');
} else if (codePoint == '\'') {
int peek = peekCodePoint();
if (peek == '\'') {
int peek2 = peek2CodePoint();
if (peek2 == '\'') {
return readMultilineString();
} else {
HjsonReadPosition beginPos = currentPos.copy();
eatCodePoint();
return new HjsonLexerToken(
HjsonLexerToken.Type.JSON_STRING,
beginPos,
currentPos.copy(),
"''"
);
}
} else {
return readJsonQuotedString('\'');
}
} else {
return null;
}
}
private HjsonLexerToken readJsonQuotedString(int quoteCodePoint) throws TweedDataReadException {
HjsonReadPosition beginPos = currentPos.copy();
StringBuilder tokenBuffer = new StringBuilder();
tokenBuffer.appendCodePoint(quoteCodePoint);
while (true) {
int codePoint = eatCodePoint();
if (codePoint == -1) {
throw TweedDataReadException.builder().message("Unterminated quoted string at " + currentPos + ", started at " + beginPos).build();
} else if (codePoint == quoteCodePoint) {
tokenBuffer.appendCodePoint(codePoint);
return new HjsonLexerToken(
HjsonLexerToken.Type.JSON_STRING,
beginPos,
currentPos.copy(),
tokenBuffer
);
} else if (codePoint == '\\') {
tokenBuffer.appendCodePoint(codePoint);
tokenBuffer.appendCodePoint(eatCodePoint());
} else {
tokenBuffer.appendCodePoint(codePoint);
}
}
}
private HjsonLexerToken readMultilineString() throws TweedDataReadException {
HjsonReadPosition beginPos = currentPos.copy();
int indentToChomp = beginPos.index() - 1;
eatCodePoint();
eatCodePoint();
StringBuilder tokenBuffer = new StringBuilder();
tokenBuffer.append("'''");
boolean chompIndent = false;
while (true) {
int codePoint = eatCodePoint();
if (codePoint == -1) {
throw TweedDataReadException.builder().message("Unexpected end of multiline string at " + currentPos + ", started at " + beginPos).build();
} else if (isInlineWhitespace(codePoint)) {
tokenBuffer.appendCodePoint(codePoint);
} else {
if (codePoint == '\n') {
chompIndent = true;
tokenBuffer.setLength(3);
}
break;
}
}
while (true) {
if (chompIndent) {
chompMultilineStringIndent(indentToChomp);
} else {
chompIndent = true;
}
int singleQuoteCount = 0;
while (true) {
int codePoint = eatCodePoint();
if (codePoint == -1) {
throw TweedDataReadException.builder().message("Unexpected end of multiline string at " + currentPos + ", started at " + beginPos).build();
}
if (codePoint == '\'') {
singleQuoteCount++;
if (singleQuoteCount == 3) {
char lastActualChar = tokenBuffer.charAt(tokenBuffer.length() - 3);
if (lastActualChar == '\n') {
tokenBuffer.delete(tokenBuffer.length() - 3, tokenBuffer.length() - 2);
}
tokenBuffer.append('\'');
return new HjsonLexerToken(
HjsonLexerToken.Type.MULTILINE_STRING,
beginPos,
currentPos.copy(),
tokenBuffer
);
}
} else {
singleQuoteCount = 0;
}
tokenBuffer.appendCodePoint(codePoint);
if (codePoint == '\n') {
break;
}
}
}
}
private HjsonLexerToken readQuotelessMemberName(int codepoint) throws TweedDataReadException {
HjsonReadPosition beginPos = currentPos.copy();
StringBuilder tokenBuffer = new StringBuilder();
tokenBuffer.appendCodePoint(codepoint);
while (true) {
int peek = peekCodePoint();
if (peek == -1 || peek == '\n' || isPunctuator(peek)) {
break;
}
tokenBuffer.appendCodePoint(eatCodePoint());
}
return new HjsonLexerToken(
HjsonLexerToken.Type.QUOTELESS_STRING,
beginPos,
currentPos.copy(),
tokenBuffer
);
}
private HjsonLexerToken readQuotelessLiteral(int codePoint) throws TweedDataReadException {
if (codePoint == 'n') {
return readConstantOrQuotelessString(codePoint, "null", HjsonLexerToken.Type.NULL);
} else if (codePoint == 't') {
return readConstantOrQuotelessString(codePoint, "true", HjsonLexerToken.Type.TRUE);
} else if (codePoint == 'f') {
return readConstantOrQuotelessString(codePoint, "false", HjsonLexerToken.Type.FALSE);
} else if (codePoint == '-' || isDigit(codePoint)) {
return readNumberLiteralOrQuotelessString(codePoint);
} else {
StringBuilder tokenBuffer = new StringBuilder();
tokenBuffer.appendCodePoint(codePoint);
return readQuotelessStringToEndOfLine(currentPos.copy(), tokenBuffer);
}
}
private HjsonLexerToken readConstantOrQuotelessString(
int firstCodePoint,
String rest,
HjsonLexerToken.Type tokenType
) throws TweedDataReadException {
HjsonReadPosition beginPos = currentPos.copy();
StringBuilder tokenBuffer = new StringBuilder();
tokenBuffer.appendCodePoint(firstCodePoint);
PrimitiveIterator.OfInt restIterator = rest.codePoints().iterator();
restIterator.nextInt(); // skip first, as already checked and consumed
while (restIterator.hasNext()) {
int codePoint = eatCodePoint();
tokenBuffer.appendCodePoint(codePoint);
if (codePoint != restIterator.nextInt()) {
return readQuotelessStringToEndOfLine(beginPos, tokenBuffer);
}
}
return chompAfterLiteralOrReadToQuotelessString(tokenType, beginPos, tokenBuffer);
}
private HjsonLexerToken readNumberLiteralOrQuotelessString(int firstCodePoint) throws TweedDataReadException {
HjsonReadPosition beginPos = currentPos.copy();
StringBuilder tokenBuffer = new StringBuilder();
tokenBuffer.appendCodePoint(firstCodePoint);
int codePoint = firstCodePoint;
if (codePoint == '-') {
codePoint = eatCodePoint();
if (codePoint == -1) {
throw TweedDataReadException.builder().message("Unexpected end of number at " + currentPos).build();
}
tokenBuffer.appendCodePoint(codePoint);
}
if (!isDigit(codePoint)) {
return readQuotelessStringToEndOfLine(beginPos, tokenBuffer);
}
boolean startsWithZero = codePoint == '0';
codePoint = peekCodePoint();
if (startsWithZero && isDigit(codePoint)) {
return readQuotelessStringToEndOfLine(beginPos, tokenBuffer);
}
eatManyDigitsToBuffer(tokenBuffer);
if (peekCodePoint() == '.') {
tokenBuffer.appendCodePoint(eatCodePoint());
codePoint = eatCodePoint();
tokenBuffer.appendCodePoint(codePoint);
if (!isDigit(codePoint)) {
return readQuotelessStringToEndOfLine(beginPos, tokenBuffer);
}
eatManyDigitsToBuffer(tokenBuffer);
}
if (peekCodePoint() == 'e' || peekCodePoint() == 'E') {
tokenBuffer.appendCodePoint(eatCodePoint());
codePoint = eatCodePoint();
tokenBuffer.appendCodePoint(codePoint);
if (codePoint == '+' || codePoint == '-') {
codePoint = eatCodePoint();
tokenBuffer.appendCodePoint(codePoint);
}
if (!isDigit(codePoint)) {
return readQuotelessStringToEndOfLine(beginPos, tokenBuffer);
}
eatManyDigitsToBuffer(tokenBuffer);
}
return chompAfterLiteralOrReadToQuotelessString(HjsonLexerToken.Type.NUMBER, beginPos, tokenBuffer);
}
private void eatManyDigitsToBuffer(StringBuilder buffer) throws TweedDataReadException {
while (true) {
int codePoint = peekCodePoint();
if (!isDigit(codePoint)) {
break;
}
buffer.appendCodePoint(eatCodePoint());
}
}
private HjsonLexerToken chompAfterLiteralOrReadToQuotelessString(
HjsonLexerToken.Type tokenType,
HjsonReadPosition beginPos,
StringBuilder tokenBuffer
) throws TweedDataReadException {
int literalEndLength = tokenBuffer.length();
HjsonReadPosition literalEndPos = currentPos.copy();
while (true) {
int peek = peekCodePoint();
if (peek == -1 || peek == ',' || peek == '\n' || peek == '#' || peek == ']' || peek == '}') {
tokenBuffer.setLength(literalEndLength);
return new HjsonLexerToken(tokenType, beginPos, literalEndPos, tokenBuffer);
} else if (peek == '/') {
int peek2 = peek2CodePoint();
if (peek2 == '/' || peek2 == '*') {
tokenBuffer.setLength(literalEndLength);
return new HjsonLexerToken(tokenType, beginPos, literalEndPos, tokenBuffer);
} else {
return readQuotelessStringToEndOfLine(beginPos, tokenBuffer);
}
} else if (!isInlineWhitespace(peek)) {
return readQuotelessStringToEndOfLine(beginPos, tokenBuffer);
}
tokenBuffer.appendCodePoint(eatCodePoint());
}
}
private HjsonLexerToken readQuotelessStringToEndOfLine(
HjsonReadPosition beginPos,
StringBuilder tokenBuffer
) throws TweedDataReadException {
int lastNonWhitespaceLength = tokenBuffer.length();
while (true) {
int codePoint = peekCodePoint();
if (codePoint == -1 || codePoint == '\n') {
tokenBuffer.setLength(lastNonWhitespaceLength);
return new HjsonLexerToken(
HjsonLexerToken.Type.QUOTELESS_STRING,
beginPos,
currentPos.copy(),
tokenBuffer
);
} else {
tokenBuffer.appendCodePoint(eatCodePoint());
if (!isInlineWhitespace(codePoint)) {
lastNonWhitespaceLength = tokenBuffer.length();
}
}
}
}
private void chompMultilineStringIndent(int count) throws TweedDataReadException {
for (int i = 0; i < count; i++) {
int codePoint = eatCodePoint();
if (codePoint == -1) {
return;
} else if (!isInlineWhitespace(codePoint)) {
throw TweedDataReadException.builder().message("Illegal indent at " + currentPos + ", expected " + count + " whitespace characters").build();
}
}
}
private void chompInlineWhitespaceAndComments() throws TweedDataReadException {
while (true) {
int peek = peekCodePoint();
if (isInlineWhitespace(peek)) {
eatCodePoint();
} else if (peek == '#') {
eatCodePoint();
chompToEndOfLine();
} else if (peek == '/') {
int peek2 = peek2CodePoint();
if (peek2 == '/') {
eatCodePoint();
eatCodePoint();
chompToEndOfLine();
} else if (peek2 == '*') {
eatCodePoint();
eatCodePoint();
chompToEndOfBlockComment();
}
} else {
break;
}
}
}
private void chompToEndOfLine() throws TweedDataReadException {
while (true) {
int codePoint = eatCodePoint();
if (codePoint == -1 || codePoint == '\n') {
break;
}
}
}
private void chompToEndOfBlockComment() throws TweedDataReadException {
boolean lastWasAsterisk = false;
while (true) {
int codePoint = eatCodePoint();
if (codePoint == -1) {
throw TweedDataReadException.builder().message("Unterminated block comment at end of file " + currentPos).build();
} else if (codePoint == '*') {
lastWasAsterisk = true;
} else if (lastWasAsterisk && codePoint == '/') {
break;
}
}
}
private boolean isPunctuator(int codePoint) {
return codePoint == ',' || codePoint == ':' || codePoint == '[' || codePoint == ']' || codePoint == '{' || codePoint == '}';
}
private boolean isDigit(int codePoint) {
return codePoint >= '0' && codePoint <= '9';
}
private boolean isInlineWhitespace(int codePoint) {
return codePoint == ' ' || codePoint == '\t' || codePoint == '\r';
}
private int peek2CodePoint() throws TweedDataReadException {
if (peeked2CodePoint == EMPTY_CODEPOINT) {
if (peekedCodePoint == EMPTY_CODEPOINT) {
peekedCodePoint = readCodePoint();
}
peeked2CodePoint = readCodePoint();
}
return peeked2CodePoint;
}
private int peekCodePoint() throws TweedDataReadException {
if (peekedCodePoint == EMPTY_CODEPOINT) {
peekedCodePoint = readCodePoint();
}
return peekedCodePoint;
}
private int eatCodePoint() throws TweedDataReadException {
int codePoint;
if (peekedCodePoint != EMPTY_CODEPOINT) {
codePoint = peekedCodePoint;
peekedCodePoint = peeked2CodePoint;
peeked2CodePoint = EMPTY_CODEPOINT;
} else {
codePoint = readCodePoint();
}
if (codePoint == '\n') {
currentPos.nextLine();
} else {
currentPos.nextCodepoint();
}
return codePoint;
}
private int readCodePoint() throws TweedDataReadException {
try {
return reader.read();
} catch (IOException e) {
throw TweedDataReadException.builder().message("Failed to read character from input at " + currentPos).cause(e).build();
}
}
}

View File

@@ -0,0 +1,61 @@
package de.siphalor.tweed5.data.hjson;
import lombok.EqualsAndHashCode;
import lombok.Value;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.Nullable;
@ApiStatus.Internal
@Value
public class HjsonLexerToken {
Type type;
HjsonReadPosition begin;
HjsonReadPosition end;
@EqualsAndHashCode.Exclude
@Nullable
CharSequence content;
@EqualsAndHashCode.Include
@Nullable
public String contentString() {
return content == null ? null : content.toString();
}
public String toString() {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(type.toString());
if (content != null) {
stringBuilder.append(" (\"");
stringBuilder.append(content);
stringBuilder.append("\")");
}
stringBuilder.append(" at ");
stringBuilder.append(begin);
if (!begin.equals(end)) {
stringBuilder.append(" to ");
stringBuilder.append(end);
}
return stringBuilder.toString();
}
enum Type {
EOF,
BRACKET_OPEN,
BRACKET_CLOSE,
BRACE_OPEN,
BRACE_CLOSE,
COMMA,
COLON,
LINE_FEED,
NULL,
TRUE,
FALSE,
NUMBER,
QUOTELESS_STRING,
JSON_STRING,
MULTILINE_STRING,
}
}

View File

@@ -0,0 +1,32 @@
package de.siphalor.tweed5.data.hjson;
import lombok.*;
import org.jetbrains.annotations.ApiStatus;
@ApiStatus.Internal
@NoArgsConstructor
@AllArgsConstructor
@EqualsAndHashCode
@Getter
class HjsonReadPosition {
private int line = 1;
private int index;
public void nextCodepoint() {
index++;
}
public void nextLine() {
line++;
index = 0;
}
public HjsonReadPosition copy() {
return new HjsonReadPosition(line, index);
}
@Override
public String toString() {
return line + ":" + index;
}
}

View File

@@ -0,0 +1,642 @@
package de.siphalor.tweed5.data.hjson;
import de.siphalor.tweed5.dataapi.api.*;
import java.util.*;
import java.util.stream.Collectors;
public class HjsonReader implements TweedDataReader {
private final HjsonLexer lexer;
private final Deque<Context> contexts;
private State state = State.BEFORE_VALUE;
private HjsonLexerToken peekedLexerToken;
private TweedDataToken peekedToken;
public HjsonReader(HjsonLexer lexer) {
this.lexer = lexer;
this.contexts = new LinkedList<>();
this.contexts.push(Context.VALUE);
}
@Override
public TweedDataToken peekToken() throws TweedDataReadException {
if (peekedToken == null) {
peekedToken = nextToken();
}
return peekedToken;
}
@Override
public TweedDataToken readToken() throws TweedDataReadException {
if (peekedToken != null) {
TweedDataToken token = peekedToken;
peekedToken = null;
return token;
}
return nextToken();
}
private TweedDataToken nextToken() throws TweedDataReadException {
Context currentContext = currentContext();
switch (currentContext) {
case OBJECT:
return nextObjectToken();
case LIST:
return nextListToken();
case VALUE:
return nextValueToken();
}
// unreachable
throw new IllegalStateException("Illegal context " + currentContext);
}
private TweedDataToken nextObjectToken() throws TweedDataReadException {
if (state == State.AFTER_OBJECT_KEY) {
chompLineFeedTokensInGeneral();
HjsonLexerToken lexerToken = eatGeneralLexerToken();
if (lexerToken.type() == HjsonLexerToken.Type.COLON) {
state = State.BEFORE_VALUE;
} else {
throw createIllegalTokenException(lexerToken, HjsonLexerToken.Type.COLON);
}
}
if (state == State.BEFORE_VALUE) {
return TweedDataTokens.asMapEntryValue(nextValueToken());
}
if (state == State.AFTER_VALUE) {
HjsonLexerToken lexerToken = eatGeneralLexerToken();
if (lexerToken.type() == HjsonLexerToken.Type.BRACE_CLOSE) {
contexts.pop();
state = State.AFTER_VALUE;
return TweedDataTokens.getMapEnd();
} else if (lexerToken.type() == HjsonLexerToken.Type.LINE_FEED || lexerToken.type() == HjsonLexerToken.Type.COMMA) {
state = State.BEFORE_OBJECT_KEY;
} else {
throw createIllegalTokenException(lexerToken, HjsonLexerToken.Type.BRACE_CLOSE, HjsonLexerToken.Type.LINE_FEED, HjsonLexerToken.Type.COMMA);
}
}
if (state == State.BEFORE_OBJECT_KEY) {
chompLineFeedTokensInObject();
HjsonLexerToken lexerToken = eatObjectLexerToken();
if (lexerToken.type() == HjsonLexerToken.Type.BRACE_CLOSE) {
contexts.pop();
state = State.AFTER_VALUE;
return TweedDataTokens.getMapEnd();
} else if (lexerToken.type() == HjsonLexerToken.Type.QUOTELESS_STRING || lexerToken.type() == HjsonLexerToken.Type.JSON_STRING) {
state = State.AFTER_OBJECT_KEY;
return TweedDataTokens.asMapEntryKey(createStringToken(lexerToken));
} else {
throw createIllegalTokenException(lexerToken, HjsonLexerToken.Type.BRACE_CLOSE, HjsonLexerToken.Type.QUOTELESS_STRING, HjsonLexerToken.Type.JSON_STRING);
}
}
throw createIllegalStateException();
}
private TweedDataToken nextListToken() throws TweedDataReadException {
if (state == State.AFTER_VALUE) {
HjsonLexerToken lexerToken = eatGeneralLexerToken();
if (lexerToken.type() == HjsonLexerToken.Type.BRACKET_CLOSE) {
contexts.pop();
state = State.AFTER_VALUE;
return TweedDataTokens.getListEnd();
} else if (lexerToken.type() == HjsonLexerToken.Type.COMMA || lexerToken.type() == HjsonLexerToken.Type.LINE_FEED) {
state = State.BEFORE_VALUE;
} else {
throw createIllegalTokenException(lexerToken, HjsonLexerToken.Type.BRACKET_CLOSE, HjsonLexerToken.Type.COMMA, HjsonLexerToken.Type.LINE_FEED);
}
}
if (state == State.BEFORE_VALUE) {
chompLineFeedTokensInGeneral();
HjsonLexerToken lexerToken = peekGeneralLexerToken();
if (lexerToken.type() == HjsonLexerToken.Type.BRACKET_CLOSE) {
eatGeneralLexerToken();
contexts.pop();
state = State.AFTER_VALUE;
return TweedDataTokens.getListEnd();
}
return TweedDataTokens.asListValue(nextValueToken());
}
throw createIllegalStateException();
}
private TweedDataToken nextValueToken() throws TweedDataReadException {
chompLineFeedTokensInGeneral();
HjsonLexerToken lexerToken = eatGeneralLexerToken();
switch (lexerToken.type()) {
case NULL:
state = State.AFTER_VALUE;
return TweedDataTokens.getNull();
case TRUE:
case FALSE:
state = State.AFTER_VALUE;
return createBooleanToken(lexerToken);
case NUMBER:
state = State.AFTER_VALUE;
return createNumberToken(lexerToken);
case QUOTELESS_STRING:
case JSON_STRING:
case MULTILINE_STRING:
state = State.AFTER_VALUE;
return createStringToken(lexerToken);
case BRACKET_OPEN:
state = State.BEFORE_VALUE;
contexts.push(Context.LIST);
return TweedDataTokens.getListStart();
case BRACE_OPEN:
state = State.BEFORE_OBJECT_KEY;
contexts.push(Context.OBJECT);
return TweedDataTokens.getMapStart();
default:
throw createIllegalTokenException(
lexerToken,
HjsonLexerToken.Type.NULL,
HjsonLexerToken.Type.TRUE,
HjsonLexerToken.Type.FALSE,
HjsonLexerToken.Type.NUMBER,
HjsonLexerToken.Type.QUOTELESS_STRING,
HjsonLexerToken.Type.JSON_STRING,
HjsonLexerToken.Type.MULTILINE_STRING,
HjsonLexerToken.Type.BRACKET_OPEN,
HjsonLexerToken.Type.BRACE_OPEN
);
}
}
private void chompLineFeedTokensInGeneral() throws TweedDataReadException {
while (peekGeneralLexerToken().type() == HjsonLexerToken.Type.LINE_FEED) {
eatGeneralLexerToken();
}
}
private void chompLineFeedTokensInObject() throws TweedDataReadException {
while (peekObjectLexerToken().type() == HjsonLexerToken.Type.LINE_FEED) {
eatObjectLexerToken();
}
}
private TweedDataToken createBooleanToken(HjsonLexerToken lexerToken) {
return new TweedDataToken() {
@Override
public boolean canReadAsBoolean() {
return true;
}
@Override
public boolean readAsBoolean() {
return lexerToken.type() == HjsonLexerToken.Type.TRUE;
}
@Override
public String toString() {
return "HJSON boolean token [" + lexerToken + "]";
}
};
}
private TweedDataToken createNumberToken(HjsonLexerToken lexerToken) {
assert lexerToken.content() != null;
return new TweedDataToken() {
private Long tryLong;
private Double tryDouble;
private boolean fraction;
private boolean mantissaTooLarge;
private boolean exponentTooLarge;
@Override
public boolean canReadAsByte() {
tryReadLong();
return isValidIntegerValue(Byte.MIN_VALUE, Byte.MAX_VALUE);
}
@Override
public byte readAsByte() throws TweedDataReadException {
tryReadLong();
requireValidIntegerValue(Byte.MIN_VALUE, Byte.MAX_VALUE);
return tryLong.byteValue();
}
@Override
public boolean canReadAsShort() {
tryReadLong();
return isValidIntegerValue(Short.MIN_VALUE, Short.MAX_VALUE);
}
@Override
public short readAsShort() throws TweedDataReadException {
tryReadLong();
requireValidIntegerValue(Short.MIN_VALUE, Short.MAX_VALUE);
return tryLong.shortValue();
}
@Override
public boolean canReadAsInt() {
tryReadLong();
return isValidIntegerValue(Integer.MIN_VALUE, Integer.MAX_VALUE);
}
@Override
public int readAsInt() throws TweedDataReadException {
tryReadLong();
requireValidIntegerValue(Integer.MIN_VALUE, Integer.MAX_VALUE);
return tryLong.intValue();
}
@Override
public boolean canReadAsLong() {
tryReadLong();
return !mantissaTooLarge && !exponentTooLarge && !fraction;
}
@Override
public long readAsLong() throws TweedDataReadException {
tryReadLong();
requireValidIntegerValue(Long.MIN_VALUE, Long.MAX_VALUE);
return tryLong;
}
private boolean isValidIntegerValue(long min, long max) {
return !mantissaTooLarge && !exponentTooLarge && !fraction && tryLong != null && tryLong >= min && tryLong <= max;
}
private void requireValidIntegerValue(long min, long max) throws TweedDataReadException {
if (mantissaTooLarge) {
throw TweedDataReadException.builder()
.message("Mantissa of number is too large! (" + lexerToken + ")")
.recoverable(TweedDataReaderRecoverMode.SKIP)
.build();
}
if (exponentTooLarge) {
throw TweedDataReadException.builder()
.message("Exponent of number is too large! (" + lexerToken + ")")
.recoverable(TweedDataReaderRecoverMode.SKIP)
.build();
}
if (fraction) {
throw TweedDataReadException.builder()
.message("Fractional number cannot be read as non-fractional! (" + lexerToken + ")")
.recoverable(TweedDataReaderRecoverMode.SKIP)
.build();
}
if (tryLong < min) {
throw TweedDataReadException.builder()
.message("Number is too low for data type, minimum is " + min + " at " + lexerToken)
.recoverable(TweedDataReaderRecoverMode.SKIP)
.build();
}
if (tryLong > max) {
throw TweedDataReadException.builder()
.message("Number is too large for data type, maximum is " + max + " at " + lexerToken)
.recoverable(TweedDataReaderRecoverMode.SKIP)
.build();
}
}
private void tryReadLong() {
if (tryLong != null) {
return;
}
PrimitiveIterator.OfInt iterator = lexerToken.content().codePoints().iterator();
long sign = 1;
int codePoint = iterator.nextInt();
if (codePoint == '-') {
sign = -1;
codePoint = iterator.nextInt();
}
int fractionDigits = 0;
try {
tryLong = 0L;
boolean inFraction = false;
do {
tryLong = Math.addExact(Math.multiplyExact(tryLong, 10L), (long) (codePoint - '0'));
if (inFraction) {
fractionDigits++;
}
if (!iterator.hasNext()) {
tryLong *= sign;
if (fractionDigits > 0) {
fraction = true;
}
return;
}
codePoint = iterator.nextInt();
if (!inFraction && codePoint == '.') {
inFraction = true;
codePoint = iterator.nextInt();
}
} while (isDigit(codePoint));
tryLong *= sign;
} catch (ArithmeticException ignored) {
mantissaTooLarge = true;
return;
}
int exponent = 0;
if (codePoint == 'e' || codePoint == 'E') {
codePoint = iterator.nextInt();
boolean negativeExponent = false;
if (codePoint == '+') {
codePoint = iterator.nextInt();
} else if (codePoint == '-') {
codePoint = iterator.nextInt();
negativeExponent = true;
}
try {
while (true) {
exponent = Math.addExact(Math.multiplyExact(exponent, 10), codePoint - '0');
if (!iterator.hasNext()) {
break;
}
codePoint = iterator.nextInt();
}
if (negativeExponent) {
exponent = -exponent;
}
} catch (ArithmeticException ignored) {
exponentTooLarge = true;
}
}
exponent -= fractionDigits;
applyLongExponent(exponent);
}
private void applyLongExponent(int exponent) {
if (exponent < 0) {
long factor = 1L;
while (exponent < 0) {
factor *= 10L;
exponent++;
}
if (tryLong != tryLong / factor * factor) {
fraction = true;
return;
}
tryLong /= factor;
} else {
try {
while (exponent > 0) {
tryLong = Math.multiplyExact(tryLong, 10L);
exponent--;
}
} catch (ArithmeticException ignored) {
exponentTooLarge = true;
}
}
}
@Override
public boolean canReadAsFloat() {
tryReadDouble();
return Float.isFinite(tryDouble.floatValue());
}
@Override
public float readAsFloat() throws TweedDataReadException {
tryReadDouble();
float value = tryDouble.floatValue();
if (Float.isInfinite(value)) {
throw TweedDataReadException.builder()
.message("Number is out of range from " + (-Float.MAX_VALUE) + " to " + Float.MAX_VALUE + " at " + lexerToken)
.recoverable(TweedDataReaderRecoverMode.SKIP)
.build();
}
return value;
}
@Override
public boolean canReadAsDouble() {
tryReadDouble();
return Double.isFinite(tryDouble);
}
@Override
public double readAsDouble() throws TweedDataReadException {
tryReadDouble();
if (Double.isInfinite(tryDouble)) {
throw TweedDataReadException.builder()
.message("Number is out of range form " + (-Double.MAX_VALUE) + " to " + Double.MAX_VALUE + " at " + lexerToken)
.recoverable(TweedDataReaderRecoverMode.SKIP)
.build();
}
return tryDouble;
}
private void tryReadDouble() {
if (tryDouble != null) {
return;
}
boolean negative = false;
PrimitiveIterator.OfInt iterator = lexerToken.content().codePoints().iterator();
int codePoint = iterator.nextInt();
if (codePoint == '-') {
negative = true;
codePoint = iterator.nextInt();
}
double value = 0;
while (isDigit(codePoint)) {
value = value * 10 + (codePoint - '0');
if (!iterator.hasNext()) {
tryDouble = negative ? -1D * value : value;
return;
}
codePoint = iterator.nextInt();
}
if (codePoint == '.') {
double factor = 0.1;
do {
codePoint = iterator.nextInt();
if (!isDigit(codePoint)) {
break;
}
value += factor * (codePoint - '0');
factor /= 10;
} while (iterator.hasNext());
}
if (codePoint == 'e' || codePoint == 'E') {
codePoint = iterator.nextInt();
double factor = 10D;
if (codePoint == '-') {
factor = 0.1D;
codePoint = iterator.nextInt();
} else if (codePoint == '+') {
codePoint = iterator.nextInt();
}
double exponent = 0D;
while (isDigit(codePoint)) {
exponent = exponent * 10 + (codePoint - '0');
if (!iterator.hasNext()) {
break;
}
codePoint = iterator.nextInt();
}
factor = Math.pow(factor, exponent);
value *= factor;
}
tryDouble = value;
}
private boolean isDigit(int codePoint) {
return codePoint >= '0' && codePoint <= '9';
}
@Override
public String toString() {
return "HJSON numeric token [" + lexerToken + "]";
}
};
}
private TweedDataToken createStringToken(HjsonLexerToken lexerToken) {
assert lexerToken.content() != null;
return new TweedDataToken() {
@Override
public boolean canReadAsString() {
return true;
}
@Override
public String readAsString() throws TweedDataReadException {
if (lexerToken.type() == HjsonLexerToken.Type.QUOTELESS_STRING || lexerToken.type() == HjsonLexerToken.Type.MULTILINE_STRING) {
return lexerToken.contentString();
} else if (lexerToken.type() == HjsonLexerToken.Type.JSON_STRING) {
return readJsonString(lexerToken.content());
}
throw TweedDataReadException.builder().message("Unrecognized string token").recoverable(TweedDataReaderRecoverMode.SKIP).build();
}
private String readJsonString(CharSequence input) throws TweedDataReadException {
PrimitiveIterator.OfInt iterator = input.codePoints().iterator();
int quoteCodePoint = iterator.nextInt();
boolean escaped = false;
StringBuilder stringBuilder = new StringBuilder();
while (true) {
int codePoint = iterator.nextInt();
if (escaped) {
escaped = false;
codePoint = getUnescapedCodePoint(codePoint);
} else if (codePoint == quoteCodePoint) {
break;
} else if (codePoint == '\\') {
escaped = true;
}
stringBuilder.appendCodePoint(codePoint);
}
return stringBuilder.toString();
}
private int getUnescapedCodePoint(int codePoint) throws TweedDataReadException {
switch (codePoint) {
case 'n':
return '\n';
case 'r':
return '\r';
case 't':
return '\t';
case 'f':
return '\f';
case 'b':
return '\b';
case '\\':
case '/':
case '"':
case '\'':
return codePoint;
default:
throw TweedDataReadException.builder()
.message("Illegal escape sequence \"\\" + String.copyValueOf(Character.toChars(codePoint)) + "\" in string " + lexerToken)
.recoverable(TweedDataReaderRecoverMode.SKIP)
.build();
}
}
@Override
public String toString() {
return "HJSON string token [" + lexerToken + "]";
}
};
}
private TweedDataReadException createIllegalTokenException(
HjsonLexerToken actualToken,
HjsonLexerToken.Type... expected
) {
return TweedDataReadException.builder().message(
"Illegal token " + actualToken + ", expected any of " +
Arrays.stream(expected).map(Objects::toString).collect(Collectors.joining(", "))
).build();
}
private TweedDataReadException createIllegalStateException() {
return TweedDataReadException.builder().message(
"Internal Error: Parser is in illegal state " + state + " in context " + currentContext()
).build();
}
private HjsonLexerToken peekGeneralLexerToken() throws TweedDataReadException {
if (peekedLexerToken == null) {
peekedLexerToken = lexer.nextGeneralToken();
}
return peekedLexerToken;
}
private HjsonLexerToken peekObjectLexerToken() throws TweedDataReadException {
if (peekedLexerToken == null) {
peekedLexerToken = lexer.nextInnerObjectToken();
}
return peekedLexerToken;
}
private HjsonLexerToken eatGeneralLexerToken() throws TweedDataReadException {
if (peekedLexerToken != null) {
HjsonLexerToken token = peekedLexerToken;
peekedLexerToken = null;
return token;
}
return lexer.nextGeneralToken();
}
private HjsonLexerToken eatObjectLexerToken() throws TweedDataReadException {
if (peekedLexerToken != null) {
HjsonLexerToken token = peekedLexerToken;
peekedLexerToken = null;
return token;
}
return lexer.nextInnerObjectToken();
}
private Context currentContext() {
return contexts.peek();
}
private enum Context {
VALUE,
LIST,
OBJECT,
}
private enum State {
BEFORE_VALUE,
AFTER_VALUE,
BEFORE_OBJECT_KEY,
AFTER_OBJECT_KEY,
}
}

View File

@@ -0,0 +1,26 @@
package de.siphalor.tweed5.data.hjson;
import de.siphalor.tweed5.dataapi.api.TweedDataReader;
import de.siphalor.tweed5.dataapi.api.TweedDataVisitor;
import de.siphalor.tweed5.dataapi.api.TweedSerde;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
public class HjsonSerde implements TweedSerde {
@Override
public TweedDataReader createReader(InputStream inputStream) {
return null;
}
@Override
public TweedDataVisitor createWriter(OutputStream outputStream) throws IOException {
return null;
}
@Override
public String getPreferredFileExtension() {
return "";
}
}

View File

@@ -0,0 +1,8 @@
package de.siphalor.tweed5.data.hjson;
public enum HjsonStringType {
INLINE_QUOTELESS,
INLINE_DOUBLE_QUOTE,
INLINE_SINGLE_QUOTE,
MULTILINE_SINGLE_QUOTE,
}

View File

@@ -0,0 +1,595 @@
package de.siphalor.tweed5.data.hjson;
import de.siphalor.tweed5.dataapi.api.TweedDataWriteException;
import de.siphalor.tweed5.dataapi.api.TweedDataWriter;
import lombok.Data;
import org.jetbrains.annotations.NotNull;
import java.io.IOException;
import java.io.Writer;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class HjsonWriter implements TweedDataWriter {
private static final int PREFILL_INDENT = 10;
private static final Pattern LINE_FEED_PATTERN = Pattern.compile("\\n|\\r\\n");
private final Writer writer;
private final Options options;
private final Deque<Context> contexts;
private final StringBuilder indentBuffer;
private int currentIndentLevel;
private int currentIndentLength;
public HjsonWriter(Writer writer, Options options) {
this.writer = writer;
this.options = options;
contexts = new LinkedList<>(Collections.singleton(Context.ROOT));
indentBuffer = new StringBuilder(options.indent.length() * PREFILL_INDENT);
for (int i = 0; i < PREFILL_INDENT; i++) {
indentBuffer.append(options.indent);
}
}
@Override
public void visitNull() {
beforeValueWrite();
write("null");
afterValueWrite();
}
@Override
public void visitBoolean(boolean value) {
beforeValueWrite();
write(value ? "true" : "false");
afterValueWrite();
}
@Override
public void visitByte(byte value) {
beforeValueWrite();
write(Byte.toString(value));
afterValueWrite();
}
@Override
public void visitShort(short value) {
beforeValueWrite();
write(Short.toString(value));
afterValueWrite();
}
@Override
public void visitInt(int value) {
beforeValueWrite();
write(Integer.toString(value));
afterValueWrite();
}
@Override
public void visitLong(long value) {
beforeValueWrite();
write(Long.toString(value));
afterValueWrite();
}
@Override
public void visitFloat(float value) {
beforeValueWrite();
write(Float.toString(value));
afterValueWrite();
}
@Override
public void visitDouble(double value) {
beforeValueWrite();
write(Double.toString(value));
afterValueWrite();
}
@Override
public void visitString(@NotNull String value) {
beforeValueWrite();
writeStringValue(getValueStringStringType(value), value);
afterValueWrite();
}
private HjsonStringType getValueStringStringType(String value) {
if (value.isEmpty() || "true".equals(value) || "false".equals(value) || "null".equals(value)) {
return HjsonStringType.INLINE_DOUBLE_QUOTE;
}
int firstCodePoint = value.codePointAt(0);
if (Character.isDigit(firstCodePoint) || Character.isWhitespace(firstCodePoint)) {
return HjsonStringType.INLINE_DOUBLE_QUOTE;
}
int lastCodePoint = value.codePointBefore(value.length());
if (Character.isWhitespace(lastCodePoint)) {
return HjsonStringType.INLINE_DOUBLE_QUOTE;
}
boolean singleQuoteFound = false;
boolean doubleQuoteFound = false;
int singleQuoteCount = 0;
boolean tripleSingleQuoteFound = false;
boolean punctuatorFound = false;
boolean newLineFound = false;
boolean escapeRequiredFound = false;
boolean tabFound = false;
PrimitiveIterator.OfInt codePointIterator = value.codePoints().iterator();
while (codePointIterator.hasNext()) {
int codePoint = codePointIterator.nextInt();
if (codePoint == '\'') {
singleQuoteFound = true;
if (++singleQuoteCount >= 3) {
tripleSingleQuoteFound = true;
}
} else {
singleQuoteCount = 0;
if (codePoint == '"') {
doubleQuoteFound = true;
} else if (codePoint == '\n' || codePoint == '\r') {
newLineFound = true;
} else if (!punctuatorFound && isPunctuatorCodePoint(codePoint)) {
punctuatorFound = true;
} else if (codePoint == '\t') {
tabFound = true;
} else if (codePoint == '\\' || codePoint < 0x20) {
escapeRequiredFound = true;
}
}
}
if (!punctuatorFound && !newLineFound && !tabFound && !escapeRequiredFound) {
return HjsonStringType.INLINE_QUOTELESS;
}
if (newLineFound && !tripleSingleQuoteFound) {
return HjsonStringType.MULTILINE_SINGLE_QUOTE;
}
if (singleQuoteFound || !doubleQuoteFound) {
return HjsonStringType.INLINE_DOUBLE_QUOTE;
}
return HjsonStringType.INLINE_SINGLE_QUOTE;
}
@Override
public void visitEmptyList() {
beforeValueWrite();
write("[]");
afterValueWrite();
}
@Override
public void visitListStart() {
beforeValueWrite();
write("[");
writeLineFeed();
pushContext(Context.LIST);
}
@Override
public void visitListEnd() {
requireContext(Context.LIST);
popContext();
writeCurrentIndent();
write("]");
afterValueWrite();
}
@Override
public void visitEmptyMap() {
beforeValueWrite();
write("{}");
afterValueWrite();
}
@Override
public void visitMapStart() {
beforeValueWrite();
write("{");
writeLineFeed();
pushContext(Context.MAP);
}
@Override
public void visitMapEntryKey(String key) {
requireContext(Context.MAP);
writeCurrentIndent();
writeStringValue(getMapEntryKeyStringType(key), key);
write(": ");
pushContext(Context.MAP_ENTRY);
}
private HjsonStringType getMapEntryKeyStringType(String key) {
int firstCodePoint = key.codePointAt(0);
if (firstCodePoint == '\'') {
return HjsonStringType.INLINE_DOUBLE_QUOTE;
} else if (firstCodePoint == '"'){
return HjsonStringType.INLINE_SINGLE_QUOTE;
}
if (key.codePoints().allMatch(this::isValidMapEntryKeyCodePoint)) {
return HjsonStringType.INLINE_QUOTELESS;
}
return HjsonStringType.INLINE_DOUBLE_QUOTE;
}
private boolean isValidMapEntryKeyCodePoint(int codePoint) {
if (codePoint < 0x21) {
return false;
}
return !isPunctuatorCodePoint(codePoint);
}
private boolean isPunctuatorCodePoint(int codePoint) {
return codePoint == ',' || codePoint == ':' || codePoint == '[' || codePoint == ']' || codePoint == '{' || codePoint == '}';
}
@Override
public void visitMapEnd() {
requireContext(Context.MAP);
popContext();
writeCurrentIndent();
write("}");
afterValueWrite();
}
@Override
public void visitComment(String comment) {
Matcher lineFeedMatcher = LINE_FEED_PATTERN.matcher(comment);
if (lineFeedMatcher.find()) {
// Multiline
writeMultilineCommentStart(options.multilineCommentType);
int begin = 0;
do {
writeCommentLine(options.multilineCommentType, comment, begin, lineFeedMatcher.start());
begin = lineFeedMatcher.end();
} while (lineFeedMatcher.find(begin));
writeCommentLine(options.multilineCommentType, comment, begin, comment.length());
writeMultilineCommentEnd(options.multilineCommentType);
} else {
// Inline
writeMultilineCommentStart(options.inlineCommentType);
writeCommentLine(options.inlineCommentType, comment, 0, comment.length());
writeMultilineCommentEnd(options.inlineCommentType);
}
}
private void writeMultilineCommentStart(HjsonCommentType commentType) {
if (commentType == HjsonCommentType.BLOCK) {
writeCurrentIndentIfApplicable();
write("/*");
writeLineFeed();
}
}
private void writeMultilineCommentEnd(HjsonCommentType commentType) {
if (commentType == HjsonCommentType.BLOCK) {
writeCurrentIndent();
write(" */");
writeLineFeed();
if (isInInlineContext()) {
writeCurrentIndent();
}
}
}
private void writeCommentLine(HjsonCommentType commentType, CharSequence text, int begin, int end) {
writeCurrentIndentIfApplicable();
write(getCommentLineStart(commentType));
write(text, begin, end);
writeLineFeed();
}
private CharSequence getCommentLineStart(HjsonCommentType commentType) {
switch (commentType) {
case HASH:
return "# ";
case SLASHES:
return "// ";
case BLOCK:
return " * ";
default:
throw new IllegalStateException("Unknown comment type: " + commentType);
}
}
private void beforeValueWrite() {
requireValueContext();
writeCurrentIndentIfApplicable();
}
private void afterValueWrite() {
switch (currentContext()) {
case ROOT:
case LIST:
writeLineFeed();
break;
case MAP_ENTRY:
popContext();
writeLineFeed();
break;
default:
break;
}
}
private void writeStringValue(HjsonStringType stringType, String text) {
switch (stringType) {
case INLINE_QUOTELESS:
write(text);
break;
case INLINE_DOUBLE_QUOTE:
writeJsonString(text, '"');
break;
case INLINE_SINGLE_QUOTE:
writeJsonString(text, '\'');
break;
case MULTILINE_SINGLE_QUOTE:
writeMultilineString(text);
break;
}
}
private void writeJsonString(String text, int quoteCodepoint) {
writeCodepoint(quoteCodepoint);
text.codePoints().forEach(codepoint -> {
if (codepoint == quoteCodepoint) {
write("\\");
writeCodepoint(codepoint);
} else {
writeJsonStringQuotePoint(codepoint);
}
});
writeCodepoint(quoteCodepoint);
}
private void writeJsonStringQuotePoint(int codepoint) {
switch (codepoint) {
case '\\':
write("\\\\");
break;
case '\b':
write("\\b");
break;
case '\f':
write("\\f");
break;
case '\n':
write("\\n");
break;
case '\r':
write("\\r");
break;
case '\t':
write("\\t");
break;
default:
if (isValidJsonStringCodepoint(codepoint)) {
writeCodepoint(codepoint);
} else {
write(codepointToHexEscape(codepoint));
}
break;
}
}
private String codepointToHexEscape(int codepoint) {
StringBuilder hexEscape = new StringBuilder("\\u0000");
hexEscape.replace(5, 6, nibbleToHex(codepoint & 0xF));
codepoint >>= 4;
hexEscape.replace(4, 5, nibbleToHex(codepoint & 0xF));
codepoint >>= 4;
hexEscape.replace(3, 4, nibbleToHex(codepoint & 0xF));
codepoint >>= 4;
hexEscape.replace(2, 3, nibbleToHex(codepoint & 0xF));
return hexEscape.toString();
}
private String nibbleToHex(int value) {
switch (value) {
case 0x0: return "0";
case 0x1: return "1";
case 0x2: return "2";
case 0x3: return "3";
case 0x4: return "4";
case 0x5: return "5";
case 0x6: return "6";
case 0x7: return "7";
case 0x8: return "8";
case 0x9: return "9";
case 0xA: return "A";
case 0xB: return "B";
case 0xC: return "C";
case 0xD: return "D";
case 0xE: return "E";
case 0xF: return "F";
default:
throw new IllegalArgumentException("Invalid nibble value");
}
}
private boolean isValidJsonStringCodepoint(int codepoint) {
return codepoint >= 0x20 && codepoint <= 0x10FFFF && codepoint != 0x21 && codepoint != 0x5C;
}
private void writeMultilineString(String text) {
boolean inInlineContext = isInInlineContext();
if (inInlineContext) {
writeLineFeed();
increaseIndent();
}
write("'''");
writeLineFeed();
Matcher matcher = LINE_FEED_PATTERN.matcher(text);
int begin = 0;
while (matcher.find(begin)) {
writeCurrentIndent();
write(text, begin, matcher.start());
writeLineFeed();
begin = matcher.end();
}
writeCurrentIndent();
write(text, begin, text.length());
writeLineFeed();
writeCurrentIndent();
write("'''");
if (inInlineContext) {
decreaseIndent();
}
}
private boolean isInInlineContext() {
return currentContext() == Context.MAP_ENTRY;
}
private void writeCurrentIndentIfApplicable() {
if (shouldWriteIndentInContext(currentContext())) {
writeCurrentIndent();
}
}
private boolean shouldWriteIndentInContext(Context context) {
return context == Context.ROOT || context == Context.LIST || context == Context.MAP;
}
private void requireValueContext() {
requireContext(Context.ROOT, Context.LIST, Context.MAP_ENTRY);
}
private void requireContext(Context... allowedContexts) {
Context currentContext = currentContext();
for (Context allowedContext : allowedContexts) {
if (currentContext == allowedContext) {
return;
}
}
throw new TweedDataWriteException(
"Writer is not in correct context, expected any of " + Arrays.toString(allowedContexts) +
" but currently in " + currentContext
);
}
private Context currentContext() {
Context currentContext = contexts.peek();
if (currentContext == null) {
throw new IllegalStateException("Writing has terminated");
}
return currentContext;
}
private void pushContext(Context context) {
switch (context) {
case ROOT:
throw new IllegalArgumentException("Root context may not be pushed");
case LIST:
case MAP:
increaseIndent();
break;
default:
break;
}
contexts.push(context);
}
private void popContext() {
switch (currentContext()) {
case LIST:
case MAP:
decreaseIndent();
break;
default:
break;
}
contexts.pop();
}
private void increaseIndent() {
currentIndentLevel++;
currentIndentLength = currentIndentLevel * options.indent.length();
ensureIndentBufferLength();
}
private void ensureIndentBufferLength() {
while (currentIndentLength > indentBuffer.length()) {
indentBuffer.append(options.indent);
}
}
private void decreaseIndent() {
if (currentIndentLevel == 0) {
throw new IllegalStateException("Cannot decrease indent level, already at 0");
}
currentIndentLevel--;
currentIndentLength = currentIndentLevel * options.indent.length();
}
private void writeCurrentIndent() {
write(indentBuffer, 0, currentIndentLength);
}
private void writeLineFeed() {
write(options.lineFeed);
}
private void write(CharSequence text, int begin, int end) {
try {
writer.append(text, begin, end);
} catch (IOException e) {
throw createExceptionForIOException(e);
}
}
private void write(CharSequence text) {
try {
writer.append(text);
} catch (IOException e) {
throw createExceptionForIOException(e);
}
}
private void writeCodepoint(int codepoint) {
try {
writer.write(codepoint);
} catch (IOException e) {
throw createExceptionForIOException(e);
}
}
private TweedDataWriteException createExceptionForIOException(IOException e) {
return new TweedDataWriteException("Writing Hjson failed", e);
}
private enum Context {
ROOT,
LIST,
MAP,
MAP_ENTRY,
}
@Data
public static class Options {
private boolean doubleQuotedInlineStrings = true;
private String indent = "\t";
private String lineFeed = "\n";
private HjsonCommentType inlineCommentType = HjsonCommentType.SLASHES;
private HjsonCommentType multilineCommentType = HjsonCommentType.BLOCK;
private HjsonStringType preferredInlineStringType = HjsonStringType.INLINE_QUOTELESS;
public void inlineCommentType(HjsonCommentType commentType) {
if (commentType.block()) {
throw new IllegalArgumentException("Inline comment type must not be a block comment type: " + commentType);
}
this.inlineCommentType = commentType;
}
}
}

View File

@@ -0,0 +1,141 @@
package de.siphalor.tweed5.data.hjson;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import java.io.StringReader;
import static org.junit.jupiter.api.Assertions.*;
@Timeout(value = 10, threadMode = Timeout.ThreadMode.SEPARATE_THREAD)
class HjsonLexerTest {
@Test
void generalEof() {
HjsonLexer lexer = createLexer("");
assertGeneralEof(lexer, new HjsonReadPosition(1, 1));
}
@Test
void innerObjectEof() {
HjsonLexer lexer = createLexer("");
assertEquals(
new HjsonLexerToken(
HjsonLexerToken.Type.EOF,
new HjsonReadPosition(1, 1),
new HjsonReadPosition(1, 1),
null
),
assertDoesNotThrow(lexer::nextGeneralToken)
);
}
@ParameterizedTest
@CsvSource(
delimiter = ';',
value = {
"[;BRACKET_OPEN",
"];BRACKET_CLOSE",
"{;BRACE_OPEN",
"};BRACE_CLOSE",
":;COLON",
",;COMMA",
}
)
void generalTerminalToken(String input, HjsonLexerToken.Type tokenType) {
HjsonLexer lexer = createLexer(input);
assertEquals(new HjsonLexerToken(
tokenType,
new HjsonReadPosition(1, 1),
new HjsonReadPosition(1, 1),
null
), assertDoesNotThrow(lexer::nextGeneralToken));
assertGeneralEof(lexer, new HjsonReadPosition(1, 2));
}
@ParameterizedTest
@CsvSource(
delimiter = ';',
value = {
"[;BRACKET_OPEN",
"];BRACKET_CLOSE",
"{;BRACE_OPEN",
"};BRACE_CLOSE",
":;COLON",
",;COMMA",
}
)
void innerObjectTerminalToken(String input, HjsonLexerToken.Type tokenType) {
HjsonLexer lexer = createLexer(input);
assertEquals(new HjsonLexerToken(
tokenType,
new HjsonReadPosition(1, 1),
new HjsonReadPosition(1, 1),
null
), assertDoesNotThrow(lexer::nextInnerObjectToken));
assertGeneralEof(lexer, new HjsonReadPosition(1, 2));
}
@ParameterizedTest
@CsvSource({
"null,NULL",
"true,TRUE",
"false,FALSE",
})
void generalConstants(String constant, HjsonLexerToken.Type tokenType) {
HjsonLexer lexer = createLexer(constant);
assertEquals(new HjsonLexerToken(
tokenType,
new HjsonReadPosition(1, 1),
new HjsonReadPosition(1, constant.length()),
constant
), assertDoesNotThrow(lexer::nextGeneralToken));
assertGeneralEof(lexer, new HjsonReadPosition(1, constant.length() + 1));
}
@ParameterizedTest
@CsvSource(
value = {
"123,0,3",
" 123 ,2,5",
"123.45,0,6",
"500e8,0,5",
"500E8,0,5",
" 789.45e-9 ,1,10",
"-45e+8,0,6",
" -12.34E-81 ,2,12",
},
ignoreLeadingAndTrailingWhitespace = false
)
void generalNumber(String input, int begin, int end) {
HjsonLexer lexer = createLexer(input);
assertEquals(new HjsonLexerToken(
HjsonLexerToken.Type.NUMBER,
new HjsonReadPosition(1, begin + 1),
new HjsonReadPosition(1, end),
input.substring(begin, end)
), assertDoesNotThrow(lexer::nextGeneralToken));
assertGeneralEof(lexer, new HjsonReadPosition(1, input.length() + 1));
}
private HjsonLexer createLexer(String input) {
return new HjsonLexer(new StringReader(input));
}
private static void assertGeneralEof(HjsonLexer lexer, HjsonReadPosition position) {
assertEquals(
new HjsonLexerToken(HjsonLexerToken.Type.EOF, position, position, null),
assertDoesNotThrow(lexer::nextGeneralToken)
);
}
}

View File

@@ -0,0 +1,175 @@
package de.siphalor.tweed5.data.hjson;
import de.siphalor.tweed5.dataapi.api.TweedDataReadException;
import de.siphalor.tweed5.dataapi.api.TweedDataToken;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import org.junit.jupiter.params.provider.ValueSource;
import java.io.StringReader;
import static org.junit.jupiter.api.Assertions.*;
@Timeout(value = 10, threadMode = Timeout.ThreadMode.SEPARATE_THREAD)
class HjsonReaderTest {
private static final double DOUBLE_PRECISION = 0.000000001D;
@ParameterizedTest
@CsvSource({
"127,127",
"-128,-128",
"1.23e2,123",
"1230E-1,123",
})
void testByte(String input, byte expected) {
HjsonReader hjsonReader = setupReaderWithLexer(input);
TweedDataToken token = assertDoesNotThrow(hjsonReader::readToken);
assertEquals(expected, assertDoesNotThrow(token::readAsByte));
assertTrue(token.canReadAsByte());
}
@ParameterizedTest
@ValueSource(strings = {
"128",
"1.23",
"-129",
"1.23e3",
"123E-1",
})
void testByteIllegal(String input) {
HjsonReader hjsonReader = setupReaderWithLexer(input);
TweedDataToken token = assertDoesNotThrow(hjsonReader::readToken);
assertThrows(TweedDataReadException.class, token::readAsByte);
assertFalse(token.canReadAsByte());
}
@ParameterizedTest
@CsvSource(value = {
"123,123",
"123e4,1230000",
"9.87e3,9870",
"45670E-1,4567",
"-123.56E+5,-12356000",
})
void testInteger(String input, int expected) {
HjsonReader hjsonReader = setupReaderWithLexer(input);
TweedDataToken token = assertDoesNotThrow(hjsonReader::readToken);
assertEquals(expected, assertDoesNotThrow(token::readAsInt));
assertTrue(token.canReadAsInt());
}
@ParameterizedTest
@CsvSource(
ignoreLeadingAndTrailingWhitespace = false,
value = {
"123,123",
"12.34,12.34",
"123456789.123456789,123456789.123456789",
"1234.057e8,123405700000",
"987.654E-5,0.00987654",
}
)
void testDouble(String input, double expected) {
HjsonReader hjsonReader = setupReaderWithLexer(input);
TweedDataToken token = assertDoesNotThrow(hjsonReader::readToken);
assertEquals(expected, assertDoesNotThrow(token::readAsDouble), DOUBLE_PRECISION);
assertTrue(token.canReadAsDouble());
}
@ParameterizedTest
@ValueSource(strings = {
"{test:abc\ncdef:123\na:true}",
"\n{test: abc \ncdef:123,a\n:\ntrue\n}",
"// \n{\n\ttest:abc\ncdef:123e0,a: true ,}",
})
void testObject(String input) {
HjsonReader hjsonReader = setupReaderWithLexer(input);
TweedDataToken token;
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isMapStart());
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isMapEntryKey());
assertTrue(token.canReadAsString());
assertEquals("test", assertDoesNotThrow(token::readAsString));
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isMapEntryValue());
assertTrue(token.canReadAsString());
assertEquals("abc", assertDoesNotThrow(token::readAsString));
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isMapEntryKey());
assertTrue(token.canReadAsString());
assertEquals("cdef", assertDoesNotThrow(token::readAsString));
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isMapEntryValue());
assertTrue(token.canReadAsInt());
assertEquals(123, assertDoesNotThrow(token::readAsInt));
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isMapEntryKey());
assertTrue(token.canReadAsString());
assertEquals("a", assertDoesNotThrow(token::readAsString));
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isMapEntryValue());
assertTrue(token.canReadAsBoolean());
assertEquals(true, assertDoesNotThrow(token::readAsBoolean));
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isMapEnd());
}
@ParameterizedTest
@ValueSource(strings = {
"[12,34,56]",
"[12\n34\n\t56]",
"[\n12\n\t\t34\n\t56\n]",
"[\n12,34\n\t56\n]",
})
void testArray(String input) {
HjsonReader hjsonReader = setupReaderWithLexer(input);
TweedDataToken token;
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isListStart());
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isListValue());
assertTrue(token.canReadAsInt());
assertEquals(12, assertDoesNotThrow(token::readAsInt));
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isListValue());
assertTrue(token.canReadAsInt());
assertEquals(34, assertDoesNotThrow(token::readAsInt));
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isListValue());
assertTrue(token.canReadAsInt());
assertEquals(56, assertDoesNotThrow(token::readAsInt));
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isListEnd());
}
@ParameterizedTest
@ValueSource(strings = {
"[]",
"[\n\n]",
"[ ]",
"[\n\t\t]",
})
void testEmptyArray(String input) {
HjsonReader hjsonReader = setupReaderWithLexer(input);
TweedDataToken token;
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isListStart());
token = assertDoesNotThrow(hjsonReader::readToken);
assertTrue(token.isListEnd());
}
private HjsonReader setupReaderWithLexer(String input) {
return new HjsonReader(new HjsonLexer(new StringReader(input)));
}
}

View File

@@ -0,0 +1,57 @@
package de.siphalor.tweed5.data.hjson;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.io.StringWriter;
import static org.junit.jupiter.api.Assertions.assertEquals;
class HjsonWriterTest {
private HjsonWriter writer;
private StringWriter stringWriter;
@BeforeEach
void setUp() {
stringWriter = new StringWriter();
}
@Test
void complex() {
setUpHjsonWriter(new HjsonWriter.Options());
writer.visitMapStart();
writer.visitMapEntryKey("test");
writer.visitBoolean(false);
writer.visitMapEntryKey("null");
writer.visitNull();
writer.visitMapEntryKey("a list");
writer.visitListStart();
writer.visitInt(12);
writer.visitInt(34);
writer.visitString("Testing\n multiline\nstuff");
writer.visitListEnd();
writer.visitMapEnd();
assertEquals(
"{\n" +
"\ttest: false\n" +
"\tnull: null\n" +
"\t\"a list\": [\n" +
"\t\t12\n" +
"\t\t34\n" +
"\t\t'''\n" +
"\t\tTesting\n" +
"\t\t multiline\n" +
"\t\tstuff\n" +
"\t\t'''\n" +
"\t]\n" +
"}\n",
stringWriter.toString()
);
}
void setUpHjsonWriter(HjsonWriter.Options options) {
writer = new HjsonWriter(stringWriter, options);
}
}