Skip to content

Commit

Permalink
transfer zimbra changes. Make sure code compiles
Browse files Browse the repository at this point in the history
  • Loading branch information
grishick committed Feb 14, 2017
1 parent 81da53e commit 659b6a2
Showing 1 changed file with 79 additions and 35 deletions.
114 changes: 79 additions & 35 deletions src/main/java/net/fortuna/ical4j/data/CalendarParserImpl.java
Expand Up @@ -33,6 +33,7 @@

import net.fortuna.ical4j.model.Calendar;
import net.fortuna.ical4j.model.Component;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -87,6 +88,44 @@ public final void parse(final InputStream in, final ContentHandler handler)
parse(new InputStreamReader(in), handler);
}

/**
* Parses an iCalendar VCALENDAR from the specified stream tokeniser.
*
* @param tokeniser
* @param in
* @param handler
* @throws IOException
* @throws ParseException
* @throws URISyntaxException
* @throws ParserException
*/
private void parseCalendar(final StreamTokenizer tokeniser, Reader in,
final ContentHandler handler) throws IOException, ParseException,
URISyntaxException, ParserException {

assertToken(tokeniser, in, ':');

assertToken(tokeniser, in, Calendar.VCALENDAR, true, false);

assertToken(tokeniser, in, StreamTokenizer.TT_EOL);

handler.startCalendar();

// parse calendar properties..
propertyListParser.parse(tokeniser, in, handler);

// parse components..
componentListParser.parse(tokeniser, in, handler);

// END:VCALENDAR
// assertToken(tokeniser,Calendar.END);

assertToken(tokeniser, in, ':');

assertToken(tokeniser, in, Calendar.VCALENDAR, true, false);

handler.endCalendar();
}
/**
* {@inheritDoc}
*/
Expand All @@ -107,31 +146,9 @@ public final void parse(final Reader in, final ContentHandler handler)
tokeniser.whitespaceChars(0, 0);
tokeniser.quoteChar('"');

// BEGIN:VCALENDAR
assertToken(tokeniser, in, Calendar.BEGIN, false, true);

assertToken(tokeniser, in, ':');

assertToken(tokeniser, in, Calendar.VCALENDAR, true, false);

assertToken(tokeniser, in, StreamTokenizer.TT_EOL);

handler.startCalendar();

// parse calendar properties..
propertyListParser.parse(tokeniser, in, handler);

// parse components..
componentListParser.parse(tokeniser, in, handler);

// END:VCALENDAR
// assertToken(tokeniser,Calendar.END);
parseCalendarList(tokeniser, in, handler);

assertToken(tokeniser, in, ':');

assertToken(tokeniser, in, Calendar.VCALENDAR, true, false);

handler.endCalendar();
} catch (Exception e) {

if (e instanceof IOException) {
Expand All @@ -145,6 +162,27 @@ public final void parse(final Reader in, final ContentHandler handler)
}
}

/**
* Parses an iCalendar ZCALENDAR list from the specified stream tokeniser.
*
* @param tokeniser
* @param handler
* @throws IOException
* @throws ParseException
* @throws URISyntaxException
* @throws ParserException
*/
private void parseCalendarList(final StreamTokenizer tokeniser, Reader in,
final ContentHandler handler) throws IOException, ParseException,
URISyntaxException, ParserException {

// BEGIN:VCALENDAR
int ntok = assertToken(tokeniser, in, Calendar.BEGIN, false, true);
while (ntok != StreamTokenizer.TT_EOF) {
parseCalendar(tokeniser, in, handler);
ntok = absorbWhitespace(tokeniser, in);
}
}
/**
* Parses an iCalendar property list from the specified stream tokeniser.
*
Expand Down Expand Up @@ -414,16 +452,18 @@ private void parse(final StreamTokenizer tokeniser, Reader in,
* @throws IOException when unable to read from stream
* @throws ParserException when next token in the stream does not match the expected token
*/
private void assertToken(final StreamTokenizer tokeniser, Reader in, final int token)
private int assertToken(final StreamTokenizer tokeniser, Reader in, final int token)
throws IOException, ParserException {

if (nextToken(tokeniser, in) != token) {
int ntok = nextToken(tokeniser, in);
if (ntok != token) {
throw new ParserException(MessageFormat.format(UNEXPECTED_TOKEN_MESSAGE, token, tokeniser.ttype), getLineNumber(tokeniser, in));
}

if (log.isDebugEnabled()) {
log.debug("[" + token + "]");
}
return ntok;
}

/**
Expand All @@ -434,30 +474,32 @@ private void assertToken(final StreamTokenizer tokeniser, Reader in, final int t
* @throws IOException
* @throws ParserException
*/
private void assertToken(final StreamTokenizer tokeniser, Reader in, final String token)
private int assertToken(final StreamTokenizer tokeniser, Reader in, final String token)
throws IOException, ParserException {
assertToken(tokeniser, in, token, false, false);
return assertToken(tokeniser, in, token, false, false);
}

/**
* Asserts that the next token in the stream matches the specified token.
*
* @param tokeniser stream tokeniser to perform assertion on
* @param token expected token
* @return
* @throws IOException when unable to read from stream
* @throws ParserException when next token in the stream does not match the expected token
*/
private void assertToken(final StreamTokenizer tokeniser, Reader in,
private int assertToken(final StreamTokenizer tokeniser, Reader in,
final String token, final boolean ignoreCase, final boolean isBeginToken) throws IOException,
ParserException {

// ensure next token is a word token..
String sval;
int ntok;
if(isBeginToken) {
skipNewLines(tokeniser, in, token);
ntok = skipNewLines(tokeniser, in, token);
sval = getSvalIgnoringBom(tokeniser, in, token);
} else {
assertToken(tokeniser, in, StreamTokenizer.TT_WORD);
ntok = assertToken(tokeniser, in, StreamTokenizer.TT_WORD);
sval = tokeniser.sval;
}

Expand All @@ -473,6 +515,7 @@ else if (!token.equals(sval)) {
if (log.isDebugEnabled()) {
log.debug("[" + token + "]");
}
return ntok;
}

/**
Expand All @@ -484,11 +527,10 @@ else if (!token.equals(sval)) {
* @throws ParserException
* @throws IOException
*/
private void skipNewLines(StreamTokenizer tokeniser, Reader in, String token) throws ParserException, IOException {
private int skipNewLines(StreamTokenizer tokeniser, Reader in, String token) throws ParserException, IOException {
for (int i = 0;; i++) {
try {
assertToken(tokeniser, in, StreamTokenizer.TT_WORD);
break;
return assertToken(tokeniser, in, StreamTokenizer.TT_WORD);
} catch (ParserException exc) {
//Skip a maximum of 10 newlines, linefeeds etc at the beginning
if (i == IGNORE_BEGINNING_NON_WORD_COUNT) {
Expand Down Expand Up @@ -523,16 +565,18 @@ private String getSvalIgnoringBom(StreamTokenizer tokeniser, Reader in, String t
* @param tokeniser
* @throws IOException
*/
private void absorbWhitespace(final StreamTokenizer tokeniser, Reader in) throws IOException, ParserException {
private int absorbWhitespace(final StreamTokenizer tokeniser, Reader in) throws IOException, ParserException {
// HACK: absorb extraneous whitespace between components (KOrganizer)..
while (nextToken(tokeniser, in) == StreamTokenizer.TT_EOL) {
int ntok;
while ((ntok = nextToken(tokeniser, in)) == StreamTokenizer.TT_EOL) {
if (log.isTraceEnabled()) {
log.trace("Absorbing extra whitespace..");
}
}
if (log.isTraceEnabled()) {
log.trace("Aborting: absorbing extra whitespace complete");
}
return ntok;
}

/**
Expand Down

0 comments on commit 659b6a2

Please sign in to comment.