微信公众号搜"智元新知"关注
微信扫一扫可直接关注哦!

org.yaml.snakeyaml.tokens.StreamStartToken的实例源码

项目:snake-yaml    文件ScannerImpltest.java   
public void testGetToken() {
    String data = "string: abcd";
    StreamReader reader = new StreamReader(data);
    Scanner scanner = new ScannerImpl(reader);
    Mark dummy = new Mark("dummy","",0);
    LinkedList<Token> etalonTokens = new LinkedList<Token>();
    etalonTokens.add(new StreamStartToken(dummy,dummy));
    etalonTokens.add(new BlockMappingStartToken(dummy,dummy));
    etalonTokens.add(new KeyToken(dummy,dummy));
    etalonTokens.add(new ScalarToken("string",true,dummy,(char) 0));
    etalonTokens.add(new Valuetoken(dummy,dummy));
    etalonTokens.add(new ScalarToken("abcd",(char) 0));
    etalonTokens.add(new BlockEndToken(dummy,dummy));
    etalonTokens.add(new StreamEndToken(dummy,dummy));
    while (!etalonTokens.isEmpty() && scanner.checkToken(etalonTokens.get(0).getTokenId())) {
        assertEquals(etalonTokens.removeFirst(),scanner.getToken());
    }
    assertFalse("Must contain no more tokens: " + scanner.getToken(),scanner.checkToken(new Token.ID[0]));
}
项目:snakeyaml    文件ScannerImpltest.java   
public void testGetToken() {
    String data = "string: abcd";
    StreamReader reader = new StreamReader(data);
    Scanner scanner = new ScannerImpl(reader);
    Mark dummy = new Mark("dummy",scanner.checkToken(new Token.ID[0]));
}
项目:AndroidApktool    文件ParserImpl.java   
public Event produce() {
    // Parse the stream start.
    StreamStartToken token = (StreamStartToken) scanner.getToken();
    Event event = new StreamStartEvent(token.getStartMark(),token.getEndMark());
    // Prepare the next state.
    state = new ParseImplicitDocumentStart();
    return event;
}
项目:AndroidApktool    文件ScannerImpl.java   
/**
 * We always add STREAM-START as the first token and STREAM-END as the last
 * token.
 */
private void fetchStreamStart() {
    // Read the token.
    Mark mark = reader.getMark();

    // Add STREAM-START.
    Token token = new StreamStartToken(mark,mark);
    this.tokens.add(token);
}
项目:5zig-TIMV-Plugin    文件ParserImpl.java   
public Event produce() {
    // Parse the stream start.
    StreamStartToken token = (StreamStartToken) scanner.getToken();
    Event event = new StreamStartEvent(token.getStartMark(),token.getEndMark());
    // Prepare the next state.
    state = new ParseImplicitDocumentStart();
    return event;
}
项目:5zig-TIMV-Plugin    文件ScannerImpl.java   
/**
 * We always add STREAM-START as the first token and STREAM-END as the last
 * token.
 */
private void fetchStreamStart() {
    // Read the token.
    Mark mark = reader.getMark();

    // Add STREAM-START.
    Token token = new StreamStartToken(mark,mark);
    this.tokens.add(token);
}
项目:snake-yaml    文件ParserImpl.java   
public Event produce() {
    // Parse the stream start.
    StreamStartToken token = (StreamStartToken) scanner.getToken();
    Event event = new StreamStartEvent(token.getStartMark(),token.getEndMark());
    // Prepare the next state.
    state = new ParseImplicitDocumentStart();
    return event;
}
项目:snake-yaml    文件ScannerImpl.java   
/**
 * We always add STREAM-START as the first token and STREAM-END as the last
 * token.
 */
private void fetchStreamStart() {
    // Read the token.
    Mark mark = reader.getMark();

    // Add STREAM-START.
    Token token = new StreamStartToken(mark,mark);
    this.tokens.add(token);
}
项目:SubServers-2    文件ParserImpl.java   
public Event produce() {
    // Parse the stream start.
    StreamStartToken token = (StreamStartToken) scanner.getToken();
    Event event = new StreamStartEvent(token.getStartMark(),token.getEndMark());
    // Prepare the next state.
    state = new ParseImplicitDocumentStart();
    return event;
}
项目:SubServers-2    文件ScannerImpl.java   
/**
 * We always add STREAM-START as the first token and STREAM-END as the last
 * token.
 */
private void fetchStreamStart() {
    // Read the token.
    Mark mark = reader.getMark();

    // Add STREAM-START.
    Token token = new StreamStartToken(mark,mark);
    this.tokens.add(token);
}
项目:snakeyaml    文件ParserImpl.java   
public Event produce() {
    // Parse the stream start.
    StreamStartToken token = (StreamStartToken) scanner.getToken();
    Event event = new StreamStartEvent(token.getStartMark(),token.getEndMark());
    // Prepare the next state.
    state = new ParseImplicitDocumentStart();
    return event;
}
项目:snakeyaml    文件ScannerImpl.java   
/**
 * We always add STREAM-START as the first token and STREAM-END as the last
 * token.
 */
private void fetchStreamStart() {
    // Read the token.
    Mark mark = reader.getMark();

    // Add STREAM-START.
    Token token = new StreamStartToken(mark,mark);
    this.tokens.add(token);
}
项目:TestTheTeacher    文件ParserImpl.java   
public Event produce() {
    // Parse the stream start.
    StreamStartToken token = (StreamStartToken) scanner.getToken();
    Event event = new StreamStartEvent(token.getStartMark(),token.getEndMark());
    // Prepare the next state.
    state = new ParseImplicitDocumentStart();
    return event;
}
项目:TestTheTeacher    文件ScannerImpl.java   
/**
 * We always add STREAM-START as the first token and STREAM-END as the last
 * token.
 */
private void fetchStreamStart() {
    // Read the token.
    Mark mark = reader.getMark();

    // Add STREAM-START.
    Token token = new StreamStartToken(mark,mark);
    this.tokens.add(token);
}
项目:org.openntf.domino    文件ParserImpl.java   
@Override
public Event produce() {
    // Parse the stream start.
    StreamStartToken token = (StreamStartToken) scanner.getToken();
    Event event = new StreamStartEvent(token.getStartMark(),token.getEndMark());
    // Prepare the next state.
    state = new ParseImplicitDocumentStart();
    return event;
}
项目:org.openntf.domino    文件ScannerImpl.java   
/**
 * We always add STREAM-START as the first token and STREAM-END as the last token.
 */
private void fetchStreamStart() {
    // Read the token.
    Mark mark = reader.getMark();

    // Add STREAM-START.
    Token token = new StreamStartToken(mark,mark);
    this.tokens.add(token);
}
项目:snake-yaml    文件CanonicalScanner.java   
private void scan() {
    this.tokens.add(new StreamStartToken(mark,mark));
    boolean stop = false;
    while (!stop) {
        findToken();
        char ch = data.charat(index);
        switch (ch) {
        case '\0':
            tokens.add(new StreamEndToken(mark,mark));
            stop = true;
            break;

        case '%':
            tokens.add(scanDirective());
            break;

        case '-':
            if ("---".equals(data.substring(index,index + 3))) {
                index += 3;
                tokens.add(new DocumentStartToken(mark,mark));
            }
            break;

        case '[':
            index++;
            tokens.add(new FlowSequenceStartToken(mark,mark));
            break;

        case '{':
            index++;
            tokens.add(new FlowMappingStartToken(mark,mark));
            break;

        case ']':
            index++;
            tokens.add(new FlowSequenceEndToken(mark,mark));
            break;

        case '}':
            index++;
            tokens.add(new FlowMappingEndToken(mark,mark));
            break;

        case '?':
            index++;
            tokens.add(new KeyToken(mark,mark));
            break;

        case ':':
            index++;
            tokens.add(new Valuetoken(mark,mark));
            break;

        case ',':
            index++;
            tokens.add(new FlowEntryToken(mark,mark));
            break;

        case '*':
            tokens.add(scanAlias());
            break;

        case '&':
            tokens.add(scanAlias());
            break;

        case '!':
            tokens.add(scanTag());
            break;

        case '"':
            tokens.add(scanScalar());
            break;

        default:
            throw new CanonicalException("invalid token");
        }
    }
    scanned = true;
}
项目:snakeyaml    文件CanonicalScanner.java   
private void scan() {
    this.tokens.add(new StreamStartToken(mark,mark));
            break;

        case '*':
            tokens.add(scanAlias());
            break;

        case '&':
            tokens.add(scanAlias());
            break;

        case '!':
            tokens.add(scanTag());
            break;

        case '"':
            tokens.add(scanScalar());
            break;

        default:
            throw new CanonicalException("invalid token");
        }
    }
    scanned = true;
}

版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。