いまだにCSVです。
2010/09/17
csv
java
Javaです。いまだにCSVです。
以前つくりかけていたものが、なんかいろいろだめだったので少しずつですが、テストして試してます。
あと、RFCだけに対応させるとCSVのパーサーとしては使いづらいものになってしまうかなと考えてたりしてます。
あと、osx上のエクセルでcsvで保存するとCRLFでは保存されていないことに気がついて....
とりあえず、コミット。
以下、ちょっと工夫してます。
以前から実装したいアイデアがあって、まあ単純にCSVの行ごとに処理をしやすいようにしたいということなのですが、
まあ、なんとなく実装できてるかなと。
手書きのパーサで、依存するものを極力少なくしておいて、ソースをひとつにしておくと、個人で使う場合に便利なことが多いので、ひとつのソースにまとめてます。
package quicklunch.e2.goodies.utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PushbackReader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
public abstract class CSVUtils {
public interface IExecutor {
public void pre();
public void exec(List<String> line);
public void post();
}
abstract static public class AbstractExecutor implements IExecutor {
public void pre() {
}
public void exec(List<String> line) {
}
public void post() {
}
}
// ===================
public enum TT {
EOF("EOF"), FIELD("FIELD"), COMMA("COMMA"), CRLF("CRLF"), CR("CR"), LF(
"LF");
String s;
TT(String s) {
this.s = s;
}
public String toString() {
return s;
}
}
public static class Token {
TT type;
public StringBuilder val = new StringBuilder();
public Token build(TT type) {
this.type = type;
return this;
}
public void append(int ch) {
this.val.append((char) ch);
}
public void append(String s) {
this.val.append(s);
}
public String toString() {
return "T:[" + type + "] V:[" + val + "]";
}
}
public static class CSVTokenizer {
PushbackReader reader;
static final int DQUOTE = '"';
static final int QUOTE = '\'';
static final int COMMA = ',';
static final int EOF = -1;
static final int CR = '\r';
static final int LF = '\n';
/* STATE */
static final int ST_nonescaped = 1;
static final int ST_escaped = 2;
static final int ST_escaped_single_quote = 3;
public CSVTokenizer(String s) {
this.reader = new PushbackReader(new BufferedReader(
new StringReader(s)));
}
public CSVTokenizer(InputStream inputStream) {
this.reader = new PushbackReader(new BufferedReader(
new InputStreamReader(inputStream)));
}
public Token token() throws IOException {
int state = 0;
Token token = new Token();
loop: while (true) {
int ch = read();
switch (state) {
case 0:
/*
* -- START --
*/
if (ch == EOF) {
return token.build(TT.EOF);
}
// dpuble quote
if (ch == DQUOTE) {
state = ST_escaped;
token.type = TT.FIELD;
break;
}
// single quote
if (ch == QUOTE) {
state = ST_escaped_single_quote;
token.type = TT.FIELD;
break;
}
if (ch == COMMA) {
// empty field
token.append(ch);
return token.build(TT.COMMA);
}
if (ch == CR) {
ch = read();
if (ch == LF) {
// default CRLF
return token.build(TT.CRLF);
}
// suport CR
unread(ch);
return token.build(TT.CR);
}
// suport LF
if (ch == LF) {
return token.build(TT.LF);
}
state = ST_nonescaped;
token.type = TT.FIELD;
case ST_nonescaped:
/*
* -- non-escaped --
*/
if (ch == EOF || ch == CR || ch == LF || ch == DQUOTE) {
unread(ch);
return token;
}
if (!isTextdata(ch)) {
unread(ch);
return token;
}
token.append(ch);
break;
case ST_escaped:
/*
* -- escaped(double quote) --
*/
if (ch == EOF) {
return token.build(TT.FIELD);
}
// 2DQUOTE
if (ch == DQUOTE) {
ch = read();
if (ch == DQUOTE) {
token.append("\"");
state = ST_escaped;
break;
}
unread(ch);
return token;
}
token.append(ch);
break;
case ST_escaped_single_quote:
/*
* -- escaped(single quote) --
*/
if (ch == EOF) {
return token.build(TT.FIELD);
}
// 2DQUOTE
if (ch == QUOTE) {
ch = read();
if (ch == QUOTE) {
token.append("\'");
state = ST_escaped_single_quote;
break;
}
unread(ch);
return token;
}
token.append(ch);
break;
default:
break loop;
}
}
return token;
}
boolean isTextdata(int ch) {
if (notEq(ch, '\r') && notEq(ch, '\n') && notEq(ch, '"')
&& notEq(ch, ',')) {
return true;
}
return false;
}
int read() throws IOException {
if (reader != null)
return reader.read();
return -1;
}
boolean notEq(int l, int r) {
return (l != r);
}
void unread(int ch) throws IOException {
if (reader != null && ch != -1) {
reader.unread(ch);
}
}
public void close() {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
}
}
}
}
/**
*
*
* @param inputStream
* @return
*/
public static CSVTokenizer tokenizer(InputStream inputStream) {
return new CSVTokenizer(inputStream);
}
public static IExecutor tokenize(InputStream inputStream,
IExecutor executor) throws IOException {
executor.pre();
try {
CSVTokenizer tokenizer = new CSVTokenizer(inputStream);
CSVUtils.Token token = null;
do {
List<String> line = new ArrayList<String>();
while ((token = tokenizer.token()) != null
&& !(token.type == CSVUtils.TT.EOF || token.type == CSVUtils.TT.CRLF
|| token.type == CSVUtils.TT.CR || token.type == TT.LF)) {
if (token.type == TT.COMMA)
continue;// skip comma
line.add(token.val.toString());
}
executor.exec(line);
} while (token != null && token.type != TT.EOF);
} finally {
executor.post();
}
return executor;
}
}
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PushbackReader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
public abstract class CSVUtils {
public interface IExecutor {
public void pre();
public void exec(List<String> line);
public void post();
}
abstract static public class AbstractExecutor implements IExecutor {
public void pre() {
}
public void exec(List<String> line) {
}
public void post() {
}
}
// ===================
public enum TT {
EOF("EOF"), FIELD("FIELD"), COMMA("COMMA"), CRLF("CRLF"), CR("CR"), LF(
"LF");
String s;
TT(String s) {
this.s = s;
}
public String toString() {
return s;
}
}
public static class Token {
TT type;
public StringBuilder val = new StringBuilder();
public Token build(TT type) {
this.type = type;
return this;
}
public void append(int ch) {
this.val.append((char) ch);
}
public void append(String s) {
this.val.append(s);
}
public String toString() {
return "T:[" + type + "] V:[" + val + "]";
}
}
public static class CSVTokenizer {
PushbackReader reader;
static final int DQUOTE = '"';
static final int QUOTE = '\'';
static final int COMMA = ',';
static final int EOF = -1;
static final int CR = '\r';
static final int LF = '\n';
/* STATE */
static final int ST_nonescaped = 1;
static final int ST_escaped = 2;
static final int ST_escaped_single_quote = 3;
public CSVTokenizer(String s) {
this.reader = new PushbackReader(new BufferedReader(
new StringReader(s)));
}
public CSVTokenizer(InputStream inputStream) {
this.reader = new PushbackReader(new BufferedReader(
new InputStreamReader(inputStream)));
}
public Token token() throws IOException {
int state = 0;
Token token = new Token();
loop: while (true) {
int ch = read();
switch (state) {
case 0:
/*
* -- START --
*/
if (ch == EOF) {
return token.build(TT.EOF);
}
// dpuble quote
if (ch == DQUOTE) {
state = ST_escaped;
token.type = TT.FIELD;
break;
}
// single quote
if (ch == QUOTE) {
state = ST_escaped_single_quote;
token.type = TT.FIELD;
break;
}
if (ch == COMMA) {
// empty field
token.append(ch);
return token.build(TT.COMMA);
}
if (ch == CR) {
ch = read();
if (ch == LF) {
// default CRLF
return token.build(TT.CRLF);
}
// suport CR
unread(ch);
return token.build(TT.CR);
}
// suport LF
if (ch == LF) {
return token.build(TT.LF);
}
state = ST_nonescaped;
token.type = TT.FIELD;
case ST_nonescaped:
/*
* -- non-escaped --
*/
if (ch == EOF || ch == CR || ch == LF || ch == DQUOTE) {
unread(ch);
return token;
}
if (!isTextdata(ch)) {
unread(ch);
return token;
}
token.append(ch);
break;
case ST_escaped:
/*
* -- escaped(double quote) --
*/
if (ch == EOF) {
return token.build(TT.FIELD);
}
// 2DQUOTE
if (ch == DQUOTE) {
ch = read();
if (ch == DQUOTE) {
token.append("\"");
state = ST_escaped;
break;
}
unread(ch);
return token;
}
token.append(ch);
break;
case ST_escaped_single_quote:
/*
* -- escaped(single quote) --
*/
if (ch == EOF) {
return token.build(TT.FIELD);
}
// 2DQUOTE
if (ch == QUOTE) {
ch = read();
if (ch == QUOTE) {
token.append("\'");
state = ST_escaped_single_quote;
break;
}
unread(ch);
return token;
}
token.append(ch);
break;
default:
break loop;
}
}
return token;
}
boolean isTextdata(int ch) {
if (notEq(ch, '\r') && notEq(ch, '\n') && notEq(ch, '"')
&& notEq(ch, ',')) {
return true;
}
return false;
}
int read() throws IOException {
if (reader != null)
return reader.read();
return -1;
}
boolean notEq(int l, int r) {
return (l != r);
}
void unread(int ch) throws IOException {
if (reader != null && ch != -1) {
reader.unread(ch);
}
}
public void close() {
if (reader != null) {
try {
reader.close();
} catch (IOException e) {
}
}
}
}
/**
*
*
* @param inputStream
* @return
*/
public static CSVTokenizer tokenizer(InputStream inputStream) {
return new CSVTokenizer(inputStream);
}
public static IExecutor tokenize(InputStream inputStream,
IExecutor executor) throws IOException {
executor.pre();
try {
CSVTokenizer tokenizer = new CSVTokenizer(inputStream);
CSVUtils.Token token = null;
do {
List<String> line = new ArrayList<String>();
while ((token = tokenizer.token()) != null
&& !(token.type == CSVUtils.TT.EOF || token.type == CSVUtils.TT.CRLF
|| token.type == CSVUtils.TT.CR || token.type == TT.LF)) {
if (token.type == TT.COMMA)
continue;// skip comma
line.add(token.val.toString());
}
executor.exec(line);
} while (token != null && token.type != TT.EOF);
} finally {
executor.post();
}
return executor;
}
}
: