| 1 | /* |
| 2 | * Copyright 2006-2007 the original author or authors. |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | package org.springframework.batch.item.file.transform; |
| 18 | |
| 19 | import java.util.HashMap; |
| 20 | import java.util.Iterator; |
| 21 | import java.util.LinkedHashMap; |
| 22 | import java.util.Map; |
| 23 | |
| 24 | import org.springframework.batch.item.file.mapping.DefaultFieldSet; |
| 25 | import org.springframework.batch.item.file.mapping.FieldSet; |
| 26 | |
| 27 | public class PrefixMatchingCompositeLineTokenizer implements LineTokenizer { |
| 28 | |
| 29 | private Map tokenizers = new HashMap(); |
| 30 | |
| 31 | public void setTokenizers(Map tokenizers) { |
| 32 | this.tokenizers = new LinkedHashMap(tokenizers); |
| 33 | } |
| 34 | |
| 35 | public FieldSet tokenize(String line) { |
| 36 | |
| 37 | if (line==null) { |
| 38 | return new DefaultFieldSet(new String[0]); |
| 39 | } |
| 40 | |
| 41 | LineTokenizer tokenizer = null; |
| 42 | LineTokenizer defaultTokenizer = null; |
| 43 | |
| 44 | for (Iterator iter = tokenizers.keySet().iterator(); iter.hasNext();) { |
| 45 | String key = (String) iter.next(); |
| 46 | if ("".equals(key)) { |
| 47 | defaultTokenizer = (LineTokenizer) tokenizers.get(key); |
| 48 | // don't break here or the tokenizer may not be found |
| 49 | continue; |
| 50 | } |
| 51 | if (line.startsWith(key)) { |
| 52 | tokenizer = (LineTokenizer) tokenizers.get(key); |
| 53 | break; |
| 54 | } |
| 55 | } |
| 56 | |
| 57 | if (tokenizer==null) { |
| 58 | tokenizer = defaultTokenizer; |
| 59 | } |
| 60 | |
| 61 | if (tokenizer==null) { |
| 62 | throw new IllegalStateException("Could not match record to tokenizer for line=["+line+"]"); |
| 63 | } |
| 64 | |
| 65 | return tokenizer.tokenize(line); |
| 66 | } |
| 67 | |
| 68 | } |