cusstom token (#1588)

* rest with spark java

* 4

* Update Application.java

* indentation changes

* spring @requestmapping shortcuts

* removing spring requestmapping and pushing spring-mvc-java

* Joining/Splitting Strings with Java and Stream API

* adding more join/split functionality

* changing package name

* testcase change

* adding webutils

* adding testcase for WebUtils and ServletRequestUtils

* adding testcase

* spring-security-stormpath

* adding ratpack module

* adding pom.xml

* adding following modules with updated testcase : DB, Filter, Json

* adding spring-boot custom banner tutorial

* changing banner format in plain text

* Delete banner.txt~

* Delete b.txt~

* CORS in JAX-RS

* ratpack with google guice

* adding factory instance example

* quick-guide-to-the-java-stringtokenizer

* Update Application.java

* Delete MovieCrudService.java~

* token customization

* Update Application.java

* adding csv test

* adding csv test

* Update Application.java

* adding collection module
This commit is contained in:
Abhinab Kanrar
2017-04-07 20:03:10 +05:30
committed by maibin
parent d87e0663fc
commit 7ab1e37356
3 changed files with 64 additions and 13 deletions

View File

@@ -1,6 +1,10 @@
package com.baeldung.stringtokenizer;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.StringTokenizer;
@@ -8,14 +12,41 @@ public class Application {
public List<String> getTokens(String str) {
List<String> tokens = new ArrayList<String>();
// StringTokenizer tokenizer = new StringTokenizer( str );
StringTokenizer tokenizer = new StringTokenizer( str , "," );
// StringTokenizer tokenizer = new StringTokenizer( str , "," , true );
// StringTokenizer tokenizer = new StringTokenizer( str );
StringTokenizer tokenizer = new StringTokenizer(str, ",");
// StringTokenizer tokenizer = new StringTokenizer( str , "," , true );
while (tokenizer.hasMoreElements()) {
tokens.add( tokenizer.nextToken() );
// tokens.add( tokenizer.nextToken( "," ) );
tokens.add(tokenizer.nextToken());
// tokens.add( tokenizer.nextToken("e") );
}
int tokenLength = tokens.size();
return tokens;
}
public List<String> getTokensWithCollection( String str ) {
StringTokenizer tokenizer = new StringTokenizer(str, ",");
List<String> tokens = new ArrayList<String>();
Collections.list(tokenizer).forEach(token -> tokens.add((String) token));
return tokens;
}
public List<String> getTokensFromFile(String path, String delim) {
List<String> tokens = new ArrayList<String>();
String currLine = "";
StringTokenizer tokenizer;
try (BufferedReader br = new BufferedReader(
new InputStreamReader(Application.class.getResourceAsStream("/" + path)))) {
while ((currLine = br.readLine()) != null) {
tokenizer = new StringTokenizer(currLine, delim);
while (tokenizer.hasMoreElements()) {
tokens.add(tokenizer.nextToken());
}
}
} catch (IOException e) {
e.printStackTrace();
}
return tokens;
}
}