Showing
15 changed files
with
394 additions
and
291 deletions
.gitignore
100644 → 100755
File mode changed
1 | <?xml version="1.0" encoding="UTF-8"?> | 1 | <?xml version="1.0" encoding="UTF-8"?> |
2 | -<module type="JAVA_MODULE" version="4" /> | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
2 | +<module org.jetbrains.idea.maven.project.MavenProjectsManager.isMavenModule="true" type="JAVA_MODULE" version="4"> | ||
3 | + <component name="NewModuleRootManager" LANGUAGE_LEVEL="JDK_1_8"> | ||
4 | + <output url="file://$MODULE_DIR$/target/classes" /> | ||
5 | + <output-test url="file://$MODULE_DIR$/target/test-classes" /> | ||
6 | + <content url="file://$MODULE_DIR$"> | ||
7 | + <sourceFolder url="file://$MODULE_DIR$/src/main/resources" type="java-resource" /> | ||
8 | + <sourceFolder url="file://$MODULE_DIR$/src/test/resources" type="java-test-resource" /> | ||
9 | + <sourceFolder url="file://$MODULE_DIR$/src/main/java" isTestSource="false" /> | ||
10 | + <sourceFolder url="file://$MODULE_DIR$/src/test/java" isTestSource="true" /> | ||
11 | + <excludeFolder url="file://$MODULE_DIR$/target" /> | ||
12 | + </content> | ||
13 | + <orderEntry type="inheritedJdk" /> | ||
14 | + <orderEntry type="sourceFolder" forTests="false" /> | ||
15 | + <orderEntry type="library" name="Maven: com.databricks:spark-csv_2.11:1.5.0" level="project" /> | ||
16 | + <orderEntry type="library" name="Maven: org.apache.commons:commons-csv:1.1" level="project" /> | ||
17 | + <orderEntry type="library" name="Maven: org.apache.spark:spark-core_2.11:2.3.0" level="project" /> | ||
18 | + <orderEntry type="library" name="Maven: org.apache.avro:avro:1.7.7" level="project" /> | ||
19 | + <orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-core-asl:1.9.13" level="project" /> | ||
20 | + <orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-mapper-asl:1.9.13" level="project" /> | ||
21 | + <orderEntry type="library" name="Maven: com.thoughtworks.paranamer:paranamer:2.3" level="project" /> | ||
22 | + <orderEntry type="library" name="Maven: org.apache.commons:commons-compress:1.4.1" level="project" /> | ||
23 | + <orderEntry type="library" name="Maven: org.tukaani:xz:1.0" level="project" /> | ||
24 | + <orderEntry type="library" name="Maven: org.apache.avro:avro-mapred:hadoop2:1.7.7" level="project" /> | ||
25 | + <orderEntry type="library" name="Maven: org.apache.avro:avro-ipc:1.7.7" level="project" /> | ||
26 | + <orderEntry type="library" name="Maven: org.apache.avro:avro-ipc:tests:1.7.7" level="project" /> | ||
27 | + <orderEntry type="library" name="Maven: com.twitter:chill_2.11:0.8.4" level="project" /> | ||
28 | + <orderEntry type="library" name="Maven: com.esotericsoftware:kryo-shaded:3.0.3" level="project" /> | ||
29 | + <orderEntry type="library" name="Maven: com.esotericsoftware:minlog:1.3.0" level="project" /> | ||
30 | + <orderEntry type="library" name="Maven: org.objenesis:objenesis:2.1" level="project" /> | ||
31 | + <orderEntry type="library" name="Maven: com.twitter:chill-java:0.8.4" level="project" /> | ||
32 | + <orderEntry type="library" name="Maven: org.apache.xbean:xbean-asm5-shaded:4.4" level="project" /> | ||
33 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-client:2.6.5" level="project" /> | ||
34 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-common:2.6.5" level="project" /> | ||
35 | + <orderEntry type="library" name="Maven: commons-cli:commons-cli:1.2" level="project" /> | ||
36 | + <orderEntry type="library" name="Maven: xmlenc:xmlenc:0.52" level="project" /> | ||
37 | + <orderEntry type="library" name="Maven: commons-httpclient:commons-httpclient:3.1" level="project" /> | ||
38 | + <orderEntry type="library" name="Maven: commons-io:commons-io:2.4" level="project" /> | ||
39 | + <orderEntry type="library" name="Maven: commons-collections:commons-collections:3.2.2" level="project" /> | ||
40 | + <orderEntry type="library" name="Maven: commons-configuration:commons-configuration:1.6" level="project" /> | ||
41 | + <orderEntry type="library" name="Maven: commons-digester:commons-digester:1.8" level="project" /> | ||
42 | + <orderEntry type="library" name="Maven: commons-beanutils:commons-beanutils:1.7.0" level="project" /> | ||
43 | + <orderEntry type="library" name="Maven: commons-beanutils:commons-beanutils-core:1.8.0" level="project" /> | ||
44 | + <orderEntry type="library" name="Maven: com.google.code.gson:gson:2.2.4" level="project" /> | ||
45 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-auth:2.6.5" level="project" /> | ||
46 | + <orderEntry type="library" name="Maven: org.apache.directory.server:apacheds-kerberos-codec:2.0.0-M15" level="project" /> | ||
47 | + <orderEntry type="library" name="Maven: org.apache.directory.server:apacheds-i18n:2.0.0-M15" level="project" /> | ||
48 | + <orderEntry type="library" name="Maven: org.apache.directory.api:api-asn1-api:1.0.0-M20" level="project" /> | ||
49 | + <orderEntry type="library" name="Maven: org.apache.directory.api:api-util:1.0.0-M20" level="project" /> | ||
50 | + <orderEntry type="library" name="Maven: org.apache.curator:curator-client:2.6.0" level="project" /> | ||
51 | + <orderEntry type="library" name="Maven: org.htrace:htrace-core:3.0.4" level="project" /> | ||
52 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-hdfs:2.6.5" level="project" /> | ||
53 | + <orderEntry type="library" name="Maven: org.mortbay.jetty:jetty-util:6.1.26" level="project" /> | ||
54 | + <orderEntry type="library" name="Maven: xerces:xercesImpl:2.9.1" level="project" /> | ||
55 | + <orderEntry type="library" name="Maven: xml-apis:xml-apis:1.3.04" level="project" /> | ||
56 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-app:2.6.5" level="project" /> | ||
57 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-common:2.6.5" level="project" /> | ||
58 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-client:2.6.5" level="project" /> | ||
59 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-server-common:2.6.5" level="project" /> | ||
60 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-shuffle:2.6.5" level="project" /> | ||
61 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-api:2.6.5" level="project" /> | ||
62 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-core:2.6.5" level="project" /> | ||
63 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-yarn-common:2.6.5" level="project" /> | ||
64 | + <orderEntry type="library" name="Maven: javax.xml.bind:jaxb-api:2.2.2" level="project" /> | ||
65 | + <orderEntry type="library" name="Maven: javax.xml.stream:stax-api:1.0-2" level="project" /> | ||
66 | + <orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-jaxrs:1.9.13" level="project" /> | ||
67 | + <orderEntry type="library" name="Maven: org.codehaus.jackson:jackson-xc:1.9.13" level="project" /> | ||
68 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-mapreduce-client-jobclient:2.6.5" level="project" /> | ||
69 | + <orderEntry type="library" name="Maven: org.apache.hadoop:hadoop-annotations:2.6.5" level="project" /> | ||
70 | + <orderEntry type="library" name="Maven: org.apache.spark:spark-launcher_2.11:2.3.0" level="project" /> | ||
71 | + <orderEntry type="library" name="Maven: org.apache.spark:spark-kvstore_2.11:2.3.0" level="project" /> | ||
72 | + <orderEntry type="library" name="Maven: org.fusesource.leveldbjni:leveldbjni-all:1.8" level="project" /> | ||
73 | + <orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-core:2.6.7" level="project" /> | ||
74 | + <orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-annotations:2.6.7" level="project" /> | ||
75 | + <orderEntry type="library" name="Maven: org.apache.spark:spark-network-common_2.11:2.3.0" level="project" /> | ||
76 | + <orderEntry type="library" name="Maven: org.apache.spark:spark-network-shuffle_2.11:2.3.0" level="project" /> | ||
77 | + <orderEntry type="library" name="Maven: org.apache.spark:spark-unsafe_2.11:2.3.0" level="project" /> | ||
78 | + <orderEntry type="library" name="Maven: net.java.dev.jets3t:jets3t:0.9.4" level="project" /> | ||
79 | + <orderEntry type="library" name="Maven: org.apache.httpcomponents:httpcore:4.4.1" level="project" /> | ||
80 | + <orderEntry type="library" name="Maven: org.apache.httpcomponents:httpclient:4.5" level="project" /> | ||
81 | + <orderEntry type="library" name="Maven: commons-codec:commons-codec:2.0-SNAPSHOT" level="project" /> | ||
82 | + <orderEntry type="library" name="Maven: javax.activation:activation:1.1.1" level="project" /> | ||
83 | + <orderEntry type="library" name="Maven: org.bouncycastle:bcprov-jdk15on:1.52" level="project" /> | ||
84 | + <orderEntry type="library" name="Maven: com.jamesmurty.utils:java-xmlbuilder:1.1" level="project" /> | ||
85 | + <orderEntry type="library" name="Maven: net.iharder:base64:2.3.8" level="project" /> | ||
86 | + <orderEntry type="library" name="Maven: org.apache.curator:curator-recipes:2.6.0" level="project" /> | ||
87 | + <orderEntry type="library" name="Maven: org.apache.curator:curator-framework:2.6.0" level="project" /> | ||
88 | + <orderEntry type="library" name="Maven: org.apache.zookeeper:zookeeper:3.4.6" level="project" /> | ||
89 | + <orderEntry type="library" name="Maven: com.google.guava:guava:16.0.1" level="project" /> | ||
90 | + <orderEntry type="library" name="Maven: javax.servlet:javax.servlet-api:3.1.0" level="project" /> | ||
91 | + <orderEntry type="library" name="Maven: org.apache.commons:commons-lang3:3.5" level="project" /> | ||
92 | + <orderEntry type="library" name="Maven: org.apache.commons:commons-math3:3.4.1" level="project" /> | ||
93 | + <orderEntry type="library" name="Maven: com.google.code.findbugs:jsr305:1.3.9" level="project" /> | ||
94 | + <orderEntry type="library" name="Maven: org.slf4j:slf4j-api:1.7.16" level="project" /> | ||
95 | + <orderEntry type="library" name="Maven: org.slf4j:jul-to-slf4j:1.7.16" level="project" /> | ||
96 | + <orderEntry type="library" name="Maven: org.slf4j:jcl-over-slf4j:1.7.16" level="project" /> | ||
97 | + <orderEntry type="library" name="Maven: log4j:log4j:1.2.17" level="project" /> | ||
98 | + <orderEntry type="library" name="Maven: org.slf4j:slf4j-log4j12:1.7.16" level="project" /> | ||
99 | + <orderEntry type="library" name="Maven: com.ning:compress-lzf:1.0.3" level="project" /> | ||
100 | + <orderEntry type="library" name="Maven: org.xerial.snappy:snappy-java:1.1.2.6" level="project" /> | ||
101 | + <orderEntry type="library" name="Maven: org.lz4:lz4-java:1.4.0" level="project" /> | ||
102 | + <orderEntry type="library" name="Maven: com.github.luben:zstd-jni:1.3.2-2" level="project" /> | ||
103 | + <orderEntry type="library" name="Maven: org.roaringbitmap:RoaringBitmap:0.5.11" level="project" /> | ||
104 | + <orderEntry type="library" name="Maven: commons-net:commons-net:2.2" level="project" /> | ||
105 | + <orderEntry type="library" name="Maven: org.scala-lang:scala-library:2.11.8" level="project" /> | ||
106 | + <orderEntry type="library" name="Maven: org.json4s:json4s-jackson_2.11:3.2.11" level="project" /> | ||
107 | + <orderEntry type="library" name="Maven: org.json4s:json4s-core_2.11:3.2.11" level="project" /> | ||
108 | + <orderEntry type="library" name="Maven: org.json4s:json4s-ast_2.11:3.2.11" level="project" /> | ||
109 | + <orderEntry type="library" name="Maven: org.scala-lang:scalap:2.11.0" level="project" /> | ||
110 | + <orderEntry type="library" name="Maven: org.scala-lang:scala-compiler:2.11.0" level="project" /> | ||
111 | + <orderEntry type="library" name="Maven: org.scala-lang.modules:scala-xml_2.11:1.0.1" level="project" /> | ||
112 | + <orderEntry type="library" name="Maven: org.glassfish.jersey.core:jersey-client:2.22.2" level="project" /> | ||
113 | + <orderEntry type="library" name="Maven: javax.ws.rs:javax.ws.rs-api:2.0.1" level="project" /> | ||
114 | + <orderEntry type="library" name="Maven: org.glassfish.hk2:hk2-api:2.4.0-b34" level="project" /> | ||
115 | + <orderEntry type="library" name="Maven: org.glassfish.hk2:hk2-utils:2.4.0-b34" level="project" /> | ||
116 | + <orderEntry type="library" name="Maven: org.glassfish.hk2.external:aopalliance-repackaged:2.4.0-b34" level="project" /> | ||
117 | + <orderEntry type="library" name="Maven: org.glassfish.hk2.external:javax.inject:2.4.0-b34" level="project" /> | ||
118 | + <orderEntry type="library" name="Maven: org.glassfish.hk2:hk2-locator:2.4.0-b34" level="project" /> | ||
119 | + <orderEntry type="library" name="Maven: org.javassist:javassist:3.18.1-GA" level="project" /> | ||
120 | + <orderEntry type="library" name="Maven: org.glassfish.jersey.core:jersey-common:2.22.2" level="project" /> | ||
121 | + <orderEntry type="library" name="Maven: javax.annotation:javax.annotation-api:1.2" level="project" /> | ||
122 | + <orderEntry type="library" name="Maven: org.glassfish.jersey.bundles.repackaged:jersey-guava:2.22.2" level="project" /> | ||
123 | + <orderEntry type="library" name="Maven: org.glassfish.hk2:osgi-resource-locator:1.0.1" level="project" /> | ||
124 | + <orderEntry type="library" name="Maven: org.glassfish.jersey.core:jersey-server:2.22.2" level="project" /> | ||
125 | + <orderEntry type="library" name="Maven: org.glassfish.jersey.media:jersey-media-jaxb:2.22.2" level="project" /> | ||
126 | + <orderEntry type="library" name="Maven: javax.validation:validation-api:1.1.0.Final" level="project" /> | ||
127 | + <orderEntry type="library" name="Maven: org.glassfish.jersey.containers:jersey-container-servlet:2.22.2" level="project" /> | ||
128 | + <orderEntry type="library" name="Maven: org.glassfish.jersey.containers:jersey-container-servlet-core:2.22.2" level="project" /> | ||
129 | + <orderEntry type="library" name="Maven: io.netty:netty-all:4.1.17.Final" level="project" /> | ||
130 | + <orderEntry type="library" name="Maven: io.netty:netty:3.9.9.Final" level="project" /> | ||
131 | + <orderEntry type="library" name="Maven: com.clearspring.analytics:stream:2.7.0" level="project" /> | ||
132 | + <orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-core:3.1.5" level="project" /> | ||
133 | + <orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-jvm:3.1.5" level="project" /> | ||
134 | + <orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-json:3.1.5" level="project" /> | ||
135 | + <orderEntry type="library" name="Maven: io.dropwizard.metrics:metrics-graphite:3.1.5" level="project" /> | ||
136 | + <orderEntry type="library" name="Maven: com.fasterxml.jackson.core:jackson-databind:2.6.7.1" level="project" /> | ||
137 | + <orderEntry type="library" name="Maven: com.fasterxml.jackson.module:jackson-module-scala_2.11:2.6.7.1" level="project" /> | ||
138 | + <orderEntry type="library" name="Maven: org.scala-lang:scala-reflect:2.11.8" level="project" /> | ||
139 | + <orderEntry type="library" name="Maven: com.fasterxml.jackson.module:jackson-module-paranamer:2.7.9" level="project" /> | ||
140 | + <orderEntry type="library" name="Maven: org.apache.ivy:ivy:2.4.0" level="project" /> | ||
141 | + <orderEntry type="library" name="Maven: oro:oro:2.0.8" level="project" /> | ||
142 | + <orderEntry type="library" name="Maven: net.razorvine:pyrolite:4.13" level="project" /> | ||
143 | + <orderEntry type="library" name="Maven: net.sf.py4j:py4j:0.10.6" level="project" /> | ||
144 | + <orderEntry type="library" name="Maven: org.apache.spark:spark-tags_2.11:2.3.0" level="project" /> | ||
145 | + <orderEntry type="library" name="Maven: org.apache.commons:commons-crypto:1.0.0" level="project" /> | ||
146 | + <orderEntry type="library" name="Maven: org.spark-project.spark:unused:1.0.0" level="project" /> | ||
147 | + <orderEntry type="library" name="Maven: org.apache.spark:spark-sql_2.11:2.3.0" level="project" /> | ||
148 | + <orderEntry type="library" name="Maven: com.univocity:univocity-parsers:2.5.9" level="project" /> | ||
149 | + <orderEntry type="library" name="Maven: org.apache.spark:spark-sketch_2.11:2.3.0" level="project" /> | ||
150 | + <orderEntry type="library" name="Maven: org.apache.spark:spark-catalyst_2.11:2.3.0" level="project" /> | ||
151 | + <orderEntry type="library" name="Maven: org.scala-lang.modules:scala-parser-combinators_2.11:1.0.4" level="project" /> | ||
152 | + <orderEntry type="library" name="Maven: org.codehaus.janino:janino:3.0.8" level="project" /> | ||
153 | + <orderEntry type="library" name="Maven: org.codehaus.janino:commons-compiler:3.0.8" level="project" /> | ||
154 | + <orderEntry type="library" name="Maven: org.antlr:antlr4-runtime:4.7" level="project" /> | ||
155 | + <orderEntry type="library" name="Maven: org.apache.orc:orc-core:nohive:1.4.1" level="project" /> | ||
156 | + <orderEntry type="library" name="Maven: com.google.protobuf:protobuf-java:2.5.0" level="project" /> | ||
157 | + <orderEntry type="library" name="Maven: commons-lang:commons-lang:2.6" level="project" /> | ||
158 | + <orderEntry type="library" name="Maven: io.airlift:aircompressor:0.8" level="project" /> | ||
159 | + <orderEntry type="library" name="Maven: org.apache.orc:orc-mapreduce:nohive:1.4.1" level="project" /> | ||
160 | + <orderEntry type="library" name="Maven: org.apache.parquet:parquet-column:1.8.2" level="project" /> | ||
161 | + <orderEntry type="library" name="Maven: org.apache.parquet:parquet-common:1.8.2" level="project" /> | ||
162 | + <orderEntry type="library" name="Maven: org.apache.parquet:parquet-encoding:1.8.2" level="project" /> | ||
163 | + <orderEntry type="library" name="Maven: org.apache.parquet:parquet-hadoop:1.8.2" level="project" /> | ||
164 | + <orderEntry type="library" name="Maven: org.apache.parquet:parquet-format:2.3.1" level="project" /> | ||
165 | + <orderEntry type="library" name="Maven: org.apache.parquet:parquet-jackson:1.8.2" level="project" /> | ||
166 | + <orderEntry type="library" name="Maven: org.apache.arrow:arrow-vector:0.8.0" level="project" /> | ||
167 | + <orderEntry type="library" name="Maven: org.apache.arrow:arrow-format:0.8.0" level="project" /> | ||
168 | + <orderEntry type="library" name="Maven: org.apache.arrow:arrow-memory:0.8.0" level="project" /> | ||
169 | + <orderEntry type="library" name="Maven: joda-time:joda-time:2.9.9" level="project" /> | ||
170 | + <orderEntry type="library" name="Maven: com.carrotsearch:hppc:0.7.2" level="project" /> | ||
171 | + <orderEntry type="library" name="Maven: com.vlkan:flatbuffers:1.2.0-3f79e055" level="project" /> | ||
172 | + <orderEntry type="library" name="Maven: com.databricks:spark-csv_2.11:1.5.0" level="project" /> | ||
173 | + <orderEntry type="library" name="Maven: org.apache.commons:commons-csv:1.1" level="project" /> | ||
174 | + </component> | ||
175 | +</module> | ... | ... |
.idea/markdown-exported-files.xml
0 → 100644
... | @@ -8,7 +8,17 @@ | ... | @@ -8,7 +8,17 @@ |
8 | </list> | 8 | </list> |
9 | </option> | 9 | </option> |
10 | </component> | 10 | </component> |
11 | - <component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" default="true" project-jdk-name="1.8" project-jdk-type="JavaSDK"> | 11 | + <component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" project-jdk-name="1.8" project-jdk-type="JavaSDK"> |
12 | <output url="file://$PROJECT_DIR$/out" /> | 12 | <output url="file://$PROJECT_DIR$/out" /> |
13 | </component> | 13 | </component> |
14 | + <component name="MavenProjectsManager"> | ||
15 | + <option name="originalFiles"> | ||
16 | + <list> | ||
17 | + <option value="$PROJECT_DIR$/pom.xml" /> | ||
18 | + </list> | ||
19 | + </option> | ||
20 | + </component> | ||
21 | + <component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" default="false" project-jdk-name="1.8" project-jdk-type="JavaSDK"> | ||
22 | + <output url="file:///tmp" /> | ||
23 | + </component> | ||
14 | </project> | 24 | </project> |
... | \ No newline at end of file | ... | \ No newline at end of file | ... | ... |
2018-1-java.iml
100644 → 100755
File mode changed
README.md
100644 → 100755
File mode changed
pom.xml
100644 → 100755
... | @@ -31,7 +31,20 @@ | ... | @@ -31,7 +31,20 @@ |
31 | <artifactId>spark-csv_2.11</artifactId> | 31 | <artifactId>spark-csv_2.11</artifactId> |
32 | <version>1.5.0</version> | 32 | <version>1.5.0</version> |
33 | </dependency> | 33 | </dependency> |
34 | - | ||
35 | </dependencies> | 34 | </dependencies> |
36 | 35 | ||
36 | + <build> | ||
37 | + <plugins> | ||
38 | + <plugin> | ||
39 | + <groupId>org.apache.maven.plugins</groupId> | ||
40 | + <artifactId>maven-compiler-plugin</artifactId> | ||
41 | + <version>3.6.1</version> | ||
42 | + <configuration> | ||
43 | + <source>1.8</source> | ||
44 | + <target>1.8</target> | ||
45 | + </configuration> | ||
46 | + </plugin> | ||
47 | + </plugins> | ||
48 | + </build> | ||
49 | + | ||
37 | </project> | 50 | </project> |
... | \ No newline at end of file | ... | \ No newline at end of file | ... | ... |
src/main/java/Aggregation.java
0 → 100644
1 | +import org.apache.spark.sql.Dataset; | ||
2 | +import org.apache.spark.sql.Row; | ||
3 | +import org.apache.spark.sql.SparkSession; | ||
4 | +import org.apache.spark.sql.expressions.Window; | ||
5 | +import org.apache.spark.sql.expressions.WindowSpec; | ||
6 | + | ||
7 | +import static org.apache.spark.sql.functions.*; | ||
8 | +import static org.apache.spark.sql.functions.lit; | ||
9 | +import static org.apache.spark.sql.functions.when; | ||
10 | + | ||
11 | +public class Aggregation { | ||
12 | + | ||
13 | + public static void main(String[] args) throws Exception { | ||
14 | + | ||
15 | + //Create Session | ||
16 | + SparkSession spark = SparkSession | ||
17 | + .builder() | ||
18 | + .appName("Detecting Fraud Clicks") | ||
19 | + .master("local") | ||
20 | + .getOrCreate(); | ||
21 | + | ||
22 | + Aggregation agg = new Aggregation(); | ||
23 | + | ||
24 | + Dataset<Row> dataset = agg.loadCSVDataSet("./train_sample.csv", spark); | ||
25 | + dataset = agg.changeTimestempToLong(dataset); | ||
26 | + dataset = agg.averageValidClickCount(dataset); | ||
27 | + dataset = agg.clickTimeDelta(dataset); | ||
28 | + | ||
29 | + dataset.where("ip == '5348' and app == '19'").show(); | ||
30 | + | ||
31 | + } | ||
32 | + | ||
33 | + | ||
34 | + private Dataset<Row> loadCSVDataSet(String path, SparkSession spark){ | ||
35 | + // Read SCV to DataSet | ||
36 | + Dataset<Row> dataset = spark.read().format("csv") | ||
37 | + .option("inferSchema", "true") | ||
38 | + .option("header", "true") | ||
39 | + .load("train_sample.csv"); | ||
40 | + return dataset; | ||
41 | + } | ||
42 | + | ||
43 | + private Dataset<Row> changeTimestempToLong(Dataset<Row> dataset){ | ||
44 | + // cast timestamp to long | ||
45 | + Dataset<Row> newDF = dataset.withColumn("utc_click_time", dataset.col("click_time").cast("long")); | ||
46 | + newDF = newDF.withColumn("utc_attributed_time", dataset.col("attributed_time").cast("long")); | ||
47 | + newDF = newDF.drop("click_time").drop("attributed_time"); | ||
48 | + return newDF; | ||
49 | + } | ||
50 | + | ||
51 | + private Dataset<Row> averageValidClickCount(Dataset<Row> dataset){ | ||
52 | + // set Window partition by 'ip' and 'app' order by 'utc_click_time' select rows between 1st row to current row | ||
53 | + WindowSpec w = Window.partitionBy("ip", "app") | ||
54 | + .orderBy("utc_click_time") | ||
55 | + .rowsBetween(Window.unboundedPreceding(), Window.currentRow()); | ||
56 | + | ||
57 | + // aggregation | ||
58 | + Dataset<Row> newDF = dataset.withColumn("cum_count_click", count("utc_click_time").over(w)); | ||
59 | + newDF = newDF.withColumn("cum_sum_attributed", sum("is_attributed").over(w)); | ||
60 | + newDF = newDF.withColumn("avg_valid_click_count", col("cum_sum_attributed").divide(col("cum_count_click"))); | ||
61 | + newDF = newDF.drop("cum_count_click", "cum_sum_attributed"); | ||
62 | + return newDF; | ||
63 | + } | ||
64 | + | ||
65 | + private Dataset<Row> clickTimeDelta(Dataset<Row> dataset){ | ||
66 | + WindowSpec w = Window.partitionBy ("ip") | ||
67 | + .orderBy("utc_click_time"); | ||
68 | + | ||
69 | + Dataset<Row> newDF = dataset.withColumn("lag(utc_click_time)", lag("utc_click_time",1).over(w)); | ||
70 | + newDF = newDF.withColumn("click_time_delta", when(col("lag(utc_click_time)").isNull(), | ||
71 | + lit(0)).otherwise(col("utc_click_time")).minus(when(col("lag(utc_click_time)").isNull(), | ||
72 | + lit(0)).otherwise(col("lag(utc_click_time)")))); | ||
73 | + newDF = newDF.drop("lag(utc_click_time)"); | ||
74 | + return newDF; | ||
75 | + } | ||
76 | +} |
src/main/java/AvgAdvTime.java
0 → 100644
1 | +import org.apache.spark.sql.Dataset; | ||
2 | +import org.apache.spark.sql.Row; | ||
3 | +import org.apache.spark.sql.SparkSession; | ||
4 | +import org.apache.spark.sql.expressions.Window; | ||
5 | +import org.apache.spark.sql.expressions.WindowSpec; | ||
6 | + | ||
7 | +import static org.apache.spark.sql.functions.col; | ||
8 | +import static org.apache.spark.sql.functions.count; | ||
9 | +import static org.apache.spark.sql.functions.sum; | ||
10 | + | ||
11 | + | ||
12 | +public class AvgAdvTime { | ||
13 | + | ||
14 | + public static void main(String[] args) throws Exception { | ||
15 | + | ||
16 | + // Start Spark Session | ||
17 | + SparkSession spark = SparkSession | ||
18 | + .builder() | ||
19 | + .master("local") | ||
20 | + .appName("Java Spark SQL basic example") | ||
21 | + .getOrCreate(); | ||
22 | + | ||
23 | + // Read SCV to DataSet | ||
24 | + Dataset<Row> df = spark.read().format("csv") | ||
25 | + .option("inferSchema", "true") | ||
26 | + .option("header", "true") | ||
27 | + .load("train_sample.csv"); | ||
28 | + | ||
29 | + // cast timestamp to long | ||
30 | + Dataset<Row> newdf = df.withColumn("utc_click_time", df.col("click_time").cast("long")); | ||
31 | + newdf = newdf.withColumn("utc_attributed_time", df.col("attributed_time").cast("long")); | ||
32 | + newdf = newdf.drop("click_time").drop("attributed_time"); | ||
33 | + | ||
34 | + // set Window partition by 'ip' and 'app' order by 'utc_click_time' select rows between 1st row to current row | ||
35 | + WindowSpec w = Window.partitionBy("ip", "app") | ||
36 | + .orderBy("utc_click_time") | ||
37 | + .rowsBetween(Window.unboundedPreceding(), Window.currentRow()); | ||
38 | + | ||
39 | + // aggregation | ||
40 | + newdf = newdf.withColumn("cum_count_click", count("utc_click_time").over(w)); | ||
41 | + newdf = newdf.withColumn("cum_sum_attributed", sum("is_attributed").over(w)); | ||
42 | + newdf = newdf.withColumn("avg_efficient", col("cum_sum_attributed").divide(col("cum_count_click"))); | ||
43 | + | ||
44 | + // print example | ||
45 | + newdf.where("ip == '5348' and app == '19'").show(); | ||
46 | + newdf.printSchema(); | ||
47 | + | ||
48 | + } | ||
49 | +} | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
src/main/java/DateUtil.java
deleted
100644 → 0
1 | -import java.text.ParseException; | ||
2 | -import java.text.SimpleDateFormat; | ||
3 | -import java.util.Calendar; | ||
4 | - | ||
5 | -/** | ||
6 | - * Calendar 객체 관련 기능들을 모아놓은 유틸리티 클래스 | ||
7 | - * | ||
8 | - * @author croute | ||
9 | - * @since 2011.02.10 | ||
10 | - */ | ||
11 | -public class DateUtil | ||
12 | -{ | ||
13 | - | ||
14 | - /** | ||
15 | - * 캘린더 객체를 yyyy-MM-dd HH:mm:ss 형태의 문자열로 변환합니다. | ||
16 | - * | ||
17 | - * @param cal 캘린더 객체 | ||
18 | - * @return 변환된 문자열 | ||
19 | - */ | ||
20 | - public static String StringFromCalendar(Calendar cal) | ||
21 | - { | ||
22 | - // 날짜를 통신용 문자열로 변경 | ||
23 | - SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); | ||
24 | - return formatter.format(cal.getTime()); | ||
25 | - } | ||
26 | - | ||
27 | - /** | ||
28 | - * 캘린더 객체를 yyyy-MM-dd형태의 문자열로 변환합니다. | ||
29 | - * | ||
30 | - * @param cal 캘린더 객체 | ||
31 | - * @return 변환된 문자열 | ||
32 | - */ | ||
33 | - public static String StringSimpleFromCalendar(Calendar cal) | ||
34 | - { | ||
35 | - // 날짜를 통신용 문자열로 변경 | ||
36 | - SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); | ||
37 | - return formatter.format(cal.getTime()); | ||
38 | - } | ||
39 | - | ||
40 | - /** | ||
41 | - * yyyy-MM-dd HH:mm:ss 형태의 문자열을 캘린더 객체로 변환합니다. | ||
42 | - * 만약 변환에 실패할 경우 오늘 날짜를 반환합니다. | ||
43 | - * | ||
44 | - * @param date 날짜를 나타내는 문자열 | ||
45 | - * @return 변환된 캘린더 객체 | ||
46 | - */ | ||
47 | - public static Calendar CalendarFromString(String date) | ||
48 | - { | ||
49 | - if (date.length() == 0) | ||
50 | - return null; | ||
51 | - Calendar cal = Calendar.getInstance(); | ||
52 | - try | ||
53 | - { | ||
54 | - //String oldstring = "2011-01-18 00:00:00.0"; | ||
55 | - // Date date = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S").parse(oldstring); | ||
56 | - SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); | ||
57 | - cal.setTime(formatter.parse(date)); | ||
58 | - } | ||
59 | - catch(ParseException e) | ||
60 | - { | ||
61 | - e.printStackTrace(); | ||
62 | - } | ||
63 | - return cal; | ||
64 | - } | ||
65 | - | ||
66 | - /** | ||
67 | - * yyyy-MM-dd 형태의 문자열을 캘린더 객체로 변환합니다. | ||
68 | - * 만약 변환에 실패할 경우 오늘 날짜를 반환합니다. | ||
69 | - * | ||
70 | - * @param date 날짜를 나타내는 문자열 | ||
71 | - * @return 변환된 캘린더 객체 | ||
72 | - */ | ||
73 | - public static Calendar CalendarFromStringSimple(String date) | ||
74 | - { | ||
75 | - Calendar cal = Calendar.getInstance(); | ||
76 | - | ||
77 | - try | ||
78 | - { | ||
79 | - SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); | ||
80 | - cal.setTime(formatter.parse(date)); | ||
81 | - } | ||
82 | - catch(ParseException e) | ||
83 | - { | ||
84 | - e.printStackTrace(); | ||
85 | - } | ||
86 | - return cal; | ||
87 | - } | ||
88 | -} | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
src/main/java/MapExample.java
deleted
100644 → 0
1 | -import org.apache.spark.SparkConf; | ||
2 | -import org.apache.spark.api.java.JavaPairRDD; | ||
3 | -import org.apache.spark.api.java.JavaRDD; | ||
4 | -import org.apache.spark.api.java.JavaSparkContext; | ||
5 | -import org.apache.spark.api.java.function.Function; | ||
6 | -import org.apache.spark.sql.Dataset; | ||
7 | -import org.apache.spark.sql.Row; | ||
8 | -import org.apache.spark.sql.SQLContext; | ||
9 | -import org.apache.spark.sql.SparkSession; | ||
10 | -import org.apache.spark.sql.types.StructType; | ||
11 | -import scala.Serializable; | ||
12 | -import scala.Tuple2; | ||
13 | - | ||
14 | -import java.util.*; | ||
15 | - | ||
16 | -//ip,app,device,os,channel,click_time,attributed_time,is_attributed | ||
17 | -//87540,12,1,13,497,2017-11-07 09:30:38,,0 | ||
18 | -class Record implements Serializable { | ||
19 | - Integer ip; | ||
20 | - Integer app; | ||
21 | - Integer device; | ||
22 | - Integer os; | ||
23 | - Integer channel; | ||
24 | - Calendar clickTime; | ||
25 | - Calendar attributedTime; | ||
26 | - Boolean isAttributed; | ||
27 | - Integer clickInTenMins; | ||
28 | - | ||
29 | - // constructor , getters and setters | ||
30 | - public Record(int pIp, int pApp, int pDevice, int pOs, int pChannel, Calendar pClickTime, Calendar pAttributedTime, boolean pIsAttributed) { | ||
31 | - ip = new Integer(pIp); | ||
32 | - app = new Integer(pApp); | ||
33 | - device = new Integer(pDevice); | ||
34 | - os = new Integer(pOs); | ||
35 | - channel = new Integer(pChannel); | ||
36 | - clickTime = pClickTime; | ||
37 | - attributedTime = pAttributedTime; | ||
38 | - isAttributed = new Boolean(pIsAttributed); | ||
39 | - clickInTenMins = new Integer(0); | ||
40 | - } | ||
41 | - | ||
42 | - public Record(int pIp, int pApp, int pDevice, int pOs, int pChannel, Calendar pClickTime, Calendar pAttributedTime, boolean pIsAttributed, int pClickInTenMins) { | ||
43 | - ip = new Integer(pIp); | ||
44 | - app = new Integer(pApp); | ||
45 | - device = new Integer(pDevice); | ||
46 | - os = new Integer(pOs); | ||
47 | - channel = new Integer(pChannel); | ||
48 | - clickTime = pClickTime; | ||
49 | - attributedTime = pAttributedTime; | ||
50 | - isAttributed = new Boolean(pIsAttributed); | ||
51 | - clickInTenMins = new Integer(pClickInTenMins); | ||
52 | - } | ||
53 | -} | ||
54 | - | ||
55 | -class RecordComparator implements Comparator<Record> { | ||
56 | - @Override | ||
57 | - public int compare(Record v1 , Record v2) { | ||
58 | -// if(a.ano < b.ano) return -1; | ||
59 | -// else if(a.ano == b.ano) return 0; | ||
60 | -// else return 1; | ||
61 | - if (v1.ip.compareTo(v2.ip) == 0) { | ||
62 | - return v1.clickTime.compareTo(v2.clickTime); | ||
63 | - } | ||
64 | - return v1.ip.compareTo(v2.ip); | ||
65 | - } | ||
66 | -} | ||
67 | - | ||
68 | -public class MapExample { | ||
69 | - | ||
70 | - static SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("Cesco"); | ||
71 | - static JavaSparkContext sc = new JavaSparkContext(conf); | ||
72 | - static SQLContext sqlContext = new SQLContext(sc); | ||
73 | - | ||
74 | - public static void main(String[] args) throws Exception { | ||
75 | - JavaRDD<String> file = sc.textFile("/Users/hyeongyunmun/Dropbox/DetectFraudClick/data/train.csv", 1); | ||
76 | - | ||
77 | - final String header = file.first(); | ||
78 | - JavaRDD<String> data = file.filter(line -> !line.equalsIgnoreCase(header)); | ||
79 | - | ||
80 | - JavaRDD<Record> records = data.map(line -> { | ||
81 | - String[] fields = line.split(","); | ||
82 | - Record sd = new Record(Integer.parseInt(fields[0]), Integer.parseInt(fields[1]), Integer.parseInt(fields[2]), Integer.parseInt(fields[3]), Integer.parseInt(fields[4]), DateUtil.CalendarFromString(fields[5]), DateUtil.CalendarFromString(fields[6]), "1".equalsIgnoreCase(fields[7].trim())); | ||
83 | - return sd; | ||
84 | - }); | ||
85 | - | ||
86 | -// JavaRDD<Tuple4<Integer,Double,Long,Integer>> secondSortRDD = firstSortRDD.keyBy(new Function<Tuple4<Integer, Double, Long, Integer>, Tuple2<Double, Long>>(){ | ||
87 | -// @Override | ||
88 | -// public Tuple2<Double, Long> call(Tuple4<Integer, Double, Long, Integer> value) throws Exception { | ||
89 | -// return new Tuple2(value._2(),value._3()); | ||
90 | -// }}).sortByKey(new TupleComparator()).values(); | ||
91 | - | ||
92 | - JavaRDD<Record> firstSorted = records.sortBy(new Function<Record, Calendar>() { | ||
93 | - @Override | ||
94 | - public Calendar call(Record record) throws Exception { | ||
95 | - return record.clickTime; | ||
96 | - } | ||
97 | - }, true, 1); | ||
98 | - | ||
99 | - JavaRDD<Record> sortedRecords = firstSorted.sortBy(new Function<Record, Integer>() { | ||
100 | - @Override | ||
101 | - public Integer call(Record record) throws Exception { | ||
102 | - return record.ip.intValue(); | ||
103 | - } | ||
104 | - }, true, 1); | ||
105 | - | ||
106 | - | ||
107 | - /* | ||
108 | - //두개를 한번에 정렬해보려 했지만 실패 | ||
109 | - JavaRDD<Record> sortedRecords = records.keyBy(new Function<Record, Record>(){ | ||
110 | - @Override | ||
111 | - public Record call(Record record) throws Exception { | ||
112 | - return new Record(record.ip, record.app, record.device, record.os, record.channel, record.clickTime, record.attributedTime, record.isAttributed); | ||
113 | - }}).sortByKey(new RecordComparator()).values(); | ||
114 | - */ | ||
115 | - | ||
116 | -// System.out.println("sortedRecords"); | ||
117 | -// sortedRecords.foreach(record -> {System.out.println(record.ip + " " + record.clickTime.getTime());}); | ||
118 | - | ||
119 | -// System.out.println("make result"); | ||
120 | - /* | ||
121 | - //map의 다음것을 가져오려했지만 실패 | ||
122 | - JavaRDD<Record> result = sortedRecords.map(record -> { | ||
123 | - System.out.println("make addTen"); | ||
124 | - Calendar addTen = Calendar.getInstance(); | ||
125 | - addTen.setTime(record.clickTime.getTime()); | ||
126 | - addTen.add(Calendar.MINUTE, 10); | ||
127 | - | ||
128 | - System.out.println("make count"); | ||
129 | - int count = 0; | ||
130 | - for (Record temp: sortedRecords.collect()) { | ||
131 | - if (temp.ip.compareTo(record.ip) == 0 && temp.clickTime.compareTo(record.clickTime) > 0 && temp.clickTime.compareTo(addTen)< 0) | ||
132 | - count++; | ||
133 | - } | ||
134 | - | ||
135 | - return new Record(record.ip, record.app, record.device, record.os, record.channel, record.clickTime, record.attributedTime, record.isAttributed, count); | ||
136 | - }); | ||
137 | - */ | ||
138 | -// System.out.println("result"); | ||
139 | -// result.foreach(record -> {System.out.println(record.ip + " " + record.clickTime.getTime());}); | ||
140 | - | ||
141 | - /* | ||
142 | - | ||
143 | - for (final ListIterator<String> it = list.listIterator(); it.hasNext();) { | ||
144 | - final String s = it.next(); | ||
145 | - System.out.println(it.previousIndex() + ": " + s); | ||
146 | - } | ||
147 | - | ||
148 | - for (ListIterator<Record> it = sortedRecords.collect().listIterator(); it.hasNext(); it = it.nextIndex()) { | ||
149 | - it. | ||
150 | - if (temp.ip.compareTo(record.ip) == 0 && temp.clickTime.compareTo(record.clickTime) > 0 && temp.clickTime.compareTo(addTen)< 0) | ||
151 | - count++; | ||
152 | - } | ||
153 | - */ | ||
154 | - | ||
155 | - | ||
156 | - List<Record> list = sortedRecords.collect(); | ||
157 | - | ||
158 | - List<Record> resultList = new ArrayList<Record>(); | ||
159 | - for (int i = 0; i < list.size(); i++) { | ||
160 | - //System.out.println(list.get(i).ip); | ||
161 | - | ||
162 | - Record record = list.get(i); | ||
163 | - | ||
164 | - Calendar addTen = Calendar.getInstance(); | ||
165 | - addTen.setTime(record.clickTime.getTime()); | ||
166 | - addTen.add(Calendar.MINUTE, 10); | ||
167 | - | ||
168 | - int count = 0; | ||
169 | - | ||
170 | - for (int j = i+1; j < list.size() && list.get(j).ip.compareTo(record.ip) == 0 | ||
171 | - && list.get(j).clickTime.compareTo(record.clickTime) > 0 &&list.get(j).clickTime.compareTo(addTen) < 0; j++) | ||
172 | - count++; | ||
173 | - | ||
174 | - resultList.add(new Record(record.ip, record.app, record.device, record.os, record.channel, record.clickTime, record.attributedTime, record.isAttributed, count)); | ||
175 | - | ||
176 | - } | ||
177 | - | ||
178 | - | ||
179 | - JavaRDD<Record> result = sc.parallelize(resultList); | ||
180 | - result.foreach(record -> {System.out.println(record.ip + " " + record.clickTime.getTime() + " " + record.clickInTenMins);}); | ||
181 | - | ||
182 | - } | ||
183 | -} |
src/main/java/calForwardTimeDelta.java
0 → 100644
1 | +import org.apache.spark.SparkConf; | ||
2 | +import org.apache.spark.api.java.JavaSparkContext; | ||
3 | +import org.apache.spark.sql.Dataset; | ||
4 | +import org.apache.spark.sql.Row; | ||
5 | +import org.apache.spark.sql.SparkSession; | ||
6 | +import org.apache.spark.sql.expressions.Window; | ||
7 | +import org.apache.spark.sql.expressions.WindowSpec; | ||
8 | + | ||
9 | +import javax.xml.crypto.Data; | ||
10 | + | ||
11 | +import static org.apache.spark.sql.functions.*; | ||
12 | + | ||
13 | +public class calForwardTimeDelta { | ||
14 | + static SparkConf conf = new SparkConf().setMaster("local[*]").setAppName("Cesco"); | ||
15 | + static JavaSparkContext sc = new JavaSparkContext(conf); | ||
16 | + | ||
17 | + public static void main(String[] args) throws Exception{ | ||
18 | + //Create Session | ||
19 | + SparkSession spark = SparkSession | ||
20 | + .builder() | ||
21 | + .appName("Detecting Fraud Clicks") | ||
22 | + .getOrCreate(); | ||
23 | + | ||
24 | + //run methods hereu | ||
25 | + calcDelta(spark); | ||
26 | + } | ||
27 | + | ||
28 | + private static void calcDelta(SparkSession spark){ | ||
29 | + // put the path the file you gonna deal with being placed | ||
30 | + String filepath = "train_sample.csv"; | ||
31 | + | ||
32 | + // create Dataset from files | ||
33 | + Dataset<Row> logDF = spark.read() | ||
34 | + .format("csv") | ||
35 | + .option("inferSchema", "true") | ||
36 | + .option("header","true") | ||
37 | + .load(filepath); | ||
38 | + | ||
39 | + // cast timestamp(click_time, attributed_time) type to long type | ||
40 | + | ||
41 | + //add column for long(click_time) | ||
42 | + Dataset<Row> newDF = logDF.withColumn("utc_click_time", logDF.col("click_time").cast("long")); | ||
43 | + //add column for long(attributed_time) | ||
44 | + newDF = newDF.withColumn("utc_attributed_time", logDF.col("attributed_time").cast("long")); | ||
45 | + //drop timestamp type columns | ||
46 | + newDF = newDF.drop("click_time").drop("attributed_time"); | ||
47 | + newDF.createOrReplaceTempView("logs"); | ||
48 | + | ||
49 | + WindowSpec w = Window.partitionBy ("ip") | ||
50 | + .orderBy("utc_click_time"); | ||
51 | + | ||
52 | + newDF = newDF.withColumn("lag(utc_click_time)", lag("utc_click_time",1).over(w)); | ||
53 | + newDF.where("ip=10").show(); | ||
54 | + newDF = newDF.withColumn("delta", when(col("lag(utc_click_time)").isNull(),lit(0)).otherwise(col("utc_click_time")).minus(when(col("lag(utc_click_time)").isNull(),lit(0)).otherwise(col("lag(utc_click_time)")))); | ||
55 | + //newDF = newDF.withColumn("delta", datediff()); | ||
56 | + newDF = newDF.drop("lag(utc_click_time)"); | ||
57 | + newDF = newDF.orderBy("ip"); | ||
58 | + | ||
59 | + newDF.show(); | ||
60 | + } | ||
61 | + | ||
62 | +} |
src/main/java/valid.java
deleted
100644 → 0
src/test/java/testValid.java
100644 → 100755
File mode changed
train_sample.csv
0 → 100644
This diff could not be displayed because it is too large.
-
Please register or login to post a comment