Skip to content

Commit

Permalink
CDAP-21027 : review comment and test datarpoc
Browse files Browse the repository at this point in the history
  • Loading branch information
sahusanket committed Jul 24, 2024
1 parent dedf0dd commit 005cb34
Show file tree
Hide file tree
Showing 7 changed files with 14 additions and 17 deletions.
1 change: 1 addition & 0 deletions cdap-app-fabric/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,7 @@
<groupId>org.hamcrest</groupId>
<artifactId>hamcrest-core</artifactId>
<version>2.2</version>
<scope>test</scope>
</dependency>
</dependencies>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.mapred.FileAlreadyExistsException;
import org.apache.hadoop.mapred.InvalidJobConfException;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobStatus;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
Expand Down Expand Up @@ -74,6 +75,15 @@ public class DynamicPartitioningOutputCommitter extends FileOutputCommitter {
public DynamicPartitioningOutputCommitter(Path outputPath, TaskAttemptContext context)
throws IOException {
super(outputPath, context);

//This output committer only works with `mapreduce.fileoutputcommitter.algorithm.version` = 1
//Since hadoop 3, by default it's 2. Fail early if it's set to 2.
if (isCommitJobRepeatable(context)){
throw new IllegalArgumentException("DynamicPartitioningOutputCommitter requires the Hadoop conf " +
"`mapreduce.fileoutputcommitter.algorithm.version` to be set to 1." +
"But Found 2.");
}

this.taskContext = context;
this.jobSpecificOutputPath = outputPath;
}
Expand Down
1 change: 0 additions & 1 deletion cdap-formats/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.13.4.2</version>
<scope>provided</scope>
</dependency>
</dependencies>
Expand Down
10 changes: 0 additions & 10 deletions cdap-runtime-ext-dataproc/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -135,16 +135,6 @@
<groupId>org.powermock</groupId>
<artifactId>powermock-api-mockito2</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<scope>compile</scope>
</dependency>
</dependencies>

<dependencyManagement>
Expand Down
5 changes: 0 additions & 5 deletions cdap-security/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -89,22 +89,18 @@
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId>
<version>${jetty9.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-security</artifactId>
<version>${jetty9.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
<version>${jetty9.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-jaspi</artifactId>
<version>${jetty9.version}</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
Expand All @@ -114,7 +110,6 @@
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>javax.servlet-api</artifactId>
<version>3.1.0</version>
</dependency>
<dependency>
<groupId>org.apache.geronimo.components</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,8 @@ public static void init() throws Exception {
hConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, TEMP_FOLDER.newFolder().getAbsolutePath());
hConf.setBoolean("ipc.client.fallback-to-simple-auth-allowed", true);
hConf.setBoolean("ignore.secure.ports.for.testing", true);
hConf.setInt("dfs.namenode.metrics.logger.period.seconds", -1);
hConf.setInt("dfs.datanode.metrics.logger.period.seconds", -1);

miniDFSCluster = new MiniDFSCluster.Builder(hConf).numDataNodes(1).build();
miniDFSCluster.waitClusterUp();
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@
<rs-api.version>2.0</rs-api.version>
<!-- Please keep consistent with the one in Spark 3 -->
<scala2.12.version>2.12.15</scala2.12.version>
<servlet.api.version>3.0.1</servlet.api.version>
<servlet.api.version>3.1.0</servlet.api.version>
<slf4j.version>1.7.15</slf4j.version>
<snappy.version>1.1.1.7</snappy.version>
<spark3.artifacts.dir>spark3_2.12</spark3.artifacts.dir>
Expand Down

0 comments on commit 005cb34

Please sign in to comment.