Skip to content

Commit f15b69d

Browse files
author
lijinglun
committed
HADOOP-19236. Incorporate VolcanoEngine Cloud TOS File System Implementation.
1 parent 964e089 commit f15b69d

File tree

156 files changed

+23493
-0
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

156 files changed

+23493
-0
lines changed

hadoop-cloud-storage-project/hadoop-cloud-storage/pom.xml

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,17 @@
136136
<groupId>org.apache.hadoop</groupId>
137137
<artifactId>hadoop-huaweicloud</artifactId>
138138
<scope>compile</scope>
139+
<exclusions>
140+
<exclusion>
141+
<groupId>com.squareup.okhttp3</groupId>
142+
<artifactId>okhttp</artifactId>
143+
</exclusion>
144+
</exclusions>
145+
</dependency>
146+
<dependency>
147+
<groupId>org.apache.hadoop</groupId>
148+
<artifactId>hadoop-tos</artifactId>
149+
<scope>compile</scope>
139150
</dependency>
140151
</dependencies>
141152
</project>
Lines changed: 167 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,167 @@
1+
<?xml version="1.0" encoding="UTF-8"?>
2+
<!--
3+
Licensed under the Apache License, Version 2.0 (the "License");
4+
you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at
6+
7+
http://www.apache.org/licenses/LICENSE-2.0
8+
9+
Unless required by applicable law or agreed to in writing, software
10+
distributed under the License is distributed on an "AS IS" BASIS,
11+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
See the License for the specific language governing permissions and
13+
limitations under the License. See accompanying LICENSE file.
14+
-->
15+
<project xmlns="http://maven.apache.org/POM/4.0.0"
16+
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
17+
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
18+
<modelVersion>4.0.0</modelVersion>
19+
<parent>
20+
<groupId>org.apache.hadoop</groupId>
21+
<artifactId>hadoop-project</artifactId>
22+
<version>3.5.0-SNAPSHOT</version>
23+
<relativePath>../../../hadoop-project/pom.xml</relativePath>
24+
</parent>
25+
26+
<artifactId>hadoop-tos-core</artifactId>
27+
<version>3.5.0-SNAPSHOT</version>
28+
<name>Apache Hadoop Volcano Engine Services Core</name>
29+
<description>
30+
This module contains code to support integration with Volcano Engine TOS.
31+
It also declares the dependencies needed to work with Volcano Engine services.
32+
</description>
33+
<packaging>jar</packaging>
34+
35+
<properties>
36+
<file.encoding>UTF-8</file.encoding>
37+
<ve-tos-java-sdk.version>2.8.6</ve-tos-java-sdk.version>
38+
<tos.shading.prefix>org.apache.hadoop.fs.tosfs.shaded</tos.shading.prefix>
39+
</properties>
40+
41+
<dependencyManagement>
42+
<dependencies>
43+
<!-- The ve-tos-java-sdk 2.8.6 depends on okhttp, but okhttp violates dependency convergence.
44+
The com.squareup.okhttp3:okhttp:jar:4.10.0 depends on org.jetbrains.kotlin:kotlin-stdlib-common,
45+
with 2 different versions: 1.5.31 and 1.6.20. Force configure it to 1.6.20. -->
46+
<dependency>
47+
<groupId>org.jetbrains.kotlin</groupId>
48+
<artifactId>kotlin-stdlib-common</artifactId>
49+
<version>1.6.20</version>
50+
</dependency>
51+
</dependencies>
52+
</dependencyManagement>
53+
54+
<dependencies>
55+
<dependency>
56+
<groupId>org.apache.hadoop</groupId>
57+
<artifactId>hadoop-common</artifactId>
58+
<scope>provided</scope>
59+
</dependency>
60+
<dependency>
61+
<groupId>org.apache.hadoop</groupId>
62+
<artifactId>hadoop-mapreduce-client-core</artifactId>
63+
<scope>provided</scope>
64+
</dependency>
65+
<dependency>
66+
<groupId>org.apache.hadoop</groupId>
67+
<artifactId>hadoop-tos-shade</artifactId>
68+
<exclusions>
69+
<exclusion>
70+
<groupId>com.volcengine</groupId>
71+
<artifactId>ve-tos-java-sdk</artifactId>
72+
</exclusion>
73+
</exclusions>
74+
</dependency>
75+
76+
<!-- Test dependencies -->
77+
<dependency>
78+
<groupId>org.apache.hadoop</groupId>
79+
<artifactId>hadoop-common</artifactId>
80+
<scope>test</scope>
81+
<type>test-jar</type>
82+
</dependency>
83+
<!-- Artifacts needed to bring up a Mini MR Yarn cluster-->
84+
<dependency>
85+
<groupId>org.apache.hadoop</groupId>
86+
<artifactId>hadoop-mapreduce-examples</artifactId>
87+
<scope>test</scope>
88+
</dependency>
89+
<dependency>
90+
<groupId>org.apache.hadoop</groupId>
91+
<artifactId>hadoop-minicluster</artifactId>
92+
<scope>test</scope>
93+
</dependency>
94+
<dependency>
95+
<groupId>org.apache.hadoop</groupId>
96+
<artifactId>hadoop-distcp</artifactId>
97+
<version>${hadoop.version}</version>
98+
<scope>test</scope>
99+
</dependency>
100+
<dependency>
101+
<groupId>org.apache.hadoop</groupId>
102+
<artifactId>hadoop-distcp</artifactId>
103+
<version>${hadoop.version}</version>
104+
<scope>test</scope>
105+
<type>test-jar</type>
106+
</dependency>
107+
108+
<dependency>
109+
<groupId>org.assertj</groupId>
110+
<artifactId>assertj-core</artifactId>
111+
<scope>test</scope>
112+
</dependency>
113+
<dependency>
114+
<groupId>junit</groupId>
115+
<artifactId>junit</artifactId>
116+
<scope>test</scope>
117+
</dependency>
118+
<dependency>
119+
<groupId>org.mockito</groupId>
120+
<artifactId>mockito-core</artifactId>
121+
<version>4.11.0</version>
122+
<scope>test</scope>
123+
</dependency>
124+
</dependencies>
125+
126+
<build>
127+
<plugins>
128+
<plugin>
129+
<groupId>org.apache.maven.plugins</groupId>
130+
<artifactId>maven-surefire-plugin</artifactId>
131+
<version>2.21.0</version>
132+
<configuration>
133+
<parallel>classes</parallel>
134+
<threadCount>1</threadCount>
135+
<perCoreThreadCount>true</perCoreThreadCount>
136+
<rerunFailingTestsCount>2</rerunFailingTestsCount>
137+
<forkCount>8</forkCount>
138+
<argLine>-Xmx2048m</argLine>
139+
</configuration>
140+
</plugin>
141+
142+
<plugin>
143+
<groupId>org.apache.maven.plugins</groupId>
144+
<artifactId>maven-shade-plugin</artifactId>
145+
<version>${maven-shade-plugin.version}</version>
146+
<executions>
147+
<execution>
148+
<id>package-dependency</id>
149+
<phase>package</phase>
150+
<goals>
151+
<goal>shade</goal>
152+
</goals>
153+
<configuration>
154+
<shadeSourcesContent>true</shadeSourcesContent>
155+
<createSourcesJar>true</createSourcesJar>
156+
<artifactSet>
157+
<includes>
158+
<include>org.apache.hadoop:hadoop-tos-shade</include>
159+
</includes>
160+
</artifactSet>
161+
</configuration>
162+
</execution>
163+
</executions>
164+
</plugin>
165+
</plugins>
166+
</build>
167+
</project>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.fs.tosfs;
20+
21+
import org.apache.hadoop.conf.Configuration;
22+
import org.apache.hadoop.fs.DelegateToFileSystem;
23+
24+
import java.io.IOException;
25+
import java.net.URI;
26+
import java.net.URISyntaxException;
27+
28+
/**
29+
* The implementation class of the raw AbstractFileSystem. If you want to use object storage as
30+
* YARN’s resource storage dir via the fs.defaultFS configuration property in Hadoop’s
31+
* core-site.xml, you should add this configuration to Hadoop's core-site.xml.
32+
* <pre>
33+
* fs.AbstractFileSystem.{scheme}.impl=io.proton.fs.RawFS.
34+
* </pre>
35+
*/
36+
public class RawFS extends DelegateToFileSystem {
37+
private static final int TOS_DEFAULT_PORT = -1;
38+
39+
public RawFS(URI uri, Configuration conf) throws IOException, URISyntaxException {
40+
super(uri, new RawFileSystem(), conf, uri.getScheme(), false);
41+
}
42+
43+
@Override
44+
public int getUriDefaultPort() {
45+
return TOS_DEFAULT_PORT;
46+
}
47+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.fs.tosfs;
20+
21+
import org.apache.hadoop.fs.Path;
22+
import org.apache.hadoop.util.Preconditions;
23+
24+
import java.util.Objects;
25+
26+
public class RawFSUtils {
27+
private RawFSUtils() {
28+
}
29+
30+
/**
31+
* @return true means the node is included in the subtree which has the root node.
32+
*/
33+
public static boolean inSubtree(String root, String p) {
34+
return inSubtree(new Path(root), new Path(p));
35+
}
36+
37+
/**
38+
* @return true means the node is included in the subtree which has the root node.
39+
*/
40+
public static boolean inSubtree(Path root, Path node) {
41+
Preconditions.checkNotNull(root, "Root cannot be null");
42+
Preconditions.checkNotNull(node, "Node cannot be null");
43+
if (root.isRoot()) {
44+
return true;
45+
}
46+
47+
if (Objects.equals(root, node)) {
48+
return true;
49+
}
50+
51+
while (!node.isRoot()) {
52+
if (Objects.equals(root, node)) {
53+
return true;
54+
}
55+
node = node.getParent();
56+
}
57+
return false;
58+
}
59+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package org.apache.hadoop.fs.tosfs;
20+
21+
import org.apache.hadoop.fs.FileStatus;
22+
import org.apache.hadoop.fs.Path;
23+
import org.apache.hadoop.fs.tosfs.object.Constants;
24+
25+
public class RawFileStatus extends FileStatus {
26+
private final byte[] checksum;
27+
28+
/**
29+
* File status of directory
30+
*
31+
* @param path directory path
32+
* @param owner directory owner
33+
*/
34+
public RawFileStatus(Path path, String owner) {
35+
this(0, true, 1, System.currentTimeMillis(), path, owner, Constants.MAGIC_CHECKSUM);
36+
}
37+
38+
public RawFileStatus(
39+
long length, boolean isdir, long blocksize,
40+
long modification_time, Path path, String owner, byte[] checksum) {
41+
super(length, isdir, 1, blocksize, modification_time, path);
42+
setOwner(owner);
43+
setGroup(owner);
44+
this.checksum = checksum;
45+
}
46+
47+
public byte[] checksum() {
48+
return checksum;
49+
}
50+
}

0 commit comments

Comments
 (0)