Remove unused pipeline orttraining-linux-gpu-perf-test-ci-pipeline.yml and unused send_perf_metrics tool. (#10326)

This commit is contained in:
Edward Chen 2022-01-21 14:31:34 -08:00 committed by GitHub
parent 141606534c
commit bfabef081d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 0 additions and 337 deletions

4
.gitignore vendored
View file

@ -49,10 +49,6 @@ java/gradle
java/.gradle
java/hs_*.log
onnxruntime/python/version_info.py
/tools/perf_util/target/classes/com/msft/send_perf_metrics
/tools/perf_util/send_perf_metrics.iml
/tools/perf_util/target/classes
/tools/perf_util/src/main/resources
/orttraining/orttraining/eager/ort_aten.g.cpp
/orttraining/orttraining/eager/ort_customops.g.cpp
/csharp/**/packages

View file

@ -1,90 +0,0 @@
trigger: none
jobs:
- job: Onnxruntime_Linux_GPU_Training_Perf_Test
timeoutInMinutes: 120
variables:
- group: 'ortperf' # variable group
steps:
- checkout: self
clean: true
submodules: recursive
- template: templates/run-docker-build-steps.yml
parameters:
RunDockerBuildArgs: >
-o ubuntu20.04 -d gpu
-t onnxruntime_perf_test_image
-x "
--config RelWithDebInfo
--enable_training
--update --build --cmake_extra_defines CMAKE_CUDA_ARCHITECTURES=70
"
DisplayName: 'Build performance tests'
- bash: tools/ci_build/github/linux/docker/scripts/training/azure_scale_set_vm_mount_test_data.sh -p $(orttrainingtestdatascus-storage-key) -s "//orttrainingtestdatascus.file.core.windows.net/bert-data" -d "/bert_data"
displayName: 'Mount bert-data'
condition: succeededOrFailed() # ensure all tests are run
- bash: tools/ci_build/github/linux/docker/scripts/training/azure_scale_set_vm_mount_test_data.sh -p $(orttrainingtestdatascus-storage-key) -s "//orttrainingtestdatascus.file.core.windows.net/gpt2-data" -d "/gpt2_data"
displayName: 'Mount gpt2 test data'
condition: succeededOrFailed() # ensure all tests are run
- script: >
docker run --gpus all --rm --name onnxruntime-gpu-perf
--volume $(Build.SourcesDirectory):/onnxruntime_src
--volume $(Build.BinariesDirectory):/build
--volume /bert_data/bert_models:/build/bert_models:ro
--volume /bert_data:/build/bert_data:ro
-e NIGHTLY_BUILD onnxruntime_perf_test_image
/usr/bin/python3 /onnxruntime_src/orttraining/tools/ci_test/run_bert_perf_test.py
--binary_dir /build/RelWithDebInfo
--training_data_root /build/bert_data
--model_root /build/bert_models
displayName: 'Run bert performance tests'
condition: succeededOrFailed()
timeoutInMinutes: 120
- script: >
docker run --gpus all --rm --name onnxruntime-gpu-perf
--volume $(Build.SourcesDirectory):/onnxruntime_src
--volume $(Build.BinariesDirectory):/build
--volume /gpt2_data/gpt2_models:/build/gpt2_models:ro
--volume /gpt2_data:/build/gpt2_data:ro
-e NIGHTLY_BUILD onnxruntime_perf_test_image
/usr/bin/python3 /onnxruntime_src/orttraining/tools/ci_test/run_gpt2_perf_test.py
--binary_dir /build/RelWithDebInfo
--training_data_root /build/gpt2_data
--model_root /build/gpt2_models
displayName: 'Run gpt-2 performance tests'
condition: succeededOrFailed()
timeoutInMinutes: 120
# generate jdbc.properties
- script: >
mkdir -p $(Build.SourcesDirectory)/tools/perf_util/src/main/resources &&
printf "url=jdbc:mysql://onnxruntimedashboard.mysql.database.azure.com/onnxruntime?serverTimezone=UTC&useUnicode=true&characterEncoding=UTF-8\nuser=powerbi@onnxruntimedashboard\npassword_env=ORT_PERF_PASSWORD"
> $(Build.SourcesDirectory)/tools/perf_util/src/main/resources/jdbc.properties
displayName: 'Create resource file'
- script: >
mvn package
displayName: 'Maven build'
workingDirectory: $(Build.SourcesDirectory)/tools/perf_util
# process json files
- script: >
java -cp target/send_perf_metrics-0.0.1-SNAPSHOT-jar-with-dependencies.jar com.msft.send_perf_metrics.App "$(Build.SourcesDirectory)/orttraining/tools/ci_test/results"
env:
ORT_PERF_PASSWORD: $(ortperf)
displayName: 'Populate perf metrics'
workingDirectory: $(Build.SourcesDirectory)/tools/perf_util
- template: templates/component-governance-component-detection-steps.yml
parameters:
condition: 'succeeded'
- template: templates/clean-agent-build-directory-step.yml

View file

@ -1,56 +0,0 @@
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.msft</groupId>
<artifactId>send_perf_metrics</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>send_perf_metrics</name>
<url>http://maven.apache.org</url>
<build>
<plugins>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.1.1</version>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id> <!-- this is used for inheritance merges -->
<phase>package</phase> <!-- bind to the packaging phase -->
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
<dependencies>
<!-- https://mvnrepository.com/artifact/com.googlecode.json-simple/json-simple -->
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
<version>1.1.1</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.22</version>
</dependency>
</dependencies>
</project>

View file

@ -1,168 +0,0 @@
package com.msft.send_perf_metrics;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import java.io.*;
import java.nio.file.FileVisitResult;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.text.SimpleDateFormat;
import java.util.*;
public class App {
static String exec_command(Path source_dir, String... commands) throws Exception {
ProcessBuilder sb = new ProcessBuilder(commands).directory(source_dir.toFile()).redirectErrorStream(true);
Process p = sb.start();
if (p.waitFor() != 0)
throw new RuntimeException("execute " + String.join(" ", commands) + " failed");
try (BufferedReader r = new BufferedReader(new InputStreamReader(p.getInputStream()))) {
return r.readLine();
}
}
public static void main(String[] args) throws Exception {
final Path source_dir = Paths.get(args[0]);
final List<Path> perf_metrics = new ArrayList<Path>();
Files.walkFileTree(source_dir, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException {
String dirname = dir.getFileName().toString();
if (dirname != "." && dirname.startsWith("."))
return FileVisitResult.SKIP_SUBTREE;
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
String filename = file.getFileName().toString();
if (!filename.startsWith(".") && filename.endsWith(".json")) {
perf_metrics.add(file);
System.out.println(filename);
}
return FileVisitResult.CONTINUE;
}
});
final Path cwd_dir = Paths.get(System.getProperty("user.dir"));
// git rev-parse HEAD
String commit_id = exec_command(cwd_dir, "git", "rev-parse", "HEAD");
String date = exec_command(cwd_dir, "git", "show", "-s", "--format=%ci", commit_id);
final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z");
java.util.Date commitDate = sdf.parse(date);
final SimpleDateFormat simple_date_format = new SimpleDateFormat("yyyy-MM-dd");
String batch_id = simple_date_format.format(commitDate);
System.out.println(String.format("Commit change date: %s", batch_id));
// collect all json files list
processPerfMetrics(perf_metrics, commit_id, batch_id);
// TODO - add e2e tests later, run it w/ process command
}
private static void processPerfMetrics(final List<Path> perf_metrics, String commit_id,
String batch_id) throws Exception {
try {
Connection conn = JdbcUtil.GetConn();
System.out.println("MySQL DB connection established.\n");
// go thru each json file
JSONParser jsonParser = new JSONParser();
for (Path metrics_json : perf_metrics) {
try (FileReader reader = new FileReader(metrics_json.toAbsolutePath().toString())) {
// Read JSON file
Object obj = jsonParser.parse(reader);
loadMetricsIntoMySQL(conn, commit_id, batch_id, (JSONObject) obj);
}
}
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
static private void loadMetricsIntoMySQL(java.sql.Connection conn, String commit_id, String batch_id,
JSONObject json_object) throws Exception {
// field name -> json value
Map<String, Object> field_mapping = new LinkedHashMap();
Set<String> update_on_duplicate_fields =
new LinkedHashSet<> (Arrays.asList("AvgTimePerBatch", "Throughput", "StabilizedThroughput", "EndToEndThroughput", "TotalTime", "AvgCPU", "Memory"));
field_mapping.put("BatchId", batch_id);
field_mapping.put("CommitId", commit_id.substring(0, 8));
json_object.forEach((key, value) -> {
if (key.equals("DerivedProperties")) {
JSONObject properties = (JSONObject) json_object.get("DerivedProperties");
properties.forEach((sub_key, sub_value) -> {
field_mapping.put((String)sub_key, sub_value);
});
} else {
field_mapping.put((String)key, value);
}
});
// building sql statement
StringBuilder sb = new StringBuilder("INSERT INTO perf_test_training_data (");
field_mapping.forEach((key, value) -> {
sb.append(key).append(",");
});
sb.append("Time) values (");
for(int i = 0; i < field_mapping.size(); i++) {
sb.append("?,");
}
sb.append("Now()) ON DUPLICATE KEY UPDATE ");
update_on_duplicate_fields.forEach((key) -> {
if(field_mapping.get(key) != null) {
sb.append(key).append("=?,");
}
});
try (java.sql.PreparedStatement st = conn.prepareStatement(sb.substring(0, sb.length() - 1))) {
int i = 0; // param index
for (Map.Entry<String, Object> entry : field_mapping.entrySet()) {
setSqlParam(++i, st, entry.getValue());
}
// update section
for(String key : update_on_duplicate_fields) {
Object value = field_mapping.get(key);
if(value != null) {
setSqlParam(++i, st, value);
}
}
st.executeUpdate();
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
static void setSqlParam(int param_index, PreparedStatement st, Object value) throws Exception {
if (value instanceof String) {
st.setString(param_index, (String) value);
} else if (value instanceof Long) {
st.setInt(param_index, (int) (long) value);
} else if (value instanceof Double) {
st.setFloat(param_index, (float) (double) value);
} else if (value instanceof Boolean) {
st.setBoolean(param_index, (Boolean) value);
} else {
throw new Exception("Unsupported data type:" + value.getClass().getName());
}
}
}

View file

@ -1,19 +0,0 @@
package com.msft.send_perf_metrics;
import java.sql.DriverManager;
import java.util.Map;
import java.util.Properties;
public class JdbcUtil {
static java.sql.Connection GetConn() throws Exception {
try (java.io.InputStream in = App.class.getResourceAsStream("/jdbc.properties")) {
if (in == null)
throw new RuntimeException("Error reading jdbc properties");
Properties props = new Properties();
props.load(in);
// loading password via env variable
return DriverManager.getConnection(props.getProperty("url"), props.getProperty("user"),
System.getenv(props.getProperty("password_env")));
}
}
}