Explicitly name TF build cache destination file
GitHub's API has stopped sending the artifact name as the file name, so we ended up with a file matching the artifact ID. Name the full file path explicitly so there's no room for changes.
This commit is contained in:
parent
a5c981bb48
commit
2af6f8da89
@ -14,6 +14,10 @@ const fs = __nccwpck_require__(5747);
|
||||
const { throttling } = __nccwpck_require__(9968);
|
||||
const { GitHub } = __nccwpck_require__(3030);
|
||||
const Download = __nccwpck_require__(7490);
|
||||
const Util = __nccwpck_require__(1669);
|
||||
const Stream = __nccwpck_require__(2413);
|
||||
|
||||
const Pipeline = Util.promisify(Stream.pipeline);
|
||||
|
||||
async function getGoodArtifacts(client, owner, repo, releaseId, name) {
|
||||
console.log(`==> GET /repos/${owner}/${repo}/releases/${releaseId}/assets`);
|
||||
@ -101,22 +105,24 @@ async function main() {
|
||||
console.log("==> # artifacts:", goodArtifacts.length);
|
||||
|
||||
const artifact = goodArtifacts[0];
|
||||
|
||||
console.log("==> Artifact:", artifact.id)
|
||||
|
||||
const size = filesize(artifact.size, { base: 10 })
|
||||
console.log(`==> Downloading: ${artifact.name} (${size}) to path: ${path}`)
|
||||
|
||||
console.log("==> Downloading:", artifact.name, `(${size})`)
|
||||
|
||||
const dir = name ? path : pathname.join(path, artifact.name)
|
||||
const dir = pathname.dirname(path)
|
||||
console.log(`==> Creating containing dir if needed: ${dir}`)
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
|
||||
await Download(artifact.url, dir, {
|
||||
headers: {
|
||||
"Accept": "application/octet-stream",
|
||||
"Authorization": `token ${token}`,
|
||||
},
|
||||
});
|
||||
await Pipeline(
|
||||
Download(artifact.url, {
|
||||
headers: {
|
||||
"Accept": "application/octet-stream",
|
||||
"Authorization": `token ${token}`,
|
||||
},
|
||||
}),
|
||||
fs.createWriteStream(path)
|
||||
)
|
||||
}
|
||||
|
||||
if (artifactStatus === "missing" && download == "true") {
|
||||
@ -30667,7 +30673,7 @@ module.exports = eval("require")("original-fs");
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
module.exports = JSON.parse("{\"_from\":\"got@^8.3.1\",\"_id\":\"got@8.3.2\",\"_inBundle\":false,\"_integrity\":\"sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw==\",\"_location\":\"/got\",\"_phantomChildren\":{},\"_requested\":{\"type\":\"range\",\"registry\":true,\"raw\":\"got@^8.3.1\",\"name\":\"got\",\"escapedName\":\"got\",\"rawSpec\":\"^8.3.1\",\"saveSpec\":null,\"fetchSpec\":\"^8.3.1\"},\"_requiredBy\":[\"/download\"],\"_resolved\":\"https://registry.npmjs.org/got/-/got-8.3.2.tgz\",\"_shasum\":\"1d23f64390e97f776cac52e5b936e5f514d2e937\",\"_spec\":\"got@^8.3.1\",\"_where\":\"/Users/reubenmorais/Development/STT/.github/actions/check_artifact_exists/node_modules/download\",\"ava\":{\"concurrency\":4},\"browser\":{\"decompress-response\":false,\"electron\":false},\"bugs\":{\"url\":\"https://github.com/sindresorhus/got/issues\"},\"bundleDependencies\":false,\"dependencies\":{\"@sindresorhus/is\":\"^0.7.0\",\"cacheable-request\":\"^2.1.1\",\"decompress-response\":\"^3.3.0\",\"duplexer3\":\"^0.1.4\",\"get-stream\":\"^3.0.0\",\"into-stream\":\"^3.1.0\",\"is-retry-allowed\":\"^1.1.0\",\"isurl\":\"^1.0.0-alpha5\",\"lowercase-keys\":\"^1.0.0\",\"mimic-response\":\"^1.0.0\",\"p-cancelable\":\"^0.4.0\",\"p-timeout\":\"^2.0.1\",\"pify\":\"^3.0.0\",\"safe-buffer\":\"^5.1.1\",\"timed-out\":\"^4.0.1\",\"url-parse-lax\":\"^3.0.0\",\"url-to-options\":\"^1.0.1\"},\"deprecated\":false,\"description\":\"Simplified HTTP requests\",\"devDependencies\":{\"ava\":\"^0.25.0\",\"coveralls\":\"^3.0.0\",\"form-data\":\"^2.1.1\",\"get-port\":\"^3.0.0\",\"nyc\":\"^11.0.2\",\"p-event\":\"^1.3.0\",\"pem\":\"^1.4.4\",\"proxyquire\":\"^1.8.0\",\"sinon\":\"^4.0.0\",\"slow-stream\":\"0.0.4\",\"tempfile\":\"^2.0.0\",\"tempy\":\"^0.2.1\",\"universal-url\":\"1.0.0-alpha\",\"xo\":\"^0.20.0\"},\"engines\":{\"node\":\">=4\"},\"files\":[\"index.js\",\"errors.js\"],\"homepage\":\"https://github.com/sindresorhus/got#readme\",\"keywords\":[\"http\",\"https\",\"get\",\"got\",\"url\",\"uri\",\"request\",\"util\",\"utility\",\"simple\",\"curl\",\"wget\",\"fetch\",\"net\",\"network\",\"electron\"],\"license\":\"MIT\",\"maintainers\":[{\"name\":\"Sindre Sorhus\",\"email\":\"sindresorhus@gmail.com\",\"url\":\"sindresorhus.com\"},{\"name\":\"Vsevolod Strukchinsky\",\"email\":\"floatdrop@gmail.com\",\"url\":\"github.com/floatdrop\"},{\"name\":\"Alexander Tesfamichael\",\"email\":\"alex.tesfamichael@gmail.com\",\"url\":\"alextes.me\"}],\"name\":\"got\",\"repository\":{\"type\":\"git\",\"url\":\"git+https://github.com/sindresorhus/got.git\"},\"scripts\":{\"coveralls\":\"nyc report --reporter=text-lcov | coveralls\",\"test\":\"xo && nyc ava\"},\"version\":\"8.3.2\"}");
|
||||
module.exports = JSON.parse("{\"_args\":[[\"got@8.3.2\",\"/Users/reubenmorais/Development/STT/.github/actions/check_artifact_exists\"]],\"_development\":true,\"_from\":\"got@8.3.2\",\"_id\":\"got@8.3.2\",\"_inBundle\":false,\"_integrity\":\"sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw==\",\"_location\":\"/got\",\"_phantomChildren\":{},\"_requested\":{\"type\":\"version\",\"registry\":true,\"raw\":\"got@8.3.2\",\"name\":\"got\",\"escapedName\":\"got\",\"rawSpec\":\"8.3.2\",\"saveSpec\":null,\"fetchSpec\":\"8.3.2\"},\"_requiredBy\":[\"/download\"],\"_resolved\":\"https://registry.npmjs.org/got/-/got-8.3.2.tgz\",\"_spec\":\"8.3.2\",\"_where\":\"/Users/reubenmorais/Development/STT/.github/actions/check_artifact_exists\",\"ava\":{\"concurrency\":4},\"browser\":{\"decompress-response\":false,\"electron\":false},\"bugs\":{\"url\":\"https://github.com/sindresorhus/got/issues\"},\"dependencies\":{\"@sindresorhus/is\":\"^0.7.0\",\"cacheable-request\":\"^2.1.1\",\"decompress-response\":\"^3.3.0\",\"duplexer3\":\"^0.1.4\",\"get-stream\":\"^3.0.0\",\"into-stream\":\"^3.1.0\",\"is-retry-allowed\":\"^1.1.0\",\"isurl\":\"^1.0.0-alpha5\",\"lowercase-keys\":\"^1.0.0\",\"mimic-response\":\"^1.0.0\",\"p-cancelable\":\"^0.4.0\",\"p-timeout\":\"^2.0.1\",\"pify\":\"^3.0.0\",\"safe-buffer\":\"^5.1.1\",\"timed-out\":\"^4.0.1\",\"url-parse-lax\":\"^3.0.0\",\"url-to-options\":\"^1.0.1\"},\"description\":\"Simplified HTTP requests\",\"devDependencies\":{\"ava\":\"^0.25.0\",\"coveralls\":\"^3.0.0\",\"form-data\":\"^2.1.1\",\"get-port\":\"^3.0.0\",\"nyc\":\"^11.0.2\",\"p-event\":\"^1.3.0\",\"pem\":\"^1.4.4\",\"proxyquire\":\"^1.8.0\",\"sinon\":\"^4.0.0\",\"slow-stream\":\"0.0.4\",\"tempfile\":\"^2.0.0\",\"tempy\":\"^0.2.1\",\"universal-url\":\"1.0.0-alpha\",\"xo\":\"^0.20.0\"},\"engines\":{\"node\":\">=4\"},\"files\":[\"index.js\",\"errors.js\"],\"homepage\":\"https://github.com/sindresorhus/got#readme\",\"keywords\":[\"http\",\"https\",\"get\",\"got\",\"url\",\"uri\",\"request\",\"util\",\"utility\",\"simple\",\"curl\",\"wget\",\"fetch\",\"net\",\"network\",\"electron\"],\"license\":\"MIT\",\"maintainers\":[{\"name\":\"Sindre Sorhus\",\"email\":\"sindresorhus@gmail.com\",\"url\":\"sindresorhus.com\"},{\"name\":\"Vsevolod Strukchinsky\",\"email\":\"floatdrop@gmail.com\",\"url\":\"github.com/floatdrop\"},{\"name\":\"Alexander Tesfamichael\",\"email\":\"alex.tesfamichael@gmail.com\",\"url\":\"alextes.me\"}],\"name\":\"got\",\"repository\":{\"type\":\"git\",\"url\":\"git+https://github.com/sindresorhus/got.git\"},\"scripts\":{\"coveralls\":\"nyc report --reporter=text-lcov | coveralls\",\"test\":\"xo && nyc ava\"},\"version\":\"8.3.2\"}");
|
||||
|
||||
/***/ }),
|
||||
|
||||
@ -30683,7 +30689,7 @@ module.exports = JSON.parse("{\"application/1d-interleaved-parityfec\":{\"source
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
module.exports = JSON.parse("{\"_from\":\"seek-bzip@^1.0.5\",\"_id\":\"seek-bzip@1.0.6\",\"_inBundle\":false,\"_integrity\":\"sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ==\",\"_location\":\"/seek-bzip\",\"_phantomChildren\":{},\"_requested\":{\"type\":\"range\",\"registry\":true,\"raw\":\"seek-bzip@^1.0.5\",\"name\":\"seek-bzip\",\"escapedName\":\"seek-bzip\",\"rawSpec\":\"^1.0.5\",\"saveSpec\":null,\"fetchSpec\":\"^1.0.5\"},\"_requiredBy\":[\"/decompress-tarbz2\"],\"_resolved\":\"https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.6.tgz\",\"_shasum\":\"35c4171f55a680916b52a07859ecf3b5857f21c4\",\"_spec\":\"seek-bzip@^1.0.5\",\"_where\":\"/Users/reubenmorais/Development/STT/.github/actions/check_artifact_exists/node_modules/decompress-tarbz2\",\"bin\":{\"seek-bunzip\":\"bin/seek-bunzip\",\"seek-table\":\"bin/seek-bzip-table\"},\"bugs\":{\"url\":\"https://github.com/cscott/seek-bzip/issues\"},\"bundleDependencies\":false,\"contributors\":[{\"name\":\"C. Scott Ananian\",\"url\":\"http://cscott.net\"},{\"name\":\"Eli Skeggs\"},{\"name\":\"Kevin Kwok\"},{\"name\":\"Rob Landley\",\"url\":\"http://landley.net\"}],\"dependencies\":{\"commander\":\"^2.8.1\"},\"deprecated\":false,\"description\":\"a pure-JavaScript Node.JS module for random-access decoding bzip2 data\",\"devDependencies\":{\"fibers\":\"~1.0.6\",\"mocha\":\"~2.2.5\"},\"directories\":{\"test\":\"test\"},\"homepage\":\"https://github.com/cscott/seek-bzip#readme\",\"license\":\"MIT\",\"main\":\"./lib/index.js\",\"name\":\"seek-bzip\",\"repository\":{\"type\":\"git\",\"url\":\"git+https://github.com/cscott/seek-bzip.git\"},\"scripts\":{\"test\":\"mocha\"},\"version\":\"1.0.6\"}");
|
||||
module.exports = JSON.parse("{\"_args\":[[\"seek-bzip@1.0.6\",\"/Users/reubenmorais/Development/STT/.github/actions/check_artifact_exists\"]],\"_development\":true,\"_from\":\"seek-bzip@1.0.6\",\"_id\":\"seek-bzip@1.0.6\",\"_inBundle\":false,\"_integrity\":\"sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ==\",\"_location\":\"/seek-bzip\",\"_phantomChildren\":{},\"_requested\":{\"type\":\"version\",\"registry\":true,\"raw\":\"seek-bzip@1.0.6\",\"name\":\"seek-bzip\",\"escapedName\":\"seek-bzip\",\"rawSpec\":\"1.0.6\",\"saveSpec\":null,\"fetchSpec\":\"1.0.6\"},\"_requiredBy\":[\"/decompress-tarbz2\"],\"_resolved\":\"https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.6.tgz\",\"_spec\":\"1.0.6\",\"_where\":\"/Users/reubenmorais/Development/STT/.github/actions/check_artifact_exists\",\"bin\":{\"seek-bunzip\":\"bin/seek-bunzip\",\"seek-table\":\"bin/seek-bzip-table\"},\"bugs\":{\"url\":\"https://github.com/cscott/seek-bzip/issues\"},\"contributors\":[{\"name\":\"C. Scott Ananian\",\"url\":\"http://cscott.net\"},{\"name\":\"Eli Skeggs\"},{\"name\":\"Kevin Kwok\"},{\"name\":\"Rob Landley\",\"url\":\"http://landley.net\"}],\"dependencies\":{\"commander\":\"^2.8.1\"},\"description\":\"a pure-JavaScript Node.JS module for random-access decoding bzip2 data\",\"devDependencies\":{\"fibers\":\"~1.0.6\",\"mocha\":\"~2.2.5\"},\"directories\":{\"test\":\"test\"},\"homepage\":\"https://github.com/cscott/seek-bzip#readme\",\"license\":\"MIT\",\"main\":\"./lib/index.js\",\"name\":\"seek-bzip\",\"repository\":{\"type\":\"git\",\"url\":\"git+https://github.com/cscott/seek-bzip.git\"},\"scripts\":{\"test\":\"mocha\"},\"version\":\"1.0.6\"}");
|
||||
|
||||
/***/ }),
|
||||
|
||||
|
26
.github/actions/check_artifact_exists/main.js
vendored
26
.github/actions/check_artifact_exists/main.js
vendored
@ -7,6 +7,10 @@ const fs = require('fs');
|
||||
const { throttling } = require('@octokit/plugin-throttling');
|
||||
const { GitHub } = require('@actions/github/lib/utils');
|
||||
const Download = require('download');
|
||||
const Util = require('util');
|
||||
const Stream = require('stream');
|
||||
|
||||
const Pipeline = Util.promisify(Stream.pipeline);
|
||||
|
||||
async function getGoodArtifacts(client, owner, repo, releaseId, name) {
|
||||
console.log(`==> GET /repos/${owner}/${repo}/releases/${releaseId}/assets`);
|
||||
@ -94,22 +98,24 @@ async function main() {
|
||||
console.log("==> # artifacts:", goodArtifacts.length);
|
||||
|
||||
const artifact = goodArtifacts[0];
|
||||
|
||||
console.log("==> Artifact:", artifact.id)
|
||||
|
||||
const size = filesize(artifact.size, { base: 10 })
|
||||
console.log(`==> Downloading: ${artifact.name} (${size}) to path: ${path}`)
|
||||
|
||||
console.log("==> Downloading:", artifact.name, `(${size})`)
|
||||
|
||||
const dir = name ? path : pathname.join(path, artifact.name)
|
||||
const dir = pathname.dirname(path)
|
||||
console.log(`==> Creating containing dir if needed: ${dir}`)
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
|
||||
await Download(artifact.url, dir, {
|
||||
headers: {
|
||||
"Accept": "application/octet-stream",
|
||||
"Authorization": `token ${token}`,
|
||||
},
|
||||
});
|
||||
await Pipeline(
|
||||
Download(artifact.url, {
|
||||
headers: {
|
||||
"Accept": "application/octet-stream",
|
||||
"Authorization": `token ${token}`,
|
||||
},
|
||||
}),
|
||||
fs.createWriteStream(path)
|
||||
)
|
||||
}
|
||||
|
||||
if (artifactStatus === "missing" && download == "true") {
|
||||
|
72
.github/workflows/build-and-test.yml
vendored
72
.github/workflows/build-and-test.yml
vendored
@ -394,15 +394,15 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-Linux.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install -y --no-install-recommends xz-utils zip
|
||||
- run: |
|
||||
tar --skip-old-files -xf ${{ needs.tensorflow_opt-linux.outputs.cache_key }}.tar.xz
|
||||
rm ${{ needs.tensorflow_opt-Linux.outputs.cache_key }}.tar.xz
|
||||
tar --skip-old-files -xf tf-cache.tar.xz
|
||||
rm tf-cache.tar.xz
|
||||
- name: Setup venv
|
||||
run: |
|
||||
/opt/python/cp37-cp37m/bin/python -m venv /tmp/venv
|
||||
@ -1353,11 +1353,11 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-macOS.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- run: |
|
||||
tar xkf ${{ needs.tensorflow_opt-macOS.outputs.cache_key }}.tar.xz
|
||||
rm ${{ needs.tensorflow_opt-macOS.outputs.cache_key }}.tar.xz
|
||||
tar xkf tf-cache.tar.xz
|
||||
rm tf-cache.tar.xz
|
||||
- run: |
|
||||
git status
|
||||
- uses: ./.github/actions/select-xcode
|
||||
@ -1818,11 +1818,11 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-Windows.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- run: |
|
||||
"C:/Program Files/7-Zip/7z.exe" x ${{ needs.tensorflow_opt-Windows.outputs.cache_key }}.tar.xz -so | "C:/Program Files/7-Zip/7z.exe" x -aos -si -ttar -o`pwd`
|
||||
rm ${{ needs.tensorflow_opt-Windows.outputs.cache_key }}.tar.xz
|
||||
"C:/Program Files/7-Zip/7z.exe" x tf-cache.tar.xz -so | "C:/Program Files/7-Zip/7z.exe" x -aos -si -ttar -o`pwd`
|
||||
rm tf-cache.tar.xz
|
||||
- name: Workaround bazel bug when LLVM is installed https://github.com/bazelbuild/bazel/issues/12144
|
||||
run: |
|
||||
rm -f /c/msys64/mingw64/clang-cl*
|
||||
@ -2696,11 +2696,11 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-LinuxArmv7.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- run: |
|
||||
tar --skip-old-files -xf ${{ needs.tensorflow_opt-linuxarmv7.outputs.cache_key }}.tar.xz
|
||||
rm ${{ needs.tensorflow_opt-LinuxArmv7.outputs.cache_key }}.tar.xz
|
||||
tar --skip-old-files -xf tf-cache.tar.xz
|
||||
rm tf-cache.tar.xz
|
||||
- run: |
|
||||
git status
|
||||
- name: "Install chroot"
|
||||
@ -2733,11 +2733,11 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-LinuxAarch64.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- run: |
|
||||
tar --skip-old-files -xf ${{ needs.tensorflow_opt-linuxaarch64.outputs.cache_key }}.tar.xz
|
||||
rm ${{ needs.tensorflow_opt-LinuxAarch64.outputs.cache_key }}.tar.xz
|
||||
tar --skip-old-files -xf tf-cache.tar.xz
|
||||
rm tf-cache.tar.xz
|
||||
- run: |
|
||||
git status
|
||||
- name: "Install chroot"
|
||||
@ -2794,11 +2794,11 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-LinuxArmv7.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- run: |
|
||||
tar --skip-old-files -xf ${{ needs.tensorflow_opt-linuxarmv7.outputs.cache_key }}.tar.xz
|
||||
rm ${{ needs.tensorflow_opt-LinuxArmv7.outputs.cache_key }}.tar.xz
|
||||
tar --skip-old-files -xf tf-cache.tar.xz
|
||||
rm tf-cache.tar.xz
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
@ -2860,11 +2860,11 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-LinuxArmv7.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- run: |
|
||||
tar --skip-old-files -xf ${{ needs.tensorflow_opt-linuxarmv7.outputs.cache_key }}.tar.xz
|
||||
rm ${{ needs.tensorflow_opt-LinuxArmv7.outputs.cache_key }}.tar.xz
|
||||
tar --skip-old-files -xf tf-cache.tar.xz
|
||||
rm tf-cache.tar.xz
|
||||
- uses: ./.github/actions/install-xldd
|
||||
with:
|
||||
target: ${{ env.SYSTEM_TARGET }}
|
||||
@ -2939,11 +2939,11 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-LinuxAarch64.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- run: |
|
||||
tar --skip-old-files -xf ${{ needs.tensorflow_opt-linuxaarch64.outputs.cache_key }}.tar.xz
|
||||
rm ${{ needs.tensorflow_opt-LinuxAarch64.outputs.cache_key }}.tar.xz
|
||||
tar --skip-old-files -xf tf-cache.tar.xz
|
||||
rm tf-cache.tar.xz
|
||||
- uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
@ -3005,11 +3005,11 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-LinuxAarch64.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- run: |
|
||||
tar --skip-old-files -xf ${{ needs.tensorflow_opt-linuxaarch64.outputs.cache_key }}.tar.xz
|
||||
rm ${{ needs.tensorflow_opt-LinuxAarch64.outputs.cache_key }}.tar.xz
|
||||
tar --skip-old-files -xf tf-cache.tar.xz
|
||||
rm tf-cache.tar.xz
|
||||
- uses: ./.github/actions/install-xldd
|
||||
with:
|
||||
target: ${{ env.SYSTEM_TARGET }}
|
||||
@ -3397,11 +3397,11 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-AndroidArmv7.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- run: |
|
||||
tar --skip-old-files -xf ${{ needs.tensorflow_opt-AndroidArmv7.outputs.cache_key }}.tar.xz
|
||||
rm ${{ needs.tensorflow_opt-AndroidArmv7.outputs.cache_key }}.tar.xz
|
||||
tar --skip-old-files -xf tf-cache.tar.xz
|
||||
rm tf-cache.tar.xz
|
||||
- uses: ./.github/actions/libstt-build
|
||||
with:
|
||||
arch: android-armv7
|
||||
@ -3469,11 +3469,11 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-AndroidArm64.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- run: |
|
||||
tar --skip-old-files -xf ${{ needs.tensorflow_opt-AndroidArm64.outputs.cache_key }}.tar.xz
|
||||
rm ${{ needs.tensorflow_opt-AndroidArm64.outputs.cache_key }}.tar.xz
|
||||
tar --skip-old-files -xf tf-cache.tar.xz
|
||||
rm tf-cache.tar.xz
|
||||
- uses: ./.github/actions/libstt-build
|
||||
with:
|
||||
arch: android-arm64
|
||||
@ -3493,11 +3493,11 @@ jobs:
|
||||
- uses: ./.github/actions/check_artifact_exists
|
||||
with:
|
||||
name: ${{ needs.tensorflow_opt-AndroidArm64.outputs.cache_key }}.tar.xz
|
||||
path: ${{ github.workspace }}/
|
||||
path: ${{ github.workspace }}/tf-cache.tar.xz
|
||||
download: true
|
||||
- run: |
|
||||
tar --skip-old-files -xf ${{ needs.tensorflow_opt-AndroidArm64.outputs.cache_key }}.tar.xz
|
||||
rm ${{ needs.tensorflow_opt-AndroidArm64.outputs.cache_key }}.tar.xz
|
||||
tar --skip-old-files -xf tf-cache.tar.xz
|
||||
rm tf-cache.tar.xz
|
||||
- uses: ./.github/actions/libstt-build
|
||||
with:
|
||||
arch: android-x86_64
|
||||
|
@ -64,7 +64,7 @@ def get_importers_parser(description):
|
||||
parser = argparse.ArgumentParser(description=description)
|
||||
parser.add_argument(
|
||||
"--validate_label_locale",
|
||||
help="Path to a Python file defining a |validate_label| function for your locale. WARNING: THIS WILL ADD THIS FILE's DIRECTORY INTO PYTHONPATH.",
|
||||
help="Path to a Python file defining a |validate_label| function for your locale.",
|
||||
)
|
||||
return parser
|
||||
|
||||
@ -81,15 +81,15 @@ def get_validate_label(args):
|
||||
:return: The user-supplied validate_label function
|
||||
:type: function
|
||||
"""
|
||||
# Python 3.5 does not support passing a pathlib.Path to os.path.* methods
|
||||
if "validate_label_locale" not in args or (args.validate_label_locale is None):
|
||||
print(
|
||||
"WARNING: No --validate_label_locale specified, your might end with inconsistent dataset."
|
||||
"WARNING: No --validate_label_locale specified, you might end with inconsistent dataset."
|
||||
)
|
||||
return validate_label_eng
|
||||
# Python 3.5 does not support passing a pathlib.Path to os.path.* methods
|
||||
validate_label_locale = str(args.validate_label_locale)
|
||||
if not os.path.exists(os.path.abspath(validate_label_locale)):
|
||||
print("ERROR: Inexistent --validate_label_locale specified. Please check.")
|
||||
print("ERROR: Path specified in --validate_label_locale is not a file.")
|
||||
return None
|
||||
module_dir = os.path.abspath(os.path.dirname(validate_label_locale))
|
||||
sys.path.insert(1, module_dir)
|
||||
|
Loading…
Reference in New Issue
Block a user