mirror of
https://github.com/apache/superset.git
synced 2026-05-10 02:15:50 +00:00
build: inline external Github Actions to unblock CI (#12241)
* build: inline cached-dependencies to unblock CI * Run E2E on pull_request on;y * Inline all external actions * Checkout needed for internal actions Also fixes pre-commit * Add missing files
This commit is contained in:
49
.github/actions/cached-dependencies/src/cache/caches.ts
vendored
Normal file
49
.github/actions/cached-dependencies/src/cache/caches.ts
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
/**
|
||||
* Default cache configs
|
||||
*/
|
||||
import * as os from 'os';
|
||||
|
||||
export interface CacheConfig {
|
||||
path: string[] | string;
|
||||
hashFiles: string[] | string;
|
||||
keyPrefix?: string;
|
||||
restoreKeys?: string[] | string;
|
||||
}
|
||||
|
||||
export interface CacheConfigs {
|
||||
[cacheName: string]: CacheConfig;
|
||||
}
|
||||
|
||||
const { HOME = '~' } = process.env;
|
||||
const platform = os.platform() as 'linux' | 'darwin' | 'win32';
|
||||
const pathByPlatform = {
|
||||
linux: {
|
||||
pip: `${HOME}/.cache/pip`,
|
||||
},
|
||||
darwin: {
|
||||
pip: `${HOME}/Library/Caches/pip`,
|
||||
},
|
||||
win32: {
|
||||
pip: `${HOME}\\AppData\\Local\\pip\\Cache`,
|
||||
},
|
||||
};
|
||||
|
||||
export default {
|
||||
pip: {
|
||||
path: pathByPlatform[platform].pip,
|
||||
hashFiles: 'requirements*.txt',
|
||||
},
|
||||
npm: {
|
||||
path: `${HOME}/.npm`,
|
||||
hashFiles: [
|
||||
`package-lock.json`,
|
||||
// support lerna monorepo with depth=2
|
||||
`*/*/package-lock.json`,
|
||||
`!node_modules/*/package-lock.json`,
|
||||
],
|
||||
},
|
||||
yarn: {
|
||||
path: `${HOME}/.npm`,
|
||||
hashFiles: [`yarn.lock`, `*/*/yarn.lock`, `!node_modules/*/yarn.lock`],
|
||||
},
|
||||
} as CacheConfigs;
|
||||
146
.github/actions/cached-dependencies/src/cache/index.ts
vendored
Normal file
146
.github/actions/cached-dependencies/src/cache/index.ts
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
/**
|
||||
* Execute @actions/cache with predefined cache configs.
|
||||
*/
|
||||
import { beginImport, doneImport } from './patch'; // monkey patch @actions modules
|
||||
|
||||
beginImport();
|
||||
import saveCache from '@actions/cache/src/save';
|
||||
import restoreCache from '@actions/cache/src/restore';
|
||||
doneImport();
|
||||
|
||||
import hasha from 'hasha';
|
||||
import * as fs from 'fs';
|
||||
import * as core from '@actions/core';
|
||||
import * as glob from '@actions/glob';
|
||||
import { Inputs, InputName, DefaultInputs } from '../constants';
|
||||
import { applyInputs, getInput, maybeArrayToString } from '../utils/inputs';
|
||||
import caches from './caches'; // default cache configs
|
||||
|
||||
// GitHub uses `sha256` for the built-in `${{ hashFiles(...) }}` expression
|
||||
// https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#hashfiles
|
||||
const HASH_OPTION = { algorithm: 'sha256' };
|
||||
|
||||
/**
|
||||
* Load custom cache configs from the `caches` path defined in inputs.
|
||||
*
|
||||
* @returns Whether the loading is successfull.
|
||||
*/
|
||||
export async function loadCustomCacheConfigs() {
|
||||
const customCachePath = getInput(InputName.Caches);
|
||||
try {
|
||||
core.debug(`Reading cache configs from '${customCachePath}'`);
|
||||
const customCache = await import(customCachePath);
|
||||
Object.assign(caches, customCache.default);
|
||||
} catch (error) {
|
||||
if (
|
||||
customCachePath !== DefaultInputs[InputName.Caches] ||
|
||||
!error.message.includes('Cannot find module')
|
||||
) {
|
||||
core.error(error.message);
|
||||
core.setFailed(
|
||||
`Failed to load custom cache configs: '${customCachePath}'`,
|
||||
);
|
||||
return process.exit(1);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate SHA256 hash for a list of files matched by glob patterns.
|
||||
*
|
||||
* @param {string[]} patterns - The glob pattern.
|
||||
* @param {string} extra - The extra string to append to the file hashes to
|
||||
* comptue the final hash.
|
||||
*/
|
||||
export async function hashFiles(
|
||||
patterns: string[] | string,
|
||||
extra: string = '',
|
||||
) {
|
||||
const globber = await glob.create(maybeArrayToString(patterns));
|
||||
let hash = '';
|
||||
let counter = 0;
|
||||
for await (const file of globber.globGenerator()) {
|
||||
if (!fs.statSync(file).isDirectory()) {
|
||||
hash += hasha.fromFileSync(file, HASH_OPTION);
|
||||
counter += 1;
|
||||
}
|
||||
}
|
||||
core.debug(`Computed hash for ${counter} files. Pattern: ${patterns}`);
|
||||
return hasha(hash + extra, HASH_OPTION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate GitHub Action inputs based on predefined cache config. Will be used
|
||||
* to override env variables.
|
||||
*
|
||||
* @param {string} cacheName - Name of the predefined cache config.
|
||||
*/
|
||||
export async function getCacheInputs(
|
||||
cacheName: string,
|
||||
): Promise<Inputs | null> {
|
||||
if (!(cacheName in caches)) {
|
||||
return null;
|
||||
}
|
||||
const { keyPrefix, restoreKeys, path, hashFiles: patterns } = caches[
|
||||
cacheName
|
||||
];
|
||||
const pathString = maybeArrayToString(path);
|
||||
const prefix = keyPrefix || `${cacheName}-`;
|
||||
// include `path` to hash, too, so to burse caches in case users change
|
||||
// the path definition.
|
||||
const hash = await hashFiles(patterns, pathString);
|
||||
return {
|
||||
[InputName.Key]: `${prefix}${hash}`,
|
||||
[InputName.Path]: pathString,
|
||||
// only use prefix as restore key if it is never defined
|
||||
[InputName.RestoreKeys]:
|
||||
restoreKeys === undefined ? prefix : maybeArrayToString(restoreKeys),
|
||||
};
|
||||
}
|
||||
|
||||
export const actions = {
|
||||
restore(inputs: Inputs) {
|
||||
return applyInputs(inputs, restoreCache);
|
||||
},
|
||||
save(inputs: Inputs) {
|
||||
return applyInputs(inputs, saveCache);
|
||||
},
|
||||
};
|
||||
|
||||
export type ActionChoice = keyof typeof actions;
|
||||
|
||||
export async function run(
|
||||
action: string | undefined = undefined,
|
||||
cacheName: string | undefined = undefined,
|
||||
) {
|
||||
if (!action || !(action in actions)) {
|
||||
core.setFailed(`Choose a cache action from: [restore, save]`);
|
||||
return process.exit(1);
|
||||
}
|
||||
if (!cacheName) {
|
||||
core.setFailed(`Must provide a cache name.`);
|
||||
return process.exit(1);
|
||||
}
|
||||
|
||||
const runInParallel = getInput(InputName.Parallel);
|
||||
|
||||
if (await loadCustomCacheConfigs()) {
|
||||
if (runInParallel) {
|
||||
core.info(`${action.toUpperCase()} cache for ${cacheName}`);
|
||||
} else {
|
||||
core.startGroup(`${action.toUpperCase()} cache for ${cacheName}`);
|
||||
}
|
||||
const inputs = await getCacheInputs(cacheName);
|
||||
if (inputs) {
|
||||
core.info(JSON.stringify(inputs, null, 2));
|
||||
await actions[action as ActionChoice](inputs);
|
||||
} else {
|
||||
core.setFailed(`Cache '${cacheName}' not defined, failed to ${action}.`);
|
||||
return process.exit(1);
|
||||
}
|
||||
if (!runInParallel) {
|
||||
core.endGroup();
|
||||
}
|
||||
}
|
||||
}
|
||||
95
.github/actions/cached-dependencies/src/cache/patch.ts
vendored
Normal file
95
.github/actions/cached-dependencies/src/cache/patch.ts
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
/**
|
||||
* Monkey patch to safely import and use @action/cache modules
|
||||
*/
|
||||
import * as utils from '@actions/cache/src/utils/actionUtils';
|
||||
import * as core from '@actions/core';
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import { InputName } from '../constants';
|
||||
import { getInput } from '../utils/inputs';
|
||||
|
||||
interface KeyValueStore {
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
const { logWarning, isValidEvent } = utils;
|
||||
const { getState, saveState } = core;
|
||||
|
||||
function getStateStoreFile() {
|
||||
const cacheName = getInput(InputName.Key);
|
||||
return `${os.tmpdir()}/cached-${cacheName}-states.json`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load states from the persistent store.
|
||||
*
|
||||
* The default `core.saveState` only writes states as command output, and
|
||||
* `core.getState` is only possible to read the state in a later step via ENV
|
||||
* variables.
|
||||
*
|
||||
* So we use a temp file to save and load states, so to allow persistent
|
||||
* states within the same step.
|
||||
*
|
||||
* Since the state output is not uniq to caches, each cache should have their
|
||||
* own file for persistent states.
|
||||
*/
|
||||
function loadStates() {
|
||||
const stateStore = getStateStoreFile();
|
||||
const states: KeyValueStore = {};
|
||||
try {
|
||||
Object.assign(
|
||||
states,
|
||||
JSON.parse(fs.readFileSync(stateStore, { encoding: 'utf-8' })),
|
||||
);
|
||||
core.debug(`Loaded states from: ${stateStore}`)
|
||||
} catch (error) {
|
||||
// pass
|
||||
if (error.code !== 'ENOENT') {
|
||||
utils.logWarning(`Could not load states: ${stateStore}`)
|
||||
utils.logWarning(error.message);
|
||||
}
|
||||
}
|
||||
return states;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save states to the persistent storage.
|
||||
*/
|
||||
function persistState(name: string, value: any) {
|
||||
const states = loadStates();
|
||||
const stateStore = getStateStoreFile();
|
||||
const valueString = typeof value === 'string' ? value : JSON.stringify(value);
|
||||
|
||||
// make sure value is always string
|
||||
states[name] = valueString;
|
||||
|
||||
// persist state in the temp file
|
||||
fs.writeFileSync(stateStore, JSON.stringify(states, null, 2), {
|
||||
encoding: 'utf-8',
|
||||
});
|
||||
core.debug(`Persist state "${name}=${valueString}" to ${stateStore}`);
|
||||
|
||||
// still pass the original value to the original function, though
|
||||
return saveState(name, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get states from persistent store, fallback to "official" states.
|
||||
*/
|
||||
function obtainState(name: string) {
|
||||
const states = loadStates();
|
||||
return states[name] || getState(name);
|
||||
}
|
||||
|
||||
export function beginImport() {
|
||||
Object.defineProperty(utils, 'isValidEvent', { value: () => false });
|
||||
Object.defineProperty(utils, 'logWarning', { value: () => {} });
|
||||
}
|
||||
|
||||
export function doneImport() {
|
||||
Object.defineProperty(utils, 'isValidEvent', { value: isValidEvent });
|
||||
Object.defineProperty(utils, 'logWarning', { value: logWarning });
|
||||
|
||||
Object.defineProperty(core, 'saveState', { value: persistState });
|
||||
Object.defineProperty(core, 'getState', { value: obtainState });
|
||||
}
|
||||
43
.github/actions/cached-dependencies/src/constants.ts
vendored
Normal file
43
.github/actions/cached-dependencies/src/constants.ts
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
// Possible input names
|
||||
export enum InputName {
|
||||
// @actions/cache specific inputs
|
||||
Key = 'key',
|
||||
Path = 'path',
|
||||
RestoreKeys = 'restore-keys',
|
||||
|
||||
// setup-webapp specific inputs
|
||||
Run = 'run',
|
||||
Caches = 'caches',
|
||||
Bashlib = 'bashlib',
|
||||
Parallel = 'parallel',
|
||||
}
|
||||
|
||||
// Possible GitHub event names
|
||||
export enum GitHubEvent {
|
||||
Push = 'push',
|
||||
PullRequest = 'pull_request',
|
||||
}
|
||||
|
||||
// Directly available environment variables
|
||||
export enum EnvVariable {
|
||||
GitHubEventName = 'GITHUB_EVENT_NAME',
|
||||
}
|
||||
|
||||
export const EnvVariableNames = new Set(Object.values(EnvVariable) as string[]);
|
||||
|
||||
export interface Inputs {
|
||||
[EnvVariable.GitHubEventName]?: string;
|
||||
[InputName.Key]?: string;
|
||||
[InputName.RestoreKeys]?: string;
|
||||
[InputName.Path]?: string;
|
||||
[InputName.Caches]?: string;
|
||||
[InputName.Bashlib]?: string;
|
||||
[InputName.Run]?: string;
|
||||
[InputName.Parallel]?: string;
|
||||
}
|
||||
|
||||
export const DefaultInputs = {
|
||||
[InputName.Caches]: '.github/workflows/caches.js',
|
||||
[InputName.Bashlib]: '.github/workflows/bashlib.sh',
|
||||
[InputName.Run]: 'default-setup-command',
|
||||
} as Inputs;
|
||||
3
.github/actions/cached-dependencies/src/run.ts
vendored
Normal file
3
.github/actions/cached-dependencies/src/run.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import { run } from './setup';
|
||||
|
||||
run();
|
||||
61
.github/actions/cached-dependencies/src/scripts/bashlib.sh
vendored
Normal file
61
.github/actions/cached-dependencies/src/scripts/bashlib.sh
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
#!/bin/bash
|
||||
# -----------------------------------------------
|
||||
# Predefined command shortcuts
|
||||
# -----------------------------------------------
|
||||
|
||||
# Exit on any command fails
|
||||
set -e
|
||||
|
||||
bashSource=${BASH_SOURCE[${#BASH_SOURCE[@]} - 1]:-${(%):-%x}}
|
||||
cacheScript="$(dirname $(dirname $(dirname $bashSource)))/dist/scripts/cache"
|
||||
|
||||
print-cachescript-path() {
|
||||
echo $cacheScript
|
||||
}
|
||||
|
||||
cache-restore() {
|
||||
node $cacheScript restore $1
|
||||
}
|
||||
|
||||
cache-save() {
|
||||
node $cacheScript save $1
|
||||
}
|
||||
|
||||
# install python packages
|
||||
pip-install() {
|
||||
cache-restore pip
|
||||
echo "::group::Install Python pacakges"
|
||||
pip install -r requirements.txt # install dependencies
|
||||
pip install -e . # install current directory as editable python package
|
||||
echo "::endgroup"
|
||||
cache-save pip
|
||||
}
|
||||
|
||||
# install npm packages
|
||||
npm-install() {
|
||||
cache-restore npm
|
||||
echo "::group::Install npm pacakges"
|
||||
echo "npm: $(npm --version)"
|
||||
echo "node: $(node --version)"
|
||||
npm ci
|
||||
echo "::endgroup::"
|
||||
cache-save npm
|
||||
}
|
||||
|
||||
# install npm packages via yarn
|
||||
yarn-install() {
|
||||
cache-restore yarn
|
||||
echo "::group::Install npm pacakges via yarn"
|
||||
echo "npm: $(npm --version)"
|
||||
echo "node: $(node --version)"
|
||||
echo "yarn: $(yarn --version)"
|
||||
yarn
|
||||
echo "::endgroup::"
|
||||
cache-save yarn
|
||||
}
|
||||
|
||||
# default setup will install both pip and npm pacakges at the same time
|
||||
default-setup-command() {
|
||||
echo 'Please provide `run` commands or configure `default-setup-command`.'
|
||||
exit 1
|
||||
}
|
||||
18
.github/actions/cached-dependencies/src/scripts/cache.ts
vendored
Normal file
18
.github/actions/cached-dependencies/src/scripts/cache.ts
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
/**
|
||||
* Runner script to store/save caches by predefined configs.
|
||||
* Used in `scripts/bashlib.sh`.
|
||||
*/
|
||||
import { EnvVariable } from '../constants';
|
||||
|
||||
// To import `@actions/cache` modules safely, we must set GitHub event name to
|
||||
// a invalid value, so actual runner code doesn't execute.
|
||||
const originalEvent = process.env[EnvVariable.GitHubEventName];
|
||||
process.env[EnvVariable.GitHubEventName] = 'CACHE_HACK';
|
||||
|
||||
import { run } from '../cache';
|
||||
|
||||
// then we restore the event name before the job actually runs
|
||||
process.env[EnvVariable.GitHubEventName] = originalEvent;
|
||||
|
||||
// @ts-ignore
|
||||
run(...process.argv.slice(2));
|
||||
66
.github/actions/cached-dependencies/src/setup.ts
vendored
Normal file
66
.github/actions/cached-dependencies/src/setup.ts
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
/**
|
||||
* Load inputs and execute.
|
||||
*/
|
||||
import * as core from '@actions/core';
|
||||
import { exec } from '@actions/exec';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import { DefaultInputs, InputName } from './constants';
|
||||
import { getInput } from './utils/inputs';
|
||||
|
||||
const SHARED_BASHLIB = path.resolve(__dirname, '../src/scripts/bashlib.sh');
|
||||
|
||||
/**
|
||||
* Run bash commands with predefined lib functions.
|
||||
*
|
||||
* @param {string} cmd - The bash commands to execute.
|
||||
*/
|
||||
export async function runCommand(
|
||||
cmd: string,
|
||||
extraBashlib: string,
|
||||
): Promise<void> {
|
||||
const bashlibCommands = [`source ${SHARED_BASHLIB}`];
|
||||
if (extraBashlib) {
|
||||
bashlibCommands.push(`source ${extraBashlib}`);
|
||||
}
|
||||
try {
|
||||
await exec('bash', ['-c', [...bashlibCommands, cmd].join('\n ')]);
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
export async function run(): Promise<void> {
|
||||
let bashlib = getInput(InputName.Bashlib);
|
||||
const rawCommands = getInput(InputName.Run);
|
||||
const runInParallel = getInput(InputName.Parallel);
|
||||
|
||||
if (!fs.existsSync(bashlib)) {
|
||||
if (bashlib !== DefaultInputs[InputName.Bashlib]) {
|
||||
core.error(`Custom bashlib "${bashlib}" does not exist.`);
|
||||
}
|
||||
// don't add bashlib to runCommand
|
||||
bashlib = '';
|
||||
}
|
||||
|
||||
if (runInParallel) {
|
||||
// Attempt to split by two or more new lines first, if there is still only
|
||||
// one command, attempt to split by one new line. This is because users
|
||||
// asked for parallelization, so we make our best efforts to get multiple
|
||||
// commands.
|
||||
let commands = rawCommands.split(/\n{2,}/);
|
||||
if (commands.length === 1) {
|
||||
commands = rawCommands.split('\n');
|
||||
}
|
||||
core.debug(`>> Run ${commands.length} commands in parallel...`);
|
||||
await Promise.all(
|
||||
commands
|
||||
.map(x => x.trim())
|
||||
.filter(x => !!x)
|
||||
.map(cmd => exports.runCommand(cmd, bashlib)),
|
||||
);
|
||||
} else if (rawCommands) {
|
||||
await exports.runCommand(rawCommands, bashlib);
|
||||
}
|
||||
}
|
||||
2
.github/actions/cached-dependencies/src/types/external.d.ts
vendored
Normal file
2
.github/actions/cached-dependencies/src/types/external.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
declare module '@actions/cache/dist/restore';
|
||||
declare module '@actions/cache/dist/save';
|
||||
61
.github/actions/cached-dependencies/src/utils/inputs.ts
vendored
Normal file
61
.github/actions/cached-dependencies/src/utils/inputs.ts
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* Manage inputs and env variables.
|
||||
*/
|
||||
import * as core from '@actions/core';
|
||||
import {
|
||||
Inputs,
|
||||
EnvVariableNames,
|
||||
InputName,
|
||||
DefaultInputs,
|
||||
} from '../constants';
|
||||
|
||||
export function getInput(name: keyof Inputs): string {
|
||||
const value = core.getInput(name);
|
||||
if (name === InputName.Parallel) {
|
||||
return value.toUpperCase() === 'TRUE' ? value : '';
|
||||
}
|
||||
return value || DefaultInputs[name] || '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Update env variables associated with some inputs.
|
||||
* See: https://github.com/actions/toolkit/blob/5b940ebda7e7b86545fe9741903c930bc1191eb0/packages/core/src/core.ts#L69-L77 .
|
||||
*
|
||||
* @param {Inputs} inputs - The new inputs to apply to the env variables.
|
||||
*/
|
||||
export function setInputs(inputs: Inputs): void {
|
||||
for (const [name, value] of Object.entries(inputs)) {
|
||||
const envName = EnvVariableNames.has(name)
|
||||
? name
|
||||
: `INPUT_${name.replace(/ /g, '_').toUpperCase()}`;
|
||||
process.env[envName] = value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply new inputs and execute a runner function, restore them when done.
|
||||
*
|
||||
* @param {Inputs} inputs - The new inputs to apply to the env variables before
|
||||
* excuting the runner.
|
||||
* @param {runner} runner - The runner function that returns a promise.
|
||||
* @returns {Promise<any>} - The result from the runner function.
|
||||
*/
|
||||
export async function applyInputs(
|
||||
inputs: Inputs,
|
||||
runner: () => Promise<void>,
|
||||
): Promise<any> {
|
||||
const originalInputs: Inputs = Object.fromEntries(
|
||||
Object.keys(inputs).map(name => [
|
||||
name,
|
||||
EnvVariableNames.has(name) ? process.env[name] : core.getInput(name),
|
||||
]),
|
||||
);
|
||||
exports.setInputs(inputs);
|
||||
const result = await runner();
|
||||
exports.setInputs(originalInputs);
|
||||
return result;
|
||||
}
|
||||
|
||||
export function maybeArrayToString(input: string[] | string) {
|
||||
return Array.isArray(input) ? input.join('\n') : input;
|
||||
}
|
||||
Reference in New Issue
Block a user