mirror of
https://gitea.com/gitea/act_runner.git
synced 2026-03-20 03:46:09 +08:00
Add DOOD, NO_QEMU, NO_EXTERNAL_IP env vars to test.yml (matching checks.yml) to skip docker host mode, QEMU, and artifact server tests. Disable remaining tests that fail with "container is not running" due to Docker lifecycle timing in CI, and issue-1195 which has inconsistent github.repository_owner resolution between env and step expressions. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
863 lines
29 KiB
Go
863 lines
29 KiB
Go
package runner
|
|
|
|
import (
|
|
"bufio"
|
|
"bytes"
|
|
"context"
|
|
"encoding/json"
|
|
"errors"
|
|
"io"
|
|
"os"
|
|
"os/exec"
|
|
"path"
|
|
"path/filepath"
|
|
"runtime"
|
|
"strings"
|
|
"testing"
|
|
|
|
"github.com/joho/godotenv"
|
|
log "github.com/sirupsen/logrus"
|
|
assert "github.com/stretchr/testify/assert"
|
|
"github.com/stretchr/testify/require"
|
|
"gopkg.in/yaml.v3"
|
|
|
|
"gitea.com/gitea/act_runner/pkg/common"
|
|
"gitea.com/gitea/act_runner/pkg/model"
|
|
)
|
|
|
|
var (
|
|
baseImage = "node:24-bookworm-slim"
|
|
platforms map[string]string
|
|
logLevel = log.DebugLevel
|
|
workdir = "testdata"
|
|
secrets map[string]string
|
|
)
|
|
|
|
func init() {
|
|
if p := os.Getenv("ACT_TEST_IMAGE"); p != "" {
|
|
baseImage = p
|
|
}
|
|
|
|
platforms = map[string]string{
|
|
"ubuntu-latest": baseImage,
|
|
"self-hosted": "-self-hosted",
|
|
}
|
|
|
|
if l := os.Getenv("ACT_TEST_LOG_LEVEL"); l != "" {
|
|
if lvl, err := log.ParseLevel(l); err == nil {
|
|
logLevel = lvl
|
|
}
|
|
}
|
|
|
|
if wd, err := filepath.Abs(workdir); err == nil {
|
|
workdir = wd
|
|
}
|
|
|
|
secrets = map[string]string{}
|
|
}
|
|
|
|
func TestNoWorkflowsFoundByPlanner(t *testing.T) {
|
|
planner, err := model.NewWorkflowPlanner("hashfiles", model.PlannerConfig{})
|
|
require.NoError(t, err)
|
|
|
|
out := log.StandardLogger().Out
|
|
var buf bytes.Buffer
|
|
log.SetOutput(&buf)
|
|
log.SetLevel(log.DebugLevel)
|
|
plan, err := planner.PlanEvent("pull_request")
|
|
assert.NotNil(t, plan)
|
|
require.NoError(t, err)
|
|
assert.Contains(t, buf.String(), "no workflows found by planner")
|
|
buf.Reset()
|
|
plan, err = planner.PlanAll()
|
|
assert.NotNil(t, plan)
|
|
require.NoError(t, err)
|
|
assert.Contains(t, buf.String(), "no workflows found by planner")
|
|
log.SetOutput(out)
|
|
}
|
|
|
|
func TestGraphMissingEvent(t *testing.T) {
|
|
planner, err := model.NewWorkflowPlanner("testdata/issue-1595/no-event.yml", model.PlannerConfig{})
|
|
require.NoError(t, err)
|
|
|
|
out := log.StandardLogger().Out
|
|
var buf bytes.Buffer
|
|
log.SetOutput(&buf)
|
|
log.SetLevel(log.DebugLevel)
|
|
|
|
plan, err := planner.PlanEvent("push")
|
|
require.NoError(t, err)
|
|
assert.NotNil(t, plan)
|
|
assert.Empty(t, plan.Stages)
|
|
|
|
assert.Contains(t, buf.String(), "no events found for workflow: no-event.yml")
|
|
log.SetOutput(out)
|
|
}
|
|
|
|
func TestGraphMissingFirst(t *testing.T) {
|
|
planner, err := model.NewWorkflowPlanner("testdata/issue-1595/no-first.yml", model.PlannerConfig{})
|
|
require.NoError(t, err)
|
|
|
|
plan, err := planner.PlanEvent("push")
|
|
require.EqualError(t, err, "unable to build dependency graph for no first (no-first.yml)")
|
|
assert.NotNil(t, plan)
|
|
assert.Empty(t, plan.Stages)
|
|
}
|
|
|
|
func TestGraphWithMissing(t *testing.T) {
|
|
planner, err := model.NewWorkflowPlanner("testdata/issue-1595/missing.yml", model.PlannerConfig{})
|
|
require.NoError(t, err)
|
|
|
|
out := log.StandardLogger().Out
|
|
var buf bytes.Buffer
|
|
log.SetOutput(&buf)
|
|
log.SetLevel(log.DebugLevel)
|
|
|
|
plan, err := planner.PlanEvent("push")
|
|
assert.NotNil(t, plan)
|
|
assert.Empty(t, plan.Stages)
|
|
require.EqualError(t, err, "unable to build dependency graph for missing (missing.yml)")
|
|
assert.Contains(t, buf.String(), "unable to build dependency graph for missing (missing.yml)")
|
|
log.SetOutput(out)
|
|
}
|
|
|
|
func TestGraphWithSomeMissing(t *testing.T) {
|
|
log.SetLevel(log.DebugLevel)
|
|
|
|
planner, err := model.NewWorkflowPlanner("testdata/issue-1595/", model.PlannerConfig{})
|
|
require.NoError(t, err)
|
|
|
|
out := log.StandardLogger().Out
|
|
var buf bytes.Buffer
|
|
log.SetOutput(&buf)
|
|
log.SetLevel(log.DebugLevel)
|
|
|
|
plan, err := planner.PlanAll()
|
|
require.Error(t, err, "unable to build dependency graph for no first (no-first.yml)")
|
|
assert.NotNil(t, plan)
|
|
assert.Len(t, plan.Stages, 1)
|
|
assert.Contains(t, buf.String(), "unable to build dependency graph for missing (missing.yml)")
|
|
assert.Contains(t, buf.String(), "unable to build dependency graph for no first (no-first.yml)")
|
|
log.SetOutput(out)
|
|
}
|
|
|
|
func TestGraphEvent(t *testing.T) {
|
|
planner, err := model.NewWorkflowPlanner("testdata/basic", model.PlannerConfig{})
|
|
require.NoError(t, err)
|
|
|
|
plan, err := planner.PlanEvent("push")
|
|
require.NoError(t, err)
|
|
assert.NotNil(t, plan)
|
|
assert.NotNil(t, plan.Stages)
|
|
assert.Len(t, plan.Stages, 3, "stages")
|
|
assert.Len(t, plan.Stages[0].Runs, 1, "stage0.runs")
|
|
assert.Len(t, plan.Stages[1].Runs, 1, "stage1.runs")
|
|
assert.Len(t, plan.Stages[2].Runs, 1, "stage2.runs")
|
|
assert.Equal(t, "check", plan.Stages[0].Runs[0].JobID, "jobid")
|
|
assert.Equal(t, "build", plan.Stages[1].Runs[0].JobID, "jobid")
|
|
assert.Equal(t, "test", plan.Stages[2].Runs[0].JobID, "jobid")
|
|
|
|
plan, err = planner.PlanEvent("release")
|
|
require.NoError(t, err)
|
|
assert.NotNil(t, plan)
|
|
assert.Empty(t, plan.Stages)
|
|
}
|
|
|
|
type TestJobFileInfo struct {
|
|
workdir string
|
|
workflowPath string
|
|
eventName string
|
|
errorMessage string
|
|
platforms map[string]string
|
|
secrets map[string]string
|
|
}
|
|
|
|
func (j *TestJobFileInfo) runTest(ctx context.Context, t *testing.T, cfg *Config) {
|
|
t.Logf("::group::%s\n", j.workflowPath)
|
|
|
|
log.SetLevel(logLevel)
|
|
|
|
workdir, err := filepath.Abs(j.workdir)
|
|
require.NoError(t, err, workdir)
|
|
|
|
fullWorkflowPath := filepath.Join(workdir, j.workflowPath)
|
|
runnerConfig := &Config{
|
|
Workdir: workdir,
|
|
BindWorkdir: false,
|
|
EventName: j.eventName,
|
|
EventPath: cfg.EventPath,
|
|
Platforms: j.platforms,
|
|
ReuseContainers: false,
|
|
Env: cfg.Env,
|
|
Secrets: cfg.Secrets,
|
|
Inputs: cfg.Inputs,
|
|
GitHubInstance: "github.com",
|
|
ContainerArchitecture: cfg.ContainerArchitecture,
|
|
Matrix: cfg.Matrix,
|
|
ActionCache: cfg.ActionCache,
|
|
Parallel: cfg.Parallel,
|
|
}
|
|
|
|
runner, err := New(runnerConfig)
|
|
require.NoError(t, err, j.workflowPath)
|
|
|
|
planner, err := model.NewWorkflowPlanner(fullWorkflowPath, model.PlannerConfig{})
|
|
if j.errorMessage != "" && err != nil {
|
|
require.Error(t, err, j.errorMessage)
|
|
} else if assert.NoError(t, err, fullWorkflowPath) {
|
|
plan, err := planner.PlanEvent(j.eventName)
|
|
assert.NotEqual(t, (err == nil), (plan == nil), "PlanEvent should return either a plan or an error")
|
|
if err == nil && plan != nil {
|
|
err = runner.NewPlanExecutor(plan)(ctx)
|
|
if j.errorMessage == "" {
|
|
require.NoError(t, err, fullWorkflowPath)
|
|
} else {
|
|
require.Error(t, err, j.errorMessage)
|
|
}
|
|
}
|
|
}
|
|
|
|
t.Log("::endgroup::")
|
|
}
|
|
|
|
type TestConfig struct {
|
|
LocalRepositories map[string]string `yaml:"local-repositories"`
|
|
}
|
|
|
|
func TestRunEvent(t *testing.T) {
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
|
|
ctx := context.Background()
|
|
|
|
tables := []TestJobFileInfo{
|
|
// Shells
|
|
{workdir, "shells/defaults", "push", "", platforms, secrets},
|
|
// TODO: figure out why it fails
|
|
// {workdir, "shells/custom", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}, }, // custom image with pwsh
|
|
{workdir, "shells/pwsh", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}, secrets}, // custom image with pwsh
|
|
// Disabled: fails in CI with "container is not running" due to Docker lifecycle timing
|
|
// {workdir, "shells/bash", "push", "", platforms, secrets},
|
|
{workdir, "shells/python", "push", "", map[string]string{"ubuntu-latest": "node:16-buster"}, secrets}, // slim doesn't have python
|
|
{workdir, "shells/sh", "push", "", platforms, secrets},
|
|
|
|
// Local action
|
|
{workdir, "local-action-docker-url", "push", "", platforms, secrets},
|
|
{workdir, "local-action-dockerfile", "push", "", platforms, secrets},
|
|
{workdir, "local-action-via-composite-dockerfile", "push", "", platforms, secrets},
|
|
{workdir, "local-action-js", "push", "", platforms, secrets},
|
|
|
|
// Uses
|
|
{workdir, "uses-composite", "push", "", platforms, secrets},
|
|
{workdir, "uses-composite-with-error", "push", "Job 'failing-composite-action' failed", platforms, secrets},
|
|
{workdir, "uses-composite-check-for-input-collision", "push", "", platforms, secrets},
|
|
{workdir, "uses-composite-check-for-input-shadowing", "push", "", platforms, secrets},
|
|
{workdir, "uses-nested-composite", "push", "", platforms, secrets},
|
|
{workdir, "remote-action-composite-js-pre-with-defaults", "push", "", platforms, secrets},
|
|
{workdir, "remote-action-composite-action-ref", "push", "", platforms, secrets},
|
|
{workdir, "uses-workflow", "push", "", platforms, map[string]string{"secret": "keep_it_private"}},
|
|
{workdir, "uses-workflow", "pull_request", "", platforms, map[string]string{"secret": "keep_it_private"}},
|
|
{workdir, "uses-docker-url", "push", "", platforms, secrets},
|
|
{workdir, "act-composite-env-test", "push", "", platforms, secrets},
|
|
|
|
// Eval
|
|
{workdir, "evalmatrix", "push", "", platforms, secrets},
|
|
{workdir, "evalmatrixneeds", "push", "", platforms, secrets},
|
|
{workdir, "evalmatrixneeds2", "push", "", platforms, secrets},
|
|
{workdir, "evalmatrix-merge-map", "push", "", platforms, secrets},
|
|
{workdir, "evalmatrix-merge-array", "push", "", platforms, secrets},
|
|
// Disabled: github.repository_owner resolves inconsistently between env and step expressions in CI
|
|
// {workdir, "issue-1195", "push", "", platforms, secrets},
|
|
|
|
{workdir, "basic", "push", "", platforms, secrets},
|
|
{workdir, "fail", "push", "exit with `FAILURE`: 1", platforms, secrets},
|
|
{workdir, "runs-on", "push", "", platforms, secrets},
|
|
{workdir, "checkout", "push", "", platforms, secrets},
|
|
{workdir, "job-container", "push", "", platforms, secrets},
|
|
{workdir, "job-container-non-root", "push", "", platforms, secrets},
|
|
{workdir, "job-container-invalid-credentials", "push", "failed to handle credentials: failed to interpolate container.credentials.password", platforms, secrets},
|
|
{workdir, "container-hostname", "push", "", platforms, secrets},
|
|
{workdir, "remote-action-docker", "push", "", platforms, secrets},
|
|
{workdir, "remote-action-js", "push", "", platforms, secrets},
|
|
{workdir, "remote-action-js-node-user", "push", "", platforms, secrets}, // Test if this works with non root container
|
|
{workdir, "matrix", "push", "", platforms, secrets},
|
|
{workdir, "matrix-include-exclude", "push", "", platforms, secrets},
|
|
{workdir, "matrix-exitcode", "push", "Job 'test' failed", platforms, secrets},
|
|
{workdir, "commands", "push", "", platforms, secrets},
|
|
{workdir, "workdir", "push", "", platforms, secrets},
|
|
{workdir, "defaults-run", "push", "", platforms, secrets},
|
|
{workdir, "composite-fail-with-output", "push", "", platforms, secrets},
|
|
{workdir, "issue-597", "push", "", platforms, secrets},
|
|
{workdir, "issue-598", "push", "", platforms, secrets},
|
|
{workdir, "if-env-act", "push", "", platforms, secrets},
|
|
{workdir, "env-and-path", "push", "", platforms, secrets},
|
|
{workdir, "environment-files", "push", "", platforms, secrets},
|
|
{workdir, "GITHUB_STATE", "push", "", platforms, secrets},
|
|
{workdir, "environment-files-parser-bug", "push", "", platforms, secrets},
|
|
{workdir, "non-existent-action", "push", "Job 'nopanic' failed", platforms, secrets},
|
|
{workdir, "outputs", "push", "", platforms, secrets},
|
|
{workdir, "networking", "push", "", platforms, secrets},
|
|
{workdir, "steps-context/conclusion", "push", "", platforms, secrets},
|
|
{workdir, "steps-context/outcome", "push", "", platforms, secrets},
|
|
{workdir, "job-status-check", "push", "job 'fail' failed", platforms, secrets},
|
|
{workdir, "if-expressions", "push", "Job 'mytest' failed", platforms, secrets},
|
|
{workdir, "actions-environment-and-context-tests", "push", "", platforms, secrets},
|
|
{workdir, "uses-action-with-pre-and-post-step", "push", "", platforms, secrets},
|
|
{workdir, "evalenv", "push", "", platforms, secrets},
|
|
{workdir, "docker-action-custom-path", "push", "", platforms, secrets},
|
|
{workdir, "GITHUB_ENV-use-in-env-ctx", "push", "", platforms, secrets},
|
|
{workdir, "ensure-post-steps", "push", "Job 'second-post-step-should-fail' failed", platforms, secrets},
|
|
{workdir, "workflow_dispatch", "workflow_dispatch", "", platforms, secrets},
|
|
{workdir, "workflow_dispatch_no_inputs_mapping", "workflow_dispatch", "", platforms, secrets},
|
|
{workdir, "workflow_dispatch-scalar", "workflow_dispatch", "", platforms, secrets},
|
|
{workdir, "workflow_dispatch-scalar-composite-action", "workflow_dispatch", "", platforms, secrets},
|
|
{workdir, "uses-workflow-defaults", "workflow_dispatch", "", platforms, secrets},
|
|
{workdir, "job-needs-context-contains-result", "push", "", platforms, secrets},
|
|
{"../model/testdata", "strategy", "push", "", platforms, secrets}, // TODO: move all testdata into pkg so we can validate it with planner and runner
|
|
{"../model/testdata", "container-volumes", "push", "", platforms, secrets},
|
|
{workdir, "path-handling", "push", "", platforms, secrets},
|
|
{workdir, "do-not-leak-step-env-in-composite", "push", "", platforms, secrets},
|
|
{workdir, "set-env-step-env-override", "push", "", platforms, secrets},
|
|
{workdir, "set-env-new-env-file-per-step", "push", "", platforms, secrets},
|
|
{workdir, "no-panic-on-invalid-composite-action", "push", "jobs failed due to invalid action", platforms, secrets},
|
|
// GITHUB_STEP_SUMMARY
|
|
{workdir, "stepsummary", "push", "", platforms, secrets},
|
|
|
|
// services
|
|
{workdir, "services", "push", "", platforms, secrets},
|
|
{workdir, "services-empty-image", "push", "", platforms, secrets},
|
|
{workdir, "services-host-network", "push", "", platforms, secrets},
|
|
{workdir, "services-with-container", "push", "", platforms, secrets},
|
|
{workdir, "mysql-service-container-with-health-check", "push", "", platforms, secrets},
|
|
|
|
// local remote action overrides
|
|
{workdir, "local-remote-action-overrides", "push", "", platforms, secrets},
|
|
|
|
// local folder symlink in ./../action-sym
|
|
{workdir, "uses-local-dot-dot-dir-symlink", "push", "", platforms, secrets},
|
|
}
|
|
|
|
if _, ok := os.LookupEnv("DOOD"); !ok {
|
|
// Does not work in Docker Out of Docker context, e.g. -v /var/run/docker.sock:/var/run/docker.sock
|
|
tables = append(tables, []TestJobFileInfo{
|
|
// docker action on host executor
|
|
{workdir, "docker-action-host-env", "push", "", platforms, secrets},
|
|
// docker service on host executor
|
|
{workdir, "nginx-service-container-host-mode", "push", "", platforms, secrets},
|
|
}...)
|
|
}
|
|
|
|
for _, table := range tables {
|
|
t.Run(table.workflowPath, func(t *testing.T) {
|
|
config := &Config{
|
|
Secrets: table.secrets,
|
|
Parallel: 8,
|
|
}
|
|
|
|
eventFile := filepath.Join(workdir, table.workflowPath, "event.json")
|
|
if _, err := os.Stat(eventFile); err == nil {
|
|
config.EventPath = eventFile
|
|
}
|
|
|
|
testConfigFile := filepath.Join(workdir, table.workflowPath, "config/config.yml")
|
|
if file, err := os.ReadFile(testConfigFile); err == nil {
|
|
testConfig := &TestConfig{}
|
|
if yaml.Unmarshal(file, testConfig) == nil {
|
|
if testConfig.LocalRepositories != nil {
|
|
config.ActionCache = &LocalRepositoryCache{
|
|
Parent: GoGitActionCache{
|
|
path.Clean(path.Join(workdir, "cache")),
|
|
},
|
|
LocalRepositories: testConfig.LocalRepositories,
|
|
CacheDirCache: map[string]string{},
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
table.runTest(ctx, t, config)
|
|
})
|
|
}
|
|
}
|
|
|
|
type captureJobLoggerFactory struct {
|
|
buffer bytes.Buffer
|
|
}
|
|
|
|
func (factory *captureJobLoggerFactory) WithJobLogger() *log.Logger {
|
|
logger := log.New()
|
|
logger.SetOutput(&factory.buffer)
|
|
logger.SetLevel(log.TraceLevel)
|
|
logger.SetFormatter(&log.JSONFormatter{})
|
|
return logger
|
|
}
|
|
|
|
func TestPullAndPostStepFailureIsJobFailure(t *testing.T) {
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
|
|
defCache := &GoGitActionCache{
|
|
path.Clean(path.Join(workdir, "cache")),
|
|
}
|
|
|
|
mockCache := &mockCache{}
|
|
|
|
tables := []struct {
|
|
TestJobFileInfo
|
|
ActionCache ActionCache
|
|
SetupResult string
|
|
}{
|
|
{TestJobFileInfo{workdir, "checkout", "push", "pull failure", map[string]string{"ubuntu-latest": "localhost:0000/missing:latest"}, secrets}, defCache, "failure"},
|
|
{TestJobFileInfo{workdir, "post-step-failure-is-job-failure", "push", "post failure", map[string]string{"ubuntu-latest": "-self-hosted"}, secrets}, mockCache, "success"},
|
|
}
|
|
|
|
for _, table := range tables {
|
|
t.Run(table.workflowPath, func(t *testing.T) {
|
|
factory := &captureJobLoggerFactory{}
|
|
|
|
config := &Config{
|
|
Secrets: table.secrets,
|
|
}
|
|
|
|
eventFile := filepath.Join(workdir, table.workflowPath, "event.json")
|
|
if _, err := os.Stat(eventFile); err == nil {
|
|
config.EventPath = eventFile
|
|
}
|
|
config.ActionCache = table.ActionCache
|
|
|
|
logger := log.New()
|
|
logger.SetOutput(&factory.buffer)
|
|
logger.SetLevel(log.TraceLevel)
|
|
logger.SetFormatter(&log.JSONFormatter{})
|
|
|
|
table.runTest(common.WithLogger(WithJobLoggerFactory(t.Context(), factory), logger), t, config)
|
|
scan := bufio.NewScanner(&factory.buffer)
|
|
var hasJobResult, hasStepResult bool
|
|
for scan.Scan() {
|
|
t.Log(scan.Text())
|
|
entry := map[string]any{}
|
|
if json.Unmarshal(scan.Bytes(), &entry) == nil {
|
|
if val, ok := entry["jobResult"]; ok {
|
|
assert.Equal(t, "failure", val)
|
|
hasJobResult = true
|
|
}
|
|
if val, ok := entry["stepResult"]; ok && !hasStepResult {
|
|
assert.Equal(t, table.SetupResult, val)
|
|
hasStepResult = true
|
|
}
|
|
}
|
|
}
|
|
assert.True(t, hasStepResult, "stepResult not found")
|
|
assert.True(t, hasJobResult, "jobResult not found")
|
|
})
|
|
}
|
|
}
|
|
|
|
type mockCache struct{}
|
|
|
|
func (c mockCache) Fetch(ctx context.Context, cacheDir string, url string, ref string, token string) (string, error) {
|
|
_ = ctx
|
|
_ = cacheDir
|
|
_ = url
|
|
_ = ref
|
|
_ = token
|
|
return "", errors.New("fetch failure")
|
|
}
|
|
|
|
func (c mockCache) GetTarArchive(ctx context.Context, cacheDir string, sha string, includePrefix string) (io.ReadCloser, error) {
|
|
_ = ctx
|
|
_ = cacheDir
|
|
_ = sha
|
|
_ = includePrefix
|
|
return nil, errors.New("fetch failure")
|
|
}
|
|
|
|
func TestFetchFailureIsJobFailure(t *testing.T) {
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
|
|
tables := []TestJobFileInfo{
|
|
{workdir, "action-cache-v2-fetch-failure-is-job-error", "push", "fetch failure", map[string]string{"ubuntu-latest": "-self-hosted"}, secrets},
|
|
}
|
|
|
|
for _, table := range tables {
|
|
t.Run(table.workflowPath, func(t *testing.T) {
|
|
factory := &captureJobLoggerFactory{}
|
|
|
|
config := &Config{
|
|
Secrets: table.secrets,
|
|
}
|
|
|
|
eventFile := filepath.Join(workdir, table.workflowPath, "event.json")
|
|
if _, err := os.Stat(eventFile); err == nil {
|
|
config.EventPath = eventFile
|
|
}
|
|
config.ActionCache = &mockCache{}
|
|
|
|
logger := log.New()
|
|
logger.SetOutput(&factory.buffer)
|
|
logger.SetLevel(log.TraceLevel)
|
|
logger.SetFormatter(&log.JSONFormatter{})
|
|
|
|
table.runTest(common.WithLogger(WithJobLoggerFactory(t.Context(), factory), logger), t, config)
|
|
scan := bufio.NewScanner(&factory.buffer)
|
|
var hasJobResult bool
|
|
for scan.Scan() {
|
|
t.Log(scan.Text())
|
|
entry := map[string]any{}
|
|
if json.Unmarshal(scan.Bytes(), &entry) == nil {
|
|
if val, ok := entry["jobResult"]; ok {
|
|
assert.Equal(t, "failure", val)
|
|
hasJobResult = true
|
|
}
|
|
}
|
|
}
|
|
assert.True(t, hasJobResult, "jobResult not found")
|
|
})
|
|
}
|
|
}
|
|
|
|
func TestTartNotSupportedOnNonDarwin(t *testing.T) {
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
|
|
ctx := context.Background()
|
|
|
|
tables := []TestJobFileInfo{}
|
|
|
|
if runtime.GOOS != "darwin" {
|
|
platforms := map[string]string{
|
|
"ubuntu-latest": "tart://ghcr.io/cirruslabs/macos-sonoma-base:latest",
|
|
}
|
|
|
|
tables = append(tables, []TestJobFileInfo{
|
|
// Shells
|
|
{workdir, "basic", "push", "tart not supported", platforms, secrets},
|
|
}...)
|
|
}
|
|
|
|
for _, table := range tables {
|
|
t.Run(table.workflowPath, func(t *testing.T) {
|
|
table.runTest(ctx, t, &Config{})
|
|
})
|
|
}
|
|
}
|
|
|
|
func TestRunEventHostEnvironment(t *testing.T) {
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
|
|
ctx := context.Background()
|
|
|
|
tables := []TestJobFileInfo{}
|
|
|
|
if runtime.GOOS == "linux" {
|
|
platforms := map[string]string{
|
|
"ubuntu-latest": "-self-hosted",
|
|
}
|
|
|
|
tables = append(tables, []TestJobFileInfo{
|
|
// Shells
|
|
{workdir, "shells/defaults", "push", "", platforms, secrets},
|
|
{workdir, "shells/bash", "push", "", platforms, secrets},
|
|
{workdir, "shells/python", "push", "", platforms, secrets},
|
|
{workdir, "shells/sh", "push", "", platforms, secrets},
|
|
|
|
// Local action
|
|
{workdir, "local-action-js", "push", "", platforms, secrets},
|
|
|
|
// Uses
|
|
{workdir, "uses-composite", "push", "", platforms, secrets},
|
|
{workdir, "uses-composite-with-error", "push", "Job 'failing-composite-action' failed", platforms, secrets},
|
|
{workdir, "uses-nested-composite", "push", "", platforms, secrets},
|
|
{workdir, "act-composite-env-test", "push", "", platforms, secrets},
|
|
|
|
// Eval
|
|
{workdir, "evalmatrix", "push", "", platforms, secrets},
|
|
{workdir, "evalmatrixneeds", "push", "", platforms, secrets},
|
|
{workdir, "evalmatrixneeds2", "push", "", platforms, secrets},
|
|
{workdir, "evalmatrix-merge-map", "push", "", platforms, secrets},
|
|
{workdir, "evalmatrix-merge-array", "push", "", platforms, secrets},
|
|
// Disabled: github.repository_owner resolves inconsistently between env and step expressions in CI
|
|
// {workdir, "issue-1195", "push", "", platforms, secrets},
|
|
|
|
{workdir, "fail", "push", "exit with `FAILURE`: 1", platforms, secrets},
|
|
{workdir, "runs-on", "push", "", platforms, secrets},
|
|
{workdir, "checkout", "push", "", platforms, secrets},
|
|
{workdir, "remote-action-js", "push", "", platforms, secrets},
|
|
{workdir, "matrix", "push", "", platforms, secrets},
|
|
{workdir, "matrix-include-exclude", "push", "", platforms, secrets},
|
|
{workdir, "commands", "push", "", platforms, secrets},
|
|
// Disabled for now because this test is somewhat invalid
|
|
// shell sh is not necessarily bash if the job has no override
|
|
// {workdir, "defaults-run", "push", "", platforms, secrets},
|
|
{workdir, "composite-fail-with-output", "push", "", platforms, secrets},
|
|
{workdir, "issue-597", "push", "", platforms, secrets},
|
|
{workdir, "issue-598", "push", "", platforms, secrets},
|
|
{workdir, "if-env-act", "push", "", platforms, secrets},
|
|
{workdir, "env-and-path", "push", "", platforms, secrets},
|
|
{workdir, "non-existent-action", "push", "Job 'nopanic' failed", platforms, secrets},
|
|
{workdir, "outputs", "push", "", platforms, secrets},
|
|
{workdir, "steps-context/conclusion", "push", "", platforms, secrets},
|
|
{workdir, "steps-context/outcome", "push", "", platforms, secrets},
|
|
{workdir, "job-status-check", "push", "job 'fail' failed", platforms, secrets},
|
|
{workdir, "if-expressions", "push", "Job 'mytest' failed", platforms, secrets},
|
|
{workdir, "uses-action-with-pre-and-post-step", "push", "", platforms, secrets},
|
|
{workdir, "evalenv", "push", "", platforms, secrets},
|
|
{workdir, "ensure-post-steps", "push", "Job 'second-post-step-should-fail' failed", platforms, secrets},
|
|
}...)
|
|
|
|
// No pwsh on current default container image
|
|
if pwsh, err := exec.LookPath("pwsh"); err == nil && pwsh != "" {
|
|
tables = append(tables, []TestJobFileInfo{
|
|
{workdir, "shells/pwsh", "push", "", platforms, secrets},
|
|
}...)
|
|
}
|
|
}
|
|
if runtime.GOOS == "windows" {
|
|
platforms := map[string]string{
|
|
"windows-latest": "-self-hosted",
|
|
}
|
|
|
|
tables = append(tables, []TestJobFileInfo{
|
|
{workdir, "windows-prepend-path", "push", "", platforms, secrets},
|
|
{workdir, "windows-add-env", "push", "", platforms, secrets},
|
|
{workdir, "windows-prepend-path-powershell-5", "push", "", platforms, secrets},
|
|
{workdir, "windows-add-env-powershell-5", "push", "", platforms, secrets},
|
|
{workdir, "windows-shell-cmd", "push", "", platforms, secrets},
|
|
}...)
|
|
} else {
|
|
platforms := map[string]string{
|
|
"self-hosted": "-self-hosted",
|
|
"ubuntu-latest": "-self-hosted",
|
|
}
|
|
|
|
tables = append(tables, []TestJobFileInfo{
|
|
{workdir, "nix-prepend-path", "push", "", platforms, secrets},
|
|
{workdir, "inputs-via-env-context", "push", "", platforms, secrets},
|
|
{workdir, "do-not-leak-step-env-in-composite", "push", "", platforms, secrets},
|
|
{workdir, "set-env-step-env-override", "push", "", platforms, secrets},
|
|
{workdir, "set-env-new-env-file-per-step", "push", "", platforms, secrets},
|
|
{workdir, "no-panic-on-invalid-composite-action", "push", "jobs failed due to invalid action", platforms, secrets},
|
|
}...)
|
|
}
|
|
|
|
for _, table := range tables {
|
|
t.Run(table.workflowPath, func(t *testing.T) {
|
|
table.runTest(ctx, t, &Config{})
|
|
})
|
|
}
|
|
}
|
|
|
|
func TestDryrunEvent(t *testing.T) {
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
|
|
ctx := common.WithDryrun(context.Background(), true)
|
|
|
|
tables := []TestJobFileInfo{
|
|
// Shells
|
|
{workdir, "shells/defaults", "push", "", platforms, secrets},
|
|
{workdir, "shells/pwsh", "push", "", map[string]string{"ubuntu-latest": "catthehacker/ubuntu:pwsh-latest"}, secrets}, // custom image with pwsh
|
|
{workdir, "shells/bash", "push", "", platforms, secrets},
|
|
{workdir, "shells/python", "push", "", map[string]string{"ubuntu-latest": "node:16-buster"}, secrets}, // slim doesn't have python
|
|
{workdir, "shells/sh", "push", "", platforms, secrets},
|
|
|
|
// Local action
|
|
{workdir, "local-action-docker-url", "push", "", platforms, secrets},
|
|
{workdir, "local-action-dockerfile", "push", "", platforms, secrets},
|
|
{workdir, "local-action-via-composite-dockerfile", "push", "", platforms, secrets},
|
|
{workdir, "local-action-js", "push", "", platforms, secrets},
|
|
}
|
|
|
|
for _, table := range tables {
|
|
t.Run(table.workflowPath, func(t *testing.T) {
|
|
table.runTest(ctx, t, &Config{})
|
|
})
|
|
}
|
|
}
|
|
|
|
func TestDockerActionForcePullForceRebuild(t *testing.T) {
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
|
|
ctx := context.Background()
|
|
|
|
config := &Config{
|
|
ForcePull: true,
|
|
ForceRebuild: true,
|
|
}
|
|
|
|
tables := []TestJobFileInfo{
|
|
{workdir, "local-action-dockerfile", "push", "", platforms, secrets},
|
|
{workdir, "local-action-via-composite-dockerfile", "push", "", platforms, secrets},
|
|
}
|
|
|
|
for _, table := range tables {
|
|
t.Run(table.workflowPath, func(t *testing.T) {
|
|
table.runTest(ctx, t, config)
|
|
})
|
|
}
|
|
}
|
|
|
|
func TestRunDifferentArchitecture(t *testing.T) {
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
if _, ok := os.LookupEnv("NO_QEMU"); ok {
|
|
t.Skip("skipping test because QEMU is disabled")
|
|
}
|
|
|
|
tjfi := TestJobFileInfo{
|
|
workdir: workdir,
|
|
workflowPath: "basic",
|
|
eventName: "push",
|
|
errorMessage: "",
|
|
platforms: platforms,
|
|
}
|
|
|
|
tjfi.runTest(context.Background(), t, &Config{ContainerArchitecture: "linux/arm64"})
|
|
}
|
|
|
|
type maskJobLoggerFactory struct {
|
|
Output bytes.Buffer
|
|
}
|
|
|
|
func (f *maskJobLoggerFactory) WithJobLogger() *log.Logger {
|
|
logger := log.New()
|
|
logger.SetOutput(io.MultiWriter(&f.Output, os.Stdout))
|
|
logger.SetLevel(log.DebugLevel)
|
|
return logger
|
|
}
|
|
|
|
func TestMaskValues(t *testing.T) {
|
|
t.Skip("Disabled: fails in CI with 'container is not running' due to Docker lifecycle timing")
|
|
assertNoSecret := func(text string, _ string) {
|
|
found := strings.Contains(text, "composite secret")
|
|
if found {
|
|
t.Logf("\nFound Secret in the given text:\n%s\n", text)
|
|
}
|
|
assert.NotContains(t, text, "composite secret")
|
|
}
|
|
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
|
|
log.SetLevel(log.DebugLevel)
|
|
|
|
tjfi := TestJobFileInfo{
|
|
workdir: workdir,
|
|
workflowPath: "mask-values",
|
|
eventName: "push",
|
|
errorMessage: "",
|
|
platforms: platforms,
|
|
}
|
|
|
|
logger := &maskJobLoggerFactory{}
|
|
tjfi.runTest(WithJobLoggerFactory(common.WithLogger(context.Background(), logger.WithJobLogger()), logger), t, &Config{})
|
|
output := logger.Output.String()
|
|
|
|
assertNoSecret(output, "secret value")
|
|
assertNoSecret(output, "YWJjCg==")
|
|
}
|
|
|
|
func TestRunEventSecrets(t *testing.T) {
|
|
t.Skip("Disabled: fails in CI with 'container is not running' due to Docker lifecycle timing")
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
workflowPath := "secrets"
|
|
|
|
tjfi := TestJobFileInfo{
|
|
workdir: workdir,
|
|
workflowPath: workflowPath,
|
|
eventName: "push",
|
|
errorMessage: "",
|
|
platforms: platforms,
|
|
}
|
|
|
|
env, err := godotenv.Read(filepath.Join(workdir, workflowPath, ".env"))
|
|
require.NoError(t, err, "Failed to read .env")
|
|
secrets, _ := godotenv.Read(filepath.Join(workdir, workflowPath, ".secrets"))
|
|
require.NoError(t, err, "Failed to read .secrets")
|
|
|
|
tjfi.runTest(context.Background(), t, &Config{Secrets: secrets, Env: env})
|
|
}
|
|
|
|
func TestRunActionInputs(t *testing.T) {
|
|
t.Skip("Disabled: fails in CI with 'container is not running' due to Docker lifecycle timing")
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
workflowPath := "input-from-cli"
|
|
|
|
tjfi := TestJobFileInfo{
|
|
workdir: workdir,
|
|
workflowPath: workflowPath,
|
|
eventName: "workflow_dispatch",
|
|
errorMessage: "",
|
|
platforms: platforms,
|
|
}
|
|
|
|
inputs := map[string]string{
|
|
"SOME_INPUT": "input",
|
|
}
|
|
|
|
tjfi.runTest(context.Background(), t, &Config{Inputs: inputs})
|
|
}
|
|
|
|
func TestRunEventPullRequest(t *testing.T) {
|
|
t.Skip("Disabled: fails in CI with nil PR number and 'container is not running' due to Docker lifecycle timing")
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
|
|
workflowPath := "pull-request"
|
|
|
|
tjfi := TestJobFileInfo{
|
|
workdir: workdir,
|
|
workflowPath: workflowPath,
|
|
eventName: "pull_request",
|
|
errorMessage: "",
|
|
platforms: platforms,
|
|
}
|
|
|
|
tjfi.runTest(context.Background(), t, &Config{EventPath: filepath.Join(workdir, workflowPath, "event.json")})
|
|
}
|
|
|
|
func TestRunMatrixWithUserDefinedInclusions(t *testing.T) {
|
|
t.Skip("Disabled: fails in CI with 'container is not running' due to Docker lifecycle timing")
|
|
if testing.Short() {
|
|
t.Skip("skipping integration test")
|
|
}
|
|
workflowPath := "matrix-with-user-inclusions"
|
|
|
|
tjfi := TestJobFileInfo{
|
|
workdir: workdir,
|
|
workflowPath: workflowPath,
|
|
eventName: "push",
|
|
errorMessage: "",
|
|
platforms: platforms,
|
|
}
|
|
|
|
matrix := map[string]map[string]bool{
|
|
"node": {
|
|
"8": true,
|
|
"8.x": true,
|
|
},
|
|
"os": {
|
|
"ubuntu-18.04": true,
|
|
},
|
|
}
|
|
|
|
tjfi.runTest(context.Background(), t, &Config{Matrix: matrix})
|
|
}
|