feature: first commit

This commit is contained in:
charlesxie
2022-07-06 18:32:25 +08:00
parent b5dd3df988
commit 05843e1aeb
620 changed files with 68 additions and 62231 deletions

View File

@@ -1,50 +0,0 @@
# 任务调度平台
![image](https://user-images.githubusercontent.com/29135056/155830656-968f5881-5729-4347-94fc-b5a657ea9725.png)
## todo list
- [ ] 变量管理-模型设计
- [ ] 变量管理-crud接口
- [ ] 变量管理-前端页面
- [ ] 变量管理-接口对接
- [ ] 变量管理-变量集成到任务中,调整引擎中节点变量传递
-
- [ ] 任务管理-新增删除查看
- [ ] 任务管理-考虑运行中的任务,可不可修改
- [ ] 首页-聚合数据接口,高纬度展示图表
- [ ] 节点管理- 区分标准节点-和节点模版
- [ ] 节点管理- 编辑时的实时预览功能
- [ ] 节点管理- 自定节点组建后端代码逻辑的上传,和持久化
- [ ] 节点管理- 返回值规范未定义
- [ ] 节点管理- 克隆/导入/导出 优先级降低
- [ ] 节点管理- Table字段梳理前端冗余代码删减
- [ ] 节点管理- 搜索过滤功能
- [ ] 节点管理- 新建作业/导入作业 统一移动到作业列表页
- [ ] 作业流管理- 新建作业流/导入作业流业 统一移动到作业流列表页
- [ ] 作业流管理- 分类接口,作业流关联
- [ ] 作业流管理- 克隆/导入/导出 优先级降低
- [ ] 作业流管理- 跳转到执行历史
- [ ] 作业流管理- 删除调度方式,移至任务里
- [ ] 作业流管理- 大流程的作业流创建失败 bug
- [ ] 任务管理- 新建任务
- [ ] 任务管理- 执行任务
- [ ] 任务管理- 定时任务和周期任务
- [ ] 变量管理- 模型设计
- [ ] 变量管理- 全局变量,局部变量,可变变量,常量
- [ ] 变量管理- 集成进任务里
- [ ] 作业监视- 暂停,停止,跳过,忽略,等人工干预操作
- [ ] 作业监视- 节点重试功能
- [ ] 作业监视- Table字段梳理前端冗余代码删减
- [ ] 作业监视- 搜索过滤功能
- [ ] 作业监视- 失败状态保存,失败状态判断
- [ ] 告警管理- 规划中
- [ ] 审计管理- 规划中
## install tips
sudo apt-get install libmysqlclient-dev
python3-dev

View File

@@ -1,3 +0,0 @@
from django.contrib import admin
# Register your models here.

View File

@@ -1,6 +0,0 @@
from django.apps import AppConfig
class FlowConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'applications.flow'

View File

@@ -1,70 +0,0 @@
FAIL_OFFSET_UNIT_CHOICE = (
("seconds", ""),
("hours", ""),
("minutes", ""),
)
node_type = (
(0, "开始节点"),
(1, "结束节点"),
(2, "作业节点"),
(3, "子流程"),
(4, "条件分支"),
(5, "汇聚网关"),
)
class StateType(object):
CREATED = "CREATED"
READY = "READY"
RUNNING = "RUNNING"
SUSPENDED = "SUSPENDED"
BLOCKED = "BLOCKED"
FINISHED = "FINISHED"
FAILED = "FAILED"
REVOKED = "REVOKED"
PIPELINE_STATE_TO_FLOW_STATE = {
StateType.READY: "wait",
StateType.RUNNING: "run",
StateType.FAILED: "fail",
StateType.FINISHED: "success",
StateType.SUSPENDED: "pause",
StateType.REVOKED: "cancel",
StateType.BLOCKED: "stop",
StateType.CREATED: "positive",
}
class NodeTemplateType:
# 空节点模板
EmptyTemplate = "0"
# 带内容的节点模板
ContentTemplate = "2"
a = [
{"key": "url", "type": "textarea", "label": "请求地址:"},
{"key": "method", "type": "select", "label": "请求类型:", "choices": [{"label": "GET", "value": "get"}]},
{"key": "header", "type": "dict_map", "label": "Header"},
{"key": "body", "type": "textarea", "label": "Body"},
{"key": "timeout", "type": "number", "label": "超时时间:"}
]
i = {
"url": "",
"method": "get",
"header": [
{
"key": "",
"value": ""
}],
"body": "{}",
"timeout": 60,
"check_point": {
"key": "",
"condition": "",
"values": ""
}
}

View File

@@ -1,5 +0,0 @@
import django_filters as filters
class NodeTemplateFilter(filters.FilterSet):
template_type = filters.CharFilter(lookup_expr="iexact")

View File

@@ -1,112 +0,0 @@
# Generated by Django 2.2.6 on 2022-02-09 03:29
import datetime
from django.db import migrations, models
import django.db.models.deletion
import django_mysql.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='分类名称')),
],
),
migrations.CreateModel(
name='Process',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='作业名称')),
('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='作业描述')),
('run_type', models.CharField(max_length=32, verbose_name='调度类型')),
('total_run_count', models.PositiveIntegerField(default=0, verbose_name='执行次数')),
('gateways', django_mysql.models.JSONField(default=dict, verbose_name='网关信息')),
('constants', django_mysql.models.JSONField(default=dict, verbose_name='内部变量信息')),
('dag', django_mysql.models.JSONField(default=dict, verbose_name='DAG')),
('create_by', models.CharField(max_length=64, null=True, verbose_name='创建者')),
('create_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='修改时间')),
('update_by', models.CharField(max_length=64, null=True, verbose_name='修改人')),
('category', models.ManyToManyField(to='flow.Category')),
],
),
migrations.CreateModel(
name='ProcessRun',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='作业名称')),
('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='作业描述')),
('run_type', models.CharField(max_length=32, verbose_name='调度类型')),
('gateways', django_mysql.models.JSONField(default=dict, verbose_name='网关信息')),
('constants', django_mysql.models.JSONField(default=dict, verbose_name='内部变量信息')),
('dag', django_mysql.models.JSONField(default=dict, verbose_name='DAG')),
('create_by', models.CharField(max_length=64, null=True, verbose_name='创建者')),
('create_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='修改时间')),
('update_by', models.CharField(max_length=64, null=True, verbose_name='修改人')),
('root_id', models.CharField(max_length=255, verbose_name='根节点uuid')),
('process', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='run', to='flow.Process')),
],
),
migrations.CreateModel(
name='NodeRun',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='节点名称')),
('uuid', models.CharField(max_length=255, unique=True, verbose_name='UUID')),
('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='节点描述')),
('show', models.BooleanField(default=True, verbose_name='是否显示')),
('top', models.IntegerField(default=300)),
('left', models.IntegerField(default=300)),
('ico', models.CharField(blank=True, max_length=64, null=True, verbose_name='icon')),
('fail_retry_count', models.IntegerField(default=0, verbose_name='失败重试次数')),
('fail_offset', models.IntegerField(default=0, verbose_name='失败重试间隔')),
('fail_offset_unit', models.CharField(choices=[('seconds', ''), ('hours', ''), ('minutes', '')], max_length=32, verbose_name='重试间隔单位')),
('node_type', models.IntegerField(default=2)),
('component_code', models.CharField(max_length=255, verbose_name='插件名称')),
('is_skip_fail', models.BooleanField(default=False, verbose_name='忽略失败')),
('is_timeout_alarm', models.BooleanField(default=False, verbose_name='超时告警')),
('inputs', django_mysql.models.JSONField(default=dict, verbose_name='输入参数')),
('outputs', django_mysql.models.JSONField(default=dict, verbose_name='输出参数')),
('process_run', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='nodes_run', to='flow.ProcessRun')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Node',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='节点名称')),
('uuid', models.CharField(max_length=255, unique=True, verbose_name='UUID')),
('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='节点描述')),
('show', models.BooleanField(default=True, verbose_name='是否显示')),
('top', models.IntegerField(default=300)),
('left', models.IntegerField(default=300)),
('ico', models.CharField(blank=True, max_length=64, null=True, verbose_name='icon')),
('fail_retry_count', models.IntegerField(default=0, verbose_name='失败重试次数')),
('fail_offset', models.IntegerField(default=0, verbose_name='失败重试间隔')),
('fail_offset_unit', models.CharField(choices=[('seconds', ''), ('hours', ''), ('minutes', '')], max_length=32, verbose_name='重试间隔单位')),
('node_type', models.IntegerField(default=2)),
('component_code', models.CharField(max_length=255, verbose_name='插件名称')),
('is_skip_fail', models.BooleanField(default=False, verbose_name='忽略失败')),
('is_timeout_alarm', models.BooleanField(default=False, verbose_name='超时告警')),
('inputs', django_mysql.models.JSONField(default=dict, verbose_name='输入参数')),
('outputs', django_mysql.models.JSONField(default=dict, verbose_name='输出参数')),
('process', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='nodes', to='flow.Process')),
],
options={
'abstract': False,
},
),
]

View File

@@ -1,40 +0,0 @@
# Generated by Django 2.2.6 on 2022-02-10 14:21
from django.db import migrations, models
import django_mysql.models
class Migration(migrations.Migration):
dependencies = [
('flow', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='NodeTemplate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='节点名称')),
('uuid', models.CharField(max_length=255, unique=True, verbose_name='UUID')),
('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='节点描述')),
('show', models.BooleanField(default=True, verbose_name='是否显示')),
('top', models.IntegerField(default=300)),
('left', models.IntegerField(default=300)),
('ico', models.CharField(blank=True, max_length=64, null=True, verbose_name='icon')),
('fail_retry_count', models.IntegerField(default=0, verbose_name='失败重试次数')),
('fail_offset', models.IntegerField(default=0, verbose_name='失败重试间隔')),
('fail_offset_unit', models.CharField(choices=[('seconds', ''), ('hours', ''), ('minutes', '')], max_length=32, verbose_name='重试间隔单位')),
('node_type', models.IntegerField(default=2)),
('component_code', models.CharField(max_length=255, verbose_name='插件名称')),
('is_skip_fail', models.BooleanField(default=False, verbose_name='忽略失败')),
('is_timeout_alarm', models.BooleanField(default=False, verbose_name='超时告警')),
('inputs', django_mysql.models.JSONField(default=dict, verbose_name='输入参数')),
('outputs', django_mysql.models.JSONField(default=dict, verbose_name='输出参数')),
('template_type', models.CharField(default='2', max_length=1, verbose_name='节点模板类型')),
],
options={
'abstract': False,
},
),
]

View File

@@ -1,28 +0,0 @@
# Generated by Django 2.2.6 on 2022-02-10 17:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('flow', '0002_nodetemplate'),
]
operations = [
migrations.AddField(
model_name='node',
name='content',
field=models.IntegerField(default=0, verbose_name='模板id'),
),
migrations.AddField(
model_name='noderun',
name='content',
field=models.IntegerField(default=0, verbose_name='模板id'),
),
migrations.AddField(
model_name='nodetemplate',
name='content',
field=models.IntegerField(default=0, verbose_name='模板id'),
),
]

View File

@@ -1,24 +0,0 @@
# Generated by Django 2.2.6 on 2022-02-26 12:02
from django.db import migrations
import django_mysql.models
class Migration(migrations.Migration):
dependencies = [
('flow', '0003_auto_20220210_1737'),
]
operations = [
migrations.AddField(
model_name='nodetemplate',
name='inputs_component',
field=django_mysql.models.JSONField(default=list, verbose_name='前端参数组件'),
),
migrations.AddField(
model_name='nodetemplate',
name='outputs_component',
field=django_mysql.models.JSONField(default=list, verbose_name='前端参数组件'),
),
]

View File

@@ -1,62 +0,0 @@
# Generated by Django 2.2.6 on 2022-06-16 16:14
import datetime
from django.db import migrations, models
import django.db.models.deletion
import django_mysql.models
class Migration(migrations.Migration):
dependencies = [
('flow', '0004_auto_20220226_1202'),
]
operations = [
migrations.CreateModel(
name='SubProcessRun',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='作业名称')),
('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='作业描述')),
('run_type', models.CharField(max_length=32, verbose_name='调度类型')),
('gateways', django_mysql.models.JSONField(default=dict, verbose_name='网关信息')),
('constants', django_mysql.models.JSONField(default=dict, verbose_name='内部变量信息')),
('dag', django_mysql.models.JSONField(default=dict, verbose_name='DAG')),
('create_by', models.CharField(max_length=64, null=True, verbose_name='创建者')),
('create_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now=True, verbose_name='修改时间')),
('update_by', models.CharField(max_length=64, null=True, verbose_name='修改人')),
('root_id', models.CharField(max_length=255, verbose_name='根节点uuid')),
('process', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='sub_run', to='flow.Process')),
('process_run', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sub', to='flow.Process')),
],
),
migrations.CreateModel(
name='SubNodeRun',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='节点名称')),
('uuid', models.CharField(max_length=255, unique=True, verbose_name='UUID')),
('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='节点描述')),
('show', models.BooleanField(default=True, verbose_name='是否显示')),
('top', models.IntegerField(default=300)),
('left', models.IntegerField(default=300)),
('ico', models.CharField(blank=True, max_length=64, null=True, verbose_name='icon')),
('fail_retry_count', models.IntegerField(default=0, verbose_name='失败重试次数')),
('fail_offset', models.IntegerField(default=0, verbose_name='失败重试间隔')),
('fail_offset_unit', models.CharField(choices=[('seconds', ''), ('hours', ''), ('minutes', '')], max_length=32, verbose_name='重试间隔单位')),
('node_type', models.IntegerField(default=2)),
('component_code', models.CharField(max_length=255, verbose_name='插件名称')),
('is_skip_fail', models.BooleanField(default=False, verbose_name='忽略失败')),
('is_timeout_alarm', models.BooleanField(default=False, verbose_name='超时告警')),
('inputs', django_mysql.models.JSONField(default=dict, verbose_name='输入参数')),
('outputs', django_mysql.models.JSONField(default=dict, verbose_name='输出参数')),
('content', models.IntegerField(default=0, verbose_name='模板id')),
('subprocess_run', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='sub_nodes_run', to='flow.SubProcessRun')),
],
options={
'abstract': False,
},
),
]

View File

@@ -1,24 +0,0 @@
# Generated by Django 2.2.6 on 2022-06-16 16:16
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('flow', '0005_subnoderun_subprocessrun'),
]
operations = [
migrations.AlterField(
model_name='noderun',
name='process_run',
field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='nodes_run', to='flow.ProcessRun'),
),
migrations.AlterField(
model_name='subnoderun',
name='subprocess_run',
field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sub_nodes_run', to='flow.SubProcessRun'),
),
]

View File

@@ -1,142 +0,0 @@
from datetime import datetime
from django.db import models
from django_mysql.models import JSONField
from applications.flow.constants import FAIL_OFFSET_UNIT_CHOICE, NodeTemplateType
class Category(models.Model):
name = models.CharField("分类名称", max_length=255, blank=False, null=False)
class Process(models.Model):
name = models.CharField("作业名称", max_length=255, blank=False, null=False)
description = models.CharField("作业描述", max_length=255, blank=True, null=True)
category = models.ManyToManyField(Category)
run_type = models.CharField("调度类型", max_length=32)
total_run_count = models.PositiveIntegerField("执行次数", default=0)
gateways = JSONField("网关信息", default=dict)
constants = JSONField("内部变量信息", default=dict)
dag = JSONField("DAG", default=dict)
create_by = models.CharField("创建者", max_length=64, null=True)
create_time = models.DateTimeField("创建时间", default=datetime.now)
update_time = models.DateTimeField("修改时间", auto_now=True)
update_by = models.CharField("修改人", max_length=64, null=True)
@property
def clone_data(self):
return {
"name": self.name,
"description": self.description,
"run_type": self.run_type,
"gateways": self.gateways,
"constants": self.constants,
"dag": self.dag,
}
class BaseNode(models.Model):
START_NODE = 0
END_NODE = 1
JOB_NODE = 2
SUB_PROCESS_NODE = 3
CONDITION_NODE = 4
CONVERGE_NODE = 5
PARALLEL_NODE = 6
CONDITION_PARALLEL_NODE = 7
name = models.CharField("节点名称", max_length=255, blank=False, null=False)
uuid = models.CharField("UUID", max_length=255, unique=True)
description = models.CharField("节点描述", max_length=255, blank=True, null=True)
show = models.BooleanField("是否显示", default=True)
top = models.IntegerField(default=300)
left = models.IntegerField(default=300)
ico = models.CharField("icon", max_length=64, blank=True, null=True)
fail_retry_count = models.IntegerField("失败重试次数", default=0)
fail_offset = models.IntegerField("失败重试间隔", default=0)
fail_offset_unit = models.CharField("重试间隔单位", choices=FAIL_OFFSET_UNIT_CHOICE, max_length=32)
# 0开始节点1结束节点2作业节点3其他作业流 4分支5汇聚.6:并行
node_type = models.IntegerField(default=2)
component_code = models.CharField("插件名称", max_length=255, blank=False, null=False)
is_skip_fail = models.BooleanField("忽略失败", default=False)
is_timeout_alarm = models.BooleanField("超时告警", default=False)
inputs = JSONField("输入参数", default=dict)
outputs = JSONField("输出参数", default=dict)
# 如为子流程content为process id 如为 节点模板为node id
content = models.IntegerField("模板id", default=0)
class Meta:
abstract = True
class Node(BaseNode):
process = models.ForeignKey(Process, on_delete=models.SET_NULL, null=True, db_constraint=False,
related_name="nodes")
class ProcessRun(models.Model):
# new
process = models.ForeignKey(Process, on_delete=models.SET_NULL, null=True, db_constraint=False,
related_name="run")
name = models.CharField("作业名称", max_length=255, blank=False, null=False)
description = models.CharField("作业描述", max_length=255, blank=True, null=True)
run_type = models.CharField("调度类型", max_length=32)
gateways = JSONField("网关信息", default=dict)
constants = JSONField("内部变量信息", default=dict)
dag = JSONField("DAG", default=dict)
create_by = models.CharField("创建者", max_length=64, null=True)
create_time = models.DateTimeField("创建时间", default=datetime.now)
update_time = models.DateTimeField("修改时间", auto_now=True)
update_by = models.CharField("修改人", max_length=64, null=True)
root_id = models.CharField("根节点uuid", max_length=255)
class SubProcessRun(models.Model):
process_run = models.ForeignKey(Process, on_delete=models.CASCADE, null=True, db_constraint=False,
related_name="sub")
process = models.ForeignKey(Process, on_delete=models.SET_NULL, null=True, db_constraint=False,
related_name="sub_run")
name = models.CharField("作业名称", max_length=255, blank=False, null=False)
description = models.CharField("作业描述", max_length=255, blank=True, null=True)
run_type = models.CharField("调度类型", max_length=32)
gateways = JSONField("网关信息", default=dict)
constants = JSONField("内部变量信息", default=dict)
dag = JSONField("DAG", default=dict)
create_by = models.CharField("创建者", max_length=64, null=True)
create_time = models.DateTimeField("创建时间", default=datetime.now)
update_time = models.DateTimeField("修改时间", auto_now=True)
update_by = models.CharField("修改人", max_length=64, null=True)
root_id = models.CharField("根节点uuid", max_length=255)
class SubNodeRun(BaseNode):
subprocess_run = models.ForeignKey(SubProcessRun, on_delete=models.CASCADE, null=True, db_constraint=False,
related_name="sub_nodes_run")
@staticmethod
def field_names():
return [field.name for field in NodeRun._meta.get_fields() if field.name not in ["id"]]
class NodeRun(BaseNode):
process_run = models.ForeignKey(ProcessRun, on_delete=models.CASCADE, null=True, db_constraint=False,
related_name="nodes_run")
@staticmethod
def field_names():
return [field.name for field in NodeRun._meta.get_fields() if field.name not in ["id"]]
class NodeTemplate(BaseNode):
template_type = models.CharField("节点模板类型", max_length=1, default=NodeTemplateType.ContentTemplate)
inputs_component = JSONField("前端参数组件", default=list)
outputs_component = JSONField("前端参数组件", default=list)

View File

@@ -1,362 +0,0 @@
import json
from bamboo_engine import api
from django.db import transaction
from pipeline.eri.models import State
from pipeline.eri.runtime import BambooDjangoRuntime
from rest_framework import serializers
from applications.flow.constants import PIPELINE_STATE_TO_FLOW_STATE
from applications.flow.models import Process, Node, ProcessRun, NodeRun, NodeTemplate, SubProcessRun, SubNodeRun
from applications.utils.uuid_helper import get_uuid
class ProcessViewSetsSerializer(serializers.Serializer):
name = serializers.CharField(required=True)
description = serializers.CharField(required=False, allow_blank=True)
category = serializers.ListField(default="null")
run_type = serializers.CharField(default="null")
pipeline_tree = serializers.JSONField(required=True)
def save(self, **kwargs):
if self.instance is not None:
self.update(instance=self.instance, validated_data=self.validated_data)
else:
self.create(validated_data=self.validated_data)
def create(self, validated_data):
node_map = {}
for node in validated_data["pipeline_tree"]["nodes"]:
node_map[node["uuid"]] = node
dag = {k: [] for k in node_map.keys()}
for line in self.validated_data["pipeline_tree"]["lines"]:
dag[line["from"]].append(line["to"])
with transaction.atomic():
process = Process.objects.create(name=validated_data["name"],
description=validated_data["description"],
run_type=validated_data["run_type"],
dag=dag)
bulk_nodes = []
for node in node_map.values():
node_data = node["node_data"]
if isinstance(node_data.get("inputs", {}), dict):
node_inputs = node_data.get("inputs", {})
else:
node_inputs = json.loads(node_data["inputs"])
bulk_nodes.append(Node(process=process,
name=node_data["node_name"],
uuid=node["uuid"],
description=node_data["description"],
fail_retry_count=node_data.get("fail_retry_count", 0) or 0,
fail_offset=node_data.get("fail_offset", 0) or 0,
fail_offset_unit=node_data.get("fail_offset_unit", "seconds"),
node_type=node.get("type", 2),
is_skip_fail=node_data["is_skip_fail"],
is_timeout_alarm=node_data["is_skip_fail"],
inputs=node_inputs,
show=node["show"],
top=node["top"],
left=node["left"],
ico=node["ico"],
outputs={},
component_code="http_request",
content=node.get("content", 0) or 0
))
Node.objects.bulk_create(bulk_nodes, batch_size=500)
self._data = {}
def update(self, instance, validated_data):
node_map = {}
for node in validated_data["pipeline_tree"]["nodes"]:
node_map[node["uuid"]] = node
dag = {k: [] for k in node_map.keys()}
for line in self.validated_data["pipeline_tree"]["lines"]:
dag[line["from"]].append(line["to"])
with transaction.atomic():
instance.name = validated_data["name"]
instance.description = validated_data["description"]
instance.run_type = validated_data["run_type"]
instance.dag = dag
instance.save()
bulk_update_nodes = []
bulk_create_nodes = []
node_dict = Node.objects.filter(process_id=instance.id).in_bulk(field_name="uuid")
for node in node_map.values():
node_data = node["node_data"]
node_obj = node_dict.get(node["uuid"], None)
if isinstance(node_data.get("inputs", {}), dict):
node_inputs = node_data.get("inputs", {})
else:
node_inputs = json.loads(node_data["inputs"])
if node_obj:
node_obj.content = node.get("content", 0) or 0
node_obj.name = node_data["node_name"]
node_obj.description = node_data["description"]
node_obj.fail_retry_count = node_data.get("fail_retry_count", 0) or 0
node_obj.fail_offset = node_data.get("fail_offset", 0) or 0
node_obj.fail_offset_unit = node_data.get("fail_offset_unit", "seconds")
node_obj.node_type = node.get("type", 3)
node_obj.is_skip_fail = node_data["is_skip_fail"]
node_obj.is_timeout_alarm = node_data["is_timeout_alarm"]
node_obj.inputs = node_inputs
node_obj.show = node["show"]
node_obj.top = node["top"]
node_obj.left = node["left"]
node_obj.ico = node["ico"]
node_obj.outputs = {}
node_obj.component_code = "http_request"
bulk_update_nodes.append(node_obj)
else:
node_obj = Node()
node_obj.content = node.get("content", 0) or 0
node_obj.name = node_data["node_name"]
node_obj.description = node_data["description"]
node_obj.fail_retry_count = node_data.get("fail_retry_count", 0) or 0
node_obj.fail_offset = node_data.get("fail_offset", 0) or 0
node_obj.fail_offset_unit = node_data.get("fail_offset_unit", "seconds")
node_obj.node_type = node.get("type", 3)
node_obj.is_skip_fail = node_data["is_skip_fail"]
node_obj.is_timeout_alarm = node_data["is_timeout_alarm"]
node_obj.inputs = node_inputs
node_obj.show = node["show"]
node_obj.top = node["top"]
node_obj.left = node["left"]
node_obj.ico = node["ico"]
node_obj.outputs = {}
node_obj.component_code = "http_request"
node_obj.uuid = node["uuid"]
node_obj.process_id = instance.id
bulk_create_nodes.append(node_obj)
Node.objects.bulk_update(bulk_update_nodes,
fields=["name", "description", "fail_retry_count", "fail_offset",
"fail_offset_unit", "node_type", "is_skip_fail",
"is_timeout_alarm", "inputs", "show", "top", "left", "ico",
"outputs", "component_code"], batch_size=500)
Node.objects.bulk_create(bulk_create_nodes, batch_size=500)
self._data = {}
class ListProcessViewSetsSerializer(serializers.ModelSerializer):
class Meta:
model = Process
# fields = "__all__"
exclude = ("dag",)
class ListProcessRunViewSetsSerializer(serializers.ModelSerializer):
state = serializers.SerializerMethodField()
class Meta:
model = ProcessRun
fields = "__all__"
def get_state(self, obj):
runtime = BambooDjangoRuntime()
process_info = api.get_pipeline_states(runtime, root_id=obj.root_id)
try:
process_state = PIPELINE_STATE_TO_FLOW_STATE.get(process_info.data[obj.root_id]["state"])
except Exception:
process_state = "error"
return process_state
class ListSubProcessRunViewSetsSerializer(serializers.ModelSerializer):
state = serializers.SerializerMethodField()
class Meta:
model = SubProcessRun
fields = "__all__"
def get_state(self, obj):
runtime = BambooDjangoRuntime()
process_info = api.get_pipeline_states(runtime, root_id=obj.root_id)
try:
process_state = PIPELINE_STATE_TO_FLOW_STATE.get(process_info.data[obj.root_id]["state"])
except Exception:
process_state = "error"
return process_state
class RetrieveProcessViewSetsSerializer(serializers.ModelSerializer):
pipeline_tree = serializers.SerializerMethodField()
# category = serializers.SerializerMethodField()
#
# def get_category(self, obj):
# return obj.category.all()
def get_pipeline_tree(self, obj):
lines = []
nodes = []
for _from, to_list in obj.dag.items():
for _to in to_list:
lines.append({
"from": _from,
"to": _to
})
node_list = Node.objects.filter(process_id=obj.id).values()
node_content_id = [node["content"] for node in node_list if node.get("content", 0)]
content_map = NodeTemplate.objects.filter(id__in=node_content_id).in_bulk()
for node in node_list:
node_template = content_map.get(node.get("content", 0), "")
inputs_component = ""
if node_template:
inputs_component = json.dumps(node_template.inputs_component)
nodes.append({"show": node["show"],
"top": node["top"],
"left": node["left"],
"ico": node["ico"],
"type": node["node_type"],
"name": node["name"],
"content": node["content"],
"node_data": {
"inputs": json.dumps(node["inputs"]),
"inputs_component": inputs_component,
"run_mark": 0,
"node_name": node["name"],
"description": node["description"],
"fail_retry_count": node["fail_retry_count"],
"fail_offset": node["fail_offset"],
"fail_offset_unit": node["fail_offset_unit"],
"is_skip_fail": node["is_skip_fail"],
"is_timeout_alarm": node["is_timeout_alarm"]},
"uuid": node["uuid"]})
return {"lines": lines, "nodes": nodes}
class Meta:
model = Process
fields = ("id", "name", "description", "category", "run_type", "pipeline_tree")
class RetrieveProcessRunViewSetsSerializer(serializers.ModelSerializer):
pipeline_tree = serializers.SerializerMethodField()
def get_pipeline_tree(self, obj):
lines = []
nodes = []
for _from, to_list in obj.dag.items():
for _to in to_list:
lines.append({
"from": _from,
"to": _to
})
runtime = BambooDjangoRuntime()
process_info = api.get_pipeline_states(runtime, root_id=obj.root_id)
process_state = PIPELINE_STATE_TO_FLOW_STATE.get(process_info.data[obj.root_id]["state"])
state_map = process_info.data[obj.root_id]["children"]
node_list = NodeRun.objects.filter(process_run_id=obj.id).values()
for node in node_list:
pipeline_state = state_map.get(node["uuid"], {}).get("state", "READY")
flow_state = PIPELINE_STATE_TO_FLOW_STATE[pipeline_state]
outputs = ""
# print(flow_state)
if node["node_type"] not in [0, 1] and flow_state not in ["wait"]:
output_data = api.get_execution_data_outputs(runtime, node_id=node["uuid"])
outputs = output_data.data.get("outputs", "")
if node["node_type"] == 3:
# todo先简单判断node有failprocess就为fail
if State.objects.filter(parent_id=node["uuid"], name="FAILED").exists():
flow_state = "fail"
# todo先简单判断node有failprocess就为fail
if flow_state == "fail":
process_state = "fail"
nodes.append({"show": node["show"],
"top": node["top"],
"left": node["left"],
"ico": node["ico"],
"type": node["node_type"],
"name": node["name"],
"state": flow_state,
"content": node["content"],
"node_data": {
"inputs": node["inputs"],
"outputs": outputs,
"run_mark": 0,
"node_name": node["name"],
"description": node["description"],
"fail_retry_count": node["fail_retry_count"],
"fail_offset": node["fail_offset"],
"fail_offset_unit": node["fail_offset_unit"],
"is_skip_fail": node["is_skip_fail"],
"is_timeout_alarm": node["is_timeout_alarm"]},
"uuid": node["uuid"]})
return {"lines": lines, "nodes": nodes, "process_state": process_state}
class Meta:
model = ProcessRun
fields = ("id", "name", "description", "run_type", "pipeline_tree")
class RetrieveSubProcessRunViewSetsSerializer(serializers.ModelSerializer):
pipeline_tree = serializers.SerializerMethodField()
def get_pipeline_tree(self, obj):
lines = []
nodes = []
for _from, to_list in obj.dag.items():
for _to in to_list:
lines.append({
"from": _from,
"to": _to
})
runtime = BambooDjangoRuntime()
process_info = api.get_pipeline_states(runtime, root_id=obj.root_id)
process_state = PIPELINE_STATE_TO_FLOW_STATE.get(process_info.data[obj.root_id]["state"])
state_map = process_info.data[obj.root_id]["children"]
node_list = SubNodeRun.objects.filter(subprocess_run_id=obj.id).values()
for node in node_list:
pipeline_state = state_map.get(node["uuid"], {}).get("state", "READY")
flow_state = PIPELINE_STATE_TO_FLOW_STATE[pipeline_state]
outputs = ""
# print(flow_state)
if node["node_type"] not in [0, 1] and flow_state not in ["wait"]:
output_data = api.get_execution_data_outputs(runtime, node_id=node["uuid"])
outputs = output_data.data.get("outputs", "")
if node["node_type"] == 3:
# todo先简单判断node有failprocess就为fail
if State.objects.filter(parent_id=node["uuid"], name="FAILED").exists():
flow_state = "fail"
# todo先简单判断node有failprocess就为fail
if flow_state == "fail":
process_state = "fail"
nodes.append({"show": node["show"],
"top": node["top"],
"left": node["left"],
"ico": node["ico"],
"type": node["node_type"],
"name": node["name"],
"state": flow_state,
"content": node["content"],
"node_data": {
"inputs": node["inputs"],
"outputs": outputs,
"run_mark": 0,
"node_name": node["name"],
"description": node["description"],
"fail_retry_count": node["fail_retry_count"],
"fail_offset": node["fail_offset"],
"fail_offset_unit": node["fail_offset_unit"],
"is_skip_fail": node["is_skip_fail"],
"is_timeout_alarm": node["is_timeout_alarm"]},
"uuid": node["uuid"]})
return {"lines": lines, "nodes": nodes, "process_state": process_state}
class Meta:
model = SubProcessRun
fields = ("id", "name", "description", "run_type", "pipeline_tree")
class ExecuteProcessSerializer(serializers.Serializer):
process_id = serializers.IntegerField(required=True)
class NodeTemplateSerializer(serializers.ModelSerializer):
def validate(self, attrs):
attrs["uuid"] = get_uuid()
return attrs
class Meta:
model = NodeTemplate
exclude = ("uuid",)

View File

@@ -1,3 +0,0 @@
from django.test import TestCase
# Create your tests here.

View File

@@ -1,12 +0,0 @@
from rest_framework.routers import DefaultRouter
from . import views
flow_router = DefaultRouter()
flow_router.register(r"flow", viewset=views.ProcessViewSets, base_name="flow")
flow_router.register(r"run", viewset=views.ProcessRunViewSets, base_name="run")
flow_router.register(r"sub_run", viewset=views.SubProcessRunViewSets, base_name="sub_run")
flow_router.register(r"test", viewset=views.TestViewSets, base_name="test")
node_router = DefaultRouter()
node_router.register(r"template", viewset=views.NodeTemplateViewSet, base_name="template")

View File

@@ -1,43 +0,0 @@
from applications.flow.models import ProcessRun, NodeRun, Process, Node, SubProcessRun, SubNodeRun
from applications.utils.dag_helper import PipelineBuilder, instance_dag
def build_and_create_process(process_id):
"""构建pipeline和创建运行时数据"""
p_builder = PipelineBuilder(process_id)
pipeline = p_builder.build()
process = p_builder.process
node_map = p_builder.node_map
process_run_uuid = p_builder.instance
# 保存的实例数据
process_run_data = process.clone_data
process_run_data["dag"] = instance_dag(process_run_data["dag"], process_run_uuid)
process_run = ProcessRun.objects.create(process_id=process.id, root_id=pipeline["id"], **process_run_data)
node_run_bulk = []
for pipeline_id, node in node_map.items():
_node = {k: v for k, v in node.__dict__.items() if k in NodeRun.field_names()}
_node["uuid"] = process_run_uuid[pipeline_id].id
node_run_bulk.append(NodeRun(process_run=process_run, **_node))
if node.node_type == Node.SUB_PROCESS_NODE:
create_subprocess(node.content, process_run.id, process_run_uuid, pipeline["id"])
NodeRun.objects.bulk_create(node_run_bulk, batch_size=500)
return pipeline
def create_subprocess(process_id, process_run_id, process_run_uuid, root_id):
process = Process.objects.filter(id=process_id).first()
process_run_data = process.clone_data
process_run_data["dag"] = instance_dag(process_run_data["dag"], process_run_uuid)
process_run = SubProcessRun.objects.create(process_id=process_id, process_run_id=process_run_id, root_id=root_id,
**process_run_data)
subprocess_node_map = Node.objects.filter(process_id=process_id).in_bulk(field_name="uuid")
node_run_bulk = []
for pipeline_id, node in subprocess_node_map.items():
_node = {k: v for k, v in node.__dict__.items() if k in NodeRun.field_names()}
_node["uuid"] = process_run_uuid[pipeline_id].id
node_run_bulk.append(SubNodeRun(subprocess_run=process_run, **_node))
if node.node_type == Node.SUB_PROCESS_NODE:
create_subprocess(node.content, process_run_id, process_run_uuid, root_id)
SubNodeRun.objects.bulk_create(node_run_bulk, batch_size=500)

View File

@@ -1,144 +0,0 @@
from datetime import datetime
import random
from django.db.models import F
from applications.flow.utils import build_and_create_process
from bamboo_engine import api
from bamboo_engine.builder import *
from django.http import JsonResponse
from pipeline.eri.runtime import BambooDjangoRuntime
from rest_framework import mixins
from rest_framework.decorators import action
from rest_framework.response import Response
from applications.flow.filters import NodeTemplateFilter
from applications.flow.models import Process, Node, ProcessRun, NodeRun, NodeTemplate, SubProcessRun
from applications.flow.serializers import ProcessViewSetsSerializer, ListProcessViewSetsSerializer, \
RetrieveProcessViewSetsSerializer, ExecuteProcessSerializer, ListProcessRunViewSetsSerializer, \
RetrieveProcessRunViewSetsSerializer, NodeTemplateSerializer, ListSubProcessRunViewSetsSerializer, \
RetrieveSubProcessRunViewSetsSerializer
from applications.utils.dag_helper import DAG, instance_dag, PipelineBuilder
from component.drf.viewsets import GenericViewSet
class ProcessViewSets(mixins.ListModelMixin,
mixins.CreateModelMixin,
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
mixins.UpdateModelMixin,
GenericViewSet):
queryset = Process.objects.order_by("-update_time")
def get_serializer_class(self):
if self.action == "list":
return ListProcessViewSetsSerializer
elif self.action == "retrieve":
return RetrieveProcessViewSetsSerializer
elif self.action == "execute":
return ExecuteProcessSerializer
return ProcessViewSetsSerializer
@action(methods=["POST"], detail=False)
def execute(self, request, *args, **kwargs):
validated_data = self.is_validated_data(request.data)
process_id = validated_data["process_id"]
pipeline = build_and_create_process(process_id)
# 执行
runtime = BambooDjangoRuntime()
api.run_pipeline(runtime=runtime, pipeline=pipeline)
Process.objects.filter(id=process_id).update(total_run_count=F("total_run_count") + 1)
return Response({})
class ProcessRunViewSets(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
GenericViewSet):
queryset = ProcessRun.objects.order_by("-update_time")
def get_serializer_class(self):
if self.action == "list":
return ListProcessRunViewSetsSerializer
elif self.action == "retrieve":
return RetrieveProcessRunViewSetsSerializer
elif self.action == "execute":
return ExecuteProcessSerializer
class SubProcessRunViewSets(mixins.ListModelMixin,
mixins.RetrieveModelMixin,
GenericViewSet):
queryset = SubProcessRun.objects.order_by("-update_time")
def get_serializer_class(self):
if self.action == "list":
return ListSubProcessRunViewSetsSerializer
elif self.action == "retrieve":
return RetrieveSubProcessRunViewSetsSerializer
class TestViewSets(GenericViewSet):
def list(self, request, *args, **kwargs):
random_list = [1, 1, 1, 1, 1, 1, 1, 1, 1, 0]
sign = random.choice(random_list)
if sign:
return Response({"now": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), "data": request.query_params})
else:
raise Exception("随机抛出异常")
class NodeTemplateViewSet(mixins.ListModelMixin,
mixins.CreateModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
mixins.RetrieveModelMixin,
GenericViewSet):
queryset = NodeTemplate.objects.order_by("-id")
serializer_class = NodeTemplateSerializer
filterset_class = NodeTemplateFilter
# Create your views here.
def flow(request):
# 使用 builder 构造出流程描述结构
start = EmptyStartEvent()
act = ServiceActivity(component_code="http_request")
act2 = ServiceActivity(component_code="http_request")
act2.component.inputs.n = Var(type=Var.PLAIN, value=50)
act3 = ServiceActivity(component_code="http_request")
act3.component.inputs.n = Var(type=Var.PLAIN, value=5)
act4 = ServiceActivity(component_code="http_request")
act5 = ServiceActivity(component_code="http_request")
eg = ExclusiveGateway(
conditions={
0: '${exe_res} >= 0',
1: '${exe_res} < 0'
},
name='act_2 or act_3'
)
pg = ParallelGateway()
cg = ConvergeGateway()
end = EmptyEndEvent()
start.extend(act).extend(eg).connect(act2, act3).to(act2).extend(act4).extend(act5).to(eg).converge(end)
# 全局变量
pipeline_data = Data()
pipeline_data.inputs['${exe_res}'] = NodeOutput(type=Var.PLAIN, source_act=act.id, source_key='exe_res')
pipeline = builder.build_tree(start, data=pipeline_data)
print(pipeline)
# 执行流程对象
runtime = BambooDjangoRuntime()
api.run_pipeline(runtime=runtime, pipeline=pipeline)
result = api.get_pipeline_states(runtime=runtime, root_id=pipeline["id"])
result_output = api.get_execution_data_outputs(runtime, act.id).data
# api.pause_pipeline(runtime=runtime, pipeline_id=pipeline["id"])
return JsonResponse({})

View File

@@ -1,5 +1,5 @@
from django.apps import AppConfig
class TaskConfig(AppConfig):
class ProjectConfig(AppConfig):
name = 'task'

View File

@@ -1,30 +0,0 @@
# Generated by Django 2.2.6 on 2022-06-17 15:02
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('flow', '0006_auto_20220616_1616'),
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, verbose_name='任务名称')),
('run_type', models.CharField(choices=[('hand', '手动'), ('now', '立即'), ('time', '定时'), ('cycle', '周期'), ('cron', 'cron表达式')], max_length=64, verbose_name='执行方式')),
('when_start', models.CharField(max_length=100, verbose_name='执行时间')),
('cycle_time', models.CharField(max_length=20, null=True, verbose_name='周期时间')),
('cycle_type', models.CharField(choices=[('min', '分钟'), ('hour', '小时'), ('day', '')], max_length=20, null=True, verbose_name='周期间隔(min,hour,day)')),
('cron_time', models.TextField(default='', verbose_name='cron表达式')),
('celery_task_id', models.CharField(max_length=64, null=True, verbose_name='celery的任务ID')),
('process_run', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tasks', to='flow.ProcessRun')),
],
),
]

View File

@@ -1,29 +1,3 @@
from django.db import models
from applications.flow.models import ProcessRun
class Task(models.Model):
TypeChoices = (
("hand", "手动"),
("now", "立即"),
("time", "定时"),
("cycle", "周期"),
("cron", "cron表达式"),
)
CycleChoices = (
("min", "分钟"),
("hour", "小时"),
("day", ""),
)
name = models.CharField("任务名称", max_length=255, blank=False, null=False)
process_run = models.ForeignKey(ProcessRun, on_delete=models.CASCADE, null=True, db_constraint=False,
related_name="tasks")
run_type = models.CharField("执行方式", choices=TypeChoices,max_length=64)
when_start = models.CharField(max_length=100, verbose_name="执行时间")
cycle_time = models.CharField(max_length=20, null=True, verbose_name="周期时间")
cycle_type = models.CharField(max_length=20, null=True, verbose_name="周期间隔(min,hour,day)", choices=CycleChoices)
cron_time = models.TextField(default="", verbose_name="cron表达式")
celery_task_id = models.CharField(max_length=64, null=True, verbose_name="celery的任务ID")
# Create your models here.

View File

@@ -1,9 +0,0 @@
from rest_framework import serializers
from applications.task.models import Task
class TaskSerializer(serializers.ModelSerializer):
class Meta:
model = Task
fields = "__all__"

View File

@@ -1,6 +0,0 @@
from rest_framework.routers import DefaultRouter
from . import views
task_router = DefaultRouter()
task_router.register(r"task", viewset=views.TaskViewSets, base_name="task")

View File

@@ -1,12 +1,3 @@
from django.shortcuts import render
from applications.task.models import Task
from applications.task.serializers import TaskSerializer
from component.drf.viewsets import GenericViewSet
from rest_framework import mixins
class TaskViewSets(mixins.ListModelMixin,
GenericViewSet):
queryset = Task.objects.order_by("-id")
serializer_class = TaskSerializer
# Create your views here.

View File

@@ -1,291 +0,0 @@
from collections import OrderedDict, defaultdict
from copy import copy, deepcopy
from applications.flow.models import Process, Node
from bamboo_engine.builder import EmptyStartEvent, EmptyEndEvent, ExclusiveGateway, ServiceActivity, Var, builder, Data, \
ParallelGateway, ConvergeGateway, ConditionalParallelGateway, SubProcess
class DAG(object):
""" Directed acyclic graph implementation. """
def __init__(self):
""" Construct a new DAG with no nodes or edges. """
self.reset_graph()
def add_node(self, node_name, graph=None):
""" Add a node if it does not exist yet, or error out. """
if not graph:
graph = self.graph
if node_name in graph:
raise KeyError('node %s already exists' % node_name)
graph[node_name] = set()
def add_node_if_not_exists(self, node_name, graph=None):
try:
self.add_node(node_name, graph=graph)
except KeyError:
pass
def delete_node(self, node_name, graph=None):
""" Deletes this node and all edges referencing it. """
if not graph:
graph = self.graph
if node_name not in graph:
raise KeyError('node %s does not exist' % node_name)
graph.pop(node_name)
for node, edges in graph.items():
if node_name in edges:
edges.remove(node_name)
def delete_node_if_exists(self, node_name, graph=None):
try:
self.delete_node(node_name, graph=graph)
except KeyError:
pass
def add_edge(self, ind_node, dep_node, graph=None):
""" Add an edge (dependency) between the specified nodes. """
if not graph:
graph = self.graph
if ind_node not in graph or dep_node not in graph:
raise KeyError('one or more nodes do not exist in graph')
test_graph = deepcopy(graph)
test_graph[ind_node].add(dep_node)
is_valid, message = self.validate(test_graph)
if is_valid:
graph[ind_node].add(dep_node)
else:
raise Exception()
def delete_edge(self, ind_node, dep_node, graph=None):
""" Delete an edge from the graph. """
if not graph:
graph = self.graph
if dep_node not in graph.get(ind_node, []):
raise KeyError('this edge does not exist in graph')
graph[ind_node].remove(dep_node)
def rename_edges(self, old_task_name, new_task_name, graph=None):
""" Change references to a task in existing edges. """
if not graph:
graph = self.graph
for node, edges in graph.items():
if node == old_task_name:
graph[new_task_name] = copy(edges)
del graph[old_task_name]
else:
if old_task_name in edges:
edges.remove(old_task_name)
edges.add(new_task_name)
def predecessors(self, node, graph=None):
""" Returns a list of all predecessors of the given node """
if graph is None:
graph = self.graph
return [key for key in graph if node in graph[key]]
def downstream(self, node, graph=None):
""" Returns a list of all nodes this node has edges towards. """
if graph is None:
graph = self.graph
if node not in graph:
raise KeyError('node %s is not in graph' % node)
return list(graph[node])
def all_downstreams(self, node, graph=None):
"""Returns a list of all nodes ultimately downstream
of the given node in the dependency graph, in
topological order."""
if graph is None:
graph = self.graph
nodes = [node]
nodes_seen = set()
i = 0
while i < len(nodes):
downstreams = self.downstream(nodes[i], graph)
for downstream_node in downstreams:
if downstream_node not in nodes_seen:
nodes_seen.add(downstream_node)
nodes.append(downstream_node)
i += 1
return list(
filter(
lambda node: node in nodes_seen,
self.topological_sort(graph=graph)
)
)
def all_leaves(self, graph=None):
""" Return a list of all leaves (nodes with no downstreams) """
if graph is None:
graph = self.graph
return [key for key in graph if not graph[key]]
def from_dict(self, graph_dict):
""" Reset the graph and build it from the passed dictionary.
The dictionary takes the form of {node_name: [directed edges]}
"""
self.reset_graph()
for new_node in graph_dict.keys():
self.add_node(new_node)
for ind_node, dep_nodes in graph_dict.items():
if not isinstance(dep_nodes, list):
raise TypeError('dict values must be lists')
for dep_node in dep_nodes:
self.add_edge(ind_node, dep_node)
def reset_graph(self):
""" Restore the graph to an empty state. """
self.graph = OrderedDict()
def ind_nodes(self, graph=None):
""" Returns a list of all nodes in the graph with no dependencies. """
if graph is None:
graph = self.graph
dependent_nodes = set(
node for dependents in graph.values() for node in dependents
)
return [node for node in graph.keys() if node not in dependent_nodes]
def validate(self, graph=None):
""" Returns (Boolean, message) of whether DAG is valid. """
graph = graph if graph is not None else self.graph
if len(self.ind_nodes(graph)) == 0:
return False, 'no independent nodes detected'
try:
self.topological_sort(graph)
except ValueError:
return False, 'failed topological sort'
return True, 'valid'
def topological_sort(self, graph=None):
""" Returns a topological ordering of the DAG.
Raises an error if this is not possible (graph is not valid).
"""
if graph is None:
graph = self.graph
result = []
in_degree = defaultdict(lambda: 0)
for u in graph:
for v in graph[u]:
in_degree[v] += 1
ready = [node for node in graph if not in_degree[node]]
while ready:
u = ready.pop()
result.append(u)
for v in graph[u]:
in_degree[v] -= 1
if in_degree[v] == 0:
ready.append(v)
if len(result) == len(graph):
return result
else:
raise ValueError('graph is not acyclic')
def size(self):
return len(self.graph)
def instance_dag(dag_dict, process_run_uuid):
new_dag_dict = defaultdict(list)
for k, v_list in dag_dict.items():
for v in v_list:
new_dag_dict[process_run_uuid[k].id].append(process_run_uuid[v].id)
return dict(new_dag_dict)
class PipelineBuilder:
def __init__(self, process_id):
self.process_id = process_id
self.process = Process.objects.filter(id=process_id).first()
self.node_map = Node.objects.filter(process_id=process_id).in_bulk(field_name="uuid")
self.dag_obj = self.setup_dag()
self.instance = self.setup_instance()
def setup_instance(self):
"""将节点转换成bamboo实例"""
pipeline_instance = {}
for p_id, node in self.node_map.items():
if node.node_type == Node.START_NODE:
pipeline_instance[p_id] = EmptyStartEvent()
elif node.node_type == Node.END_NODE:
pipeline_instance[p_id] = EmptyEndEvent()
elif node.node_type == Node.CONDITION_NODE:
pipeline_instance[p_id] = ExclusiveGateway(
conditions={
0: '1==0',
1: '0==0'
},
name='act_2 or act_3'
)
elif node.node_type == Node.PARALLEL_NODE:
pipeline_instance[p_id] = ParallelGateway()
elif node.node_type == Node.CONVERGE_NODE:
pipeline_instance[p_id] = ConvergeGateway()
elif node.node_type == Node.CONDITION_PARALLEL_NODE:
pipeline_instance[p_id] = ConditionalParallelGateway(
conditions={
0: '1==0',
1: '1==1',
2: '2==2'
},
name='[act_2] or [act_3 and act_4]'
)
elif node.node_type == Node.SUB_PROCESS_NODE:
process_id = node.content
p_builder = PipelineBuilder(process_id)
pipeline = p_builder.build(is_subprocess=True)
pipeline_instance[p_id] = pipeline
# 子流程的pid一并加入pipeline_instance
pipeline_instance.update(p_builder.instance)
else:
act = ServiceActivity(component_code="http_request")
act.component.inputs.inputs = Var(type=Var.PLAIN, value=node.inputs)
pipeline_instance[p_id] = act
return pipeline_instance
def setup_dag(self):
dag_obj = DAG()
dag_obj.from_dict(self.process.dag)
return dag_obj
def get_inst(self, p_id):
return self.instance.get(p_id)
def get_inst_list(self, p_ids):
return [self.instance.get(p_id) for p_id in p_ids]
def build(self, is_subprocess=False):
start = self.dag_obj.ind_nodes()[0]
for _in, out_list in self.dag_obj.graph.items():
for _out in out_list:
self.get_inst(_in).extend(self.get_inst(_out))
pipeline_data = Data()
if is_subprocess:
pipeline = SubProcess(self.get_inst(start), data=pipeline_data)
else:
pipeline = builder.build_tree(self.get_inst(start), data=pipeline_data)
return pipeline
if __name__ == '__main__':
dag = DAG()
dag.add_node("a")
dag.add_node("b")
dag.add_node("c")
dag.add_node("d")
dag.add_edge("a", "b")
dag.add_edge("a", "d")
dag.add_edge("b", "c")
print(dag.topological_sort())
print(dag.graph)
print(dag.all_downstreams("b"))

View File

@@ -1,5 +0,0 @@
import uuid
def get_uuid():
return str(uuid.uuid4()).replace("-", "")

View File

@@ -1,16 +1,3 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import json
import logging
import traceback

View File

@@ -1,130 +0,0 @@
# -*- coding: utf-8 -*-
true = True
false = False
null = ""
a = {"result": true, "code": "OK", "message": "success", "data": {"menu": [
{"name": "home", "cnName": "首页", "to": "/home", "icon": "iconfont icon-mianxingtubiao-shouye", "hasChild": false,
"children": []},
{"name": "AgentList", "cnName": "Agent管理", "to": "/agentlist", "icon": "iconfont icon-mianxingtubiao-Agentguanli",
"hasChild": true, "children": [{"name": "AgentList", "cnName": "Agent列表", "to": "/agentlist", "hasChild": false},
{"name": "AgentMonitor", "cnName": "Agent监视", "to": "/agentmonitor",
"hasChild": false}]},
{"name": "NewJob", "cnName": "作业管理", "to": "/newjob", "icon": "iconfont icon-mianxingtubiao-zuoyeguanli",
"hasChild": true, "children": [{"name": "NewJob", "cnName": "新建作业", "to": "/newjob", "hasChild": false},
{"name": "JobList", "cnName": "作业列表", "to": "/joblist", "hasChild": false}]},
{"name": "NewJobFlow", "cnName": "作业流管理", "to": "/newjobflow",
"icon": "iconfont icon-mianxingtubiao-zuoyeliuguanli", "hasChild": true,
"children": [{"name": "NewJobFlow", "cnName": "新建作业流", "to": "/newjobflow", "hasChild": false},
{"name": "JobFlowList", "cnName": "作业流列表", "to": "/jobflowlist", "hasChild": false},
{"name": "CalendarMgmt", "cnName": "日历管理", "to": "/calendarmgmt", "hasChild": false},
{"name": "VariableMgmt", "cnName": "变量管理", "to": "/variablemgmt", "hasChild": false}]},
{"name": "JobMonitor", "cnName": "作业监视", "to": "/jobmonitor", "icon": "iconfont icon-mianxingtubiao-zuoyejiankong",
"hasChild": false}, {"name": "LargeScreen", "cnName": "作业监视大屏", "to": "/largescreen",
"icon": "iconfont icon-mianxingtubiao-zuoyejiankongdaping", "hasChild": false},
{"name": "Report", "cnName": "报表分析", "to": "/report", "icon": "iconfont icon-xianxingtubiao-shengchengbaobiao",
"hasChild": false},
{"name": "JobHistory", "cnName": "作业历史", "to": "/jobhistory", "icon": "iconfont icon-mianxingtubiao-zuoyelishi",
"hasChild": false},
{"name": "AlarmList", "cnName": "告警中心", "to": "/alarmlist", "icon": "iconfont icon-mianxingtubiao-gaojingzhongxin",
"hasChild": false, "children": []},
{"name": "SysSetup", "cnName": "系统管理", "to": "/syssetup", "icon": "iconfont icon-mianxingtubiao-shezhi",
"hasChild": true, "children": [{"name": "SysSetup", "cnName": "系统设置", "to": "/syssetup", "hasChild": false},
{"name": "UserAndPermissions", "cnName": "用户与权限", "to": "/userandpermissions",
"hasChild": false},
{"name": "SystemClassManage", "cnName": "系统类别管理", "to": "/systemclassmanage",
"hasChild": false},
{"name": "Log", "cnName": "操作审计", "to": "/log", "hasChild": false}]}], "router": [
{"path": "/", "name": "home", "component": "Home", "meta": {"title": "首页"}},
{"path": "/log", "name": "Log", "component": "Log", "meta": {"title": "操作审计"}},
{"path": "/addcalendarmgmt", "name": "AddCalendarMgmt", "component": "AddCalendarMgmt",
"meta": {"title": "操作日历", "back": "true", "fatherName": "CalendarMgmt"}},
{"path": "/variablechange", "name": "variableChange", "component": "variableChange",
"meta": {"title": "变量表", "back": "true", "fatherName": "VariableMgmt"}},
{"path": "/singlejob", "name": "SingleJob", "component": "SingleJob",
"meta": {"title": "单个作业", "back": "true", "fatherName": "NewJob"}},
{"path": "/singleJobdetail", "name": "singleJobDetail", "component": "SingleJob",
"meta": {"title": "作业管理 > 修改作业 > 单个作业"}}, {"path": "/viewdetail", "name": "ViewDetail", "component": "ViewDetail",
"meta": {"title": "作业流视图详情", "back": "true",
"fatherName": "JobMonitor"}},
{"path": "/jobflowdetail", "name": "JobFlowDetail", "component": "JobFlowDetail",
"meta": {"title": "作业流视图历史详情", "back": "true", "fatherName": "JobHistory"}},
{"path": "/jobviewdetail", "name": "JobViewDetail", "component": "JobViewDetail",
"meta": {"title": "作业视图历史详情", "back": "true", "fatherName": "JobHistory"}},
{"path": "/multiplejob", "name": "MultipleJob", "component": "MultipleJob",
"meta": {"title": "批量作业导入", "back": "true", "fatherName": "NewJob"}},
{"path": "/scanfile", "name": "ScanFile", "component": "ScanFile",
"meta": {"title": "导入详情", "back": "true", "fatherName": "NewJob"}},
{"path": "/home", "name": "home", "component": "Home", "meta": {"title": "首页"}},
{"path": "/agentlist", "name": "AgentList", "component": "AgentList", "meta": {"title": "Agent列表"}},
{"path": "/agentmonitor", "name": "AgentMonitor", "component": "AgentMonitor", "meta": {"title": "Agent监视"}},
{"path": "/calendarmgmt", "name": "CalendarMgmt", "component": "CalendarMgmt", "meta": {"title": "日历管理"}},
{"path": "/jobflowlist", "name": "JobFlowList", "component": "JobFlowList", "meta": {"title": "作业流列表"}},
{"path": "/newjobflow", "name": "NewJobFlow", "component": "NewJobFlow", "meta": {"title": "新建作业流"}},
{"path": "/singlejobflow", "name": "SingleJobFlow", "component": "SingleJobFlow",
"meta": {"title": "单个作业流", "back": "true", "fatherName": "NewJobFlow"}},
{"path": "/multiplejobflow", "name": "MultipleJobFlow", "component": "MultipleJobFlow",
"meta": {"title": "批量导入", "back": "true", "fatherName": "NewJobFlow"}},
{"path": "/importfile", "name": "importFile", "component": "ImportFile",
"meta": {"title": "导入详情", "back": "true", "fatherName": "NewJobFlow"}},
{"path": "/variablemgmt", "name": "VariableMgmt", "component": "VariableMgmt", "meta": {"title": "变量管理"}},
{"path": "/joblist", "name": "JobList", "component": "JobList", "meta": {"title": "作业列表"}},
{"path": "/newjob", "name": "NewJob", "component": "NewJob", "meta": {"title": "新建作业"}},
{"path": "/jobhistory", "name": "JobHistory", "component": "JobHistory", "meta": {"title": "作业历史"}, "children": [
{"path": "/jobflowviewhistory", "name": "JobFlowViewHistory", "component": "JobFlowViewHistory",
"meta": {"title": "作业历史", "fatherName": "JobHistory"}},
{"path": "/jobviewhistory", "name": "JobViewHistory", "component": "JobViewHistory",
"meta": {"title": "作业历史", "fatherName": "JobHistory"}}]},
{"path": "/report", "name": "Report", "component": "Report", "meta": {"title": "报表分析"}},
{"path": "/largescreen", "name": "LargeScreen", "component": "LargeScreen", "meta": {"title": "作业监视大屏"}},
{"path": "/jobmonitor", "name": "JobMonitor", "component": "JobMonitor", "meta": {"title": "作业监视"}, "children": [
{"path": "/jobview", "name": "JobView", "component": "JobView",
"meta": {"title": "作业监视", "fatherName": "JobMonitor"}},
{"path": "/jobflowview", "name": "JobFlowView", "component": "JobFlowView",
"meta": {"title": "作业监视", "fatherName": "JobMonitor"}}]},
{"path": "/jobdetail", "name": "jobDetail", "component": "JobDetail",
"meta": {"title": "作业视图详情", "back": "true", "fatherName": "JobMonitor"}},
{"path": "/syssetup", "name": "SysSetup", "component": "SysSetup", "meta": {"title": "系统设置"}},
{"path": "/userandpermissions", "name": "UserAndPermissions", "component": "UserAndPermissions",
"meta": {"title": "用户与权限"}},
{"path": "/systemclassmanage", "name": "SystemClassManage", "component": "SystemClassManage",
"meta": {"title": "系统类别管理"}},
{"path": "/logmange", "name": "LogMange", "component": "LogMange", "meta": {"title": "日志管理"}},
{"path": "/alarmlist", "name": "AlarmList", "component": "AlarmList", "meta": {"title": "告警中心"}}], "permission": [
{"url": "/agentlist", "auth": {"search": true, "create": true, "modify": true, "del": true}},
{"url": "/agentmonitor", "auth": {"search": true}}, {"url": "/newjob", "auth": {"create": true}},
{"url": "/joblist", "auth": {"search": true, "operate": true, "modify": true, "del": true}},
{"url": "/newjobflow", "auth": {"create": true}},
{"url": "/jobflowlist", "auth": {"search": true, "operate": true, "modify": true, "del": true}},
{"url": "/calendarmgmt", "auth": {"search": true, "create": true, "modify": true, "del": true}},
{"url": "/variablemgmt", "auth": {"search": true, "create": true, "modify": true, "del": true}},
{"url": "/jobflowview", "auth": {"search": true, "operate": true}},
{"url": "/jobflowviewhistory", "auth": {"search": true}},
{"url": "/jobview", "auth": {"search": true, "operate": true}},
{"url": "/jobviewhistory", "auth": {"search": true}}, {"url": "/alarmlist", "auth": {"search": true}},
{"url": "/syssetup", "auth": {"operate": true, "modify": true}},
{"url": "/userandpermissions", "auth": {"search": true, "create": true}},
{"url": "/systemclassmanage", "auth": {"search": true, "create": true, "modify": true, "del": true}},
{"url": "/log", "auth": {"search": true}}, {"url": "/viewdetail", "auth": {"search": true, "operate": true}}]}}
component = {"result": true, "code": "OK", "message": "success",
"data": {"page": 1, "total_page": 1, "count": 2,
"items":
[
{"id": 1, "name": "HTTP请求", "description": "", "show": true, "top": 300, "left": 300,
"ico": null,
"fail_retry_count": 0, "fail_offset": 10, "fail_offset_unit": "seconds", "node_type": 2,
"component_code": "http_request", "is_skip_fail": false, "is_timeout_alarm": false,
"inputs": {"url": "", "body": "{}", "header": [{"key": "", "value": ""}],
"method": "get",
"timeout": 60,
"check_point": {"key": "", "values": "", "condition": ""}}, "outputs": {},
"content": 0,
"template_type": "0",
"inputs_component": [{"key": "url", "type": "textarea", "label": "请求地址:"},
{"key": "method", "type": "select", "label": "请求类型:",
"choices": [{"label": "GET", "value": "get"}]},
{"key": "header", "type": "dict_map",
"label": "Header"},
{"key": "body", "type": "textarea", "label": "Body"},
{"key": "timeout", "type": "number",
"label": "超时时间:"}],
"outputs_component": []}]}}

View File

@@ -1,7 +0,0 @@
# -*- coding: utf-8 -*-
from django.apps import AppConfig
class CustomPluginsConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'custom_plugins'

View File

@@ -1,10 +0,0 @@
# -*- coding: utf-8 -*-
import logging
from pipeline.conf import settings
from pipeline.core.flow.activity import Service
from pipeline.component_framework.component import Component
logger = logging.getLogger('celery')

View File

@@ -1,10 +0,0 @@
# -*- coding: utf-8 -*-
import logging
from pipeline.conf import settings
from pipeline.core.flow.activity import Service
from pipeline.component_framework.component import Component
logger = logging.getLogger('celery')

View File

@@ -1,58 +0,0 @@
# -*- coding: utf-8 -*-
import math
from pipeline.core.flow.activity import Service, StaticIntervalGenerator
from pipeline.component_framework.component import Component
import json
import time
import requests
# to adapter window pc
# import eventlet
# requests = eventlet.import_patched('requests')
class HttpRequestService(Service):
__need_schedule__ = False
def execute(self, data, parent_data):
try:
inputs = data.get_one_of_inputs("inputs")
headers = self.parse_headers(inputs["header"])
inputs["body"] = json.loads(inputs["body"])
req_data = [{"params": inputs["body"]}, {"json": inputs["body"]}][inputs["method"] != "get"]
res = requests.request(inputs["method"], url=inputs["url"], headers=headers, timeout=inputs["timeout"],
**req_data).content
print("执行了", res)
try:
res = json.loads(res)
except Exception:
res = res
data.outputs.outputs = res
time.sleep(5)
if res.get("result"):
return True
else:
return False
except Exception as e:
data.outputs.outputs = str(e)
return False
def parse_headers(self, headers):
return {header["key"]: header["value"] for header in headers if header["key"]}
def inputs_format(self):
return [
Service.InputItem(name="输入参数", key="inputs", type="dict", required=True)
]
def outputs_format(self):
return [
Service.OutputItem(name="输出参数", key="outputs", type="dict", required=True)
]
class HttpRequestComponent(Component):
name = "HttpRequestComponent"
code = "http_request"
bound_service = HttpRequestService

View File

@@ -1 +0,0 @@
# -*- coding: utf-8 -*-

View File

@@ -1,12 +0,0 @@
/**
* Tencent is pleased to support the open source community by making À¶¾¨ÖÇÔÆPaaSƽ̨ÉçÇø°æ (BlueKing PaaS Community
* Edition) available.
* Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/

View File

@@ -1,21 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making À¶¾¨ÖÇÔÆPaaSƽ̨ÉçÇø°æ (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
from pipeline.conf import settings
from pipeline.core.flow.activity import Service
from pipeline.component_framework.component import Component
logger = logging.getLogger('celery')

View File

@@ -1,21 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making À¶¾¨ÖÇÔÆPaaSƽ̨ÉçÇø°æ (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
from pipeline.conf import settings
from pipeline.core.flow.activity import Service
from pipeline.component_framework.component import Component
logger = logging.getLogger('celery')

View File

@@ -1,21 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making À¶¾¨ÖÇÔÆPaaSƽ̨ÉçÇø°æ (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
from pipeline.conf import settings
from pipeline.core.flow.activity import Service
from pipeline.component_framework.component import Component
logger = logging.getLogger('celery')

View File

@@ -1,21 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making À¶¾¨ÖÇÔÆPaaSƽ̨ÉçÇø°æ (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
from pipeline.conf import settings
from pipeline.core.flow.activity import Service
from pipeline.component_framework.component import Component
logger = logging.getLogger('celery')

View File

@@ -1,41 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from __future__ import absolute_import, unicode_literals
import os
import time
from celery import Celery, platforms
from django.conf import settings
platforms.C_FORCE_ROOT = True
# set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dj_flow.settings")
app = Celery("dj_flow")
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object("django.conf:settings")
# Load task modules from all registered Django app configs.
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print("Request: {!r}".format(self.request))
time.sleep(2)

View File

@@ -1,30 +0,0 @@
"""URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from applications.flow.urls import flow_router, node_router
from applications.flow.views import flow
from applications.task.urls import task_router
from dj_flow.views import index
urlpatterns = [
path('admin/', admin.site.urls),
path('', index),
path("process/", include(flow_router.urls)),
path("node/", include(node_router.urls)),
path("task/", include(task_router.urls)),
path("tt/", flow),
]

View File

@@ -0,0 +1,30 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import time
from celery import Celery, platforms
from django.conf import settings
platforms.C_FORCE_ROOT = True
# set the default Django settings module for the 'celery' program.
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_vue_cli.settings")
app = Celery("django_vue_cli")
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object("django.conf:settings")
# Load task modules from all registered Django app configs.
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print("Request: {!r}".format(self.request))
time.sleep(2)

View File

@@ -1,7 +1,7 @@
from pathlib import Path
import sys
import os
# lib文件夹中手动导入的第三方库
BASE_DIR = Path(__file__).resolve().parent.parent
sys.path.insert(1, os.path.join(os.getcwd(), 'lib'))
@@ -13,7 +13,7 @@ DEBUG = False
ALLOWED_HOSTS = ["*"]
CORS_ALLOW_CREDENTIALS = True
CSRF_COOKIE_NAME = "dj-flow_csrftoken"
CSRF_COOKIE_NAME = "django_vue_cli_csrftoken"
CORS_ORIGIN_WHITELIST = [
"http://127.0.0.1:8080"
]
@@ -22,20 +22,14 @@ CORS_ORIGIN_WHITELIST = [
INSTALLED_APPS = [
"corsheaders",
"pipeline",
"pipeline.engine",
"pipeline.component_framework",
"pipeline.eri",
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
"custom_plugins",
"rest_framework",
"applications.flow",
"applications.task"
"applications.task",
]
@@ -51,7 +45,7 @@ MIDDLEWARE = [
"component.drf.middleware.AppExceptionMiddleware"
]
ROOT_URLCONF = 'dj_flow.urls'
ROOT_URLCONF = 'django_vue_cli.urls'
TEMPLATES = [
{
@@ -69,7 +63,7 @@ TEMPLATES = [
},
]
WSGI_APPLICATION = 'dj_flow.wsgi.application'
WSGI_APPLICATION = 'django_vue_cli.wsgi.application'
TIME_ZONE = "Asia/Shanghai"
LANGUAGE_CODE = "zh-hans"
# Database

8
django_vue_cli/urls.py Normal file
View File

@@ -0,0 +1,8 @@
from django.contrib import admin
from django.urls import path, include
from django_vue_cli.views import index
urlpatterns = [
path('admin/', admin.site.urls),
path('', index),
]

View File

@@ -1,5 +1,5 @@
"""
WSGI config for project.
WSGI config for task.
It exposes the WSGI callable as a module-level variable named ``application``.
@@ -11,6 +11,6 @@ import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj_flow.settings')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_vue_cli.settings')
application = get_wsgi_application()

View File

@@ -1,14 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from .engine import Engine # noqa

View File

@@ -1,14 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
__version__ = "1.6.4"

View File

@@ -1,647 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# API 模块用于向外暴露接口bamboo-engine 的使用者应该永远只用这个模块与 bamboo-engien 进行交互
import logging
import functools
import traceback
from typing import Optional, Any, List
from .utils.object import Representable
from .eri import EngineRuntimeInterface, ContextValue
from .engine import Engine
from .template import Template
from .context import Context
from .utils.constants import VAR_CONTEXT_MAPPING
logger = logging.getLogger("bamboo_engine")
class EngineAPIResult(Representable):
"""
api 统一返回结果
"""
def __init__(
self,
result: bool,
message: str,
exc: Optional[Exception] = None,
data: Optional[Any] = None,
exc_trace: Optional[str] = None,
):
"""
:param result: 是否执行成功
:type result: bool
:param message: 附加消息result 为 False 时关注
:type message: str
:param exc: 异常对象
:type exc: Exception
:param data: 数据
:type data: Any
"""
self.result = result
self.message = message
self.exc = exc
self.data = data
self.exc_trace = exc_trace
def _ensure_return_api_result(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
data = func(*args, **kwargs)
except Exception as e:
logger.exception("{} raise error.".format(func.__name__))
trace = traceback.format_exc()
return EngineAPIResult(result=False, message="fail", exc=e, data=None, exc_trace=trace)
if isinstance(data, EngineAPIResult):
return data
return EngineAPIResult(result=True, message="success", exc=None, data=data, exc_trace=None)
return wrapper
@_ensure_return_api_result
def run_pipeline(runtime: EngineRuntimeInterface, pipeline: dict, **options) -> EngineAPIResult:
"""
执行 pipeline
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param pipeline: pipeline 描述对象
:type pipeline: dict
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).run_pipeline(pipeline, **options)
@_ensure_return_api_result
def pause_pipeline(runtime: EngineRuntimeInterface, pipeline_id: str) -> EngineAPIResult:
"""
暂停 pipeline 的执行
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param pipeline_id: piipeline id
:type pipeline_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).pause_pipeline(pipeline_id)
@_ensure_return_api_result
def revoke_pipeline(runtime: EngineRuntimeInterface, pipeline_id: str) -> EngineAPIResult:
"""
撤销 pipeline使其无法继续执行
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param pipeline_id: pipeline id
:type pipeline_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).revoke_pipeline(pipeline_id)
@_ensure_return_api_result
def resume_pipeline(runtime: EngineRuntimeInterface, pipeline_id: str) -> EngineAPIResult:
"""
继续被 pause_pipeline 接口暂停的 pipeline 的执行
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param pipeline_id: pipeline id
:type pipeline_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).resume_pipeline(pipeline_id)
@_ensure_return_api_result
def pause_node_appoint(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult:
"""
预约暂停某个节点的执行
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 节点 id
:type node_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).pause_node_appoint(node_id)
@_ensure_return_api_result
def resume_node_appoint(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult:
"""
继续由于某个节点而暂停的 pipeline 的执行
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 节点 id
:type node_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).resume_node_appoint(node_id)
@_ensure_return_api_result
def retry_node(runtime: EngineRuntimeInterface, node_id: str, data: Optional[dict] = None) -> EngineAPIResult:
"""
重试某个执行失败的节点
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 失败的节点 id
:type node_id: str
:param data: 重试时使用的节点执行输入
:type data: dict
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).retry_node(node_id, data)
@_ensure_return_api_result
def retry_subprocess(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult:
"""
重试进入失败的子流程节点
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 子流程节点 id
:type node_id: str
:return: [description]
:rtype: EngineAPIResult
"""
Engine(runtime).retry_subprocess(node_id)
@_ensure_return_api_result
def skip_node(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult:
"""
跳过某个执行失败的节点(仅限 eventactivity
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 失败的节点 id
:type node_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).skip_node(node_id)
@_ensure_return_api_result
def skip_exclusive_gateway(runtime: EngineRuntimeInterface, node_id: str, flow_id: str) -> EngineAPIResult:
"""
跳过某个执行失败的分支网关
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 失败的分支网关 id
:type node_id: str
:param flow_id: 需要往下执行的 flow id
:type flow_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).skip_exclusive_gateway(node_id, flow_id)
@_ensure_return_api_result
def skip_conditional_parallel_gateway(
runtime: EngineRuntimeInterface,
node_id: str,
flow_ids: list,
converge_gateway_id: str,
) -> EngineAPIResult:
"""
跳过某个执行失败的条件并行网关
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 失败的分支网关 id
:type node_id: str
:param flow_ids: 需要往下执行的 flow id 列表
:type flow_ids: list
:param converge_gateway_id: 目标汇聚网关 id
:type converge_gateway_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).skip_conditional_parallel_gateway(node_id, flow_ids, converge_gateway_id)
@_ensure_return_api_result
def forced_fail_activity(runtime: EngineRuntimeInterface, node_id: str, ex_data: str) -> EngineAPIResult:
"""
强制失败某个 activity 节点
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 节点 ID
:type node_id: str
:param message: 异常信息
:type message: str
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).forced_fail_activity(node_id, ex_data)
@_ensure_return_api_result
def callback(runtime: EngineRuntimeInterface, node_id: str, version: str, data: dict) -> EngineAPIResult:
"""
回调某个节点
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param version: 节点执行版本
:param version: str
:param data: 节点 ID
:type data: dict
:return: 执行结果
:rtype: EngineAPIResult
"""
Engine(runtime).callback(node_id, version, data)
@_ensure_return_api_result
def get_pipeline_states(runtime: EngineRuntimeInterface, root_id: str, flat_children=True) -> EngineAPIResult:
"""
返回某个任务的状态树
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param root_id: 根节点 ID
:type root_id: str
:param flat_children: 是否将所有子节点展开
:type flat_children: bool
:return: 执行结果
:rtype: EngineAPIResult
"""
states = runtime.get_state_by_root(root_id)
if not states:
return {}
root_state = None
children = {}
for s in states:
if s.node_id != root_id:
children[s.node_id] = {
"id": s.node_id,
"state": s.name,
"root_id:": s.root_id,
"parent_id": s.parent_id,
"version": s.version,
"loop": s.loop,
"retry": s.retry,
"skip": s.skip,
"error_ignorable": s.error_ignored,
"error_ignored": s.error_ignored,
"created_time": s.created_time,
"started_time": s.started_time,
"archived_time": s.archived_time,
"children": {},
}
else:
root_state = s
if not flat_children:
# set node children
for node_id, state in children.items():
if state["parent_id"] in children:
children[state["parent_id"]]["children"][node_id] = state
# pop sub child
for node_id in list(children.keys()):
if children[node_id]["parent_id"] != root_state.node_id:
children.pop(node_id)
state_tree = {}
state_tree[root_state.node_id] = {
"id": root_state.node_id,
"state": root_state.name,
"root_id:": root_state.root_id,
"parent_id": root_state.root_id,
"version": root_state.version,
"loop": root_state.loop,
"retry": root_state.retry,
"skip": root_state.skip,
"error_ignorable": s.error_ignored,
"error_ignored": s.error_ignored,
"created_time": root_state.created_time,
"started_time": root_state.started_time,
"archived_time": root_state.archived_time,
"children": children,
}
return state_tree
@_ensure_return_api_result
def get_children_states(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult:
"""
返回某个节点及其所有子节点的状态
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 父流程 ID
:type node_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
parent_state = runtime.get_state_or_none(node_id)
if not parent_state:
return {}
states = runtime.get_state_by_parent(node_id)
children = {}
for s in states:
children[s.node_id] = {
"id": s.node_id,
"state": s.name,
"root_id:": s.root_id,
"parent_id": s.parent_id,
"version": s.version,
"loop": s.loop,
"retry": s.retry,
"skip": s.skip,
"error_ignorable": s.error_ignored,
"error_ignored": s.error_ignored,
"created_time": s.created_time,
"started_time": s.started_time,
"archived_time": s.archived_time,
"children": {},
}
state_tree = {}
state_tree[parent_state.node_id] = {
"id": parent_state.node_id,
"state": parent_state.name,
"root_id:": parent_state.root_id,
"parent_id": parent_state.root_id,
"version": parent_state.version,
"loop": parent_state.loop,
"retry": parent_state.retry,
"skip": parent_state.skip,
"error_ignorable": parent_state.error_ignored,
"error_ignored": parent_state.error_ignored,
"created_time": parent_state.created_time,
"started_time": parent_state.started_time,
"archived_time": parent_state.archived_time,
"children": children,
}
return state_tree
@_ensure_return_api_result
def get_execution_data_inputs(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult:
"""
获取某个节点执行数据的输入数据
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 节点 ID
:type node_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
return runtime.get_execution_data_inputs(node_id)
@_ensure_return_api_result
def get_execution_data_outputs(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult:
"""
获取某个节点的执行数据输出
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 节点 ID
:type node_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
return runtime.get_execution_data_outputs(node_id)
@_ensure_return_api_result
def get_execution_data(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult:
"""
获取某个节点的执行数据
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 节点 ID
:type node_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
data = runtime.get_execution_data(node_id)
return {"inputs": data.inputs, "outputs": data.outputs}
@_ensure_return_api_result
def get_data(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult:
"""
获取某个节点的原始输入数据
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 节点 ID
:type node_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
data = runtime.get_data(node_id)
return {
"inputs": {k: {"need_render": v.need_render, "value": v.value} for k, v in data.inputs.items()},
"outputs": data.outputs,
}
@_ensure_return_api_result
def get_node_histories(runtime: EngineRuntimeInterface, node_id: str, loop: int = -1) -> EngineAPIResult:
"""
获取某个节点的历史记录概览
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 节点 ID
:type node_id: str
:param loop: 重入次数, -1 表示不过滤重入次数
:type loop: int, optional
:return: 执行结果
:rtype: EngineAPIResult
"""
return [
{
"id": h.id,
"node_id": h.node_id,
"started_time": h.started_time,
"archived_time": h.archived_time,
"loop": h.loop,
"skip": h.skip,
"version": h.version,
"inputs": h.inputs,
"outputs": h.outputs,
}
for h in runtime.get_histories(node_id, loop)
]
@_ensure_return_api_result
def get_node_short_histories(runtime: EngineRuntimeInterface, node_id: str, loop: int = -1) -> EngineAPIResult:
"""
获取某个节点的简要历史记录
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 节点 ID
:type node_id: str
:param loop: 重入次数, -1 表示不过滤重入次数
:type loop: int, optional
:return: 执行结果
:rtype: EngineAPIResult
"""
return [
{
"id": h.id,
"node_id": h.node_id,
"started_time": h.started_time,
"archived_time": h.archived_time,
"loop": h.loop,
"skip": h.skip,
"version": h.version,
}
for h in runtime.get_short_histories(node_id, loop)
]
@_ensure_return_api_result
def get_pipeline_debug_info(runtime: EngineRuntimeInterface, pipeline_id: str):
"""
获取某个流程的调试信息
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param pipeline_id: 流程 ID
:type pipeline_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
return {
"contex_values": runtime.get_context(pipeline_id),
"processes": runtime.get_process_info_with_root_pipeline(pipeline_id),
}
@_ensure_return_api_result
def get_node_debug_info(runtime: EngineRuntimeInterface, node_id: str):
"""
获取某个节点的调试信息
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param node_id: 节点 ID
:type node_id: str
:return: 执行结果
:rtype: EngineAPIResult
"""
data = None
state = None
err = []
try:
data = runtime.get_data(node_id)
except Exception as e:
err.append(str(e))
try:
state = runtime.get_state(node_id)
except Exception as e:
err.append(str(e))
return {
"node": runtime.get_node(node_id),
"data": data,
"state": state,
"err": err,
}
@_ensure_return_api_result
def preview_node_inputs(
runtime: EngineRuntimeInterface,
pipeline: dict,
node_id: str,
subprocess_stack: List[str] = [],
root_pipeline_data: dict = {},
parent_params: dict = {},
):
"""
预览某个节点的输入结果
:param pipeline: 预处理后的流程树数据
:type pipeline: dict
:param node_id: 节点 ID
:type node_id: str
:param subprocess_stack: 子流程,需保证顺序
:type subprocess_stack: List[str]
:param root_pipeline_data: root流程数据
:param parent_params: 父流程传入参数
:return: 执行结果
:rtype: EngineAPIResult
"""
context_values = [
ContextValue(key=key, type=VAR_CONTEXT_MAPPING[info["type"]], value=info["value"], code=info.get("custom_type"))
for key, info in list(pipeline["data"].get("inputs", {}).items()) + list(parent_params.items())
]
context = Context(runtime, context_values, root_pipeline_data)
if subprocess_stack:
subprocess = subprocess_stack[0]
child_pipeline = pipeline["activities"][subprocess]["pipeline"]
param_data = {key: info["value"] for key, info in pipeline["activities"][subprocess]["params"].items()}
hydrated_context = context.hydrate(deformat=True)
hydrated_param_data = Template(param_data).render(hydrated_context)
formatted_param_data = {key: {"value": value, "type": "plain"} for key, value in hydrated_param_data.items()}
return preview_node_inputs(
runtime=runtime,
pipeline=child_pipeline,
node_id=node_id,
subprocess_stack=subprocess_stack[1:],
root_pipeline_data=root_pipeline_data,
parent_params=formatted_param_data,
)
raw_inputs = pipeline["activities"][node_id]["component"]["inputs"]
raw_inputs = {key: info["value"] for key, info in raw_inputs.items()}
hydrated_context = context.hydrate(deformat=True)
inputs = Template(raw_inputs).render(hydrated_context)
return inputs

View File

@@ -1,15 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from .builder import * # noqa
from .flow import * # noqa

View File

@@ -1,224 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import copy
import queue
from bamboo_engine.utils.string import unique_id
from .flow.data import Data, Params
from .flow.event import ExecutableEndEvent
__all__ = ["build_tree"]
__skeleton = {
"id": None,
"start_event": None,
"end_event": None,
"activities": {},
"gateways": {},
"flows": {},
"data": {"inputs": {}, "outputs": []},
}
__node_type = {
"ServiceActivity": "activities",
"SubProcess": "activities",
"EmptyEndEvent": "end_event",
"EmptyStartEvent": "start_event",
"ParallelGateway": "gateways",
"ConditionalParallelGateway": "gateways",
"ExclusiveGateway": "gateways",
"ConvergeGateway": "gateways",
}
__start_elem = {"EmptyStartEvent"}
__end_elem = {"EmptyEndEvent"}
__multiple_incoming_type = {
"ServiceActivity",
"ConvergeGateway",
"EmptyEndEvent",
"ParallelGateway",
"ConditionalParallelGateway",
"ExclusiveGateway",
"SubProcess",
}
__incoming = "__incoming"
def build_tree(start_elem, id=None, data=None):
tree = copy.deepcopy(__skeleton)
elem_queue = queue.Queue()
processed_elem = set()
tree[__incoming] = {}
elem_queue.put(start_elem)
while not elem_queue.empty():
# get elem
elem = elem_queue.get()
# update node when we meet again
if elem.id in processed_elem:
__update(tree, elem)
continue
# add to queue
for e in elem.outgoing:
elem_queue.put(e)
# mark as processed
processed_elem.add(elem.id)
# tree grow
__grow(tree, elem)
del tree[__incoming]
tree["id"] = id or unique_id("p")
user_data = data.to_dict() if isinstance(data, Data) else data
tree["data"] = user_data or tree["data"]
return tree
def __update(tree, elem):
node_type = __node_type[elem.type()]
node = tree[node_type] if node_type == "end_event" else tree[node_type][elem.id]
node["incoming"] = tree[__incoming][elem.id]
def __grow(tree, elem):
if elem.type() in __start_elem:
outgoing = unique_id("f")
tree["start_event"] = {
"incoming": "",
"outgoing": outgoing,
"type": elem.type(),
"id": elem.id,
"name": elem.name,
}
next_elem = elem.outgoing[0]
__grow_flow(tree, outgoing, elem, next_elem)
elif elem.type() in __end_elem or isinstance(elem, ExecutableEndEvent):
tree["end_event"] = {
"incoming": tree[__incoming][elem.id],
"outgoing": "",
"type": elem.type(),
"id": elem.id,
"name": elem.name,
}
elif elem.type() == "ServiceActivity":
outgoing = unique_id("f")
tree["activities"][elem.id] = {
"incoming": tree[__incoming][elem.id],
"outgoing": outgoing,
"type": elem.type(),
"id": elem.id,
"name": elem.name,
"error_ignorable": elem.error_ignorable,
"timeout": elem.timeout,
"skippable": elem.skippable,
"retryable": elem.retryable,
"component": elem.component_dict(),
"optional": False,
}
next_elem = elem.outgoing[0]
__grow_flow(tree, outgoing, elem, next_elem)
elif elem.type() == "SubProcess":
outgoing = unique_id("f")
subprocess_param = elem.params.to_dict() if isinstance(elem.params, Params) else elem.params
subprocess = {
"id": elem.id,
"incoming": tree[__incoming][elem.id],
"name": elem.name,
"outgoing": outgoing,
"type": elem.type(),
"params": subprocess_param,
}
subprocess["pipeline"] = build_tree(start_elem=elem.start, id=elem.id, data=elem.data)
tree["activities"][elem.id] = subprocess
next_elem = elem.outgoing[0]
__grow_flow(tree, outgoing, elem, next_elem)
elif elem.type() == "ParallelGateway":
outgoing = [unique_id("f") for _ in range(len(elem.outgoing))]
tree["gateways"][elem.id] = {
"id": elem.id,
"incoming": tree[__incoming][elem.id],
"outgoing": outgoing,
"type": elem.type(),
"name": elem.name,
}
for i, next_elem in enumerate(elem.outgoing):
__grow_flow(tree, outgoing[i], elem, next_elem)
elif elem.type() in {"ExclusiveGateway", "ConditionalParallelGateway"}:
outgoing = [unique_id("f") for _ in range(len(elem.outgoing))]
tree["gateways"][elem.id] = {
"id": elem.id,
"incoming": tree[__incoming][elem.id],
"outgoing": outgoing,
"type": elem.type(),
"name": elem.name,
"conditions": elem.link_conditions_with(outgoing),
}
for i, next_elem in enumerate(elem.outgoing):
__grow_flow(tree, outgoing[i], elem, next_elem)
elif elem.type() == "ConvergeGateway":
outgoing = unique_id("f")
tree["gateways"][elem.id] = {
"id": elem.id,
"incoming": tree[__incoming][elem.id],
"outgoing": outgoing,
"type": elem.type(),
"name": elem.name,
}
next_elem = elem.outgoing[0]
__grow_flow(tree, outgoing, elem, next_elem)
else:
raise Exception()
def __grow_flow(tree, outgoing, elem, next_element):
tree["flows"][outgoing] = {
"is_default": False,
"source": elem.id,
"target": next_element.id,
"id": outgoing,
}
if next_element.type() in __multiple_incoming_type:
tree[__incoming].setdefault(next_element.id, []).append(outgoing)
else:
tree[__incoming][next_element.id] = outgoing

View File

@@ -1,17 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from .activity import * # noqa
from .event import * # noqa
from .gateway import * # noqa
from .data import * # noqa

View File

@@ -1,51 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from .base import Element
from bamboo_engine.eri import NodeType
from bamboo_engine.utils.collections import FancyDict
__all__ = ["ServiceActivity", "SubProcess"]
class ServiceActivity(Element):
def __init__(
self, component_code=None, error_ignorable=False, timeout=None, skippable=True, retryable=True, *args, **kwargs
):
self.component = FancyDict({"code": component_code, "inputs": FancyDict({})})
self.error_ignorable = error_ignorable
self.timeout = timeout
self.skippable = skippable
self.retryable = retryable
super(ServiceActivity, self).__init__(*args, **kwargs)
def type(self):
return NodeType.ServiceActivity.value
def component_dict(self):
return {
"code": self.component.code,
"inputs": {key: var.to_dict() for key, var in list(self.component.inputs.items())},
}
class SubProcess(Element):
def __init__(self, start=None, data=None, params=None, global_outputs=None, *args, **kwargs):
self.start = start
self.data = data
self.params = params or {}
self.global_outputs = FancyDict(global_outputs or {})
super(SubProcess, self).__init__(*args, **kwargs)
def type(self):
return NodeType.SubProcess.value

View File

@@ -1,78 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from bamboo_engine.utils.string import unique_id
__all__ = ["Element"]
class Element(object):
def __init__(self, id=None, name=None, outgoing=None):
self.id = id or unique_id("e")
self.name = name
self.outgoing = outgoing or []
def extend(self, element):
"""
build a connection from self to element and return element
:param element: target
:rtype: Element
"""
self.outgoing.append(element)
return element
def connect(self, *args):
"""
build connections from self to elements in args and return self
:param args: target elements
:rtype: Element
"""
for e in args:
self.outgoing.append(e)
return self
def converge(self, element):
"""
converge all connection those diverge from self to element and return element
:param element: target
:rtype: Element
"""
for e in self.outgoing:
e.tail().connect(element)
return element
def to(self, element):
return element
def tail(self):
"""
get tail element for self
:rtype: Element
"""
is_tail = len(self.outgoing) == 0
e = self
while not is_tail:
e = e.outgoing[0]
is_tail = len(e.outgoing) == 0
return e
def type(self):
raise NotImplementedError()
def __eq__(self, other):
return self.id == other.id
def __repr__(self):
return "<{cls} {name}:{id}>".format(cls=type(self).__name__, name=self.name, id=self.id)

View File

@@ -1,96 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from bamboo_engine.utils.collections import FancyDict
class Data(object):
def __init__(self, inputs=None, outputs=None, pre_render_keys=None):
self.inputs = FancyDict(inputs or {})
self.outputs = outputs or []
self.pre_render_keys = pre_render_keys or []
def to_dict(self):
base = {"inputs": {}, "outputs": self.outputs, "pre_render_keys": self.pre_render_keys}
for key, value in list(self.inputs.items()):
base["inputs"][key] = value.to_dict() if isinstance(value, Var) else value
return base
class Params(object):
def __init__(self, params=None):
self.params = FancyDict(params or {})
def to_dict(self):
base = {}
for key, value in list(self.params.items()):
base[key] = value.to_dict() if isinstance(value, Var) else value
return base
class Var(object):
PLAIN = "plain"
SPLICE = "splice"
LAZY = "lazy"
def __init__(self, type, value, custom_type=None):
self.type = type
self.value = value
self.custom_type = custom_type
def to_dict(self):
base = {"type": self.type, "value": self.value}
if self.type == self.LAZY:
base["custom_type"] = self.custom_type
return base
class DataInput(Var):
def __init__(self, *args, **kwargs):
super(DataInput, self).__init__(*args, **kwargs)
def to_dict(self):
base = super(DataInput, self).to_dict()
base["is_param"] = True
return base
class NodeOutput(Var):
def __init__(self, source_act, source_key, *args, **kwargs):
self.source_act = source_act
self.source_key = source_key
kwargs["value"] = None
super(NodeOutput, self).__init__(*args, **kwargs)
def to_dict(self):
base = super(NodeOutput, self).to_dict()
base["source_act"] = self.source_act
base["source_key"] = self.source_key
return base
class RewritableNodeOutput(Var):
def __init__(self, source_act, *args, **kwargs):
self.source_act = source_act
kwargs["value"] = None
super(RewritableNodeOutput, self).__init__(*args, **kwargs)
def to_dict(self):
base = super(RewritableNodeOutput, self).to_dict()
base["source_act"] = self.source_act
return base

View File

@@ -1,35 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from bamboo_engine.eri import NodeType
from .base import Element
__all__ = ["EmptyEndEvent", "EmptyStartEvent", "ExecutableEndEvent"]
class EmptyStartEvent(Element):
def type(self):
return NodeType.EmptyStartEvent.value
class EmptyEndEvent(Element):
def type(self):
return NodeType.EmptyEndEvent.value
class ExecutableEndEvent(Element):
def __init__(self, type, **kwargs):
self._type = type
super(ExecutableEndEvent, self).__init__(**kwargs)
def type(self):
return self._type

View File

@@ -1,59 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from bamboo_engine.eri import NodeType
from .base import Element
__all__ = [
"ParallelGateway",
"ExclusiveGateway",
"ConvergeGateway",
"ConditionalParallelGateway",
]
class ParallelGateway(Element):
def type(self):
return NodeType.ParallelGateway.value
class ConditionGateway(Element):
def __init__(self, conditions=None, *args, **kwargs):
self.conditions = conditions or {}
super(ConditionGateway, self).__init__(*args, **kwargs)
def add_condition(self, index, evaluate):
self.conditions[index] = evaluate
def link_conditions_with(self, outgoing):
conditions = {}
for i, out in enumerate(outgoing):
conditions[out] = {"evaluate": self.conditions[i]}
return conditions
class ConditionalParallelGateway(ConditionGateway):
def type(self):
return NodeType.ConditionalParallelGateway.value
class ExclusiveGateway(ConditionGateway):
def type(self):
return NodeType.ExclusiveGateway.value
class ConvergeGateway(Element):
def type(self):
return NodeType.ConvergeGateway.value

View File

@@ -1,66 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# 引擎内部配置模块
class Settings:
"""
引擎全局配置对象
"""
MAKO_SANDBOX_SHIELD_WORDS = [
"ascii",
"bytearray",
"bytes",
"callable",
"chr",
"classmethod",
"compile",
"delattr",
"dir",
"divmod",
"exec",
"eval",
"filter",
"frozenset",
"getattr",
"globals",
"hasattr",
"hash",
"help",
"id",
"input",
"isinstance",
"issubclass",
"iter",
"locals",
"map",
"memoryview",
"next",
"object",
"open",
"print",
"property",
"repr",
"setattr",
"staticmethod",
"super",
"type",
"vars",
"__import__",
]
MAKO_SANDBOX_IMPORT_MODULES = {}
RERUN_INDEX_OFFSET = 0

View File

@@ -1,169 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# 流程上下文相关逻辑封装模块
import logging
from weakref import WeakValueDictionary
from typing import List, Dict, Any
from bamboo_engine.eri import (
ContextValue,
EngineRuntimeInterface,
Variable,
ContextValueType,
)
from .template.template import Template
from .utils.string import deformat_var_key
logger = logging.getLogger("bamboo_engine")
class PlainVariable(Variable):
"""
普通变量
"""
def __init__(self, key: str, value: Any):
self.key = key
self.value = value
def get(self):
return self.value
class SpliceVariable(Variable):
"""
模板类型变量,会尝试在流程上下文中解析变量中定义的模板
"""
def __init__(self, key: str, value: Any, pool: WeakValueDictionary):
self.key = key
self.value = value
self.pool = pool
self.refs = [k for k in Template(value).get_reference()]
def get(self):
context = {}
for r in self.refs:
if r not in self.pool:
continue
var = self.pool[r]
if issubclass(var.__class__, Variable):
var = var.get()
context[deformat_var_key(r)] = var
return Template(self.value).render(context=context)
def _raw_key(key: str) -> str:
return key
class Context:
"""
流程执行上下文,封装引擎在执行流程的过程中对上下文进行的操作和逻辑
"""
def __init__(
self,
runtime: EngineRuntimeInterface,
values: List[ContextValue],
additional_data: dict,
):
"""
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param values: 上下文数据列表
:type values: List[ContextValue]
:param additional_data: 额外数据字典
:type additional_data: dict
"""
self.values = values
self.runtime = runtime
self.pool = WeakValueDictionary()
self.variables = {}
self.additional_data = additional_data
# 将上下文数据转换成变量,变量内封装了自身解析的逻辑,且实现了 Variable 接口
for v in self.values:
if v.type is ContextValueType.PLAIN:
self.variables[v.key] = PlainVariable(key=v.key, value=v.value)
elif v.type is ContextValueType.SPLICE:
self.variables[v.key] = SpliceVariable(key=v.key, value=v.value, pool=self.pool)
elif v.type is ContextValueType.COMPUTE:
self.variables[v.key] = self.runtime.get_compute_variable(
code=v.code,
key=v.key,
value=SpliceVariable(key=v.key, value=v.value, pool=self.pool),
additional_data=self.additional_data,
)
for k, var in self.variables.items():
self.pool[k] = var
def hydrate(self, deformat=False, mute_error=False) -> Dict[str, Any]:
"""
将当前上下文中的数据清洗成 Dict[str, Any] 类型的朴素数据,过程中会进行变量引用的分析和替换
:param deformat: 是否将返回字典中的 key 值从 ${%s} 替换为 %s
:type deformat: bool, optional
:return: 上下文数据朴素值字典
:rtype: Dict[str, Any]
"""
key_formatter = deformat_var_key if deformat else _raw_key
hydrated = {}
for key, var in self.pool.items():
try:
hydrated[key_formatter(key)] = var.get()
except Exception as e:
if not mute_error:
raise e
logger.exception("%s get error." % key)
hydrated[key_formatter(key)] = str(e)
return hydrated
def extract_outputs(
self,
pipeline_id: str,
data_outputs: Dict[str, str],
execution_data_outputs: Dict[str, Any],
):
"""
将某个节点的输出提取到流程上下文中
:param pipeline_id: 上下文对应的流程/子流程 ID
:type pipeline_id: str
:param data_outputs: 节点输出键映射
:type data_outputs: Dict[str, str]
:param execution_data_outputs: 节点执行数据输出
:type execution_data_outputs: Dict[str, Any]
"""
update = {}
for origin_key, target_key in data_outputs.items():
if origin_key not in execution_data_outputs:
continue
update[target_key] = ContextValue(
key=target_key,
type=ContextValueType.PLAIN,
value=execution_data_outputs[origin_key],
)
self.runtime.upsert_plain_context_values(pipeline_id=pipeline_id, update=update)

File diff suppressed because it is too large Load Diff

View File

@@ -1,19 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
"""
引擎运行时接口定义模块
"""
from .interfaces import * # noqa
from .models import * # noqa

File diff suppressed because it is too large Load Diff

View File

@@ -1,646 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# ERI 中相关的模型对象
from enum import Enum
from datetime import datetime
from typing import List, Dict, Any, Optional
from bamboo_engine.utils.object import Representable
from bamboo_engine.utils.collections import FancyDict
from bamboo_engine.exceptions import ValueError
# node relate models
class NodeType(Enum):
"""
节点类型枚举
"""
ServiceActivity = "ServiceActivity"
SubProcess = "SubProcess"
ExclusiveGateway = "ExclusiveGateway"
ParallelGateway = "ParallelGateway"
ConditionalParallelGateway = "ConditionalParallelGateway"
ConvergeGateway = "ConvergeGateway"
EmptyStartEvent = "EmptyStartEvent"
EmptyEndEvent = "EmptyEndEvent"
ExecutableEndEvent = "ExecutableEndEvent"
class Node(Representable):
"""
节点信息描述类
"""
def __init__(
self,
id: str,
type: NodeType,
target_flows: List[str],
target_nodes: List[str],
targets: Dict[str, str],
root_pipeline_id: str,
parent_pipeline_id: str,
can_skip: bool = True,
can_retry: bool = True,
):
"""
:param id: 节点 ID
:type id: str
:param type: 节点类型
:type type: NodeType
:param target_flows: 节点目标流 ID 列表
:type target_flows: List[str]
:param target_nodes: 目标节点 ID 列表
:type target_nodes: List[str]
:param targets: 节点目标流,目标节点 ID 映射
:type targets: Dict[str, str]
:param root_pipeline_id: 根流程 ID
:type root_pipeline_id: str
:param parent_pipeline_id: 父流程 ID
:type parent_pipeline_id: str
:param can_skip: 节点是否能够跳过
:type can_skip: bool
:param can_retry: 节点是否能够重试
:type can_retry: bool
"""
self.id = id
self.type = type
self.targets = targets
self.target_flows = target_flows
self.target_nodes = target_nodes
self.root_pipeline_id = root_pipeline_id
self.parent_pipeline_id = parent_pipeline_id
self.can_skip = can_skip
self.can_retry = can_retry
class EmptyStartEvent(Node):
pass
class ConvergeGateway(Node):
pass
class EmptyEndEvent(Node):
pass
class Condition(Representable):
"""
分支条件
"""
def __init__(self, name: str, evaluation: str, target_id: str, flow_id: str):
"""
:param name: 条件名
:type name: str
:param evaluation: 条件表达式
:type evaluation: str
:param target_id: 目标节点 ID
:type target_id: str
:param flow_id: 目标流 ID
:type flow_id: str
"""
self.name = name
self.evaluation = evaluation
self.target_id = target_id
self.flow_id = flow_id
class ParallelGateway(Node):
"""
并行网关
"""
def __init__(self, converge_gateway_id: str, *args, **kwargs):
"""
:param converge_gateway_id: 汇聚网关 ID
:type converge_gateway_id: str
"""
super().__init__(*args, **kwargs)
self.converge_gateway_id = converge_gateway_id
class ConditionalParallelGateway(Node):
"""
条件并行网关
"""
def __init__(self, conditions: List[Condition], converge_gateway_id: str, *args, **kwargs):
"""
:param conditions: 分支条件
:type conditions: List[Condition]
:param converge_gateway_id: 汇聚网关 ID
:type converge_gateway_id: str
"""
super().__init__(*args, **kwargs)
self.conditions = conditions
self.converge_gateway_id = converge_gateway_id
class ExclusiveGateway(Node):
"""
分支网关
"""
def __init__(self, conditions: List[Condition], *args, **kwargs):
"""
:param conditions: 分支条件
:type conditions: List[Condition]
"""
super().__init__(*args, **kwargs)
self.conditions = conditions
class ServiceActivity(Node):
"""
服务节点
"""
def __init__(self, code: str, version: str, timeout: Optional[int], error_ignorable: bool, *args, **kwargs):
"""
:param code: Service Code
:type code: str
:param version: 版本
:type version: str
:param timeout: 超时限制
:type timeout: Optional[int]
:param error_ignorable: 是否忽略错误
:type error_ignorable: bool
"""
super().__init__(*args, **kwargs)
self.code = code
self.version = version
self.timeout = timeout
self.error_ignorable = error_ignorable
class SubProcess(Node):
"""
子流程
"""
def __init__(self, start_event_id: str, *args, **kwargs):
"""
:param start_event_id: 子流程开始节点 ID
:type start_event_id: str
"""
super().__init__(*args, **kwargs)
self.start_event_id = start_event_id
class ExecutableEndEvent(Node):
"""
可执行结束节点
"""
def __init__(self, code: str, *args, **kwargs):
"""
:param code: 可执行结束节点 ID
:type code: str
"""
super().__init__(*args, **kwargs)
self.code = code
# runtime relate models
class ScheduleType(Enum):
"""
调度类型
"""
CALLBACK = 1
MULTIPLE_CALLBACK = 2
POLL = 3
class Schedule(Representable):
"""
调度对象
"""
def __init__(
self,
id: int,
type: ScheduleType,
process_id: int,
node_id: str,
finished: bool,
expired: bool,
version: str,
times: int,
):
"""
:param id: ID
:type id: int
:param type: 类型
:type type: ScheduleType
:param process_id: 进程 ID
:type process_id: int
:param node_id: 节点 ID
:type node_id: str
:param finished: 是否已完成
:type finished: bool
:param expired: 是否已过期
:type expired: bool
:param version: 绑定版本
:type version: str
:param times: 调度次数
:type times: int
"""
self.id = id
self.type = type
self.process_id = process_id
self.node_id = node_id
self.finished = finished
self.expired = expired
self.version = version
self.times = times
class State(Representable):
"""
节点状态对象
"""
def __init__(
self,
node_id: str,
root_id: str,
parent_id: str,
name: str,
version: str,
loop: int,
inner_loop: int,
retry: int,
skip: bool,
error_ignored: bool,
created_time: datetime,
started_time: datetime,
archived_time: datetime,
):
"""
:param node_id: 节点 ID
:type node_id: str
:param root_id: 根流程 ID
:type root_id: str
:param parent_id: 父流程 ID
:type parent_id: str
:param name: 状态名
:type name: str
:param version: 版本
:type version: str
:param loop: 重入次数
:type loop: int
:param inner_loop: 子流程重入次数
:type inner_loop: int
:param retry: 重试次数
:type retry: int
:param skip: 是否跳过
:type skip: bool
:param error_ignored: 是否出错后自动忽略
:type error_ignored: bool
:param started_time: 创建时间
:type started_time: datetime
:param started_time: 开始时间
:type started_time: datetime
:param archived_time: 归档时间
:type archived_time: datetime
"""
self.node_id = node_id
self.root_id = root_id
self.parent_id = parent_id
self.name = name
self.version = version
self.loop = loop
self.inner_loop = inner_loop
self.retry = retry
self.skip = skip
self.error_ignored = error_ignored
self.created_time = created_time
self.started_time = started_time
self.archived_time = archived_time
class DataInput(Representable):
"""
节点数据输入项
"""
def __init__(self, need_render: bool, value: Any):
"""
:type is_splice: bool
:param value: 是否需要进行模板解析
:type value: Any
"""
self.need_render = need_render
self.value = value
class Data(Representable):
"""
节点数据对象
"""
def __init__(self, inputs: Dict[str, DataInput], outputs: Dict[str, str]):
"""
:param inputs: 输入数据
:type inputs: Dict[str, Any]
:param outputs: 节点输出配置
:type outputs: Dict[str, str]
"""
self.inputs = inputs
self.outputs = outputs
def plain_inputs(self) -> Dict[str, Any]:
"""
获取不带输入项类型的输入字典
"""
return {key: di.value for key, di in self.inputs.items()}
def need_render_inputs(self) -> Dict[str, Any]:
"""
获取需要进行渲染的输入项字典
"""
return {key: di.value for key, di in self.inputs.items() if di.need_render}
def render_escape_inputs(self) -> Dict[str, Any]:
"""
获取不需要进行渲染的输入项字典
"""
return {key: di.value for key, di in self.inputs.items() if not di.need_render}
class ExecutionData(Representable):
"""
节点输出数据
"""
def __init__(self, inputs: Optional[dict], outputs: Optional[dict]):
"""
:param inputs: 输入数据
:type inputs: Optional[dict]
:param outputs: 输出数据
:type outputs: Optional[dict]
"""
self.inputs = FancyDict(inputs)
self.outputs = FancyDict(outputs)
class ExecutionHistory(Representable):
"""
节点执行历史
"""
def __init__(
self,
id: str,
node_id: str,
started_time: datetime,
archived_time: datetime,
loop: int,
skip: bool,
retry: int,
version: str,
inputs: dict,
outputs: dict,
):
"""
: param id: ID
: type id: str
: param node_id: Node ID
: type node_id: str
: param started_time: 开始时间
: type started_time: datetime
: param archived_time: 归档时间
: type archived_time: datetime
: param loop: 重入计数
: type loop: int
: param skip: 是否跳过
: type skip: bool
: param retry: 重试次数
: type retry: int
: param version: 版本号
: type version: str
: param inputs: 输入数据
: type inputs: dict
: param outputs: 输出数据
: type outputs: dict
"""
self.id = id
self.node_id = node_id
self.started_time = started_time
self.archived_time = archived_time
self.loop = loop
self.skip = skip
self.retry = retry
self.version = version
self.inputs = inputs
self.outputs = outputs
class ExecutionShortHistory(Representable):
"""
简短节点执行历史
"""
def __init__(
self,
id: str,
node_id: str,
started_time: datetime,
archived_time: datetime,
loop: int,
skip: bool,
retry: int,
version: str,
):
"""
: param id: ID
: type id: str
: param node_id: Node ID
: type node_id: str
: param started_time: 开始时间
: type started_time: datetime
: param archived_time: 归档时间
: type archived_time: datetime
: param loop: 重入计数
: type loop: int
: param skip: 是否跳过
: type skip: bool
: param retry: 重试次数
: type retry: int
: param version: 版本号
: type version: str
"""
self.id = id
self.node_id = node_id
self.started_time = started_time
self.archived_time = archived_time
self.loop = loop
self.skip = skip
self.retry = retry
self.version = version
class CallbackData(Representable):
"""
节点回调数据
"""
def __init__(self, id: int, node_id: str, version: str, data: dict):
"""
:param id: 数据 ID
:type id: int
:param node_id: 节点 ID
:type node_id: str
:param version: 版本
:type version: str
:param data: 数据
:type data: dict
"""
self.id = id
self.node_id = node_id
self.version = version
self.data = data
class SuspendedProcessInfo(Representable):
"""
挂起进程信息
"""
def __init__(
self,
process_id: int,
current_node: str,
root_pipeline_id: str,
pipeline_stack: List[str],
):
"""
:param process_id: 进程 ID
:type process_id: int
:param current_node: 当前节点 ID
:type current_node: str
:param root_pipeline_id: 根流程 ID
:type root_pipeline_id: str
:param pipeline_stack: 流程栈
:type pipeline_stack: List[str]
"""
self.process_id = process_id
self.current_node = current_node
self.root_pipeline_id = root_pipeline_id
self.pipeline_stack = pipeline_stack
@property
def top_pipeline_id(self):
return self.pipeline_stack[-1]
class ProcessInfo(Representable):
"""
进程信息
"""
def __init__(
self,
process_id: int,
destination_id: str,
root_pipeline_id: str,
pipeline_stack: List[str],
parent_id: int,
):
"""
:param process_id: 进程 ID
:type process_id: int
:param destination_id: 进程目标节点 ID
:type destination_id: str
:param root_pipeline_id: 根流程 ID
:type root_pipeline_id: str
:param pipeline_stack: 流程栈
:type pipeline_stack: List[str]
:param parent_id: 父进程 ID
:type parent_id: int
"""
self.process_id = process_id
self.destination_id = destination_id
self.parent_id = parent_id
self.root_pipeline_id = root_pipeline_id
self.pipeline_stack = pipeline_stack
@property
def top_pipeline_id(self):
return self.pipeline_stack[-1]
class DispatchProcess(Representable):
"""
待调度进程信息
"""
def __init__(self, process_id: int, node_id: str):
"""
:param process_id: 进程 ID
:type process_id: int
:param node_id: 调度开始节点 ID
:type node_id: str
"""
self.process_id = process_id
self.node_id = node_id
class ContextValueType(Enum):
"""
:param Enum: [description]
:type Enum: [type]
"""
PLAIN = 1
SPLICE = 2
COMPUTE = 3
class ContextValue(Representable):
def __init__(self, key: str, type: ContextValueType, value: Any, code: Optional[str] = None):
if type is ContextValueType.COMPUTE and code is None:
raise ValueError("code can't be none when type is COMPUTE")
self.key = key
self.type = type
self.value = value
self.code = code

View File

@@ -1,61 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# 异常定义模块
class EngineException(Exception):
pass
class InvalidOperationError(EngineException):
pass
class NotFoundError(EngineException):
pass
class ValueError(EngineException):
pass
class StateVersionNotMatchError(EngineException):
pass
class TreeInvalidException(EngineException):
pass
class ConnectionValidateError(TreeInvalidException):
def __init__(self, failed_nodes, detail, *args):
self.failed_nodes = failed_nodes
self.detail = detail
super(ConnectionValidateError, self).__init__(*args)
class ConvergeMatchError(TreeInvalidException):
def __init__(self, gateway_id, *args):
self.gateway_id = gateway_id
super(ConvergeMatchError, self).__init__(*args)
class StreamValidateError(TreeInvalidException):
def __init__(self, node_id, *args):
self.node_id = node_id
super(StreamValidateError, self).__init__(*args)
class IsolateNodeError(TreeInvalidException):
pass

View File

@@ -1,243 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# 节点处理器逻辑封装模块
from typing import Optional, List
from abc import ABCMeta, abstractmethod
from bamboo_engine import states
from .eri import (
EngineRuntimeInterface,
Node,
Schedule,
CallbackData,
ScheduleType,
DispatchProcess,
ProcessInfo,
NodeType,
)
from .exceptions import NotFoundError, InvalidOperationError
def register_handler(type: NodeType):
"""
节点 Handler 注册函数
:param type: 节点类型
:type type: NodeType
"""
def register(cls):
HandlerFactory.add_handler(type, cls)
return cls
return register
class ExecuteResult:
"""
Handler execute 方法返回的结果
"""
def __init__(
self,
should_sleep: bool,
schedule_ready: bool,
schedule_type: Optional[ScheduleType],
schedule_after: int,
dispatch_processes: List[DispatchProcess],
next_node_id: Optional[str],
should_die: bool = False,
):
"""
:param should_sleep: 当前进程是否应该进入休眠
:type should_sleep: bool
:param schedule_ready: 被处理的节点是否准备好进入调度
:type schedule_ready: bool
:param schedule_type: 被处理的节点的调度类型
:type schedule_type: Optional[ScheduleType]
:param schedule_after: 在 schedule_after 秒后开始执行调度
:type schedule_after: int
:param dispatch_processes: 需要派发的子进程信息列表
:type dispatch_processes: List[DispatchProcess]
:param next_node_id: 推进循环中下一个要处理的节点的 ID
:type next_node_id: Optional[str]
:param should_die: 当前进程是否需要进入死亡状态, defaults to False
:type should_die: bool, optional
"""
self.should_sleep = should_sleep
self.schedule_ready = schedule_ready
self.schedule_type = schedule_type
self.schedule_after = schedule_after
self.dispatch_processes = dispatch_processes
self.next_node_id = next_node_id
self.should_die = should_die
class ScheduleResult:
"""
Handler schedule 方法返回的结果
"""
def __init__(
self,
has_next_schedule: bool,
schedule_after: int,
schedule_done: bool,
next_node_id: Optional[str],
):
"""
:param has_next_schedule: 是否还有下次调度
:type has_next_schedule: bool
:param schedule_after: 在 schedule_after 秒后开始下次调度
:type schedule_after: int
:param schedule_done: 调度是否完成
:type schedule_done: bool
:param next_node_id: 调度完成后下一个需要执行的节点的 ID
:type next_node_id: Optional[str]
"""
self.has_next_schedule = has_next_schedule
self.schedule_after = schedule_after
self.schedule_done = schedule_done
self.next_node_id = next_node_id
class NodeHandler(metaclass=ABCMeta):
"""
节点处理器,负责封装不同类型节点的 execute 和 schedule 逻辑
"""
LOOP_KEY = "_loop"
INNER_LOOP_KEY = "_inner_loop"
def __init__(self, node: Node, runtime: EngineRuntimeInterface):
"""
:param node: 节点实例
:type node: Node
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
"""
self.node = node
self.runtime = runtime
@abstractmethod
def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult:
"""
节点的 execute 处理逻辑
:param process_info: 进程信息
:type process_id: ProcessInfo
:param loop: 重入次数
:type loop: int
:param inner_loop: 当前流程重入次数
:type inner_loop: int
:param version: 执行版本
:type version: str
:return: 执行结果
:rtype: ExecuteResult
"""
def schedule(
self,
process_info: ProcessInfo,
loop: int,
inner_loop: int,
schedule: Schedule,
callback_data: Optional[CallbackData] = None,
) -> ScheduleResult:
"""
节点的 schedule 处理逻辑,不支持 schedule 的节点可以不实现该方法
:param process_info: 进程信息
:type process_id: ProcessInfo
:param loop: 重入次数
:type loop: int
:param inner_loop: 当前流程重入次数
:type inner_loop: int
:param schedule: Schedule 实例
:type schedule: Schedule
:param callback_data: 回调数据, defaults to None
:type callback_data: Optional[CallbackData], optional
:return: 调度结果
:rtype: ScheduleResult
"""
raise NotImplementedError()
def _execute_fail(self, ex_data: str) -> ExecuteResult:
exec_outputs = self.runtime.get_execution_data_outputs(self.node.id)
self.runtime.set_state(node_id=self.node.id, to_state=states.FAILED, set_archive_time=True)
exec_outputs["ex_data"] = ex_data
self.runtime.set_execution_data_outputs(self.node.id, exec_outputs)
return ExecuteResult(
should_sleep=True,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=[],
next_node_id=None,
)
def _get_plain_inputs(self, node_id: str):
return {key: di.value for key, di in self.runtime.get_data_inputs(node_id).items()}
class HandlerFactory:
"""
节点处理器工厂
"""
_handlers = {}
@classmethod
def add_handler(cls, type: NodeType, handler_cls):
"""
向工厂中注册某个类型节点的处理器
:param type: 节点类型
:type type: NodeType
:param handler_cls: [description]
:type handler_cls: [type]
:raises InvalidOperationError: [description]
"""
if not issubclass(handler_cls, NodeHandler):
raise InvalidOperationError(
"register handler err: {} is not subclass of {}".format(handler_cls, "NodeHandler")
)
cls._handlers[type.value] = handler_cls
@classmethod
def get_handler(cls, node: Node, runtime: EngineRuntimeInterface) -> NodeHandler:
"""
获取某个节点的处理器实例
:param node: 节点实例
:type node: NodeType
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:raises NotFoundError: [description]
:return: 节点处理器实例
:rtype: NodeHandler
"""
if node.type.value not in cls._handlers:
raise NotFoundError("can not find handler for {} type node".format(node.type.value))
return cls._handlers[node.type.value](node, runtime)

View File

@@ -1,28 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
"""
节点处理逻辑存放模块
"""
def register():
from .conditional_parallel_gateway import ConditionalParallelGatewayHandler # noqa
from .converge_gateway import ConvergeGatewayHandler # noqa
from .empty_end_event import EmptyEndEventHandler # noqa
from .empty_start_event import EmptyStartEventHandler # noqa
from .exclusive_gateway import ExclusiveGatewayHandler # noqa
from .executable_end_event import ExecutableEndEventHandler # noqa
from .parallel_gateway import ParallelGatewayHandler # noqa
from .service_activity import ServiceActivityHandler # noqa
from .subprocess import SubProcessHandler # noqa

View File

@@ -1,137 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import json
import logging
from bamboo_engine.utils.boolrule import BoolRule
from bamboo_engine.template.template import Template
from bamboo_engine import states
from bamboo_engine.eri import NodeType, ProcessInfo
from bamboo_engine.context import Context
from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult
from bamboo_engine.utils.string import transform_escape_char
logger = logging.getLogger("bamboo_engine")
@register_handler(NodeType.ConditionalParallelGateway)
class ConditionalParallelGatewayHandler(NodeHandler):
def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult:
"""
节点的 execute 处理逻辑
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param process_info: 进程信息
:type process_id: ProcessInfo
:return: 执行结果
:rtype: ExecuteResult
"""
evaluations = [c.evaluation for c in self.node.conditions]
top_pipeline_id = process_info.top_pipeline_id
root_pipeline_id = process_info.root_pipeline_id
root_pipeline_inputs = self._get_plain_inputs(root_pipeline_id)
# resolve conditions references
evaluation_refs = set()
for e in evaluations:
refs = Template(e).get_reference()
evaluation_refs = evaluation_refs.union(refs)
logger.info(
"root_pipeline[%s] node(%s) evaluation original refs: %s",
root_pipeline_id,
self.node.id,
evaluation_refs,
)
additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=evaluation_refs)
evaluation_refs = evaluation_refs.union(additional_refs)
logger.info(
"root_pipeline[%s] node(%s) evaluation final refs: %s",
root_pipeline_id,
self.node.id,
evaluation_refs,
)
context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=evaluation_refs)
context = Context(self.runtime, context_values, root_pipeline_inputs)
try:
hydrated_context = {k: transform_escape_char(v) for k, v in context.hydrate(deformat=True).items()}
except Exception as e:
logger.exception(
"root_pipeline[%s] node(%s) context hydrate error",
root_pipeline_id,
self.node.id,
)
return self._execute_fail("evaluation context hydrate failed(%s), check node log for details." % e)
# check conditions
fork_targets = []
for c in self.node.conditions:
resolved_evaluate = Template(c.evaluation).render(hydrated_context)
logger.info(
"root_pipeline[%s] node(%s) render evaluation %s: %s with %s",
root_pipeline_id,
self.node.id,
c.evaluation,
resolved_evaluate,
hydrated_context,
)
try:
result = BoolRule(resolved_evaluate).test()
logger.info(
"root_pipeline[%s] node(%s) %s test result: %s",
root_pipeline_id,
self.node.id,
resolved_evaluate,
result,
)
except Exception as e:
# test failed
return self._execute_fail(
"evaluate[{}] fail with data[{}] message: {}".format(
c.resolved_evaluate, json.dumps(hydrated_context), e
)
)
else:
if result:
fork_targets.append(c.target_id)
# all miss
if not fork_targets:
return self._execute_fail("all conditions of branches are not meet")
# fork
from_to = {}
for target in fork_targets:
from_to[target] = self.node.converge_gateway_id
dispatch_processes = self.runtime.fork(
parent_id=process_info.process_id,
root_pipeline_id=process_info.root_pipeline_id,
pipeline_stack=process_info.pipeline_stack,
from_to=from_to,
)
self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True)
return ExecuteResult(
should_sleep=True,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=dispatch_processes,
next_node_id=None,
)

View File

@@ -1,42 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from bamboo_engine import states
from bamboo_engine.eri import ProcessInfo, NodeType
from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult
@register_handler(NodeType.ConvergeGateway)
class ConvergeGatewayHandler(NodeHandler):
def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult:
"""
节点的 execute 处理逻辑
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param process_info: 进程信息
:type process_id: ProcessInfo
:return: 执行结果
:rtype: ExecuteResult
"""
self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True)
return ExecuteResult(
should_sleep=False,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=[],
next_node_id=self.node.target_nodes[0],
)

View File

@@ -1,135 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
from bamboo_engine import states
from bamboo_engine.config import Settings
from bamboo_engine.eri import ProcessInfo, NodeType
from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult
from bamboo_engine.context import Context
from bamboo_engine.template.template import Template
logger = logging.getLogger("bamboo_engine")
@register_handler(NodeType.EmptyEndEvent)
class EmptyEndEventHandler(NodeHandler):
def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult:
"""
节点的 execute 处理逻辑
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param process_info: 进程信息
:type process_id: ProcessInfo
:return: 执行结果
:rtype: ExecuteResult
"""
root_pipeline_id = process_info.root_pipeline_id
pipeline_id = process_info.pipeline_stack.pop()
root_pipeline_finished = len(process_info.pipeline_stack) == 0
root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id)
if not root_pipeline_finished:
subproc_state = self.runtime.get_state(pipeline_id)
# write pipeline data
context_outputs = self.runtime.get_context_outputs(pipeline_id)
logger.info(
"root_pipeline[%s] pipeline(%s) context outputs: %s",
root_pipeline_id,
pipeline_id,
context_outputs,
)
context_values = self.runtime.get_context_values(pipeline_id=pipeline_id, keys=context_outputs)
logger.info(
"root_pipeline[%s] pipeline(%s) context values: %s",
root_pipeline_id,
pipeline_id,
context_values,
)
# caculate outputs values references
output_value_refs = set(Template([cv.value for cv in context_values]).get_reference())
logger.info(
"root_pipeline[%s] node(%s) outputs values refs: %s",
root_pipeline_id,
self.node.id,
output_value_refs,
)
additional_refs = self.runtime.get_context_key_references(pipeline_id=pipeline_id, keys=output_value_refs)
output_value_refs = output_value_refs.union(additional_refs)
logger.info(
"root_pipeline[%s] pipeline(%s) outputs values final refs: %s",
root_pipeline_id,
pipeline_id,
output_value_refs,
)
context_values.extend(self.runtime.get_context_values(pipeline_id=pipeline_id, keys=output_value_refs))
context = Context(self.runtime, context_values, root_pipeline_inputs)
hydrated_context = context.hydrate(deformat=False)
logger.info(
"root_pipeline[%s] pipeline(%s) hydrated context: %s",
root_pipeline_id,
pipeline_id,
hydrated_context,
)
outputs = {}
for key in context_outputs:
outputs[key] = hydrated_context.get(key, key)
if not root_pipeline_finished:
outputs[self.LOOP_KEY] = subproc_state.loop + Settings.RERUN_INDEX_OFFSET
outputs[self.INNER_LOOP_KEY] = subproc_state.inner_loop + Settings.RERUN_INDEX_OFFSET
self.runtime.set_execution_data_outputs(node_id=pipeline_id, outputs=outputs)
self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True)
self.runtime.set_state(node_id=pipeline_id, to_state=states.FINISHED, set_archive_time=True)
# root pipeline finish
if root_pipeline_finished:
return ExecuteResult(
should_sleep=False,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=[],
next_node_id=None,
should_die=True,
)
# subprocess finish
subprocess = self.runtime.get_node(pipeline_id)
self.runtime.set_pipeline_stack(process_info.process_id, process_info.pipeline_stack)
# extract subprocess outputs to parent context
subprocess_outputs = self.runtime.get_data_outputs(pipeline_id)
context.extract_outputs(
pipeline_id=process_info.pipeline_stack[-1],
data_outputs=subprocess_outputs,
execution_data_outputs=outputs,
)
return ExecuteResult(
should_sleep=False,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=[],
next_node_id=subprocess.target_nodes[0],
)

View File

@@ -1,81 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
from bamboo_engine import states
from bamboo_engine.context import Context
from bamboo_engine.eri import ProcessInfo, NodeType, ContextValue, ContextValueType
from bamboo_engine.exceptions import NotFoundError
from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult
logger = logging.getLogger("bamboo_engine")
@register_handler(NodeType.EmptyStartEvent)
class EmptyStartEventHandler(NodeHandler):
def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult:
"""
节点的 execute 处理逻辑
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param process_info: 进程信息
:type process_id: ProcessInfo
:return: 执行结果
:rtype: ExecuteResult
"""
try:
data = self.runtime.get_data(self.node.id)
except NotFoundError:
need_pre_render = False
else:
need_pre_render = True
if need_pre_render:
top_pipeline_id = process_info.top_pipeline_id
root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id)
upsert_context_dict = dict()
pre_render_keys = data.inputs["pre_render_keys"].value
logger.info("top_pipeline({}) pre_render_keys are: {}".format(top_pipeline_id, ",".join(pre_render_keys)))
refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=set(pre_render_keys))
context_values = self.runtime.get_context_values(
pipeline_id=top_pipeline_id, keys=set(pre_render_keys).union(refs)
)
context = Context(self.runtime, context_values, root_pipeline_inputs)
hydrated_context = context.hydrate(deformat=False)
for context_value in context_values:
context_key = context_value.key
if context_key in pre_render_keys:
upsert_context_dict[context_key] = ContextValue(
key=context_key,
type=ContextValueType.PLAIN,
value=hydrated_context[context_key],
)
logger.info(f"top_pipeline({top_pipeline_id}) pre_render_keys results are: {upsert_context_dict}")
self.runtime.upsert_plain_context_values(top_pipeline_id, upsert_context_dict)
self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True)
return ExecuteResult(
should_sleep=False,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=[],
next_node_id=self.node.target_nodes[0],
)

View File

@@ -1,138 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import json
import logging
from bamboo_engine import states
from bamboo_engine.context import Context
from bamboo_engine.template import Template
from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult
from bamboo_engine.utils.boolrule import BoolRule
from bamboo_engine.eri import NodeType, ProcessInfo
from bamboo_engine.utils.string import transform_escape_char
logger = logging.getLogger("bamboo_engine")
@register_handler(NodeType.ExclusiveGateway)
class ExclusiveGatewayHandler(NodeHandler):
def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult:
"""
节点的 execute 处理逻辑
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param process_info: 进程信息
:type process_id: ProcessInfo
:return: 执行结果
:rtype: ExecuteResult
"""
evaluations = [c.evaluation for c in self.node.conditions]
top_pipeline_id = process_info.top_pipeline_id
root_pipeline_id = process_info.root_pipeline_id
root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id)
# resolve conditions references
evaluation_refs = set()
for e in evaluations:
refs = Template(e).get_reference()
evaluation_refs = evaluation_refs.union(refs)
logger.info(
"root_pipeline[%s] node(%s) evaluation original refs: %s",
root_pipeline_id,
self.node.id,
evaluation_refs,
)
additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=evaluation_refs)
evaluation_refs = evaluation_refs.union(additional_refs)
logger.info(
"root_pipeline[%s] node(%s) evaluation final refs: %s",
root_pipeline_id,
self.node.id,
evaluation_refs,
)
context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=evaluation_refs)
logger.info(
"root_pipeline[%s] node(%s) evaluation context values: %s",
root_pipeline_id,
self.node.id,
context_values,
)
context = Context(self.runtime, context_values, root_pipeline_inputs)
try:
hydrated_context = {k: transform_escape_char(v) for k, v in context.hydrate(deformat=True).items()}
except Exception as e:
logger.exception(
"root_pipeline[%s] node(%s) context hydrate error",
root_pipeline_id,
self.node.id,
)
return self._execute_fail("evaluation context hydrate failed(%s), check node log for details." % e)
# check conditions
meet_targets = []
meet_conditions = []
for c in self.node.conditions:
resolved_evaluate = Template(c.evaluation).render(hydrated_context)
logger.info(
"root_pipeline[%s] node(%s) render evaluation %s: %s with %s",
root_pipeline_id,
self.node.id,
c.evaluation,
resolved_evaluate,
hydrated_context,
)
try:
result = BoolRule(resolved_evaluate).test()
logger.info(
"root_pipeline[%s] node(%s) %s test result: %s",
root_pipeline_id,
self.node.id,
resolved_evaluate,
result,
)
except Exception as e:
# test failed
return self._execute_fail(
"evaluate[{}] fail with data[{}] message: {}".format(
resolved_evaluate, json.dumps(hydrated_context), e
)
)
else:
if result:
meet_conditions.append(c.name)
meet_targets.append(c.target_id)
# all miss
if not meet_targets:
return self._execute_fail("all conditions of branches are not meet")
# multiple branch hit
if len(meet_targets) != 1:
return self._execute_fail("multiple conditions meet: {}".format(meet_conditions))
self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True)
return ExecuteResult(
should_sleep=False,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=[],
next_node_id=meet_targets[0],
)

View File

@@ -1,75 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import copy
import logging
import traceback
from bamboo_engine import states
from bamboo_engine.eri import ProcessInfo, NodeType
from bamboo_engine.handler import register_handler, ExecuteResult
from .empty_end_event import EmptyEndEventHandler
logger = logging.getLogger("bamboo_engine")
@register_handler(NodeType.ExecutableEndEvent)
class ExecutableEndEventHandler(EmptyEndEventHandler):
def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult:
"""
节点的 execute 处理逻辑
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param process_info: 进程信息
:type process_id: ProcessInfo
:return: 执行结果
:rtype: ExecuteResult
"""
logger.info(
"root_pipeline[%s] node(%s) executable end event: %s",
process_info.root_pipeline_id,
self.node.id,
self.node,
)
event = self.runtime.get_executable_end_event(code=self.node.code)
try:
event.execute(
pipeline_stack=copy.copy(process_info.pipeline_stack),
root_pipeline_id=process_info.root_pipeline_id,
)
except Exception:
ex_data = traceback.format_exc()
logger.warning(
"root_pipeline[%s] node(%s) executable end event execute raise: %s",
process_info.root_pipeline_id,
self.node.id,
ex_data,
)
self.runtime.set_execution_data_outputs(node_id=self.node.id, outputs={"ex_data": ex_data})
self.runtime.set_state(node_id=self.node.id, to_state=states.FAILED, set_archive_time=True)
return ExecuteResult(
should_sleep=True,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=[],
next_node_id=None,
)
return super().execute(process_info=process_info, loop=loop, inner_loop=inner_loop, version=version)

View File

@@ -1,53 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from bamboo_engine import states
from bamboo_engine.eri import ProcessInfo, NodeType
from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult
@register_handler(NodeType.ParallelGateway)
class ParallelGatewayHandler(NodeHandler):
def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult:
"""
节点的 execute 处理逻辑
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param process_info: 进程信息
:type process_id: ProcessInfo
:return: 执行结果
:rtype: ExecuteResult
"""
from_to = {}
for target in self.node.target_nodes:
from_to[target] = self.node.converge_gateway_id
dispatch_processes = self.runtime.fork(
parent_id=process_info.process_id,
root_pipeline_id=process_info.root_pipeline_id,
pipeline_stack=process_info.pipeline_stack,
from_to=from_to,
)
self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True)
return ExecuteResult(
should_sleep=True,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=dispatch_processes,
next_node_id=None,
)

View File

@@ -1,508 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
import traceback
from typing import Optional
from bamboo_engine import states
from bamboo_engine.config import Settings
from bamboo_engine.context import Context
from bamboo_engine.template import Template
from bamboo_engine.eri import (
ProcessInfo,
ContextValue,
ContextValueType,
ExecutionData,
CallbackData,
ScheduleType,
NodeType,
Schedule,
)
from bamboo_engine.handler import (
register_handler,
NodeHandler,
ExecuteResult,
ScheduleResult,
)
logger = logging.getLogger("bamboo_engine")
@register_handler(NodeType.ServiceActivity)
class ServiceActivityHandler(NodeHandler):
"""
其中所有 set_state 调用都会传入 state version 来确保能够在用户强制失败节点后放弃后续无效的任务执行
"""
def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult:
"""
节点的 execute 处理逻辑
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param process_info: 进程信息
:type process_id: ProcessInfo
:return: 执行结果
:rtype: ExecuteResult
"""
top_pipeline_id = process_info.top_pipeline_id
root_pipeline_id = process_info.root_pipeline_id
data = self.runtime.get_data(self.node.id)
root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id)
need_render_inputs = data.need_render_inputs()
render_escape_inputs = data.render_escape_inputs()
logger.info(
"root_pipeline[%s] node(%s) activity execute data: %s, root inputs: %s",
root_pipeline_id,
self.node.id,
data,
root_pipeline_inputs,
)
# resolve inputs context references
inputs_refs = set(Template(need_render_inputs).get_reference())
logger.info(
"root_pipeline[%s] node(%s) activity original refs: %s",
root_pipeline_id,
self.node.id,
inputs_refs,
)
additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=inputs_refs)
inputs_refs = inputs_refs.union(additional_refs)
logger.info(
"root_pipeline[%s] node(%s) activity final refs: %s",
root_pipeline_id,
self.node.id,
inputs_refs,
)
# prepare context
context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=inputs_refs)
# pre extract loop outputs
loop_value = loop + Settings.RERUN_INDEX_OFFSET
need_render_inputs[self.LOOP_KEY] = loop_value
if self.LOOP_KEY in data.outputs:
loop_output_key = data.outputs[self.LOOP_KEY]
context_values.append(ContextValue(key=loop_output_key, type=ContextValueType.PLAIN, value=loop_value))
# pre extract inner_loop outputs
inner_loop_value = inner_loop + Settings.RERUN_INDEX_OFFSET
need_render_inputs[self.INNER_LOOP_KEY] = inner_loop_value
if self.INNER_LOOP_KEY in data.outputs:
inner_loop_output_key = data.outputs[self.INNER_LOOP_KEY]
context_values.append(
ContextValue(
key=inner_loop_output_key,
type=ContextValueType.PLAIN,
value=inner_loop_value,
)
)
logger.info(
"root_pipeline[%s] node(%s) activity context values: %s",
root_pipeline_id,
self.node.id,
context_values,
)
context = Context(self.runtime, context_values, root_pipeline_inputs)
# hydrate will call user code, use try to catch unexpected error
try:
hydrated_context = context.hydrate(deformat=True)
except Exception as e:
logger.exception(
"root_pipeline[%s] node(%s) activity context hydrate error",
root_pipeline_id,
self.node.id,
)
service_data = ExecutionData(inputs=data.plain_inputs(), outputs={})
service_data.outputs.ex_data = "inputs hydrate failed(%s), check node log for details" % e
service_data.outputs._result = False
service_data.outputs._loop = loop
service_data.outputs._inner_loop = inner_loop
self.runtime.set_execution_data(node_id=self.node.id, data=service_data)
self.runtime.set_state(
node_id=self.node.id,
version=version,
to_state=states.FAILED,
set_archive_time=True,
)
return ExecuteResult(
should_sleep=True,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=[],
next_node_id=None,
)
logger.info(
"root_pipeline[%s] node(%s) actvity hydrated context: %s",
root_pipeline_id,
self.node.id,
hydrated_context,
)
# resolve inputs
execute_inputs = Template(need_render_inputs).render(hydrated_context)
execute_inputs.update(render_escape_inputs)
# data prepare
service_data = ExecutionData(inputs=execute_inputs, outputs={})
root_pipeline_data = ExecutionData(inputs=root_pipeline_inputs, outputs={})
# execute
service = self.runtime.get_service(code=self.node.code, version=self.node.version)
service.setup_runtime_attributes(
id=self.node.id,
version=version,
top_pipeline_id=top_pipeline_id,
root_pipeline_id=root_pipeline_id,
loop=loop,
inner_loop=inner_loop,
)
# start monitor
monitoring = False
if self.node.timeout is not None:
monitoring = True
self.runtime.start_timeout_monitor(
process_id=process_info.process_id,
node_id=self.node.id,
version=version,
timeout=self.node.timeout,
)
# pre_execute and excute
logger.debug(
"root_pipeline[%s] node(%s) service data before execute: %s",
self.node.id,
root_pipeline_id,
service_data,
)
logger.debug(
"root_pipeline[%s] node(%s) root pipeline data before execute: %s",
self.node.id,
root_pipeline_id,
root_pipeline_data,
)
execute_success = False
try:
service.pre_execute(data=service_data, root_pipeline_data=root_pipeline_data)
execute_success = service.execute(data=service_data, root_pipeline_data=root_pipeline_data)
except Exception:
ex_data = traceback.format_exc()
service_data.outputs.ex_data = ex_data
logger.warning("root_pipeline[%s]service execute fail: %s", process_info.root_pipeline_id, ex_data)
logger.debug("root_pipeline[%s] service data after execute: %s", root_pipeline_id, service_data)
service_data.outputs._result = execute_success
service_data.outputs._loop = loop
service_data.outputs._inner_loop = inner_loop
# execute success
if execute_success:
need_schedule = service.need_schedule()
next_node_id = None
if not need_schedule:
if monitoring:
self.runtime.stop_timeout_monitor(
process_id=process_info.process_id,
node_id=self.node.id,
version=version,
timeout=self.node.timeout,
)
self.runtime.set_state(
node_id=self.node.id,
version=version,
to_state=states.FINISHED,
set_archive_time=True,
)
context.extract_outputs(
pipeline_id=top_pipeline_id,
data_outputs=data.outputs,
execution_data_outputs=service_data.outputs,
)
next_node_id = self.node.target_nodes[0]
self.runtime.set_execution_data(node_id=self.node.id, data=service_data)
return ExecuteResult(
should_sleep=need_schedule,
schedule_ready=need_schedule,
schedule_type=service.schedule_type(),
schedule_after=service.schedule_after(
schedule=None,
data=service_data,
root_pipeline_data=root_pipeline_data,
),
dispatch_processes=[],
next_node_id=next_node_id,
)
# pre_execute failed or execute failed
if monitoring:
self.runtime.stop_timeout_monitor(
process_id=process_info.process_id,
node_id=self.node.id,
version=version,
timeout=self.node.timeout,
)
if not self.node.error_ignorable:
self.runtime.set_state(
node_id=self.node.id,
version=version,
to_state=states.FAILED,
set_archive_time=True,
)
self.runtime.set_execution_data(node_id=self.node.id, data=service_data)
context.extract_outputs(
pipeline_id=top_pipeline_id,
data_outputs=data.outputs,
execution_data_outputs=service_data.outputs,
)
return ExecuteResult(
should_sleep=True,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=[],
next_node_id=None,
)
# pre_execute failed or execute failed and error ignore
self.runtime.set_state(
node_id=self.node.id,
version=version,
to_state=states.FINISHED,
set_archive_time=True,
error_ignored=True,
)
self.runtime.set_execution_data(node_id=self.node.id, data=service_data)
context.extract_outputs(
pipeline_id=top_pipeline_id,
data_outputs=data.outputs,
execution_data_outputs=service_data.outputs,
)
return ExecuteResult(
should_sleep=False,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=[],
next_node_id=self.node.target_nodes[0],
)
def _finish_schedule(
self,
process_info: ProcessInfo,
schedule: Schedule,
data_outputs: dict,
execution_data: ExecutionData,
error_ignored: bool,
root_pipeline_inputs: dict,
) -> ScheduleResult:
if self.node.timeout is not None:
self.runtime.stop_timeout_monitor(
process_id=process_info.process_id,
node_id=self.node.id,
version=schedule.version,
timeout=self.node.timeout,
)
self.runtime.set_state(
node_id=self.node.id,
version=schedule.version,
to_state=states.FINISHED,
set_archive_time=True,
error_ignored=error_ignored,
)
context = Context(self.runtime, [], root_pipeline_inputs)
context.extract_outputs(
pipeline_id=process_info.top_pipeline_id,
data_outputs=data_outputs,
execution_data_outputs=execution_data.outputs,
)
return ScheduleResult(
has_next_schedule=False,
schedule_after=-1,
schedule_done=True,
next_node_id=self.node.target_nodes[0],
)
def schedule(
self,
process_info: ProcessInfo,
loop: int,
inner_loop: int,
schedule: Schedule,
callback_data: Optional[CallbackData] = None,
) -> ScheduleResult:
"""
节点的 schedule 处理逻辑
:param process_id: 进程 ID
:type process_id: int
:param schedule: Schedule 实例
:type schedule: Schedule
:param callback_data: 回调数据, defaults to None
:type callback_data: Optional[CallbackData], optional
:return: 调度结果
:rtype: ScheduleResult
"""
# data prepare
top_pipeline_id = process_info.top_pipeline_id
root_pipeline_id = process_info.root_pipeline_id
data_outputs = self.runtime.get_data_outputs(self.node.id)
service_data = self.runtime.get_execution_data(self.node.id)
root_pipeline_inputs = self._get_plain_inputs(root_pipeline_id)
root_pipeline_data = ExecutionData(inputs=root_pipeline_inputs, outputs={})
logger.info(
"root_pipeline[%s] node(%s) activity schedule data: %s, root inputs: %s",
root_pipeline_id,
self.node.id,
service_data,
root_pipeline_inputs,
)
# schedule
service = self.runtime.get_service(code=self.node.code, version=self.node.version)
service.setup_runtime_attributes(
id=self.node.id,
version=schedule.version,
top_pipeline_id=top_pipeline_id,
root_pipeline_id=root_pipeline_id,
loop=loop,
inner_loop=inner_loop,
)
schedule_success = False
schedule.times += 1
try:
schedule_success = service.schedule(
schedule=schedule,
data=service_data,
root_pipeline_data=root_pipeline_data,
callback_data=callback_data,
)
except Exception:
service_data.outputs.ex_data = traceback.format_exc()
service_data.outputs._result = schedule_success
service_data.outputs._loop = loop
service_data.outputs._inner_loop = inner_loop
self.runtime.add_schedule_times(schedule.id)
self.runtime.set_execution_data(node_id=self.node.id, data=service_data)
monitoring = self.node.timeout is not None
schedule_type = service.schedule_type()
# schedule success
if schedule_success:
if schedule_type == ScheduleType.CALLBACK:
return self._finish_schedule(
process_info=process_info,
schedule=schedule,
data_outputs=data_outputs,
execution_data=service_data,
error_ignored=False,
root_pipeline_inputs=root_pipeline_inputs,
)
else:
is_schedule_done = service.is_schedule_done()
# poll or multi-callback finished
if is_schedule_done:
return self._finish_schedule(
process_info=process_info,
schedule=schedule,
data_outputs=data_outputs,
execution_data=service_data,
error_ignored=False,
root_pipeline_inputs=root_pipeline_inputs,
)
has_next_schedule = schedule_type == ScheduleType.POLL
return ScheduleResult(
has_next_schedule=has_next_schedule,
schedule_after=service.schedule_after(
schedule=schedule,
data=service_data,
root_pipeline_data=root_pipeline_data,
),
schedule_done=False,
next_node_id=None,
)
if monitoring:
self.runtime.stop_timeout_monitor(
process_id=process_info.process_id,
node_id=self.node.id,
version=schedule.version,
timeout=self.node.timeout,
)
# schedule fail
if not self.node.error_ignorable:
self.runtime.set_state(
node_id=self.node.id,
version=schedule.version,
to_state=states.FAILED,
set_archive_time=True,
)
context = Context(self.runtime, [], root_pipeline_inputs)
context.extract_outputs(
pipeline_id=process_info.top_pipeline_id,
data_outputs=data_outputs,
execution_data_outputs=service_data.outputs,
)
return ScheduleResult(
has_next_schedule=False,
schedule_after=-1,
schedule_done=False,
next_node_id=None,
)
# schedule fail and error ignore
return self._finish_schedule(
process_info=process_info,
schedule=schedule,
data_outputs=data_outputs,
execution_data=service_data,
error_ignored=True,
root_pipeline_inputs=root_pipeline_inputs,
)

View File

@@ -1,129 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
from bamboo_engine.context import Context
from bamboo_engine.config import Settings
from bamboo_engine.template import Template
from bamboo_engine.eri import ProcessInfo, ContextValue, ContextValueType, NodeType
from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult
logger = logging.getLogger("bamboo_engine")
@register_handler(NodeType.SubProcess)
class SubProcessHandler(NodeHandler):
def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult:
"""
节点的 execute 处理逻辑
:param runtime: 引擎运行时实例
:type runtime: EngineRuntimeInterface
:param process_info: 进程信息
:type process_id: ProcessInfo
:return: 执行结果
:rtype: ExecuteResult
"""
data = self.runtime.get_data(self.node.id)
root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id)
need_render_inputs = data.need_render_inputs()
render_escape_inputs = data.render_escape_inputs()
top_pipeline_id = process_info.top_pipeline_id
root_pipeline_id = process_info.root_pipeline_id
logger.info(
"root_pipeline[%s] node(%s) subprocess data: %s",
root_pipeline_id,
self.node.id,
data,
)
# reset inner_loop of nodes in subprocess
self.runtime.reset_children_state_inner_loop(self.node.id)
# resolve inputs context references
inputs_refs = Template(need_render_inputs).get_reference()
logger.info(
"root_pipeline[%s] node(%s) subprocess original refs: %s",
root_pipeline_id,
self.node.id,
inputs_refs,
)
additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=inputs_refs)
inputs_refs = inputs_refs.union(additional_refs)
logger.info(
"root_pipeline[%s] node(%s) subprocess final refs: %s",
root_pipeline_id,
self.node.id,
inputs_refs,
)
# prepare context
context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=inputs_refs)
# pre extract loop outputs
loop_value = loop + Settings.RERUN_INDEX_OFFSET
if self.LOOP_KEY in data.outputs:
loop_output_key = data.outputs[self.LOOP_KEY]
context_values.append(
ContextValue(
key=loop_output_key,
type=ContextValueType.PLAIN,
value=loop_value,
)
)
logger.info(
"root_pipeline[%s] node(%s) subprocess parent context values: %s",
root_pipeline_id,
self.node.id,
context_values,
)
context = Context(self.runtime, context_values, root_pipeline_inputs)
hydrated_context = context.hydrate(deformat=True)
logger.info(
"root_pipeline[%s] node(%s) subprocess parent hydrated context: %s",
root_pipeline_id,
self.node.id,
hydrated_context,
)
# resolve inputs
subprocess_inputs = Template(need_render_inputs).render(hydrated_context)
subprocess_inputs.update(render_escape_inputs)
sub_context_values = {
key: ContextValue(key=key, type=ContextValueType.PLAIN, value=value)
for key, value in subprocess_inputs.items()
}
logger.info(
"root_pipeline[%s] node(%s) subprocess inject context: %s",
root_pipeline_id,
self.node.id,
sub_context_values,
)
# update subprocess context, inject subprocess data
self.runtime.upsert_plain_context_values(self.node.id, sub_context_values)
process_info.pipeline_stack.append(self.node.id)
self.runtime.set_pipeline_stack(process_info.process_id, process_info.pipeline_stack)
return ExecuteResult(
should_sleep=False,
schedule_ready=False,
schedule_type=None,
schedule_after=-1,
dispatch_processes=[],
next_node_id=self.node.start_event_id,
)

View File

@@ -1,62 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# 引擎执行 local
from typing import Optional
from werkzeug.local import Local
from .utils.object import Representable
_local = Local()
class CurrentNodeInfo(Representable):
def __init__(self, node_id: str, version: str, loop: int):
self.node_id = node_id
self.version = version
self.loop = loop
def set_node_info(node_info: CurrentNodeInfo):
"""
设置当前进程/线程/协程 Local 中的当前节点信息
:param node_id: 节点 ID
:type node_id: str
:param version: 节点版本
:type version: str
:param loop: 重入次数
:type loop: int
"""
_local.current_node_info = node_info
def get_node_info() -> Optional[CurrentNodeInfo]:
"""
获取当前进程/线程/协程正在处理的节点 ID版本及重入次数
:return: 节点 ID
:rtype: [type]
"""
return getattr(_local, "current_node_info", None)
def clear_node_info():
"""
清理当前进程/线程/协程 Local 中的当前节点信息
"""
_local.current_node_info = None

View File

@@ -1,194 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import os
import time
from functools import wraps
from prometheus_client import Gauge, Histogram
from .utils.host import get_hostname
HOST_NAME = get_hostname()
def decode_buckets(buckets_list):
return [float(x) for x in buckets_list.split(",")]
def get_histogram_buckets_from_evn(env_name):
if env_name in os.environ:
buckets = decode_buckets(os.environ.get(env_name))
else:
if hasattr(Histogram, "DEFAULT_BUCKETS"): # pragma: no cover
buckets = Histogram.DEFAULT_BUCKETS
else: # pragma: no cover
# For prometheus-client < 0.3.0 we cannot easily access
# the default buckets:
buckets = (
0.005,
0.01,
0.025,
0.05,
0.075,
0.1,
0.25,
0.5,
0.75,
1.0,
2.5,
5.0,
7.5,
10.0,
float("inf"),
)
return buckets
def setup_gauge(*gauges):
def wrapper(func):
@wraps(func)
def _wrapper(*args, **kwargs):
for g in gauges:
g.labels(hostname=HOST_NAME).inc(1)
try:
return func(*args, **kwargs)
finally:
for g in gauges:
g.labels(hostname=HOST_NAME).dec(1)
return _wrapper
return wrapper
def setup_histogram(*histograms):
def wrapper(func):
@wraps(func)
def _wrapper(*args, **kwargs):
start = time.time()
try:
return func(*args, **kwargs)
finally:
for h in histograms:
h.labels(hostname=HOST_NAME).observe(time.time() - start)
return _wrapper
return wrapper
# engine metrics
ENGINE_RUNNING_PROCESSES = Gauge("engine_running_processes", "count running state processes", labelnames=["hostname"])
ENGINE_RUNNING_SCHEDULES = Gauge("engine_running_schedules", "count running state schedules", labelnames=["hostname"])
ENGINE_PROCESS_RUNNING_TIME = Histogram(
"engine_process_running_time",
"time spent running process",
buckets=get_histogram_buckets_from_evn("ENGINE_PROCESS_RUNNING_TIME_BUCKETS"),
labelnames=["hostname"],
)
ENGINE_SCHEDULE_RUNNING_TIME = Histogram(
"engine_schedule_running_time",
"time spent running schedule",
buckets=get_histogram_buckets_from_evn("ENGINE_SCHEDULE_RUNNING_TIME_BUCKETS"),
labelnames=["hostname"],
)
ENGINE_NODE_EXECUTE_TIME = Histogram(
"engine_node_execute_time",
"time spent executing node",
buckets=get_histogram_buckets_from_evn("ENGINE_NODE_EXECUTE_TIME_BUCKETS"),
labelnames=["type", "hostname"],
)
ENGINE_NODE_SCHEDULE_TIME = Histogram(
"engine_node_schedule_time",
"time spent scheduling node",
buckets=get_histogram_buckets_from_evn("ENGINE_NODE_SCHEDULE_TIME_BUCKETS"),
labelnames=["type", "hostname"],
)
# runtime metrics
ENGINE_RUNTIME_CONTEXT_VALUE_READ_TIME = Histogram(
"engine_runtime_context_value_read_time", "time spent reading context value", labelnames=["hostname"]
)
ENGINE_RUNTIME_CONTEXT_REF_READ_TIME = Histogram(
"engine_runtime_context_ref_read_time", "time spent reading context value reference", labelnames=["hostname"]
)
ENGINE_RUNTIME_CONTEXT_VALUE_UPSERT_TIME = Histogram(
"engine_runtime_context_value_upsert_time", "time spent upserting context value", labelnames=["hostname"]
)
ENGINE_RUNTIME_DATA_INPUTS_READ_TIME = Histogram(
"engine_runtime_data_inputs_read_time", "time spent reading node data inputs", labelnames=["hostname"]
)
ENGINE_RUNTIME_DATA_OUTPUTS_READ_TIME = Histogram(
"engine_runtime_data_outputs_read_time", "time spent reading node data outputs", labelnames=["hostname"]
)
ENGINE_RUNTIME_DATA_READ_TIME = Histogram(
"engine_runtime_data_read_time", "time spent reading node data inputs and outputs", labelnames=["hostname"]
)
ENGINE_RUNTIME_EXEC_DATA_INPUTS_READ_TIME = Histogram(
"engine_runtime_exec_data_inputs_read_time",
"time spent reading node execution data inputs",
labelnames=["hostname"],
)
ENGINE_RUNTIME_EXEC_DATA_OUTPUTS_READ_TIME = Histogram(
"engine_runtime_exec_data_outputs_read_time",
"time spent reading node execution data outputs",
labelnames=["hostname"],
)
ENGINE_RUNTIME_EXEC_DATA_READ_TIME = Histogram(
"engine_runtime_exec_data_read_time",
"time spent reading node execution data inputs and outputs",
labelnames=["hostname"],
)
ENGINE_RUNTIME_EXEC_DATA_INPUTS_WRITE_TIME = Histogram(
"engine_runtime_exec_data_inputs_write_time",
"time spent writing node execution data inputs",
labelnames=["hostname"],
)
ENGINE_RUNTIME_EXEC_DATA_OUTPUTS_WRITE_TIME = Histogram(
"engine_runtime_exec_data_outputs_write_time",
"time spent writing node execution data outputs",
labelnames=["hostname"],
)
ENGINE_RUNTIME_EXEC_DATA_WRITE_TIME = Histogram(
"engine_runtime_exec_data_write_time",
"time spent writing node execution data inputs and outputs",
labelnames=["hostname"],
)
ENGINE_RUNTIME_CALLBACK_DATA_READ_TIME = Histogram(
"engine_runtime_callback_data_read_time", "time spent reading node callback data", labelnames=["hostname"]
)
ENGINE_RUNTIME_SCHEDULE_READ_TIME = Histogram(
"engine_runtime_schedule_read_time", "time spent reading schedule", labelnames=["hostname"]
)
ENGINE_RUNTIME_SCHEDULE_WRITE_TIME = Histogram(
"engine_runtime_schedule_write_time", "time spent writing schedule", labelnames=["hostname"]
)
ENGINE_RUNTIME_STATE_READ_TIME = Histogram(
"engine_runtime_state_read_time", "time spent reading state", labelnames=["hostname"]
)
ENGINE_RUNTIME_STATE_WRITE_TIME = Histogram(
"engine_runtime_state_write_time", "time spent writing state", labelnames=["hostname"]
)
ENGINE_RUNTIME_NODE_READ_TIME = Histogram(
"engine_runtime_node_read_time", "time spent reading node", labelnames=["hostname"]
)
ENGINE_RUNTIME_PROCESS_READ_TIME = Histogram(
"engine_runtime_process_read_time", "time spent reading process", labelnames=["hostname"]
)

View File

@@ -1,69 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# 引擎内部状态及状态相关数据定义模块
from enum import Enum
from .utils.collections import ConstantDict
class StateType(Enum):
CREATED = "CREATED"
READY = "READY"
RUNNING = "RUNNING"
SUSPENDED = "SUSPENDED"
BLOCKED = "BLOCKED"
FINISHED = "FINISHED"
FAILED = "FAILED"
REVOKED = "REVOKED"
CREATED = StateType.CREATED.value
READY = StateType.READY.value
RUNNING = StateType.RUNNING.value
SUSPENDED = StateType.SUSPENDED.value
BLOCKED = StateType.BLOCKED.value
FINISHED = StateType.FINISHED.value
FAILED = StateType.FAILED.value
REVOKED = StateType.REVOKED.value
ALL_STATES = frozenset([READY, RUNNING, SUSPENDED, BLOCKED, FINISHED, FAILED, REVOKED])
ARCHIVED_STATES = frozenset([FINISHED, FAILED, REVOKED])
SLEEP_STATES = frozenset([SUSPENDED, REVOKED])
CHILDREN_IGNORE_STATES = frozenset([BLOCKED])
INVERTED_TRANSITION = ConstantDict({RUNNING: frozenset([READY, FINISHED])})
TRANSITION = ConstantDict(
{
READY: frozenset([RUNNING, SUSPENDED]),
RUNNING: frozenset([FINISHED, FAILED, REVOKED, SUSPENDED]),
SUSPENDED: frozenset([READY, REVOKED, RUNNING]),
BLOCKED: frozenset([]),
FINISHED: frozenset([RUNNING, FAILED]),
FAILED: frozenset([READY, FINISHED]),
REVOKED: frozenset([]),
}
)
def can_transit(from_state, to_state):
if from_state in TRANSITION:
if to_state in TRANSITION[from_state]:
return True
return False

View File

@@ -1,18 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
"""
模板相关逻辑存放模块
"""
from .template import Template # noqa

View File

@@ -1,54 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# 模板渲染沙箱
from typing import List, Dict
import importlib
from bamboo_engine.config import Settings
def _shield_words(sandbox: dict, words: List[str]):
for shield_word in words:
sandbox[shield_word] = None
class ModuleObject:
def __init__(self, sub_paths, module):
if len(sub_paths) == 1:
setattr(self, sub_paths[0], module)
return
setattr(self, sub_paths[0], ModuleObject(sub_paths[1:], module))
def _import_modules(sandbox: dict, modules: Dict[str, str]):
for mod_path, alias in modules.items():
mod = importlib.import_module(mod_path)
sub_paths = alias.split(".")
if len(sub_paths) == 1:
sandbox[alias] = mod
else:
sandbox[sub_paths[0]] = ModuleObject(sub_paths[1:], mod)
def get() -> dict:
sandbox = {}
_shield_words(sandbox, Settings.MAKO_SANDBOX_SHIELD_WORDS)
_import_modules(sandbox, Settings.MAKO_SANDBOX_IMPORT_MODULES)
return sandbox

View File

@@ -1,195 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# 封装模板处理,渲染逻辑的相关模块
import copy
import re
import logging
from typing import Any, List, Set
from mako.template import Template as MakoTemplate
from mako import lexer, codegen
from mako.exceptions import MakoException
from bamboo_engine.utils.mako_utils.checker import check_mako_template_safety
from bamboo_engine.utils.mako_utils.exceptions import ForbiddenMakoTemplateException
from bamboo_engine.utils import mako_safety
from bamboo_engine.utils.string import deformat_var_key
from . import sandbox
logger = logging.getLogger("root")
# find mako template(format is ${xxx}and ${}# not in xxx, # may raise memory error)
TEMPLATE_PATTERN = re.compile(r"\${[^${}#]+}")
class Template:
def __init__(self, data: Any):
self.data = data
def get_reference(self, deformat=False) -> Set[str]:
"""
获取当前数据中模板所引用的所有标志符
:return: 标志符列表
:rtype: List[str]
"""
reference = []
templates = self.get_templates()
for tpl in templates:
reference += self._get_template_reference(tpl)
reference = set(reference)
if not deformat:
reference = {"${%s}" % r for r in reference}
return reference
def get_templates(self) -> List[str]:
"""
获取当前数据中所有的模板片段
:return: 模板片段列表
:rtype: List[str]
"""
templates = []
data = self.data
if isinstance(data, str):
templates += self._get_string_templates(data)
if isinstance(data, (list, tuple)):
for item in data:
templates += Template(item).get_templates()
if isinstance(data, dict):
for value in list(data.values()):
templates += Template(value).get_templates()
return list(set(templates))
def render(self, context: dict) -> Any:
"""
渲染当前模板
:param context: 模板渲染上下文
:type context: dict
:return: 模板渲染后的数据
:rtype: Any
"""
data = self.data
if isinstance(data, str):
return self._render_string(data, context)
if isinstance(data, list):
ldata = [""] * len(data)
for index, item in enumerate(data):
ldata[index] = Template(copy.deepcopy(item)).render(context)
return ldata
if isinstance(data, tuple):
ldata = [""] * len(data)
for index, item in enumerate(data):
ldata[index] = Template(copy.deepcopy(item)).render(context)
return tuple(ldata)
if isinstance(data, dict):
for key, value in list(data.items()):
data[key] = Template(copy.deepcopy(value)).render(context)
return data
return data
def _get_string_templates(self, string) -> List[str]:
return list(set(TEMPLATE_PATTERN.findall(string)))
def _get_template_reference(self, template: str) -> List[str]:
lex = lexer.Lexer(template)
try:
node = lex.parse()
except MakoException as e:
logger.warning("pipeline get template[{}] reference error[{}]".format(template, e))
return []
# Dummy compiler. _Identifiers class requires one
# but only interested in the reserved_names field
def compiler():
return None
compiler.reserved_names = set()
identifiers = codegen._Identifiers(compiler, node)
return list(identifiers.undeclared)
def _render_string(self, string: str, context: dict) -> str:
"""
使用特定上下文渲染指定模板
:param string: 模板
:type string: str
:param context: 上下文
:type context: dict
:return: 渲染后的模板
:rtype: str
"""
if not isinstance(string, str):
return string
templates = self._get_string_templates(string)
# TODO keep render return object, here only process simple situation
if len(templates) == 1 and templates[0] == string and deformat_var_key(string) in context:
return context[deformat_var_key(string)]
for tpl in templates:
try:
check_mako_template_safety(
tpl,
mako_safety.SingleLineNodeVisitor(),
mako_safety.SingleLinCodeExtractor(),
)
except ForbiddenMakoTemplateException as e:
logger.warning("forbidden template: {}, exception: {}".format(tpl, e))
continue
except Exception:
logger.exception("{} safety check error.".format(tpl))
continue
resolved = Template._render_template(tpl, context)
string = string.replace(tpl, resolved)
return string
@staticmethod
def _render_template(template: str, context: dict) -> Any:
"""
使用特定上下文渲染指定模板
:param template: 模板
:type template: Any
:param context: 上下文
:type context: dict
:raises TypeError: [description]
:return: [description]
:rtype: str
"""
data = {}
data.update(sandbox.get())
data.update(context)
if not isinstance(template, str):
raise TypeError("constant resolve error, template[%s] is not a string" % template)
try:
tm = MakoTemplate(template)
except (MakoException, SyntaxError) as e:
logger.error("pipeline resolve template[{}] error[{}]".format(template, e))
return template
try:
resolved = tm.render_unicode(**data)
except Exception as e:
logger.warning("constant content({}) is invalid, data({}), error: {}".format(template, data, e))
return template
else:
return resolved

View File

@@ -1,16 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
"""
引擎内部使用的各类工具存放模块
"""

View File

@@ -1,22 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# bool 表达式解析工具模块
from .boolrule import ( # noqa
BoolRule,
MissingVariableException,
UnknownOperatorException,
)

View File

@@ -1,291 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from pyparsing import (
CaselessLiteral,
Combine,
Forward,
Group,
Keyword,
Optional,
ParseException,
ParseResults,
QuotedString,
Suppress,
Word,
ZeroOrMore,
alphanums,
alphas,
delimitedList,
nums,
oneOf,
)
PATH_DELIMITER = "."
class SubstituteVal(object):
"""
Represents a token that will later be replaced by a context value.
"""
def __init__(self, t):
self._path = t[0]
def get_val(self, context):
if not context:
# raise MissingVariableException(
# 'context missing or empty'
# )
return self._path
val = context
try:
for part in self._path.split(PATH_DELIMITER):
val = getattr(val, part) if hasattr(val, part) else val[part]
except KeyError:
raise MissingVariableException("no value supplied for {}".format(self._path))
return val
def __repr__(self):
return "SubstituteVal(%s)" % self._path
def get_bool_expression():
# Grammar definition
# match gcloud's variable
identifier = Combine(Optional("${") + Optional("_") + Word(alphas, alphanums + "_") + Optional("}"))
# identifier = Word(alphas, alphanums + "_")
propertyPath = delimitedList(identifier, PATH_DELIMITER, combine=True)
and_ = Keyword("and", caseless=True)
or_ = Keyword("or", caseless=True)
lparen = Suppress("(")
rparen = Suppress(")")
binaryOp = oneOf("== != < > >= <= in notin issuperset notissuperset", caseless=True)("operator")
E = CaselessLiteral("E")
numberSign = Word("+-", exact=1)
realNumber = Combine(
Optional(numberSign)
+ (Word(nums) + "." + Optional(Word(nums)) | ("." + Word(nums)))
+ Optional(E + Optional(numberSign) + Word(nums))
)
integer = Combine(Optional(numberSign) + Word(nums) + Optional(E + Optional("+") + Word(nums)))
# str_ = quotedString.addParseAction(removeQuotes)
str_ = QuotedString('"') | QuotedString("'")
bool_ = oneOf("true false", caseless=True)
simpleVals = (
realNumber.setParseAction(lambda toks: float(toks[0]))
| integer.setParseAction(lambda toks: int(toks[0]))
| str_
| bool_.setParseAction(lambda toks: toks[0] == "true")
| propertyPath.setParseAction(lambda toks: SubstituteVal(toks))
) # need to add support for alg expressions
propertyVal = simpleVals | (lparen + Group(delimitedList(simpleVals)) + rparen)
boolExpression = Forward()
boolCondition = Group(
(Group(propertyVal)("lval") + binaryOp + Group(propertyVal)("rval")) | (lparen + boolExpression + rparen)
)
boolExpression << boolCondition + ZeroOrMore((and_ | or_) + boolExpression)
return boolExpression
def double_equals_trans(lval, rval, operator):
# double equals
if operator in ["in", "notin"]:
if isinstance(rval, list) and len(rval):
transed_rval = []
if isinstance(lval, int):
for item in rval:
try:
transed_rval.append(int(item))
except Exception:
pass
elif isinstance(lval, str):
for item in rval:
try:
transed_rval.append(str(item))
except Exception:
pass
rval += transed_rval
elif operator in ["issuperset", "notissuperset"]:
# avoid convert set('abc') to {a, b, c}, but keep {'abc'}
if isinstance(lval, str):
lval = [lval]
if isinstance(rval, str):
rval = [rval]
else:
try:
if isinstance(lval, int):
rval = int(rval)
elif isinstance(rval, int):
lval = int(lval)
if isinstance(lval, str):
rval = str(rval)
elif isinstance(rval, str):
lval = str(lval)
except Exception:
pass
return lval, rval
class BoolRule(object):
"""
Represents a boolean expression and provides a `test` method to evaluate
the expression and determine its truthiness.
:param query: A string containing the query to be evaluated
:param lazy: If ``True``, parse the query the first time it's tested rather
than immediately. This can help with performance if you
instantiate a lot of rules and only end up evaluating a
small handful.
"""
_compiled = False
_tokens = None
_query = None
def __init__(self, query, lazy=False, strict=True):
self._query = query
self.strict = strict
if not lazy:
self._compile()
def test(self, context=None):
"""
Test the expression against the given context and return the result.
:param context: A dict context to evaluate the expression against.
:return: True if the expression succesfully evaluated against the
context, or False otherwise.
"""
if self._is_match_all():
return True
self._compile()
return self._test_tokens(self._tokens, context)
def _is_match_all(self):
return True if self._query == "*" else False
def _compile(self):
if not self._compiled:
# special case match-all query
if self._is_match_all():
return
try:
self._tokens = get_bool_expression().parseString(self._query, parseAll=self.strict)
except ParseException:
raise
self._compiled = True
def _expand_val(self, val, context):
if type(val) == list:
val = [self._expand_val(v, context) for v in val]
if isinstance(val, SubstituteVal):
ret = val.get_val(context)
return ret
if isinstance(val, ParseResults):
return [self._expand_val(x, context) for x in val.asList()]
return val
def _test_tokens(self, tokens, context):
passed = False
for token in tokens:
if not isinstance(token, ParseResults):
if token == "or" and passed:
return True
elif token == "and" and not passed:
return False
continue
if not token.getName():
passed = self._test_tokens(token, context)
continue
items = token.asDict()
operator = items["operator"]
lval = self._expand_val(items["lval"][0], context)
rval = self._expand_val(items["rval"][0], context)
lval, rval = double_equals_trans(lval, rval, operator)
if operator in ("=", "==", "eq"):
passed = lval == rval
elif operator in ("!=", "ne"):
passed = lval != rval
elif operator in (">", "gt"):
passed = lval > rval
elif operator in (">=", "ge"):
passed = lval >= rval
elif operator in ("<", "lt"):
passed = lval < rval
elif operator in ("<=", "le"):
passed = lval <= rval
elif operator == "in":
passed = lval in rval
elif operator == "notin":
passed = lval not in rval
elif operator == "issuperset":
passed = set(lval).issuperset(set(rval))
elif operator == "notissuperset":
passed = not set(lval).issuperset(set(rval))
else:
raise UnknownOperatorException("Unknown operator '{}'".format(operator))
return passed
class MissingVariableException(Exception):
"""
Raised when an expression contains a property path that's not supplied in
the context.
"""
pass
class UnknownOperatorException(Exception):
"""
Raised when an expression uses an unknown operator.
This should never be thrown since the operator won't be correctly parsed as
a token by pyparsing, but it's useful to have this hanging around for when
additional operators are being added.
"""
pass

View File

@@ -1,53 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# 集合类工具
from typing import Any
class FancyDict(dict):
def __getattr__(self, key: str) -> Any:
try:
return self[key]
except KeyError as k:
raise AttributeError(k)
def __setattr__(self, key: str, value: Any):
# 内建属性不放入 key 中
if key.startswith("__") and key.endswith("__"):
super().__setattr__(key, value)
else:
self[key] = value
def __delattr__(self, key: str):
try:
del self[key]
except KeyError as k:
raise AttributeError(k)
class ConstantDict(dict):
"""ConstantDict is a subclass of :class:`dict`, implementing __setitem__
method to avoid item assignment::
>>> d = ConstantDict({'key': 'value'})
>>> d['key'] = 'value'
Traceback (most recent call last):
...
TypeError: 'ConstantDict' object does not support item assignment
"""
def __setitem__(self, key: str, value: Any):
raise TypeError("'%s' object does not support item assignment" % self.__class__.__name__)

View File

@@ -1,22 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from bamboo_engine.eri import ContextValueType
VAR_CONTEXT_MAPPING = {
"plain": ContextValueType.PLAIN,
"splice": ContextValueType.SPLICE,
"lazy": ContextValueType.COMPUTE,
}

View File

@@ -1,261 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
class Graph(object):
def __init__(self, nodes, flows):
self.nodes = nodes
self.flows = flows
self.path = []
self.last_visited_node = ""
self.graph = {node: [] for node in self.nodes}
for flow in self.flows:
self.graph[flow[0]].append(flow[1])
def has_cycle(self):
self.path = []
visited = {node: False for node in self.nodes}
visit_stack = {node: False for node in self.nodes}
for node in self.nodes:
if self._has_cycle(node, visited, visit_stack):
return True
return False
def _has_cycle(self, node, visited, visit_stack):
self.last_visited_node = node
self.path.append(node)
visited[node] = True
visit_stack[node] = True
for neighbor in self.graph[node]:
if not visited[neighbor]:
if self._has_cycle(neighbor, visited, visit_stack):
return True
elif visit_stack[neighbor]:
self.path.append(neighbor)
return True
self.path.remove(node)
visit_stack[node] = False
return False
def get_cycle(self):
if self.has_cycle():
cross_node = self.path[-1]
if self.path.count(cross_node) > 1:
return self.path[self.path.index(cross_node) :]
else:
return self.path
return []
if __name__ == "__main__":
graph1 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4]])
assert not graph1.has_cycle()
assert graph1.get_cycle() == []
graph2 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4], [4, 1]])
assert graph2.has_cycle()
assert graph2.get_cycle() == [1, 2, 3, 4, 1]
graph3 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4], [4, 2]])
assert graph3.has_cycle()
assert graph3.get_cycle() == [2, 3, 4, 2]
graph4 = Graph(
[
"n20c4a0601193f268bfa168f1192eacd",
"nef42d10350b3961b53df7af67e16d9b",
"n0ada7b4abe63771a43052eaf188dc4b",
"n0cd3b95c714388bacdf1a486ab432fc",
"n1430047af8537f88710c4bbf3cbfb0f",
"n383748fe27434d582f0ca17af9d968a",
"n51426abd4be3a4691c80a73c3f93b3c",
"n854753a77933562ae72ec87c365f23d",
"n89f083892a731d7b9d7edb0f372006d",
"n8d4568db0ad364692b0387e86a2f1e0",
"n8daedbb02273a0fbc94cc118c90649f",
"n90b7ef55fe839b181879e036b4f8ffe",
"n99817348b4a36a6931854c93eed8c5f",
"na02956eba6f3a36ab9b0af2f2350213",
"nc3d0d49adf530bbaffe53630c184c0a",
"nca50848d1aa340f8c2b4776ce81868d",
"ncab9a48e79d357195dcee68dad3a31f",
"ncb4e013a6a8348bab087cc8500a3876",
"ne1f86f902a23e7fa4a67192e8b38a05",
"ne26def77df1385caa206c64e7e3ea53",
"nf3ebee137c53da28091ad7d140ce00c",
"nfc1dcdd7476393b9a81a988c113e1cf",
"n0197f8f210b3a1b8a7fc2f90e94744e",
"n01fb40259ad3cf285bb11a8bbbe59f2",
"n03f39191e8a32629145ba6a677ed040",
"n03ffc3b9e12316d8be63261cb9dec71",
"n07982b8985139249bca3a046f3a4379",
"n0b9e36e6b633ddb906d2044f658f110",
"n136c4fedebe3eb0ba932495aff6a945",
"n17cdc62c5d43976a413bda8f35634eb",
"n1d48483d8023439ad98d61d156c85fb",
"n26725bdcc0931fab0bc73e7244545ca",
"n2890db24f6c3cd1bbcd6b7d8cf2c045",
"n2ad9caac5b737bd897d4c8844c85f12",
"n2c88d1c1d8b35aebf883cbf259fb6bc",
"n302d25dfc9c369ab13104d5208e7119",
"n31688b7ab44338e9e6cb8dcaf259eef",
"n374443fbdc1313d98ebbe19d535fec2",
"n38c3dd0344a3f86bc7511c454bcdf4c",
"n3934eef90463940a6a9cf4ba2e63b1c",
"n40d5f0ca4bc3dd99c0b264cb186f00f",
"n476ddcb6dd33e2abac43596b08c2bc1",
"n4790f8aa48e335aa712e2af757e180b",
"n48bbfdc912334fc89c4f48c05e8969e",
"n5bef4f4532a382eaf79a0af70b2396b",
"n5ced56bcc863060ac4977755f35a5f5",
"n66a0562670e37648a3e05c243335bff",
"n6dc118cd3f7341d9ef8c97c63e2e9d9",
"n6e9d52e1ea53958a93e5b34022e7037",
"n786694b5ed33295a885b5bcd8c7c1ce",
"n7dccd56c80233469a4609f684ebe457",
"n8492d92ab6a3da48c2b49d6fcb8a479",
"n86a8b1a56f9399f90c4c227594a9d03",
"n8a805c0cd02307bad9f7828880b53dc",
"n8c7e35b0457300d9d6a96a6b1d18329",
"n91fdaed36403d06a07f4afe85e2892c",
"n9335d0718a937f9a39ec5b36d5637fe",
"n9372fb07ad936cba31f3d4e440f395a",
"n9ab96f926d83a93a5d3ebe2888fd343",
"na2a8a54e68033d0a276eb88dbff91c3",
"na493a7b5d5b3cc29f4070a6c4589cb7",
"nadfa68cb2503a39aac6626d6c72484a",
"nae1218ddd2e3448b562bc79dc084401",
"nc012287be793377b975b0230b35d713",
"ncb2e01f0c5336fe82b0e0e496f2612b",
"ncb5843900903b4c8a0a8302474d8c51",
"ncbf4db2c48f3348b2c7081f9e3b363a",
"nd4ee6c3248935ce9239e4bb20a81ab8",
"ndb1cf7af0e2319c9868530d0df8fd93",
"ne36a6858a733430bffa4fec053dc1ab",
"ne7af4a7c3613b3d81fe9e6046425a36",
"ne8035dd8de732758c1cc623f80f2fc8",
"ned91fdb914c35f3a21f320f62d72ffd",
"nf5448b3c66430f4a299d08208d313a6",
"nfaa0756a06f300495fb2e2e45e05ed3",
],
[
["n8d4568db0ad364692b0387e86a2f1e0", "n5bef4f4532a382eaf79a0af70b2396b"],
["n8daedbb02273a0fbc94cc118c90649f", "nf5448b3c66430f4a299d08208d313a6"],
["n01fb40259ad3cf285bb11a8bbbe59f2", "ne1f86f902a23e7fa4a67192e8b38a05"],
["ncab9a48e79d357195dcee68dad3a31f", "n0197f8f210b3a1b8a7fc2f90e94744e"],
["na493a7b5d5b3cc29f4070a6c4589cb7", "ne1f86f902a23e7fa4a67192e8b38a05"],
["n89f083892a731d7b9d7edb0f372006d", "n136c4fedebe3eb0ba932495aff6a945"],
["n51426abd4be3a4691c80a73c3f93b3c", "n9ab96f926d83a93a5d3ebe2888fd343"],
["n89f083892a731d7b9d7edb0f372006d", "n8492d92ab6a3da48c2b49d6fcb8a479"],
["n17cdc62c5d43976a413bda8f35634eb", "n6e9d52e1ea53958a93e5b34022e7037"],
["n476ddcb6dd33e2abac43596b08c2bc1", "ne1f86f902a23e7fa4a67192e8b38a05"],
["n6dc118cd3f7341d9ef8c97c63e2e9d9", "nfc1dcdd7476393b9a81a988c113e1cf"],
["n91fdaed36403d06a07f4afe85e2892c", "ncb4e013a6a8348bab087cc8500a3876"],
["n8a805c0cd02307bad9f7828880b53dc", "n3934eef90463940a6a9cf4ba2e63b1c"],
["n2890db24f6c3cd1bbcd6b7d8cf2c045", "n0ada7b4abe63771a43052eaf188dc4b"],
["ned91fdb914c35f3a21f320f62d72ffd", "n383748fe27434d582f0ca17af9d968a"],
["n89f083892a731d7b9d7edb0f372006d", "n0b9e36e6b633ddb906d2044f658f110"],
["nc3d0d49adf530bbaffe53630c184c0a", "na493a7b5d5b3cc29f4070a6c4589cb7"],
["ncb2e01f0c5336fe82b0e0e496f2612b", "nc012287be793377b975b0230b35d713"],
["n86a8b1a56f9399f90c4c227594a9d03", "nf3ebee137c53da28091ad7d140ce00c"],
["nc3d0d49adf530bbaffe53630c184c0a", "nadfa68cb2503a39aac6626d6c72484a"],
["na02956eba6f3a36ab9b0af2f2350213", "na2a8a54e68033d0a276eb88dbff91c3"],
["n8daedbb02273a0fbc94cc118c90649f", "n07982b8985139249bca3a046f3a4379"],
["n136c4fedebe3eb0ba932495aff6a945", "nfc1dcdd7476393b9a81a988c113e1cf"],
["n9372fb07ad936cba31f3d4e440f395a", "n1430047af8537f88710c4bbf3cbfb0f"],
["n8d4568db0ad364692b0387e86a2f1e0", "n91fdaed36403d06a07f4afe85e2892c"],
["n854753a77933562ae72ec87c365f23d", "n40d5f0ca4bc3dd99c0b264cb186f00f"],
["n854753a77933562ae72ec87c365f23d", "n1d48483d8023439ad98d61d156c85fb"],
["n9ab96f926d83a93a5d3ebe2888fd343", "n383748fe27434d582f0ca17af9d968a"],
["ne36a6858a733430bffa4fec053dc1ab", "n0cd3b95c714388bacdf1a486ab432fc"],
["n03ffc3b9e12316d8be63261cb9dec71", "nca50848d1aa340f8c2b4776ce81868d"],
["ne8035dd8de732758c1cc623f80f2fc8", "n0ada7b4abe63771a43052eaf188dc4b"],
["n51426abd4be3a4691c80a73c3f93b3c", "ned91fdb914c35f3a21f320f62d72ffd"],
["nd4ee6c3248935ce9239e4bb20a81ab8", "nfaa0756a06f300495fb2e2e45e05ed3"],
["n5bef4f4532a382eaf79a0af70b2396b", "ncb4e013a6a8348bab087cc8500a3876"],
["ne26def77df1385caa206c64e7e3ea53", "n786694b5ed33295a885b5bcd8c7c1ce"],
["n854753a77933562ae72ec87c365f23d", "ne8035dd8de732758c1cc623f80f2fc8"],
["n374443fbdc1313d98ebbe19d535fec2", "ndb1cf7af0e2319c9868530d0df8fd93"],
["nfaa0756a06f300495fb2e2e45e05ed3", "n8c7e35b0457300d9d6a96a6b1d18329"],
["n90b7ef55fe839b181879e036b4f8ffe", "n26725bdcc0931fab0bc73e7244545ca"],
["n8d4568db0ad364692b0387e86a2f1e0", "ncb2e01f0c5336fe82b0e0e496f2612b"],
["ncb5843900903b4c8a0a8302474d8c51", "ncb4e013a6a8348bab087cc8500a3876"],
["nf5448b3c66430f4a299d08208d313a6", "nf3ebee137c53da28091ad7d140ce00c"],
["n20c4a0601193f268bfa168f1192eacd", "nd4ee6c3248935ce9239e4bb20a81ab8"],
["nca50848d1aa340f8c2b4776ce81868d", "nc3d0d49adf530bbaffe53630c184c0a"],
["na02956eba6f3a36ab9b0af2f2350213", "n03ffc3b9e12316d8be63261cb9dec71"],
["n7dccd56c80233469a4609f684ebe457", "n8daedbb02273a0fbc94cc118c90649f"],
["n0ada7b4abe63771a43052eaf188dc4b", "na02956eba6f3a36ab9b0af2f2350213"],
["n9335d0718a937f9a39ec5b36d5637fe", "n99817348b4a36a6931854c93eed8c5f"],
["n90b7ef55fe839b181879e036b4f8ffe", "n5ced56bcc863060ac4977755f35a5f5"],
["ncb4e013a6a8348bab087cc8500a3876", "ne26def77df1385caa206c64e7e3ea53"],
["na02956eba6f3a36ab9b0af2f2350213", "n4790f8aa48e335aa712e2af757e180b"],
["nc012287be793377b975b0230b35d713", "ncb4e013a6a8348bab087cc8500a3876"],
["n8d4568db0ad364692b0387e86a2f1e0", "ncb5843900903b4c8a0a8302474d8c51"],
["n40d5f0ca4bc3dd99c0b264cb186f00f", "n0ada7b4abe63771a43052eaf188dc4b"],
["n38c3dd0344a3f86bc7511c454bcdf4c", "n17cdc62c5d43976a413bda8f35634eb"],
["n6e9d52e1ea53958a93e5b34022e7037", "n90b7ef55fe839b181879e036b4f8ffe"],
["nf3ebee137c53da28091ad7d140ce00c", "n51426abd4be3a4691c80a73c3f93b3c"],
["n99817348b4a36a6931854c93eed8c5f", "n89f083892a731d7b9d7edb0f372006d"],
["n89f083892a731d7b9d7edb0f372006d", "n6dc118cd3f7341d9ef8c97c63e2e9d9"],
["n8daedbb02273a0fbc94cc118c90649f", "n66a0562670e37648a3e05c243335bff"],
["nadfa68cb2503a39aac6626d6c72484a", "ne1f86f902a23e7fa4a67192e8b38a05"],
["n383748fe27434d582f0ca17af9d968a", "nef42d10350b3961b53df7af67e16d9b"],
["na02956eba6f3a36ab9b0af2f2350213", "n03f39191e8a32629145ba6a677ed040"],
["nae1218ddd2e3448b562bc79dc084401", "n383748fe27434d582f0ca17af9d968a"],
["n26725bdcc0931fab0bc73e7244545ca", "n1430047af8537f88710c4bbf3cbfb0f"],
["n48bbfdc912334fc89c4f48c05e8969e", "n8a805c0cd02307bad9f7828880b53dc"],
["ne7af4a7c3613b3d81fe9e6046425a36", "ncb4e013a6a8348bab087cc8500a3876"],
["nfc1dcdd7476393b9a81a988c113e1cf", "n8d4568db0ad364692b0387e86a2f1e0"],
["n0197f8f210b3a1b8a7fc2f90e94744e", "n99817348b4a36a6931854c93eed8c5f"],
["n90b7ef55fe839b181879e036b4f8ffe", "n302d25dfc9c369ab13104d5208e7119"],
["n1d48483d8023439ad98d61d156c85fb", "n0ada7b4abe63771a43052eaf188dc4b"],
["na2a8a54e68033d0a276eb88dbff91c3", "nca50848d1aa340f8c2b4776ce81868d"],
["n90b7ef55fe839b181879e036b4f8ffe", "n9372fb07ad936cba31f3d4e440f395a"],
["ndb1cf7af0e2319c9868530d0df8fd93", "n2ad9caac5b737bd897d4c8844c85f12"],
["n8492d92ab6a3da48c2b49d6fcb8a479", "nfc1dcdd7476393b9a81a988c113e1cf"],
["n8d4568db0ad364692b0387e86a2f1e0", "ne7af4a7c3613b3d81fe9e6046425a36"],
["n302d25dfc9c369ab13104d5208e7119", "n1430047af8537f88710c4bbf3cbfb0f"],
["n51426abd4be3a4691c80a73c3f93b3c", "n2c88d1c1d8b35aebf883cbf259fb6bc"],
["n786694b5ed33295a885b5bcd8c7c1ce", "n0cd3b95c714388bacdf1a486ab432fc"],
["n854753a77933562ae72ec87c365f23d", "n2890db24f6c3cd1bbcd6b7d8cf2c045"],
["nc3d0d49adf530bbaffe53630c184c0a", "n476ddcb6dd33e2abac43596b08c2bc1"],
["n2c88d1c1d8b35aebf883cbf259fb6bc", "n383748fe27434d582f0ca17af9d968a"],
["n0cd3b95c714388bacdf1a486ab432fc", "n854753a77933562ae72ec87c365f23d"],
["n51426abd4be3a4691c80a73c3f93b3c", "nae1218ddd2e3448b562bc79dc084401"],
["nc3d0d49adf530bbaffe53630c184c0a", "n01fb40259ad3cf285bb11a8bbbe59f2"],
["ne1f86f902a23e7fa4a67192e8b38a05", "n374443fbdc1313d98ebbe19d535fec2"],
["n0b9e36e6b633ddb906d2044f658f110", "nfc1dcdd7476393b9a81a988c113e1cf"],
["ncab9a48e79d357195dcee68dad3a31f", "ncbf4db2c48f3348b2c7081f9e3b363a"],
["n8daedbb02273a0fbc94cc118c90649f", "n86a8b1a56f9399f90c4c227594a9d03"],
["ncbf4db2c48f3348b2c7081f9e3b363a", "n99817348b4a36a6931854c93eed8c5f"],
["n1430047af8537f88710c4bbf3cbfb0f", "ncab9a48e79d357195dcee68dad3a31f"],
["n4790f8aa48e335aa712e2af757e180b", "nca50848d1aa340f8c2b4776ce81868d"],
["ne26def77df1385caa206c64e7e3ea53", "ne36a6858a733430bffa4fec053dc1ab"],
["ncab9a48e79d357195dcee68dad3a31f", "n31688b7ab44338e9e6cb8dcaf259eef"],
["n07982b8985139249bca3a046f3a4379", "nf3ebee137c53da28091ad7d140ce00c"],
["n66a0562670e37648a3e05c243335bff", "nf3ebee137c53da28091ad7d140ce00c"],
["n03f39191e8a32629145ba6a677ed040", "nca50848d1aa340f8c2b4776ce81868d"],
["n8c7e35b0457300d9d6a96a6b1d18329", "n38c3dd0344a3f86bc7511c454bcdf4c"],
["n5ced56bcc863060ac4977755f35a5f5", "n1430047af8537f88710c4bbf3cbfb0f"],
["n2ad9caac5b737bd897d4c8844c85f12", "n48bbfdc912334fc89c4f48c05e8969e"],
["n31688b7ab44338e9e6cb8dcaf259eef", "n99817348b4a36a6931854c93eed8c5f"],
["n3934eef90463940a6a9cf4ba2e63b1c", "n7dccd56c80233469a4609f684ebe457"],
["ncab9a48e79d357195dcee68dad3a31f", "n9335d0718a937f9a39ec5b36d5637fe"],
],
)
assert not graph4.has_cycle()
assert graph4.get_cycle() == []
graph5 = Graph([1, 2, 3, 4, 5], [[1, 2], [2, 3], [2, 4], [4, 5], [5, 2]])
assert graph5.has_cycle()
assert graph5.get_cycle() == [2, 4, 5, 2]

View File

@@ -1,21 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import socket
def get_hostname():
"""
获取当前主机名
"""
return socket.gethostname()

View File

@@ -1,55 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# Mako 安全工具
from ast import NodeVisitor
from mako import parsetree
from .mako_utils.code_extract import MakoNodeCodeExtractor
from .mako_utils.exceptions import ForbiddenMakoTemplateException
class SingleLineNodeVisitor(NodeVisitor):
"""
遍历语法树节点,遇到魔术方法使用或 import 时,抛出异常
"""
def __init__(self, *args, **kwargs):
super(SingleLineNodeVisitor, self).__init__(*args, **kwargs)
def visit_Attribute(self, node):
if node.attr.startswith("__"):
raise ForbiddenMakoTemplateException("can not access private attribute")
def visit_Name(self, node):
if node.id.startswith("__"):
raise ForbiddenMakoTemplateException("can not access private method")
def visit_Import(self, node):
raise ForbiddenMakoTemplateException("can not use import statement")
def visit_ImportFrom(self, node):
self.visit_Import(node)
class SingleLinCodeExtractor(MakoNodeCodeExtractor):
def extract(self, node):
if isinstance(node, parsetree.Code) or isinstance(node, parsetree.Expression):
return node.text
elif isinstance(node, parsetree.Text):
return None
else:
raise ForbiddenMakoTemplateException("Unsupported node: [{}]".format(node.__class__.__name__))

View File

@@ -1,16 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
"""
Mako 相关工具模块
"""

View File

@@ -1,59 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import ast
from typing import List
from mako import parsetree
from mako.exceptions import MakoException
from mako.lexer import Lexer
from .code_extract import MakoNodeCodeExtractor
from .exceptions import ForbiddenMakoTemplateException
def parse_template_nodes(
nodes: List[parsetree.Node],
node_visitor: ast.NodeVisitor,
code_extractor: MakoNodeCodeExtractor,
):
"""
解析mako模板节点逐个节点解析抽象语法树并检查安全性
:param nodes: mako模板节点列表
:param node_visitor: 节点访问类用于遍历AST节点
:param code_extractor: Mako 词法节点处理器,用于提取 python 代码
"""
for node in nodes:
code = code_extractor.extract(node)
if code is None:
continue
ast_node = ast.parse(code, "<unknown>", "exec")
node_visitor.visit(ast_node)
if hasattr(node, "nodes"):
parse_template_nodes(node.nodes, node_visitor)
def check_mako_template_safety(text: str, node_visitor: ast.NodeVisitor, code_extractor: MakoNodeCodeExtractor) -> bool:
"""
检查mako模板是否安全若不安全直接抛出异常安全则返回True
:param text: mako模板内容
:param node_visitor: 节点访问器用于遍历AST节点
"""
try:
lexer_template = Lexer(text).parse()
except MakoException as mako_error:
raise ForbiddenMakoTemplateException("非mako模板解析失败, {err_msg}".format(err_msg=mako_error.__class__.__name__))
parse_template_nodes(lexer_template.nodes, node_visitor, code_extractor)
return True

View File

@@ -1,45 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import abc
from mako import parsetree
from mako.ast import PythonFragment
from .exceptions import ForbiddenMakoTemplateException
class MakoNodeCodeExtractor:
@abc.abstractmethod
def extract(self, node):
"""
处理 Mako Lexer 分割出来的 code 对象,返回需要检测的 python 代码,返回 None 表示该节点不需要处理
:param node: mako parsetree node
:return: 需要处理的代码,或 None
"""
raise NotImplementedError()
class StrictMakoNodeCodeExtractor(MakoNodeCodeExtractor):
def extract(self, node):
if isinstance(node, parsetree.Code) or isinstance(node, parsetree.Expression):
return node.text
elif isinstance(node, parsetree.ControlLine):
if node.isend:
return None
return PythonFragment(node.text).code
elif isinstance(node, parsetree.Text):
return None
else:
raise ForbiddenMakoTemplateException("不支持[{}]节点".format(node.__class__.__name__))

View File

@@ -1,16 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
class ForbiddenMakoTemplateException(Exception):
pass

View File

@@ -1,115 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import _ast
import ast
from werkzeug.utils import import_string
from .exceptions import ForbiddenMakoTemplateException
class StrictNodeVisitor(ast.NodeVisitor):
"""
遍历语法树节点遇到魔术方法使用或import时抛出异常
"""
BLACK_LIST_MODULE_METHODS = {
"os": dir(__import__("os")),
"subprocess": dir(__import__("subprocess")),
"shutil": dir(__import__("shutil")),
"ctypes": dir(__import__("ctypes")),
"codecs": dir(__import__("codecs")),
"sys": dir(__import__("sys")),
"socket": dir(__import__("socket")),
"webbrowser": dir(__import__("webbrowser")),
"threading": dir(__import__("threading")),
"sqlite3": dir(__import__("threading")),
"signal": dir(__import__("signal")),
"imaplib": dir(__import__("imaplib")),
"fcntl": dir(__import__("fcntl")),
"pdb": dir(__import__("pdb")),
"pty": dir(__import__("pty")),
"glob": dir(__import__("glob")),
"tempfile": dir(__import__("tempfile")),
"types": dir(import_string("types.CodeType")) + dir(import_string("types.FrameType")),
"builtins": [
"getattr",
"hasattr",
"breakpoint",
"compile",
"delattr",
"open",
"eval",
"exec",
"execfile",
"exit",
"dir",
"globals",
"locals",
"input",
"iter",
"next",
"quit",
"setattr",
"vars",
"memoryview",
"super",
"print",
],
}
BLACK_LIST_METHODS = []
for module_name, methods in BLACK_LIST_MODULE_METHODS.items():
BLACK_LIST_METHODS.append(module_name)
BLACK_LIST_METHODS.extend(methods)
BLACK_LIST_METHODS = set(BLACK_LIST_METHODS)
WHITE_LIST_MODULES = ["datetime", "re", "random", "json", "math"]
def __init__(self, black_list_methods=None, white_list_modules=None):
self.black_list_methods = black_list_methods or self.BLACK_LIST_METHODS
self.white_list_modules = white_list_modules or self.WHITE_LIST_MODULES
@staticmethod
def is_white_list_ast_obj(ast_obj: _ast.AST) -> bool:
"""
判断是否白名单对象,特殊豁免
:param ast_obj: 抽象语法树节点
:return: bool
"""
# re 正则表达式允许使用 compile
if isinstance(ast_obj, _ast.Attribute) and isinstance(ast_obj.value, _ast.Name):
if ast_obj.value.id == "re" and ast_obj.attr in ["compile"]:
return True
return False
def visit_Attribute(self, node):
if self.is_white_list_ast_obj(node):
return
if node.attr in self.black_list_methods or node.attr.startswith("_"):
raise ForbiddenMakoTemplateException("Mako template forbidden.")
def visit_Name(self, node):
if node.id in self.black_list_methods or node.id.startswith("_"):
raise ForbiddenMakoTemplateException("Mako template forbidden.")
def visit_Import(self, node):
for name in node.names:
if name.name not in self.white_list_modules:
raise ForbiddenMakoTemplateException("Mako template forbidden.")
def visit_ImportFrom(self, node):
self.visit_Import(node)

View File

@@ -1,19 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
class Representable:
def __str__(self):
return "<%s: %s>" % (self.__class__.__name__, self.__dict__)
__repr__ = __str__

View File

@@ -1,75 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
# 字符串处理类工具
import uuid
ESCAPED_CHARS = {"\n": r"\n", "\r": r"\r", "\t": r"\t"}
def transform_escape_char(string: str) -> str:
"""
对未转义的字符串进行转义,现有的转义字符包括\n, \r, \t
"""
if not isinstance(string, str):
return string
# 已转义的情况
if len([c for c in ESCAPED_CHARS.values() if c in string]) > 0:
return string
for key, value in ESCAPED_CHARS.items():
if key in string:
string = string.replace(key, value)
return string
def format_var_key(key: str) -> str:
"""
format key to ${key}
:param key: key
:type key: str
:return: format key
:rtype: str
"""
return "${%s}" % key
def deformat_var_key(key: str) -> str:
"""
deformat ${key} to key
:param key: key
:type key: str
:return: deformat key
:rtype: str
"""
return key[2:-1]
def unique_id(prefix: str) -> str:
if len(prefix) != 1:
raise ValueError("prefix length must be 1")
return "{}{}".format(prefix, uuid.uuid4().hex)
def get_lower_case_name(text: str) -> str:
lst = []
for index, char in enumerate(text):
if char.isupper() and index != 0:
lst.append("_")
lst.append(char)
return "".join(lst).lower()

View File

@@ -1,14 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from .api import validate_and_process_pipeline # noqa

View File

@@ -1,49 +0,0 @@
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from bamboo_engine.eri import NodeType
from bamboo_engine import exceptions
from . import rules
from .connection import (
validate_graph_connection,
validate_graph_without_circle,
)
from .gateway import validate_gateways, validate_stream
from .utils import format_pipeline_tree_io_to_list
def validate_and_process_pipeline(pipeline: dict, cycle_tolerate=False):
for subproc in [act for act in pipeline["activities"].values() if act["type"] == NodeType.SubProcess.value]:
validate_and_process_pipeline(subproc["pipeline"], cycle_tolerate)
format_pipeline_tree_io_to_list(pipeline)
# 1. connection validation
validate_graph_connection(pipeline)
# do not tolerate circle in flow
if not cycle_tolerate:
no_cycle = validate_graph_without_circle(pipeline)
if not no_cycle["result"]:
raise exceptions.TreeInvalidException(no_cycle["message"])
# 2. gateway validation
validate_gateways(pipeline)
# 3. stream validation
validate_stream(pipeline)
def add_sink_type(node_type: str):
rules.FLOW_NODES_WITHOUT_STARTEVENT.append(node_type)
rules.NODE_RULES[node_type] = rules.SINK_RULE

Some files were not shown because too many files have changed in this diff Show More