diff --git a/README.md b/README.md index 3cafa18..e69de29 100644 --- a/README.md +++ b/README.md @@ -1,50 +0,0 @@ -# 任务调度平台 -![image](https://user-images.githubusercontent.com/29135056/155830656-968f5881-5729-4347-94fc-b5a657ea9725.png) - -## todo list - - [ ] 变量管理-模型设计 - - [ ] 变量管理-crud接口 - - [ ] 变量管理-前端页面 - - [ ] 变量管理-接口对接 - - [ ] 变量管理-变量集成到任务中,调整引擎中节点变量传递 - - - - [ ] 任务管理-新增删除查看 - - [ ] 任务管理-考虑运行中的任务,可不可修改 - - - [ ] 首页-聚合数据接口,高纬度展示图表 - - - [ ] 节点管理- 区分标准节点-和节点模版 - - [ ] 节点管理- 编辑时的实时预览功能 - - [ ] 节点管理- 自定节点组建后端代码逻辑的上传,和持久化 - - [ ] 节点管理- 返回值规范未定义 - - [ ] 节点管理- 克隆/导入/导出 优先级降低 - - [ ] 节点管理- Table字段梳理,前端冗余代码删减 - - [ ] 节点管理- 搜索过滤功能 - - [ ] 节点管理- 新建作业/导入作业 统一移动到作业列表页 - - - [ ] 作业流管理- 新建作业流/导入作业流业 统一移动到作业流列表页 - - [ ] 作业流管理- 分类接口,作业流关联 - - [ ] 作业流管理- 克隆/导入/导出 优先级降低 - - [ ] 作业流管理- 跳转到执行历史 - - [ ] 作业流管理- 删除调度方式,移至任务里 - - [ ] 作业流管理- 大流程的作业流创建失败 bug - - - [ ] 任务管理- 新建任务 - - [ ] 任务管理- 执行任务 - - [ ] 任务管理- 定时任务和周期任务 - - - [ ] 变量管理- 模型设计 - - [ ] 变量管理- 全局变量,局部变量,可变变量,常量 - - [ ] 变量管理- 集成进任务里 - - - [ ] 作业监视- 暂停,停止,跳过,忽略,等人工干预操作 - - [ ] 作业监视- 节点重试功能 - - [ ] 作业监视- Table字段梳理,前端冗余代码删减 - - [ ] 作业监视- 搜索过滤功能 - - [ ] 作业监视- 失败状态保存,失败状态判断 - - - [ ] 告警管理- 规划中 - - [ ] 审计管理- 规划中 -## install tips -sudo apt-get install libmysqlclient-dev -python3-dev \ No newline at end of file diff --git a/applications/flow/__init__.py b/applications/flow/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/applications/flow/admin.py b/applications/flow/admin.py deleted file mode 100644 index 8c38f3f..0000000 --- a/applications/flow/admin.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.contrib import admin - -# Register your models here. diff --git a/applications/flow/apps.py b/applications/flow/apps.py deleted file mode 100644 index a702df1..0000000 --- a/applications/flow/apps.py +++ /dev/null @@ -1,6 +0,0 @@ -from django.apps import AppConfig - - -class FlowConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'applications.flow' diff --git a/applications/flow/constants.py b/applications/flow/constants.py deleted file mode 100644 index 19720a4..0000000 --- a/applications/flow/constants.py +++ /dev/null @@ -1,70 +0,0 @@ -FAIL_OFFSET_UNIT_CHOICE = ( - ("seconds", "秒"), - ("hours", "时"), - ("minutes", "分"), - -) -node_type = ( - (0, "开始节点"), - (1, "结束节点"), - (2, "作业节点"), - (3, "子流程"), - (4, "条件分支"), - (5, "汇聚网关"), -) - - -class StateType(object): - CREATED = "CREATED" - READY = "READY" - RUNNING = "RUNNING" - SUSPENDED = "SUSPENDED" - BLOCKED = "BLOCKED" - FINISHED = "FINISHED" - FAILED = "FAILED" - REVOKED = "REVOKED" - - -PIPELINE_STATE_TO_FLOW_STATE = { - StateType.READY: "wait", - StateType.RUNNING: "run", - StateType.FAILED: "fail", - StateType.FINISHED: "success", - StateType.SUSPENDED: "pause", - StateType.REVOKED: "cancel", - StateType.BLOCKED: "stop", - StateType.CREATED: "positive", - -} - - -class NodeTemplateType: - # 空节点模板 - EmptyTemplate = "0" - # 带内容的节点模板 - ContentTemplate = "2" - - -a = [ - {"key": "url", "type": "textarea", "label": "请求地址:"}, - {"key": "method", "type": "select", "label": "请求类型:", "choices": [{"label": "GET", "value": "get"}]}, - {"key": "header", "type": "dict_map", "label": "Header"}, - {"key": "body", "type": "textarea", "label": "Body:"}, - {"key": "timeout", "type": "number", "label": "超时时间:"} -] -i = { - "url": "", - "method": "get", - "header": [ - { - "key": "", - "value": "" - }], - "body": "{}", - "timeout": 60, - "check_point": { - "key": "", - "condition": "", - "values": "" - } -} diff --git a/applications/flow/filters.py b/applications/flow/filters.py deleted file mode 100644 index 5fac4dc..0000000 --- a/applications/flow/filters.py +++ /dev/null @@ -1,5 +0,0 @@ -import django_filters as filters - - -class NodeTemplateFilter(filters.FilterSet): - template_type = filters.CharFilter(lookup_expr="iexact") diff --git a/applications/flow/migrations/0001_initial.py b/applications/flow/migrations/0001_initial.py deleted file mode 100644 index 4a34c1e..0000000 --- a/applications/flow/migrations/0001_initial.py +++ /dev/null @@ -1,112 +0,0 @@ -# Generated by Django 2.2.6 on 2022-02-09 03:29 - -import datetime -from django.db import migrations, models -import django.db.models.deletion -import django_mysql.models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ] - - operations = [ - migrations.CreateModel( - name='Category', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255, verbose_name='分类名称')), - ], - ), - migrations.CreateModel( - name='Process', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255, verbose_name='作业名称')), - ('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='作业描述')), - ('run_type', models.CharField(max_length=32, verbose_name='调度类型')), - ('total_run_count', models.PositiveIntegerField(default=0, verbose_name='执行次数')), - ('gateways', django_mysql.models.JSONField(default=dict, verbose_name='网关信息')), - ('constants', django_mysql.models.JSONField(default=dict, verbose_name='内部变量信息')), - ('dag', django_mysql.models.JSONField(default=dict, verbose_name='DAG')), - ('create_by', models.CharField(max_length=64, null=True, verbose_name='创建者')), - ('create_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间')), - ('update_time', models.DateTimeField(auto_now=True, verbose_name='修改时间')), - ('update_by', models.CharField(max_length=64, null=True, verbose_name='修改人')), - ('category', models.ManyToManyField(to='flow.Category')), - ], - ), - migrations.CreateModel( - name='ProcessRun', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255, verbose_name='作业名称')), - ('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='作业描述')), - ('run_type', models.CharField(max_length=32, verbose_name='调度类型')), - ('gateways', django_mysql.models.JSONField(default=dict, verbose_name='网关信息')), - ('constants', django_mysql.models.JSONField(default=dict, verbose_name='内部变量信息')), - ('dag', django_mysql.models.JSONField(default=dict, verbose_name='DAG')), - ('create_by', models.CharField(max_length=64, null=True, verbose_name='创建者')), - ('create_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间')), - ('update_time', models.DateTimeField(auto_now=True, verbose_name='修改时间')), - ('update_by', models.CharField(max_length=64, null=True, verbose_name='修改人')), - ('root_id', models.CharField(max_length=255, verbose_name='根节点uuid')), - ('process', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='run', to='flow.Process')), - ], - ), - migrations.CreateModel( - name='NodeRun', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255, verbose_name='节点名称')), - ('uuid', models.CharField(max_length=255, unique=True, verbose_name='UUID')), - ('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='节点描述')), - ('show', models.BooleanField(default=True, verbose_name='是否显示')), - ('top', models.IntegerField(default=300)), - ('left', models.IntegerField(default=300)), - ('ico', models.CharField(blank=True, max_length=64, null=True, verbose_name='icon')), - ('fail_retry_count', models.IntegerField(default=0, verbose_name='失败重试次数')), - ('fail_offset', models.IntegerField(default=0, verbose_name='失败重试间隔')), - ('fail_offset_unit', models.CharField(choices=[('seconds', '秒'), ('hours', '时'), ('minutes', '分')], max_length=32, verbose_name='重试间隔单位')), - ('node_type', models.IntegerField(default=2)), - ('component_code', models.CharField(max_length=255, verbose_name='插件名称')), - ('is_skip_fail', models.BooleanField(default=False, verbose_name='忽略失败')), - ('is_timeout_alarm', models.BooleanField(default=False, verbose_name='超时告警')), - ('inputs', django_mysql.models.JSONField(default=dict, verbose_name='输入参数')), - ('outputs', django_mysql.models.JSONField(default=dict, verbose_name='输出参数')), - ('process_run', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='nodes_run', to='flow.ProcessRun')), - ], - options={ - 'abstract': False, - }, - ), - migrations.CreateModel( - name='Node', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255, verbose_name='节点名称')), - ('uuid', models.CharField(max_length=255, unique=True, verbose_name='UUID')), - ('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='节点描述')), - ('show', models.BooleanField(default=True, verbose_name='是否显示')), - ('top', models.IntegerField(default=300)), - ('left', models.IntegerField(default=300)), - ('ico', models.CharField(blank=True, max_length=64, null=True, verbose_name='icon')), - ('fail_retry_count', models.IntegerField(default=0, verbose_name='失败重试次数')), - ('fail_offset', models.IntegerField(default=0, verbose_name='失败重试间隔')), - ('fail_offset_unit', models.CharField(choices=[('seconds', '秒'), ('hours', '时'), ('minutes', '分')], max_length=32, verbose_name='重试间隔单位')), - ('node_type', models.IntegerField(default=2)), - ('component_code', models.CharField(max_length=255, verbose_name='插件名称')), - ('is_skip_fail', models.BooleanField(default=False, verbose_name='忽略失败')), - ('is_timeout_alarm', models.BooleanField(default=False, verbose_name='超时告警')), - ('inputs', django_mysql.models.JSONField(default=dict, verbose_name='输入参数')), - ('outputs', django_mysql.models.JSONField(default=dict, verbose_name='输出参数')), - ('process', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='nodes', to='flow.Process')), - ], - options={ - 'abstract': False, - }, - ), - ] diff --git a/applications/flow/migrations/0002_nodetemplate.py b/applications/flow/migrations/0002_nodetemplate.py deleted file mode 100644 index 84eca0e..0000000 --- a/applications/flow/migrations/0002_nodetemplate.py +++ /dev/null @@ -1,40 +0,0 @@ -# Generated by Django 2.2.6 on 2022-02-10 14:21 - -from django.db import migrations, models -import django_mysql.models - - -class Migration(migrations.Migration): - - dependencies = [ - ('flow', '0001_initial'), - ] - - operations = [ - migrations.CreateModel( - name='NodeTemplate', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255, verbose_name='节点名称')), - ('uuid', models.CharField(max_length=255, unique=True, verbose_name='UUID')), - ('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='节点描述')), - ('show', models.BooleanField(default=True, verbose_name='是否显示')), - ('top', models.IntegerField(default=300)), - ('left', models.IntegerField(default=300)), - ('ico', models.CharField(blank=True, max_length=64, null=True, verbose_name='icon')), - ('fail_retry_count', models.IntegerField(default=0, verbose_name='失败重试次数')), - ('fail_offset', models.IntegerField(default=0, verbose_name='失败重试间隔')), - ('fail_offset_unit', models.CharField(choices=[('seconds', '秒'), ('hours', '时'), ('minutes', '分')], max_length=32, verbose_name='重试间隔单位')), - ('node_type', models.IntegerField(default=2)), - ('component_code', models.CharField(max_length=255, verbose_name='插件名称')), - ('is_skip_fail', models.BooleanField(default=False, verbose_name='忽略失败')), - ('is_timeout_alarm', models.BooleanField(default=False, verbose_name='超时告警')), - ('inputs', django_mysql.models.JSONField(default=dict, verbose_name='输入参数')), - ('outputs', django_mysql.models.JSONField(default=dict, verbose_name='输出参数')), - ('template_type', models.CharField(default='2', max_length=1, verbose_name='节点模板类型')), - ], - options={ - 'abstract': False, - }, - ), - ] diff --git a/applications/flow/migrations/0003_auto_20220210_1737.py b/applications/flow/migrations/0003_auto_20220210_1737.py deleted file mode 100644 index 64229b0..0000000 --- a/applications/flow/migrations/0003_auto_20220210_1737.py +++ /dev/null @@ -1,28 +0,0 @@ -# Generated by Django 2.2.6 on 2022-02-10 17:37 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('flow', '0002_nodetemplate'), - ] - - operations = [ - migrations.AddField( - model_name='node', - name='content', - field=models.IntegerField(default=0, verbose_name='模板id'), - ), - migrations.AddField( - model_name='noderun', - name='content', - field=models.IntegerField(default=0, verbose_name='模板id'), - ), - migrations.AddField( - model_name='nodetemplate', - name='content', - field=models.IntegerField(default=0, verbose_name='模板id'), - ), - ] diff --git a/applications/flow/migrations/0004_auto_20220226_1202.py b/applications/flow/migrations/0004_auto_20220226_1202.py deleted file mode 100644 index ba15168..0000000 --- a/applications/flow/migrations/0004_auto_20220226_1202.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by Django 2.2.6 on 2022-02-26 12:02 - -from django.db import migrations -import django_mysql.models - - -class Migration(migrations.Migration): - - dependencies = [ - ('flow', '0003_auto_20220210_1737'), - ] - - operations = [ - migrations.AddField( - model_name='nodetemplate', - name='inputs_component', - field=django_mysql.models.JSONField(default=list, verbose_name='前端参数组件'), - ), - migrations.AddField( - model_name='nodetemplate', - name='outputs_component', - field=django_mysql.models.JSONField(default=list, verbose_name='前端参数组件'), - ), - ] diff --git a/applications/flow/migrations/0005_subnoderun_subprocessrun.py b/applications/flow/migrations/0005_subnoderun_subprocessrun.py deleted file mode 100644 index c2b3ae3..0000000 --- a/applications/flow/migrations/0005_subnoderun_subprocessrun.py +++ /dev/null @@ -1,62 +0,0 @@ -# Generated by Django 2.2.6 on 2022-06-16 16:14 - -import datetime -from django.db import migrations, models -import django.db.models.deletion -import django_mysql.models - - -class Migration(migrations.Migration): - - dependencies = [ - ('flow', '0004_auto_20220226_1202'), - ] - - operations = [ - migrations.CreateModel( - name='SubProcessRun', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255, verbose_name='作业名称')), - ('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='作业描述')), - ('run_type', models.CharField(max_length=32, verbose_name='调度类型')), - ('gateways', django_mysql.models.JSONField(default=dict, verbose_name='网关信息')), - ('constants', django_mysql.models.JSONField(default=dict, verbose_name='内部变量信息')), - ('dag', django_mysql.models.JSONField(default=dict, verbose_name='DAG')), - ('create_by', models.CharField(max_length=64, null=True, verbose_name='创建者')), - ('create_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='创建时间')), - ('update_time', models.DateTimeField(auto_now=True, verbose_name='修改时间')), - ('update_by', models.CharField(max_length=64, null=True, verbose_name='修改人')), - ('root_id', models.CharField(max_length=255, verbose_name='根节点uuid')), - ('process', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='sub_run', to='flow.Process')), - ('process_run', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sub', to='flow.Process')), - ], - ), - migrations.CreateModel( - name='SubNodeRun', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255, verbose_name='节点名称')), - ('uuid', models.CharField(max_length=255, unique=True, verbose_name='UUID')), - ('description', models.CharField(blank=True, max_length=255, null=True, verbose_name='节点描述')), - ('show', models.BooleanField(default=True, verbose_name='是否显示')), - ('top', models.IntegerField(default=300)), - ('left', models.IntegerField(default=300)), - ('ico', models.CharField(blank=True, max_length=64, null=True, verbose_name='icon')), - ('fail_retry_count', models.IntegerField(default=0, verbose_name='失败重试次数')), - ('fail_offset', models.IntegerField(default=0, verbose_name='失败重试间隔')), - ('fail_offset_unit', models.CharField(choices=[('seconds', '秒'), ('hours', '时'), ('minutes', '分')], max_length=32, verbose_name='重试间隔单位')), - ('node_type', models.IntegerField(default=2)), - ('component_code', models.CharField(max_length=255, verbose_name='插件名称')), - ('is_skip_fail', models.BooleanField(default=False, verbose_name='忽略失败')), - ('is_timeout_alarm', models.BooleanField(default=False, verbose_name='超时告警')), - ('inputs', django_mysql.models.JSONField(default=dict, verbose_name='输入参数')), - ('outputs', django_mysql.models.JSONField(default=dict, verbose_name='输出参数')), - ('content', models.IntegerField(default=0, verbose_name='模板id')), - ('subprocess_run', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='sub_nodes_run', to='flow.SubProcessRun')), - ], - options={ - 'abstract': False, - }, - ), - ] diff --git a/applications/flow/migrations/0006_auto_20220616_1616.py b/applications/flow/migrations/0006_auto_20220616_1616.py deleted file mode 100644 index cc85945..0000000 --- a/applications/flow/migrations/0006_auto_20220616_1616.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by Django 2.2.6 on 2022-06-16 16:16 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ('flow', '0005_subnoderun_subprocessrun'), - ] - - operations = [ - migrations.AlterField( - model_name='noderun', - name='process_run', - field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='nodes_run', to='flow.ProcessRun'), - ), - migrations.AlterField( - model_name='subnoderun', - name='subprocess_run', - field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sub_nodes_run', to='flow.SubProcessRun'), - ), - ] diff --git a/applications/flow/migrations/__init__.py b/applications/flow/migrations/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/applications/flow/models.py b/applications/flow/models.py deleted file mode 100644 index cdfabd6..0000000 --- a/applications/flow/models.py +++ /dev/null @@ -1,142 +0,0 @@ -from datetime import datetime - -from django.db import models -from django_mysql.models import JSONField - -from applications.flow.constants import FAIL_OFFSET_UNIT_CHOICE, NodeTemplateType - - -class Category(models.Model): - name = models.CharField("分类名称", max_length=255, blank=False, null=False) - - -class Process(models.Model): - name = models.CharField("作业名称", max_length=255, blank=False, null=False) - description = models.CharField("作业描述", max_length=255, blank=True, null=True) - category = models.ManyToManyField(Category) - run_type = models.CharField("调度类型", max_length=32) - total_run_count = models.PositiveIntegerField("执行次数", default=0) - gateways = JSONField("网关信息", default=dict) - constants = JSONField("内部变量信息", default=dict) - dag = JSONField("DAG", default=dict) - - create_by = models.CharField("创建者", max_length=64, null=True) - create_time = models.DateTimeField("创建时间", default=datetime.now) - update_time = models.DateTimeField("修改时间", auto_now=True) - update_by = models.CharField("修改人", max_length=64, null=True) - - @property - def clone_data(self): - return { - "name": self.name, - "description": self.description, - "run_type": self.run_type, - "gateways": self.gateways, - "constants": self.constants, - "dag": self.dag, - } - - -class BaseNode(models.Model): - START_NODE = 0 - END_NODE = 1 - JOB_NODE = 2 - SUB_PROCESS_NODE = 3 - CONDITION_NODE = 4 - CONVERGE_NODE = 5 - PARALLEL_NODE = 6 - CONDITION_PARALLEL_NODE = 7 - name = models.CharField("节点名称", max_length=255, blank=False, null=False) - uuid = models.CharField("UUID", max_length=255, unique=True) - description = models.CharField("节点描述", max_length=255, blank=True, null=True) - - show = models.BooleanField("是否显示", default=True) - top = models.IntegerField(default=300) - left = models.IntegerField(default=300) - ico = models.CharField("icon", max_length=64, blank=True, null=True) - - fail_retry_count = models.IntegerField("失败重试次数", default=0) - fail_offset = models.IntegerField("失败重试间隔", default=0) - fail_offset_unit = models.CharField("重试间隔单位", choices=FAIL_OFFSET_UNIT_CHOICE, max_length=32) - # 0:开始节点,1:结束节点,2:作业节点,3:其他作业流 4:分支,5:汇聚.6:并行 - node_type = models.IntegerField(default=2) - component_code = models.CharField("插件名称", max_length=255, blank=False, null=False) - is_skip_fail = models.BooleanField("忽略失败", default=False) - is_timeout_alarm = models.BooleanField("超时告警", default=False) - - inputs = JSONField("输入参数", default=dict) - outputs = JSONField("输出参数", default=dict) - # 如为子流程content为process id, 如为 节点模板为node id - content = models.IntegerField("模板id", default=0) - - class Meta: - abstract = True - - -class Node(BaseNode): - process = models.ForeignKey(Process, on_delete=models.SET_NULL, null=True, db_constraint=False, - related_name="nodes") - - -class ProcessRun(models.Model): - # new - process = models.ForeignKey(Process, on_delete=models.SET_NULL, null=True, db_constraint=False, - related_name="run") - - name = models.CharField("作业名称", max_length=255, blank=False, null=False) - description = models.CharField("作业描述", max_length=255, blank=True, null=True) - run_type = models.CharField("调度类型", max_length=32) - gateways = JSONField("网关信息", default=dict) - constants = JSONField("内部变量信息", default=dict) - dag = JSONField("DAG", default=dict) - - create_by = models.CharField("创建者", max_length=64, null=True) - create_time = models.DateTimeField("创建时间", default=datetime.now) - update_time = models.DateTimeField("修改时间", auto_now=True) - update_by = models.CharField("修改人", max_length=64, null=True) - - root_id = models.CharField("根节点uuid", max_length=255) - - -class SubProcessRun(models.Model): - process_run = models.ForeignKey(Process, on_delete=models.CASCADE, null=True, db_constraint=False, - related_name="sub") - process = models.ForeignKey(Process, on_delete=models.SET_NULL, null=True, db_constraint=False, - related_name="sub_run") - name = models.CharField("作业名称", max_length=255, blank=False, null=False) - description = models.CharField("作业描述", max_length=255, blank=True, null=True) - run_type = models.CharField("调度类型", max_length=32) - gateways = JSONField("网关信息", default=dict) - constants = JSONField("内部变量信息", default=dict) - dag = JSONField("DAG", default=dict) - - create_by = models.CharField("创建者", max_length=64, null=True) - create_time = models.DateTimeField("创建时间", default=datetime.now) - update_time = models.DateTimeField("修改时间", auto_now=True) - update_by = models.CharField("修改人", max_length=64, null=True) - - root_id = models.CharField("根节点uuid", max_length=255) - - -class SubNodeRun(BaseNode): - subprocess_run = models.ForeignKey(SubProcessRun, on_delete=models.CASCADE, null=True, db_constraint=False, - related_name="sub_nodes_run") - - @staticmethod - def field_names(): - return [field.name for field in NodeRun._meta.get_fields() if field.name not in ["id"]] - - -class NodeRun(BaseNode): - process_run = models.ForeignKey(ProcessRun, on_delete=models.CASCADE, null=True, db_constraint=False, - related_name="nodes_run") - - @staticmethod - def field_names(): - return [field.name for field in NodeRun._meta.get_fields() if field.name not in ["id"]] - - -class NodeTemplate(BaseNode): - template_type = models.CharField("节点模板类型", max_length=1, default=NodeTemplateType.ContentTemplate) - inputs_component = JSONField("前端参数组件", default=list) - outputs_component = JSONField("前端参数组件", default=list) diff --git a/applications/flow/serializers.py b/applications/flow/serializers.py deleted file mode 100644 index 8db7e9b..0000000 --- a/applications/flow/serializers.py +++ /dev/null @@ -1,362 +0,0 @@ -import json - -from bamboo_engine import api -from django.db import transaction - -from pipeline.eri.models import State -from pipeline.eri.runtime import BambooDjangoRuntime -from rest_framework import serializers - -from applications.flow.constants import PIPELINE_STATE_TO_FLOW_STATE -from applications.flow.models import Process, Node, ProcessRun, NodeRun, NodeTemplate, SubProcessRun, SubNodeRun -from applications.utils.uuid_helper import get_uuid - - -class ProcessViewSetsSerializer(serializers.Serializer): - name = serializers.CharField(required=True) - description = serializers.CharField(required=False, allow_blank=True) - category = serializers.ListField(default="null") - run_type = serializers.CharField(default="null") - pipeline_tree = serializers.JSONField(required=True) - - def save(self, **kwargs): - if self.instance is not None: - self.update(instance=self.instance, validated_data=self.validated_data) - else: - self.create(validated_data=self.validated_data) - - def create(self, validated_data): - node_map = {} - for node in validated_data["pipeline_tree"]["nodes"]: - node_map[node["uuid"]] = node - dag = {k: [] for k in node_map.keys()} - for line in self.validated_data["pipeline_tree"]["lines"]: - dag[line["from"]].append(line["to"]) - with transaction.atomic(): - process = Process.objects.create(name=validated_data["name"], - description=validated_data["description"], - run_type=validated_data["run_type"], - dag=dag) - bulk_nodes = [] - for node in node_map.values(): - node_data = node["node_data"] - if isinstance(node_data.get("inputs", {}), dict): - node_inputs = node_data.get("inputs", {}) - else: - node_inputs = json.loads(node_data["inputs"]) - bulk_nodes.append(Node(process=process, - name=node_data["node_name"], - uuid=node["uuid"], - description=node_data["description"], - fail_retry_count=node_data.get("fail_retry_count", 0) or 0, - fail_offset=node_data.get("fail_offset", 0) or 0, - fail_offset_unit=node_data.get("fail_offset_unit", "seconds"), - node_type=node.get("type", 2), - is_skip_fail=node_data["is_skip_fail"], - is_timeout_alarm=node_data["is_skip_fail"], - inputs=node_inputs, - show=node["show"], - top=node["top"], - left=node["left"], - ico=node["ico"], - outputs={}, - component_code="http_request", - content=node.get("content", 0) or 0 - )) - Node.objects.bulk_create(bulk_nodes, batch_size=500) - self._data = {} - - def update(self, instance, validated_data): - node_map = {} - for node in validated_data["pipeline_tree"]["nodes"]: - node_map[node["uuid"]] = node - dag = {k: [] for k in node_map.keys()} - for line in self.validated_data["pipeline_tree"]["lines"]: - dag[line["from"]].append(line["to"]) - with transaction.atomic(): - instance.name = validated_data["name"] - instance.description = validated_data["description"] - instance.run_type = validated_data["run_type"] - instance.dag = dag - instance.save() - bulk_update_nodes = [] - bulk_create_nodes = [] - node_dict = Node.objects.filter(process_id=instance.id).in_bulk(field_name="uuid") - for node in node_map.values(): - node_data = node["node_data"] - node_obj = node_dict.get(node["uuid"], None) - if isinstance(node_data.get("inputs", {}), dict): - node_inputs = node_data.get("inputs", {}) - else: - node_inputs = json.loads(node_data["inputs"]) - if node_obj: - node_obj.content = node.get("content", 0) or 0 - node_obj.name = node_data["node_name"] - node_obj.description = node_data["description"] - node_obj.fail_retry_count = node_data.get("fail_retry_count", 0) or 0 - node_obj.fail_offset = node_data.get("fail_offset", 0) or 0 - node_obj.fail_offset_unit = node_data.get("fail_offset_unit", "seconds") - node_obj.node_type = node.get("type", 3) - node_obj.is_skip_fail = node_data["is_skip_fail"] - node_obj.is_timeout_alarm = node_data["is_timeout_alarm"] - node_obj.inputs = node_inputs - node_obj.show = node["show"] - node_obj.top = node["top"] - node_obj.left = node["left"] - node_obj.ico = node["ico"] - node_obj.outputs = {} - node_obj.component_code = "http_request" - bulk_update_nodes.append(node_obj) - else: - node_obj = Node() - node_obj.content = node.get("content", 0) or 0 - node_obj.name = node_data["node_name"] - node_obj.description = node_data["description"] - node_obj.fail_retry_count = node_data.get("fail_retry_count", 0) or 0 - node_obj.fail_offset = node_data.get("fail_offset", 0) or 0 - node_obj.fail_offset_unit = node_data.get("fail_offset_unit", "seconds") - node_obj.node_type = node.get("type", 3) - node_obj.is_skip_fail = node_data["is_skip_fail"] - node_obj.is_timeout_alarm = node_data["is_timeout_alarm"] - node_obj.inputs = node_inputs - node_obj.show = node["show"] - node_obj.top = node["top"] - node_obj.left = node["left"] - node_obj.ico = node["ico"] - node_obj.outputs = {} - node_obj.component_code = "http_request" - node_obj.uuid = node["uuid"] - node_obj.process_id = instance.id - bulk_create_nodes.append(node_obj) - Node.objects.bulk_update(bulk_update_nodes, - fields=["name", "description", "fail_retry_count", "fail_offset", - "fail_offset_unit", "node_type", "is_skip_fail", - "is_timeout_alarm", "inputs", "show", "top", "left", "ico", - "outputs", "component_code"], batch_size=500) - Node.objects.bulk_create(bulk_create_nodes, batch_size=500) - self._data = {} - - -class ListProcessViewSetsSerializer(serializers.ModelSerializer): - class Meta: - model = Process - # fields = "__all__" - exclude = ("dag",) - - -class ListProcessRunViewSetsSerializer(serializers.ModelSerializer): - state = serializers.SerializerMethodField() - - class Meta: - model = ProcessRun - fields = "__all__" - - def get_state(self, obj): - runtime = BambooDjangoRuntime() - process_info = api.get_pipeline_states(runtime, root_id=obj.root_id) - try: - process_state = PIPELINE_STATE_TO_FLOW_STATE.get(process_info.data[obj.root_id]["state"]) - except Exception: - process_state = "error" - return process_state - - -class ListSubProcessRunViewSetsSerializer(serializers.ModelSerializer): - state = serializers.SerializerMethodField() - - class Meta: - model = SubProcessRun - fields = "__all__" - - def get_state(self, obj): - runtime = BambooDjangoRuntime() - process_info = api.get_pipeline_states(runtime, root_id=obj.root_id) - try: - process_state = PIPELINE_STATE_TO_FLOW_STATE.get(process_info.data[obj.root_id]["state"]) - except Exception: - process_state = "error" - return process_state - - -class RetrieveProcessViewSetsSerializer(serializers.ModelSerializer): - pipeline_tree = serializers.SerializerMethodField() - - # category = serializers.SerializerMethodField() - # - # def get_category(self, obj): - # return obj.category.all() - - def get_pipeline_tree(self, obj): - lines = [] - nodes = [] - for _from, to_list in obj.dag.items(): - for _to in to_list: - lines.append({ - "from": _from, - "to": _to - }) - node_list = Node.objects.filter(process_id=obj.id).values() - node_content_id = [node["content"] for node in node_list if node.get("content", 0)] - content_map = NodeTemplate.objects.filter(id__in=node_content_id).in_bulk() - for node in node_list: - node_template = content_map.get(node.get("content", 0), "") - inputs_component = "" - if node_template: - inputs_component = json.dumps(node_template.inputs_component) - nodes.append({"show": node["show"], - "top": node["top"], - "left": node["left"], - "ico": node["ico"], - "type": node["node_type"], - "name": node["name"], - "content": node["content"], - "node_data": { - "inputs": json.dumps(node["inputs"]), - "inputs_component": inputs_component, - "run_mark": 0, - "node_name": node["name"], - "description": node["description"], - "fail_retry_count": node["fail_retry_count"], - "fail_offset": node["fail_offset"], - "fail_offset_unit": node["fail_offset_unit"], - "is_skip_fail": node["is_skip_fail"], - "is_timeout_alarm": node["is_timeout_alarm"]}, - "uuid": node["uuid"]}) - return {"lines": lines, "nodes": nodes} - - class Meta: - model = Process - fields = ("id", "name", "description", "category", "run_type", "pipeline_tree") - - -class RetrieveProcessRunViewSetsSerializer(serializers.ModelSerializer): - pipeline_tree = serializers.SerializerMethodField() - - def get_pipeline_tree(self, obj): - lines = [] - nodes = [] - for _from, to_list in obj.dag.items(): - for _to in to_list: - lines.append({ - "from": _from, - "to": _to - }) - runtime = BambooDjangoRuntime() - process_info = api.get_pipeline_states(runtime, root_id=obj.root_id) - process_state = PIPELINE_STATE_TO_FLOW_STATE.get(process_info.data[obj.root_id]["state"]) - state_map = process_info.data[obj.root_id]["children"] - node_list = NodeRun.objects.filter(process_run_id=obj.id).values() - for node in node_list: - pipeline_state = state_map.get(node["uuid"], {}).get("state", "READY") - flow_state = PIPELINE_STATE_TO_FLOW_STATE[pipeline_state] - outputs = "" - # print(flow_state) - if node["node_type"] not in [0, 1] and flow_state not in ["wait"]: - output_data = api.get_execution_data_outputs(runtime, node_id=node["uuid"]) - outputs = output_data.data.get("outputs", "") - if node["node_type"] == 3: - # todo先简单判断node有fail,process就为fail - if State.objects.filter(parent_id=node["uuid"], name="FAILED").exists(): - flow_state = "fail" - # todo先简单判断node有fail,process就为fail - if flow_state == "fail": - process_state = "fail" - nodes.append({"show": node["show"], - "top": node["top"], - "left": node["left"], - "ico": node["ico"], - "type": node["node_type"], - "name": node["name"], - "state": flow_state, - "content": node["content"], - "node_data": { - "inputs": node["inputs"], - "outputs": outputs, - "run_mark": 0, - "node_name": node["name"], - "description": node["description"], - "fail_retry_count": node["fail_retry_count"], - "fail_offset": node["fail_offset"], - "fail_offset_unit": node["fail_offset_unit"], - "is_skip_fail": node["is_skip_fail"], - "is_timeout_alarm": node["is_timeout_alarm"]}, - "uuid": node["uuid"]}) - return {"lines": lines, "nodes": nodes, "process_state": process_state} - - class Meta: - model = ProcessRun - fields = ("id", "name", "description", "run_type", "pipeline_tree") - - -class RetrieveSubProcessRunViewSetsSerializer(serializers.ModelSerializer): - pipeline_tree = serializers.SerializerMethodField() - - def get_pipeline_tree(self, obj): - lines = [] - nodes = [] - for _from, to_list in obj.dag.items(): - for _to in to_list: - lines.append({ - "from": _from, - "to": _to - }) - runtime = BambooDjangoRuntime() - process_info = api.get_pipeline_states(runtime, root_id=obj.root_id) - process_state = PIPELINE_STATE_TO_FLOW_STATE.get(process_info.data[obj.root_id]["state"]) - state_map = process_info.data[obj.root_id]["children"] - node_list = SubNodeRun.objects.filter(subprocess_run_id=obj.id).values() - for node in node_list: - pipeline_state = state_map.get(node["uuid"], {}).get("state", "READY") - flow_state = PIPELINE_STATE_TO_FLOW_STATE[pipeline_state] - outputs = "" - # print(flow_state) - if node["node_type"] not in [0, 1] and flow_state not in ["wait"]: - output_data = api.get_execution_data_outputs(runtime, node_id=node["uuid"]) - outputs = output_data.data.get("outputs", "") - if node["node_type"] == 3: - # todo先简单判断node有fail,process就为fail - if State.objects.filter(parent_id=node["uuid"], name="FAILED").exists(): - flow_state = "fail" - # todo先简单判断node有fail,process就为fail - if flow_state == "fail": - process_state = "fail" - nodes.append({"show": node["show"], - "top": node["top"], - "left": node["left"], - "ico": node["ico"], - "type": node["node_type"], - "name": node["name"], - "state": flow_state, - "content": node["content"], - "node_data": { - "inputs": node["inputs"], - "outputs": outputs, - "run_mark": 0, - "node_name": node["name"], - "description": node["description"], - "fail_retry_count": node["fail_retry_count"], - "fail_offset": node["fail_offset"], - "fail_offset_unit": node["fail_offset_unit"], - "is_skip_fail": node["is_skip_fail"], - "is_timeout_alarm": node["is_timeout_alarm"]}, - "uuid": node["uuid"]}) - return {"lines": lines, "nodes": nodes, "process_state": process_state} - - class Meta: - model = SubProcessRun - fields = ("id", "name", "description", "run_type", "pipeline_tree") - - -class ExecuteProcessSerializer(serializers.Serializer): - process_id = serializers.IntegerField(required=True) - - -class NodeTemplateSerializer(serializers.ModelSerializer): - - def validate(self, attrs): - attrs["uuid"] = get_uuid() - return attrs - - class Meta: - model = NodeTemplate - exclude = ("uuid",) diff --git a/applications/flow/tests.py b/applications/flow/tests.py deleted file mode 100644 index 7ce503c..0000000 --- a/applications/flow/tests.py +++ /dev/null @@ -1,3 +0,0 @@ -from django.test import TestCase - -# Create your tests here. diff --git a/applications/flow/urls.py b/applications/flow/urls.py deleted file mode 100644 index 0198a56..0000000 --- a/applications/flow/urls.py +++ /dev/null @@ -1,12 +0,0 @@ -from rest_framework.routers import DefaultRouter - -from . import views - -flow_router = DefaultRouter() -flow_router.register(r"flow", viewset=views.ProcessViewSets, base_name="flow") -flow_router.register(r"run", viewset=views.ProcessRunViewSets, base_name="run") -flow_router.register(r"sub_run", viewset=views.SubProcessRunViewSets, base_name="sub_run") -flow_router.register(r"test", viewset=views.TestViewSets, base_name="test") - -node_router = DefaultRouter() -node_router.register(r"template", viewset=views.NodeTemplateViewSet, base_name="template") diff --git a/applications/flow/utils.py b/applications/flow/utils.py deleted file mode 100644 index 67c1ca0..0000000 --- a/applications/flow/utils.py +++ /dev/null @@ -1,43 +0,0 @@ -from applications.flow.models import ProcessRun, NodeRun, Process, Node, SubProcessRun, SubNodeRun -from applications.utils.dag_helper import PipelineBuilder, instance_dag - - -def build_and_create_process(process_id): - """构建pipeline和创建运行时数据""" - p_builder = PipelineBuilder(process_id) - pipeline = p_builder.build() - - process = p_builder.process - node_map = p_builder.node_map - process_run_uuid = p_builder.instance - - # 保存的实例数据 - process_run_data = process.clone_data - process_run_data["dag"] = instance_dag(process_run_data["dag"], process_run_uuid) - process_run = ProcessRun.objects.create(process_id=process.id, root_id=pipeline["id"], **process_run_data) - node_run_bulk = [] - for pipeline_id, node in node_map.items(): - _node = {k: v for k, v in node.__dict__.items() if k in NodeRun.field_names()} - _node["uuid"] = process_run_uuid[pipeline_id].id - node_run_bulk.append(NodeRun(process_run=process_run, **_node)) - if node.node_type == Node.SUB_PROCESS_NODE: - create_subprocess(node.content, process_run.id, process_run_uuid, pipeline["id"]) - NodeRun.objects.bulk_create(node_run_bulk, batch_size=500) - return pipeline - - -def create_subprocess(process_id, process_run_id, process_run_uuid, root_id): - process = Process.objects.filter(id=process_id).first() - process_run_data = process.clone_data - process_run_data["dag"] = instance_dag(process_run_data["dag"], process_run_uuid) - process_run = SubProcessRun.objects.create(process_id=process_id, process_run_id=process_run_id, root_id=root_id, - **process_run_data) - subprocess_node_map = Node.objects.filter(process_id=process_id).in_bulk(field_name="uuid") - node_run_bulk = [] - for pipeline_id, node in subprocess_node_map.items(): - _node = {k: v for k, v in node.__dict__.items() if k in NodeRun.field_names()} - _node["uuid"] = process_run_uuid[pipeline_id].id - node_run_bulk.append(SubNodeRun(subprocess_run=process_run, **_node)) - if node.node_type == Node.SUB_PROCESS_NODE: - create_subprocess(node.content, process_run_id, process_run_uuid, root_id) - SubNodeRun.objects.bulk_create(node_run_bulk, batch_size=500) diff --git a/applications/flow/views.py b/applications/flow/views.py deleted file mode 100644 index 29c0cc6..0000000 --- a/applications/flow/views.py +++ /dev/null @@ -1,144 +0,0 @@ -from datetime import datetime -import random -from django.db.models import F - -from applications.flow.utils import build_and_create_process -from bamboo_engine import api -from bamboo_engine.builder import * -from django.http import JsonResponse -from pipeline.eri.runtime import BambooDjangoRuntime -from rest_framework import mixins -from rest_framework.decorators import action -from rest_framework.response import Response - -from applications.flow.filters import NodeTemplateFilter -from applications.flow.models import Process, Node, ProcessRun, NodeRun, NodeTemplate, SubProcessRun -from applications.flow.serializers import ProcessViewSetsSerializer, ListProcessViewSetsSerializer, \ - RetrieveProcessViewSetsSerializer, ExecuteProcessSerializer, ListProcessRunViewSetsSerializer, \ - RetrieveProcessRunViewSetsSerializer, NodeTemplateSerializer, ListSubProcessRunViewSetsSerializer, \ - RetrieveSubProcessRunViewSetsSerializer -from applications.utils.dag_helper import DAG, instance_dag, PipelineBuilder -from component.drf.viewsets import GenericViewSet - - -class ProcessViewSets(mixins.ListModelMixin, - mixins.CreateModelMixin, - mixins.RetrieveModelMixin, - mixins.DestroyModelMixin, - mixins.UpdateModelMixin, - GenericViewSet): - queryset = Process.objects.order_by("-update_time") - - def get_serializer_class(self): - if self.action == "list": - return ListProcessViewSetsSerializer - elif self.action == "retrieve": - return RetrieveProcessViewSetsSerializer - elif self.action == "execute": - return ExecuteProcessSerializer - return ProcessViewSetsSerializer - - @action(methods=["POST"], detail=False) - def execute(self, request, *args, **kwargs): - validated_data = self.is_validated_data(request.data) - process_id = validated_data["process_id"] - pipeline = build_and_create_process(process_id) - # 执行 - runtime = BambooDjangoRuntime() - api.run_pipeline(runtime=runtime, pipeline=pipeline) - - Process.objects.filter(id=process_id).update(total_run_count=F("total_run_count") + 1) - - return Response({}) - - -class ProcessRunViewSets(mixins.ListModelMixin, - mixins.RetrieveModelMixin, - GenericViewSet): - queryset = ProcessRun.objects.order_by("-update_time") - - def get_serializer_class(self): - if self.action == "list": - return ListProcessRunViewSetsSerializer - elif self.action == "retrieve": - return RetrieveProcessRunViewSetsSerializer - elif self.action == "execute": - return ExecuteProcessSerializer - - -class SubProcessRunViewSets(mixins.ListModelMixin, - mixins.RetrieveModelMixin, - GenericViewSet): - queryset = SubProcessRun.objects.order_by("-update_time") - - def get_serializer_class(self): - if self.action == "list": - return ListSubProcessRunViewSetsSerializer - elif self.action == "retrieve": - return RetrieveSubProcessRunViewSetsSerializer - - -class TestViewSets(GenericViewSet): - def list(self, request, *args, **kwargs): - random_list = [1, 1, 1, 1, 1, 1, 1, 1, 1, 0] - sign = random.choice(random_list) - if sign: - return Response({"now": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), "data": request.query_params}) - else: - raise Exception("随机抛出异常") - - -class NodeTemplateViewSet(mixins.ListModelMixin, - mixins.CreateModelMixin, - mixins.UpdateModelMixin, - mixins.DestroyModelMixin, - mixins.RetrieveModelMixin, - GenericViewSet): - queryset = NodeTemplate.objects.order_by("-id") - serializer_class = NodeTemplateSerializer - filterset_class = NodeTemplateFilter - - -# Create your views here. -def flow(request): - # 使用 builder 构造出流程描述结构 - start = EmptyStartEvent() - act = ServiceActivity(component_code="http_request") - - act2 = ServiceActivity(component_code="http_request") - act2.component.inputs.n = Var(type=Var.PLAIN, value=50) - - act3 = ServiceActivity(component_code="http_request") - act3.component.inputs.n = Var(type=Var.PLAIN, value=5) - - act4 = ServiceActivity(component_code="http_request") - act5 = ServiceActivity(component_code="http_request") - eg = ExclusiveGateway( - conditions={ - 0: '${exe_res} >= 0', - 1: '${exe_res} < 0' - }, - name='act_2 or act_3' - ) - pg = ParallelGateway() - cg = ConvergeGateway() - - end = EmptyEndEvent() - - start.extend(act).extend(eg).connect(act2, act3).to(act2).extend(act4).extend(act5).to(eg).converge(end) - # 全局变量 - pipeline_data = Data() - pipeline_data.inputs['${exe_res}'] = NodeOutput(type=Var.PLAIN, source_act=act.id, source_key='exe_res') - - pipeline = builder.build_tree(start, data=pipeline_data) - print(pipeline) - # 执行流程对象 - runtime = BambooDjangoRuntime() - - api.run_pipeline(runtime=runtime, pipeline=pipeline) - - result = api.get_pipeline_states(runtime=runtime, root_id=pipeline["id"]) - - result_output = api.get_execution_data_outputs(runtime, act.id).data - # api.pause_pipeline(runtime=runtime, pipeline_id=pipeline["id"]) - return JsonResponse({}) diff --git a/applications/task/apps.py b/applications/task/apps.py index 3c5f70a..fba3e67 100644 --- a/applications/task/apps.py +++ b/applications/task/apps.py @@ -1,5 +1,5 @@ from django.apps import AppConfig -class TaskConfig(AppConfig): +class ProjectConfig(AppConfig): name = 'task' diff --git a/applications/task/migrations/0001_initial.py b/applications/task/migrations/0001_initial.py deleted file mode 100644 index 4510d5a..0000000 --- a/applications/task/migrations/0001_initial.py +++ /dev/null @@ -1,30 +0,0 @@ -# Generated by Django 2.2.6 on 2022-06-17 15:02 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ('flow', '0006_auto_20220616_1616'), - ] - - operations = [ - migrations.CreateModel( - name='Task', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('name', models.CharField(max_length=255, verbose_name='任务名称')), - ('run_type', models.CharField(choices=[('hand', '手动'), ('now', '立即'), ('time', '定时'), ('cycle', '周期'), ('cron', 'cron表达式')], max_length=64, verbose_name='执行方式')), - ('when_start', models.CharField(max_length=100, verbose_name='执行时间')), - ('cycle_time', models.CharField(max_length=20, null=True, verbose_name='周期时间')), - ('cycle_type', models.CharField(choices=[('min', '分钟'), ('hour', '小时'), ('day', '天')], max_length=20, null=True, verbose_name='周期间隔(min,hour,day)')), - ('cron_time', models.TextField(default='', verbose_name='cron表达式')), - ('celery_task_id', models.CharField(max_length=64, null=True, verbose_name='celery的任务ID')), - ('process_run', models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tasks', to='flow.ProcessRun')), - ], - ), - ] diff --git a/applications/task/models.py b/applications/task/models.py index 051d5b6..71a8362 100644 --- a/applications/task/models.py +++ b/applications/task/models.py @@ -1,29 +1,3 @@ from django.db import models -from applications.flow.models import ProcessRun - - -class Task(models.Model): - TypeChoices = ( - ("hand", "手动"), - ("now", "立即"), - ("time", "定时"), - ("cycle", "周期"), - ("cron", "cron表达式"), - ) - CycleChoices = ( - ("min", "分钟"), - ("hour", "小时"), - ("day", "天"), - ) - name = models.CharField("任务名称", max_length=255, blank=False, null=False) - - process_run = models.ForeignKey(ProcessRun, on_delete=models.CASCADE, null=True, db_constraint=False, - related_name="tasks") - run_type = models.CharField("执行方式", choices=TypeChoices,max_length=64) - when_start = models.CharField(max_length=100, verbose_name="执行时间") - cycle_time = models.CharField(max_length=20, null=True, verbose_name="周期时间") - cycle_type = models.CharField(max_length=20, null=True, verbose_name="周期间隔(min,hour,day)", choices=CycleChoices) - cron_time = models.TextField(default="", verbose_name="cron表达式") - - celery_task_id = models.CharField(max_length=64, null=True, verbose_name="celery的任务ID") \ No newline at end of file +# Create your models here. diff --git a/applications/task/serializers.py b/applications/task/serializers.py deleted file mode 100644 index 7ebb292..0000000 --- a/applications/task/serializers.py +++ /dev/null @@ -1,9 +0,0 @@ -from rest_framework import serializers - -from applications.task.models import Task - - -class TaskSerializer(serializers.ModelSerializer): - class Meta: - model = Task - fields = "__all__" diff --git a/applications/task/urls.py b/applications/task/urls.py deleted file mode 100644 index c0faa3f..0000000 --- a/applications/task/urls.py +++ /dev/null @@ -1,6 +0,0 @@ -from rest_framework.routers import DefaultRouter - -from . import views - -task_router = DefaultRouter() -task_router.register(r"task", viewset=views.TaskViewSets, base_name="task") diff --git a/applications/task/views.py b/applications/task/views.py index 5d2ad35..91ea44a 100644 --- a/applications/task/views.py +++ b/applications/task/views.py @@ -1,12 +1,3 @@ from django.shortcuts import render -from applications.task.models import Task -from applications.task.serializers import TaskSerializer -from component.drf.viewsets import GenericViewSet -from rest_framework import mixins - - -class TaskViewSets(mixins.ListModelMixin, - GenericViewSet): - queryset = Task.objects.order_by("-id") - serializer_class = TaskSerializer +# Create your views here. diff --git a/applications/utils/__init__.py b/applications/utils/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/applications/utils/dag_helper.py b/applications/utils/dag_helper.py deleted file mode 100644 index 0ac3efb..0000000 --- a/applications/utils/dag_helper.py +++ /dev/null @@ -1,291 +0,0 @@ -from collections import OrderedDict, defaultdict -from copy import copy, deepcopy - -from applications.flow.models import Process, Node -from bamboo_engine.builder import EmptyStartEvent, EmptyEndEvent, ExclusiveGateway, ServiceActivity, Var, builder, Data, \ - ParallelGateway, ConvergeGateway, ConditionalParallelGateway, SubProcess - - -class DAG(object): - """ Directed acyclic graph implementation. """ - - def __init__(self): - """ Construct a new DAG with no nodes or edges. """ - self.reset_graph() - - def add_node(self, node_name, graph=None): - """ Add a node if it does not exist yet, or error out. """ - if not graph: - graph = self.graph - if node_name in graph: - raise KeyError('node %s already exists' % node_name) - graph[node_name] = set() - - def add_node_if_not_exists(self, node_name, graph=None): - try: - self.add_node(node_name, graph=graph) - except KeyError: - pass - - def delete_node(self, node_name, graph=None): - """ Deletes this node and all edges referencing it. """ - if not graph: - graph = self.graph - if node_name not in graph: - raise KeyError('node %s does not exist' % node_name) - graph.pop(node_name) - - for node, edges in graph.items(): - if node_name in edges: - edges.remove(node_name) - - def delete_node_if_exists(self, node_name, graph=None): - try: - self.delete_node(node_name, graph=graph) - except KeyError: - pass - - def add_edge(self, ind_node, dep_node, graph=None): - """ Add an edge (dependency) between the specified nodes. """ - if not graph: - graph = self.graph - if ind_node not in graph or dep_node not in graph: - raise KeyError('one or more nodes do not exist in graph') - test_graph = deepcopy(graph) - test_graph[ind_node].add(dep_node) - is_valid, message = self.validate(test_graph) - if is_valid: - graph[ind_node].add(dep_node) - else: - raise Exception() - - def delete_edge(self, ind_node, dep_node, graph=None): - """ Delete an edge from the graph. """ - if not graph: - graph = self.graph - if dep_node not in graph.get(ind_node, []): - raise KeyError('this edge does not exist in graph') - graph[ind_node].remove(dep_node) - - def rename_edges(self, old_task_name, new_task_name, graph=None): - """ Change references to a task in existing edges. """ - if not graph: - graph = self.graph - for node, edges in graph.items(): - - if node == old_task_name: - graph[new_task_name] = copy(edges) - del graph[old_task_name] - - else: - if old_task_name in edges: - edges.remove(old_task_name) - edges.add(new_task_name) - - def predecessors(self, node, graph=None): - """ Returns a list of all predecessors of the given node """ - if graph is None: - graph = self.graph - return [key for key in graph if node in graph[key]] - - def downstream(self, node, graph=None): - """ Returns a list of all nodes this node has edges towards. """ - if graph is None: - graph = self.graph - if node not in graph: - raise KeyError('node %s is not in graph' % node) - return list(graph[node]) - - def all_downstreams(self, node, graph=None): - """Returns a list of all nodes ultimately downstream - of the given node in the dependency graph, in - topological order.""" - if graph is None: - graph = self.graph - nodes = [node] - nodes_seen = set() - i = 0 - while i < len(nodes): - downstreams = self.downstream(nodes[i], graph) - for downstream_node in downstreams: - if downstream_node not in nodes_seen: - nodes_seen.add(downstream_node) - nodes.append(downstream_node) - i += 1 - return list( - filter( - lambda node: node in nodes_seen, - self.topological_sort(graph=graph) - ) - ) - - def all_leaves(self, graph=None): - """ Return a list of all leaves (nodes with no downstreams) """ - if graph is None: - graph = self.graph - return [key for key in graph if not graph[key]] - - def from_dict(self, graph_dict): - """ Reset the graph and build it from the passed dictionary. - The dictionary takes the form of {node_name: [directed edges]} - """ - - self.reset_graph() - for new_node in graph_dict.keys(): - self.add_node(new_node) - for ind_node, dep_nodes in graph_dict.items(): - if not isinstance(dep_nodes, list): - raise TypeError('dict values must be lists') - for dep_node in dep_nodes: - self.add_edge(ind_node, dep_node) - - def reset_graph(self): - """ Restore the graph to an empty state. """ - self.graph = OrderedDict() - - def ind_nodes(self, graph=None): - """ Returns a list of all nodes in the graph with no dependencies. """ - if graph is None: - graph = self.graph - - dependent_nodes = set( - node for dependents in graph.values() for node in dependents - ) - return [node for node in graph.keys() if node not in dependent_nodes] - - def validate(self, graph=None): - """ Returns (Boolean, message) of whether DAG is valid. """ - graph = graph if graph is not None else self.graph - if len(self.ind_nodes(graph)) == 0: - return False, 'no independent nodes detected' - try: - self.topological_sort(graph) - except ValueError: - return False, 'failed topological sort' - return True, 'valid' - - def topological_sort(self, graph=None): - """ Returns a topological ordering of the DAG. - Raises an error if this is not possible (graph is not valid). - """ - if graph is None: - graph = self.graph - result = [] - in_degree = defaultdict(lambda: 0) - - for u in graph: - for v in graph[u]: - in_degree[v] += 1 - ready = [node for node in graph if not in_degree[node]] - - while ready: - u = ready.pop() - result.append(u) - for v in graph[u]: - in_degree[v] -= 1 - if in_degree[v] == 0: - ready.append(v) - - if len(result) == len(graph): - return result - else: - raise ValueError('graph is not acyclic') - - def size(self): - return len(self.graph) - - -def instance_dag(dag_dict, process_run_uuid): - new_dag_dict = defaultdict(list) - for k, v_list in dag_dict.items(): - for v in v_list: - new_dag_dict[process_run_uuid[k].id].append(process_run_uuid[v].id) - return dict(new_dag_dict) - - -class PipelineBuilder: - def __init__(self, process_id): - self.process_id = process_id - self.process = Process.objects.filter(id=process_id).first() - self.node_map = Node.objects.filter(process_id=process_id).in_bulk(field_name="uuid") - self.dag_obj = self.setup_dag() - self.instance = self.setup_instance() - - def setup_instance(self): - """将节点转换成bamboo实例""" - pipeline_instance = {} - for p_id, node in self.node_map.items(): - if node.node_type == Node.START_NODE: - pipeline_instance[p_id] = EmptyStartEvent() - elif node.node_type == Node.END_NODE: - pipeline_instance[p_id] = EmptyEndEvent() - elif node.node_type == Node.CONDITION_NODE: - pipeline_instance[p_id] = ExclusiveGateway( - conditions={ - 0: '1==0', - 1: '0==0' - }, - name='act_2 or act_3' - ) - elif node.node_type == Node.PARALLEL_NODE: - pipeline_instance[p_id] = ParallelGateway() - elif node.node_type == Node.CONVERGE_NODE: - pipeline_instance[p_id] = ConvergeGateway() - elif node.node_type == Node.CONDITION_PARALLEL_NODE: - pipeline_instance[p_id] = ConditionalParallelGateway( - conditions={ - 0: '1==0', - 1: '1==1', - 2: '2==2' - }, - name='[act_2] or [act_3 and act_4]' - ) - elif node.node_type == Node.SUB_PROCESS_NODE: - process_id = node.content - p_builder = PipelineBuilder(process_id) - pipeline = p_builder.build(is_subprocess=True) - pipeline_instance[p_id] = pipeline - # 子流程的pid一并加入pipeline_instance - pipeline_instance.update(p_builder.instance) - else: - act = ServiceActivity(component_code="http_request") - act.component.inputs.inputs = Var(type=Var.PLAIN, value=node.inputs) - pipeline_instance[p_id] = act - return pipeline_instance - - def setup_dag(self): - dag_obj = DAG() - dag_obj.from_dict(self.process.dag) - return dag_obj - - def get_inst(self, p_id): - return self.instance.get(p_id) - - def get_inst_list(self, p_ids): - return [self.instance.get(p_id) for p_id in p_ids] - - def build(self, is_subprocess=False): - start = self.dag_obj.ind_nodes()[0] - for _in, out_list in self.dag_obj.graph.items(): - for _out in out_list: - self.get_inst(_in).extend(self.get_inst(_out)) - pipeline_data = Data() - if is_subprocess: - pipeline = SubProcess(self.get_inst(start), data=pipeline_data) - else: - pipeline = builder.build_tree(self.get_inst(start), data=pipeline_data) - return pipeline - - -if __name__ == '__main__': - dag = DAG() - dag.add_node("a") - dag.add_node("b") - dag.add_node("c") - dag.add_node("d") - dag.add_edge("a", "b") - dag.add_edge("a", "d") - dag.add_edge("b", "c") - print(dag.topological_sort()) - print(dag.graph) - print(dag.all_downstreams("b")) diff --git a/applications/utils/uuid_helper.py b/applications/utils/uuid_helper.py deleted file mode 100644 index d2fef51..0000000 --- a/applications/utils/uuid_helper.py +++ /dev/null @@ -1,5 +0,0 @@ -import uuid - - -def get_uuid(): - return str(uuid.uuid4()).replace("-", "") \ No newline at end of file diff --git a/component/drf/middleware.py b/component/drf/middleware.py index faf420d..a818a13 100644 --- a/component/drf/middleware.py +++ b/component/drf/middleware.py @@ -1,16 +1,3 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - import json import logging import traceback diff --git a/custom_plugins/__init__.py b/custom_plugins/__init__.py deleted file mode 100644 index 4cb971a..0000000 --- a/custom_plugins/__init__.py +++ /dev/null @@ -1,130 +0,0 @@ -# -*- coding: utf-8 -*- -true = True -false = False -null = "" -a = {"result": true, "code": "OK", "message": "success", "data": {"menu": [ - {"name": "home", "cnName": "首页", "to": "/home", "icon": "iconfont icon-mianxingtubiao-shouye", "hasChild": false, - "children": []}, - {"name": "AgentList", "cnName": "Agent管理", "to": "/agentlist", "icon": "iconfont icon-mianxingtubiao-Agentguanli", - "hasChild": true, "children": [{"name": "AgentList", "cnName": "Agent列表", "to": "/agentlist", "hasChild": false}, - {"name": "AgentMonitor", "cnName": "Agent监视", "to": "/agentmonitor", - "hasChild": false}]}, - {"name": "NewJob", "cnName": "作业管理", "to": "/newjob", "icon": "iconfont icon-mianxingtubiao-zuoyeguanli", - "hasChild": true, "children": [{"name": "NewJob", "cnName": "新建作业", "to": "/newjob", "hasChild": false}, - {"name": "JobList", "cnName": "作业列表", "to": "/joblist", "hasChild": false}]}, - {"name": "NewJobFlow", "cnName": "作业流管理", "to": "/newjobflow", - "icon": "iconfont icon-mianxingtubiao-zuoyeliuguanli", "hasChild": true, - "children": [{"name": "NewJobFlow", "cnName": "新建作业流", "to": "/newjobflow", "hasChild": false}, - {"name": "JobFlowList", "cnName": "作业流列表", "to": "/jobflowlist", "hasChild": false}, - {"name": "CalendarMgmt", "cnName": "日历管理", "to": "/calendarmgmt", "hasChild": false}, - {"name": "VariableMgmt", "cnName": "变量管理", "to": "/variablemgmt", "hasChild": false}]}, - {"name": "JobMonitor", "cnName": "作业监视", "to": "/jobmonitor", "icon": "iconfont icon-mianxingtubiao-zuoyejiankong", - "hasChild": false}, {"name": "LargeScreen", "cnName": "作业监视大屏", "to": "/largescreen", - "icon": "iconfont icon-mianxingtubiao-zuoyejiankongdaping", "hasChild": false}, - {"name": "Report", "cnName": "报表分析", "to": "/report", "icon": "iconfont icon-xianxingtubiao-shengchengbaobiao", - "hasChild": false}, - {"name": "JobHistory", "cnName": "作业历史", "to": "/jobhistory", "icon": "iconfont icon-mianxingtubiao-zuoyelishi", - "hasChild": false}, - {"name": "AlarmList", "cnName": "告警中心", "to": "/alarmlist", "icon": "iconfont icon-mianxingtubiao-gaojingzhongxin", - "hasChild": false, "children": []}, - {"name": "SysSetup", "cnName": "系统管理", "to": "/syssetup", "icon": "iconfont icon-mianxingtubiao-shezhi", - "hasChild": true, "children": [{"name": "SysSetup", "cnName": "系统设置", "to": "/syssetup", "hasChild": false}, - {"name": "UserAndPermissions", "cnName": "用户与权限", "to": "/userandpermissions", - "hasChild": false}, - {"name": "SystemClassManage", "cnName": "系统类别管理", "to": "/systemclassmanage", - "hasChild": false}, - {"name": "Log", "cnName": "操作审计", "to": "/log", "hasChild": false}]}], "router": [ - {"path": "/", "name": "home", "component": "Home", "meta": {"title": "首页"}}, - {"path": "/log", "name": "Log", "component": "Log", "meta": {"title": "操作审计"}}, - {"path": "/addcalendarmgmt", "name": "AddCalendarMgmt", "component": "AddCalendarMgmt", - "meta": {"title": "操作日历", "back": "true", "fatherName": "CalendarMgmt"}}, - {"path": "/variablechange", "name": "variableChange", "component": "variableChange", - "meta": {"title": "变量表", "back": "true", "fatherName": "VariableMgmt"}}, - {"path": "/singlejob", "name": "SingleJob", "component": "SingleJob", - "meta": {"title": "单个作业", "back": "true", "fatherName": "NewJob"}}, - {"path": "/singleJobdetail", "name": "singleJobDetail", "component": "SingleJob", - "meta": {"title": "作业管理 > 修改作业 > 单个作业"}}, {"path": "/viewdetail", "name": "ViewDetail", "component": "ViewDetail", - "meta": {"title": "作业流视图详情", "back": "true", - "fatherName": "JobMonitor"}}, - {"path": "/jobflowdetail", "name": "JobFlowDetail", "component": "JobFlowDetail", - "meta": {"title": "作业流视图历史详情", "back": "true", "fatherName": "JobHistory"}}, - {"path": "/jobviewdetail", "name": "JobViewDetail", "component": "JobViewDetail", - "meta": {"title": "作业视图历史详情", "back": "true", "fatherName": "JobHistory"}}, - {"path": "/multiplejob", "name": "MultipleJob", "component": "MultipleJob", - "meta": {"title": "批量作业导入", "back": "true", "fatherName": "NewJob"}}, - {"path": "/scanfile", "name": "ScanFile", "component": "ScanFile", - "meta": {"title": "导入详情", "back": "true", "fatherName": "NewJob"}}, - {"path": "/home", "name": "home", "component": "Home", "meta": {"title": "首页"}}, - {"path": "/agentlist", "name": "AgentList", "component": "AgentList", "meta": {"title": "Agent列表"}}, - {"path": "/agentmonitor", "name": "AgentMonitor", "component": "AgentMonitor", "meta": {"title": "Agent监视"}}, - {"path": "/calendarmgmt", "name": "CalendarMgmt", "component": "CalendarMgmt", "meta": {"title": "日历管理"}}, - {"path": "/jobflowlist", "name": "JobFlowList", "component": "JobFlowList", "meta": {"title": "作业流列表"}}, - {"path": "/newjobflow", "name": "NewJobFlow", "component": "NewJobFlow", "meta": {"title": "新建作业流"}}, - {"path": "/singlejobflow", "name": "SingleJobFlow", "component": "SingleJobFlow", - "meta": {"title": "单个作业流", "back": "true", "fatherName": "NewJobFlow"}}, - {"path": "/multiplejobflow", "name": "MultipleJobFlow", "component": "MultipleJobFlow", - "meta": {"title": "批量导入", "back": "true", "fatherName": "NewJobFlow"}}, - {"path": "/importfile", "name": "importFile", "component": "ImportFile", - "meta": {"title": "导入详情", "back": "true", "fatherName": "NewJobFlow"}}, - {"path": "/variablemgmt", "name": "VariableMgmt", "component": "VariableMgmt", "meta": {"title": "变量管理"}}, - {"path": "/joblist", "name": "JobList", "component": "JobList", "meta": {"title": "作业列表"}}, - {"path": "/newjob", "name": "NewJob", "component": "NewJob", "meta": {"title": "新建作业"}}, - {"path": "/jobhistory", "name": "JobHistory", "component": "JobHistory", "meta": {"title": "作业历史"}, "children": [ - {"path": "/jobflowviewhistory", "name": "JobFlowViewHistory", "component": "JobFlowViewHistory", - "meta": {"title": "作业历史", "fatherName": "JobHistory"}}, - {"path": "/jobviewhistory", "name": "JobViewHistory", "component": "JobViewHistory", - "meta": {"title": "作业历史", "fatherName": "JobHistory"}}]}, - {"path": "/report", "name": "Report", "component": "Report", "meta": {"title": "报表分析"}}, - {"path": "/largescreen", "name": "LargeScreen", "component": "LargeScreen", "meta": {"title": "作业监视大屏"}}, - {"path": "/jobmonitor", "name": "JobMonitor", "component": "JobMonitor", "meta": {"title": "作业监视"}, "children": [ - {"path": "/jobview", "name": "JobView", "component": "JobView", - "meta": {"title": "作业监视", "fatherName": "JobMonitor"}}, - {"path": "/jobflowview", "name": "JobFlowView", "component": "JobFlowView", - "meta": {"title": "作业监视", "fatherName": "JobMonitor"}}]}, - {"path": "/jobdetail", "name": "jobDetail", "component": "JobDetail", - "meta": {"title": "作业视图详情", "back": "true", "fatherName": "JobMonitor"}}, - {"path": "/syssetup", "name": "SysSetup", "component": "SysSetup", "meta": {"title": "系统设置"}}, - {"path": "/userandpermissions", "name": "UserAndPermissions", "component": "UserAndPermissions", - "meta": {"title": "用户与权限"}}, - {"path": "/systemclassmanage", "name": "SystemClassManage", "component": "SystemClassManage", - "meta": {"title": "系统类别管理"}}, - {"path": "/logmange", "name": "LogMange", "component": "LogMange", "meta": {"title": "日志管理"}}, - {"path": "/alarmlist", "name": "AlarmList", "component": "AlarmList", "meta": {"title": "告警中心"}}], "permission": [ - {"url": "/agentlist", "auth": {"search": true, "create": true, "modify": true, "del": true}}, - {"url": "/agentmonitor", "auth": {"search": true}}, {"url": "/newjob", "auth": {"create": true}}, - {"url": "/joblist", "auth": {"search": true, "operate": true, "modify": true, "del": true}}, - {"url": "/newjobflow", "auth": {"create": true}}, - {"url": "/jobflowlist", "auth": {"search": true, "operate": true, "modify": true, "del": true}}, - {"url": "/calendarmgmt", "auth": {"search": true, "create": true, "modify": true, "del": true}}, - {"url": "/variablemgmt", "auth": {"search": true, "create": true, "modify": true, "del": true}}, - {"url": "/jobflowview", "auth": {"search": true, "operate": true}}, - {"url": "/jobflowviewhistory", "auth": {"search": true}}, - {"url": "/jobview", "auth": {"search": true, "operate": true}}, - {"url": "/jobviewhistory", "auth": {"search": true}}, {"url": "/alarmlist", "auth": {"search": true}}, - {"url": "/syssetup", "auth": {"operate": true, "modify": true}}, - {"url": "/userandpermissions", "auth": {"search": true, "create": true}}, - {"url": "/systemclassmanage", "auth": {"search": true, "create": true, "modify": true, "del": true}}, - {"url": "/log", "auth": {"search": true}}, {"url": "/viewdetail", "auth": {"search": true, "operate": true}}]}} -component = {"result": true, "code": "OK", "message": "success", - "data": {"page": 1, "total_page": 1, "count": 2, - "items": - [ - {"id": 1, "name": "HTTP请求", "description": "", "show": true, "top": 300, "left": 300, - "ico": null, - "fail_retry_count": 0, "fail_offset": 10, "fail_offset_unit": "seconds", "node_type": 2, - "component_code": "http_request", "is_skip_fail": false, "is_timeout_alarm": false, - "inputs": {"url": "", "body": "{}", "header": [{"key": "", "value": ""}], - "method": "get", - "timeout": 60, - "check_point": {"key": "", "values": "", "condition": ""}}, "outputs": {}, - "content": 0, - "template_type": "0", - "inputs_component": [{"key": "url", "type": "textarea", "label": "请求地址:"}, - {"key": "method", "type": "select", "label": "请求类型:", - "choices": [{"label": "GET", "value": "get"}]}, - {"key": "header", "type": "dict_map", - "label": "Header"}, - {"key": "body", "type": "textarea", "label": "Body:"}, - {"key": "timeout", "type": "number", - "label": "超时时间:"}], - "outputs_component": []}]}} diff --git a/custom_plugins/apps.py b/custom_plugins/apps.py deleted file mode 100644 index eddd725..0000000 --- a/custom_plugins/apps.py +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -from django.apps import AppConfig - - -class CustomPluginsConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'custom_plugins' diff --git a/custom_plugins/components/__init__.py b/custom_plugins/components/__init__.py deleted file mode 100644 index ecef9ad..0000000 --- a/custom_plugins/components/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# -*- coding: utf-8 -*- - - -import logging - -from pipeline.conf import settings -from pipeline.core.flow.activity import Service -from pipeline.component_framework.component import Component - -logger = logging.getLogger('celery') diff --git a/custom_plugins/components/collections/__init__.py b/custom_plugins/components/collections/__init__.py deleted file mode 100644 index ecef9ad..0000000 --- a/custom_plugins/components/collections/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -# -*- coding: utf-8 -*- - - -import logging - -from pipeline.conf import settings -from pipeline.core.flow.activity import Service -from pipeline.component_framework.component import Component - -logger = logging.getLogger('celery') diff --git a/custom_plugins/components/collections/plugins.py b/custom_plugins/components/collections/plugins.py deleted file mode 100644 index 7972283..0000000 --- a/custom_plugins/components/collections/plugins.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -import math -from pipeline.core.flow.activity import Service, StaticIntervalGenerator -from pipeline.component_framework.component import Component -import json -import time -import requests - -# to adapter window pc -# import eventlet -# requests = eventlet.import_patched('requests') - - -class HttpRequestService(Service): - __need_schedule__ = False - - def execute(self, data, parent_data): - try: - inputs = data.get_one_of_inputs("inputs") - headers = self.parse_headers(inputs["header"]) - inputs["body"] = json.loads(inputs["body"]) - req_data = [{"params": inputs["body"]}, {"json": inputs["body"]}][inputs["method"] != "get"] - res = requests.request(inputs["method"], url=inputs["url"], headers=headers, timeout=inputs["timeout"], - **req_data).content - print("执行了", res) - try: - res = json.loads(res) - except Exception: - res = res - data.outputs.outputs = res - time.sleep(5) - if res.get("result"): - return True - else: - return False - - except Exception as e: - data.outputs.outputs = str(e) - return False - - def parse_headers(self, headers): - return {header["key"]: header["value"] for header in headers if header["key"]} - - def inputs_format(self): - return [ - Service.InputItem(name="输入参数", key="inputs", type="dict", required=True) - ] - - def outputs_format(self): - return [ - Service.OutputItem(name="输出参数", key="outputs", type="dict", required=True) - ] - - -class HttpRequestComponent(Component): - name = "HttpRequestComponent" - code = "http_request" - bound_service = HttpRequestService diff --git a/custom_plugins/migrations/__init__.py b/custom_plugins/migrations/__init__.py deleted file mode 100644 index 40a96af..0000000 --- a/custom_plugins/migrations/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/custom_plugins/static/custom_plugins/plugins.js b/custom_plugins/static/custom_plugins/plugins.js deleted file mode 100644 index 1a35a45..0000000 --- a/custom_plugins/static/custom_plugins/plugins.js +++ /dev/null @@ -1,12 +0,0 @@ - -/** -* Tencent is pleased to support the open source community by making PaaSƽ̨ (BlueKing PaaS Community -* Edition) available. -* Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* http://opensource.org/licenses/MIT -* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -* specific language governing permissions and limitations under the License. -*/ diff --git a/custom_plugins/tests/__init__.py b/custom_plugins/tests/__init__.py deleted file mode 100644 index 948ef15..0000000 --- a/custom_plugins/tests/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ - -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making PaaSƽ̨ (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from pipeline.conf import settings -from pipeline.core.flow.activity import Service -from pipeline.component_framework.component import Component - -logger = logging.getLogger('celery') diff --git a/custom_plugins/tests/components/__init__.py b/custom_plugins/tests/components/__init__.py deleted file mode 100644 index 948ef15..0000000 --- a/custom_plugins/tests/components/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ - -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making PaaSƽ̨ (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from pipeline.conf import settings -from pipeline.core.flow.activity import Service -from pipeline.component_framework.component import Component - -logger = logging.getLogger('celery') diff --git a/custom_plugins/tests/components/collections/__init__.py b/custom_plugins/tests/components/collections/__init__.py deleted file mode 100644 index 948ef15..0000000 --- a/custom_plugins/tests/components/collections/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ - -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making PaaSƽ̨ (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from pipeline.conf import settings -from pipeline.core.flow.activity import Service -from pipeline.component_framework.component import Component - -logger = logging.getLogger('celery') diff --git a/custom_plugins/tests/components/collections/plugins_test/__init__.py b/custom_plugins/tests/components/collections/plugins_test/__init__.py deleted file mode 100644 index 948ef15..0000000 --- a/custom_plugins/tests/components/collections/plugins_test/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ - -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making PaaSƽ̨ (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from pipeline.conf import settings -from pipeline.core.flow.activity import Service -from pipeline.component_framework.component import Component - -logger = logging.getLogger('celery') diff --git a/dj_flow/celery_app.py b/dj_flow/celery_app.py deleted file mode 100644 index de86087..0000000 --- a/dj_flow/celery_app.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from __future__ import absolute_import, unicode_literals - -import os -import time -from celery import Celery, platforms -from django.conf import settings - -platforms.C_FORCE_ROOT = True - -# set the default Django settings module for the 'celery' program. -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dj_flow.settings") - -app = Celery("dj_flow") - -# Using a string here means the worker don't have to serialize -# the configuration object to child processes. -# - namespace='CELERY' means all celery-related configuration keys -# should have a `CELERY_` prefix. -app.config_from_object("django.conf:settings") - -# Load task modules from all registered Django app configs. -app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) - - -@app.task(bind=True) -def debug_task(self): - print("Request: {!r}".format(self.request)) - time.sleep(2) diff --git a/dj_flow/urls.py b/dj_flow/urls.py deleted file mode 100644 index 57cdeab..0000000 --- a/dj_flow/urls.py +++ /dev/null @@ -1,30 +0,0 @@ -"""URL Configuration - -The `urlpatterns` list routes URLs to views. For more information please see: - https://docs.djangoproject.com/en/3.2/topics/http/urls/ -Examples: -Function views - 1. Add an import: from my_app import views - 2. Add a URL to urlpatterns: path('', views.home, name='home') -Class-based views - 1. Add an import: from other_app.views import Home - 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') -Including another URLconf - 1. Import the include() function: from django.urls import include, path - 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) -""" -from django.contrib import admin -from django.urls import path, include -from applications.flow.urls import flow_router, node_router -from applications.flow.views import flow -from applications.task.urls import task_router -from dj_flow.views import index - -urlpatterns = [ - path('admin/', admin.site.urls), - path('', index), - path("process/", include(flow_router.urls)), - path("node/", include(node_router.urls)), - path("task/", include(task_router.urls)), - path("tt/", flow), -] diff --git a/dj_flow/__init__.py b/django_vue_cli/__init__.py similarity index 100% rename from dj_flow/__init__.py rename to django_vue_cli/__init__.py diff --git a/django_vue_cli/celery_app.py b/django_vue_cli/celery_app.py new file mode 100644 index 0000000..4a80630 --- /dev/null +++ b/django_vue_cli/celery_app.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +from __future__ import absolute_import, unicode_literals + +import os +import time +from celery import Celery, platforms +from django.conf import settings + +platforms.C_FORCE_ROOT = True + +# set the default Django settings module for the 'celery' program. +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_vue_cli.settings") + +app = Celery("django_vue_cli") + +# Using a string here means the worker don't have to serialize +# the configuration object to child processes. +# - namespace='CELERY' means all celery-related configuration keys +# should have a `CELERY_` prefix. +app.config_from_object("django.conf:settings") + +# Load task modules from all registered Django app configs. +app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) + + +@app.task(bind=True) +def debug_task(self): + print("Request: {!r}".format(self.request)) + time.sleep(2) diff --git a/dj_flow/settings.py b/django_vue_cli/settings.py similarity index 93% rename from dj_flow/settings.py rename to django_vue_cli/settings.py index aade659..1acdc80 100644 --- a/dj_flow/settings.py +++ b/django_vue_cli/settings.py @@ -1,7 +1,7 @@ from pathlib import Path import sys import os - +# lib文件夹中手动导入的第三方库 BASE_DIR = Path(__file__).resolve().parent.parent sys.path.insert(1, os.path.join(os.getcwd(), 'lib')) @@ -13,7 +13,7 @@ DEBUG = False ALLOWED_HOSTS = ["*"] CORS_ALLOW_CREDENTIALS = True -CSRF_COOKIE_NAME = "dj-flow_csrftoken" +CSRF_COOKIE_NAME = "django_vue_cli_csrftoken" CORS_ORIGIN_WHITELIST = [ "http://127.0.0.1:8080" ] @@ -22,20 +22,14 @@ CORS_ORIGIN_WHITELIST = [ INSTALLED_APPS = [ "corsheaders", - "pipeline", - "pipeline.engine", - "pipeline.component_framework", - "pipeline.eri", 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', - "custom_plugins", "rest_framework", - "applications.flow", - "applications.task" + "applications.task", ] @@ -51,7 +45,7 @@ MIDDLEWARE = [ "component.drf.middleware.AppExceptionMiddleware" ] -ROOT_URLCONF = 'dj_flow.urls' +ROOT_URLCONF = 'django_vue_cli.urls' TEMPLATES = [ { @@ -69,7 +63,7 @@ TEMPLATES = [ }, ] -WSGI_APPLICATION = 'dj_flow.wsgi.application' +WSGI_APPLICATION = 'django_vue_cli.wsgi.application' TIME_ZONE = "Asia/Shanghai" LANGUAGE_CODE = "zh-hans" # Database diff --git a/django_vue_cli/urls.py b/django_vue_cli/urls.py new file mode 100644 index 0000000..dc5573f --- /dev/null +++ b/django_vue_cli/urls.py @@ -0,0 +1,8 @@ +from django.contrib import admin +from django.urls import path, include +from django_vue_cli.views import index + +urlpatterns = [ + path('admin/', admin.site.urls), + path('', index), +] diff --git a/dj_flow/views.py b/django_vue_cli/views.py similarity index 100% rename from dj_flow/views.py rename to django_vue_cli/views.py diff --git a/dj_flow/wsgi.py b/django_vue_cli/wsgi.py similarity index 74% rename from dj_flow/wsgi.py rename to django_vue_cli/wsgi.py index f03105e..bc38fb5 100644 --- a/dj_flow/wsgi.py +++ b/django_vue_cli/wsgi.py @@ -1,5 +1,5 @@ """ -WSGI config for project. +WSGI config for task. It exposes the WSGI callable as a module-level variable named ``application``. @@ -11,6 +11,6 @@ import os from django.core.wsgi import get_wsgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj_flow.settings') +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_vue_cli.settings') application = get_wsgi_application() diff --git a/lib/bamboo_engine/__init__.py b/lib/bamboo_engine/__init__.py deleted file mode 100644 index 1435565..0000000 --- a/lib/bamboo_engine/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .engine import Engine # noqa diff --git a/lib/bamboo_engine/__version__.py b/lib/bamboo_engine/__version__.py deleted file mode 100644 index f74cac2..0000000 --- a/lib/bamboo_engine/__version__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -__version__ = "1.6.4" diff --git a/lib/bamboo_engine/api.py b/lib/bamboo_engine/api.py deleted file mode 100644 index 3e77626..0000000 --- a/lib/bamboo_engine/api.py +++ /dev/null @@ -1,647 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -# API 模块用于向外暴露接口,bamboo-engine 的使用者应该永远只用这个模块与 bamboo-engien 进行交互 - - -import logging -import functools -import traceback -from typing import Optional, Any, List - -from .utils.object import Representable -from .eri import EngineRuntimeInterface, ContextValue -from .engine import Engine -from .template import Template -from .context import Context -from .utils.constants import VAR_CONTEXT_MAPPING - -logger = logging.getLogger("bamboo_engine") - - -class EngineAPIResult(Representable): - """ - api 统一返回结果 - """ - - def __init__( - self, - result: bool, - message: str, - exc: Optional[Exception] = None, - data: Optional[Any] = None, - exc_trace: Optional[str] = None, - ): - """ - :param result: 是否执行成功 - :type result: bool - :param message: 附加消息,result 为 False 时关注 - :type message: str - :param exc: 异常对象 - :type exc: Exception - :param data: 数据 - :type data: Any - """ - self.result = result - self.message = message - self.exc = exc - self.data = data - self.exc_trace = exc_trace - - -def _ensure_return_api_result(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - try: - data = func(*args, **kwargs) - except Exception as e: - logger.exception("{} raise error.".format(func.__name__)) - trace = traceback.format_exc() - return EngineAPIResult(result=False, message="fail", exc=e, data=None, exc_trace=trace) - - if isinstance(data, EngineAPIResult): - return data - return EngineAPIResult(result=True, message="success", exc=None, data=data, exc_trace=None) - - return wrapper - - -@_ensure_return_api_result -def run_pipeline(runtime: EngineRuntimeInterface, pipeline: dict, **options) -> EngineAPIResult: - """ - 执行 pipeline - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param pipeline: pipeline 描述对象 - :type pipeline: dict - :return: 执行结果 - :rtype: EngineAPIResult - """ - - Engine(runtime).run_pipeline(pipeline, **options) - - -@_ensure_return_api_result -def pause_pipeline(runtime: EngineRuntimeInterface, pipeline_id: str) -> EngineAPIResult: - """ - 暂停 pipeline 的执行 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param pipeline_id: piipeline id - :type pipeline_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - - Engine(runtime).pause_pipeline(pipeline_id) - - -@_ensure_return_api_result -def revoke_pipeline(runtime: EngineRuntimeInterface, pipeline_id: str) -> EngineAPIResult: - """ - 撤销 pipeline,使其无法继续执行 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param pipeline_id: pipeline id - :type pipeline_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - Engine(runtime).revoke_pipeline(pipeline_id) - - -@_ensure_return_api_result -def resume_pipeline(runtime: EngineRuntimeInterface, pipeline_id: str) -> EngineAPIResult: - """ - 继续被 pause_pipeline 接口暂停的 pipeline 的执行 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param pipeline_id: pipeline id - :type pipeline_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - Engine(runtime).resume_pipeline(pipeline_id) - - -@_ensure_return_api_result -def pause_node_appoint(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: - """ - 预约暂停某个节点的执行 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 节点 id - :type node_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - Engine(runtime).pause_node_appoint(node_id) - - -@_ensure_return_api_result -def resume_node_appoint(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: - """ - 继续由于某个节点而暂停的 pipeline 的执行 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 节点 id - :type node_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - Engine(runtime).resume_node_appoint(node_id) - - -@_ensure_return_api_result -def retry_node(runtime: EngineRuntimeInterface, node_id: str, data: Optional[dict] = None) -> EngineAPIResult: - """ - 重试某个执行失败的节点 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 失败的节点 id - :type node_id: str - :param data: 重试时使用的节点执行输入 - :type data: dict - :return: 执行结果 - :rtype: EngineAPIResult - """ - Engine(runtime).retry_node(node_id, data) - - -@_ensure_return_api_result -def retry_subprocess(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: - """ - 重试进入失败的子流程节点 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 子流程节点 id - :type node_id: str - :return: [description] - :rtype: EngineAPIResult - """ - Engine(runtime).retry_subprocess(node_id) - - -@_ensure_return_api_result -def skip_node(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: - """ - 跳过某个执行失败的节点(仅限 event,activity) - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 失败的节点 id - :type node_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - Engine(runtime).skip_node(node_id) - - -@_ensure_return_api_result -def skip_exclusive_gateway(runtime: EngineRuntimeInterface, node_id: str, flow_id: str) -> EngineAPIResult: - """ - 跳过某个执行失败的分支网关 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 失败的分支网关 id - :type node_id: str - :param flow_id: 需要往下执行的 flow id - :type flow_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - Engine(runtime).skip_exclusive_gateway(node_id, flow_id) - - -@_ensure_return_api_result -def skip_conditional_parallel_gateway( - runtime: EngineRuntimeInterface, - node_id: str, - flow_ids: list, - converge_gateway_id: str, -) -> EngineAPIResult: - """ - 跳过某个执行失败的条件并行网关 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 失败的分支网关 id - :type node_id: str - :param flow_ids: 需要往下执行的 flow id 列表 - :type flow_ids: list - :param converge_gateway_id: 目标汇聚网关 id - :type converge_gateway_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - Engine(runtime).skip_conditional_parallel_gateway(node_id, flow_ids, converge_gateway_id) - - -@_ensure_return_api_result -def forced_fail_activity(runtime: EngineRuntimeInterface, node_id: str, ex_data: str) -> EngineAPIResult: - """ - 强制失败某个 activity 节点 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 节点 ID - :type node_id: str - :param message: 异常信息 - :type message: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - Engine(runtime).forced_fail_activity(node_id, ex_data) - - -@_ensure_return_api_result -def callback(runtime: EngineRuntimeInterface, node_id: str, version: str, data: dict) -> EngineAPIResult: - """ - 回调某个节点 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param version: 节点执行版本 - :param version: str - :param data: 节点 ID - :type data: dict - :return: 执行结果 - :rtype: EngineAPIResult - """ - Engine(runtime).callback(node_id, version, data) - - -@_ensure_return_api_result -def get_pipeline_states(runtime: EngineRuntimeInterface, root_id: str, flat_children=True) -> EngineAPIResult: - """ - 返回某个任务的状态树 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param root_id: 根节点 ID - :type root_id: str - :param flat_children: 是否将所有子节点展开 - :type flat_children: bool - :return: 执行结果 - :rtype: EngineAPIResult - """ - states = runtime.get_state_by_root(root_id) - if not states: - return {} - - root_state = None - children = {} - for s in states: - if s.node_id != root_id: - children[s.node_id] = { - "id": s.node_id, - "state": s.name, - "root_id:": s.root_id, - "parent_id": s.parent_id, - "version": s.version, - "loop": s.loop, - "retry": s.retry, - "skip": s.skip, - "error_ignorable": s.error_ignored, - "error_ignored": s.error_ignored, - "created_time": s.created_time, - "started_time": s.started_time, - "archived_time": s.archived_time, - "children": {}, - } - else: - root_state = s - - if not flat_children: - # set node children - for node_id, state in children.items(): - if state["parent_id"] in children: - children[state["parent_id"]]["children"][node_id] = state - - # pop sub child - for node_id in list(children.keys()): - if children[node_id]["parent_id"] != root_state.node_id: - children.pop(node_id) - - state_tree = {} - state_tree[root_state.node_id] = { - "id": root_state.node_id, - "state": root_state.name, - "root_id:": root_state.root_id, - "parent_id": root_state.root_id, - "version": root_state.version, - "loop": root_state.loop, - "retry": root_state.retry, - "skip": root_state.skip, - "error_ignorable": s.error_ignored, - "error_ignored": s.error_ignored, - "created_time": root_state.created_time, - "started_time": root_state.started_time, - "archived_time": root_state.archived_time, - "children": children, - } - return state_tree - - -@_ensure_return_api_result -def get_children_states(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: - """ - 返回某个节点及其所有子节点的状态 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 父流程 ID - :type node_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - parent_state = runtime.get_state_or_none(node_id) - if not parent_state: - return {} - - states = runtime.get_state_by_parent(node_id) - children = {} - for s in states: - children[s.node_id] = { - "id": s.node_id, - "state": s.name, - "root_id:": s.root_id, - "parent_id": s.parent_id, - "version": s.version, - "loop": s.loop, - "retry": s.retry, - "skip": s.skip, - "error_ignorable": s.error_ignored, - "error_ignored": s.error_ignored, - "created_time": s.created_time, - "started_time": s.started_time, - "archived_time": s.archived_time, - "children": {}, - } - - state_tree = {} - state_tree[parent_state.node_id] = { - "id": parent_state.node_id, - "state": parent_state.name, - "root_id:": parent_state.root_id, - "parent_id": parent_state.root_id, - "version": parent_state.version, - "loop": parent_state.loop, - "retry": parent_state.retry, - "skip": parent_state.skip, - "error_ignorable": parent_state.error_ignored, - "error_ignored": parent_state.error_ignored, - "created_time": parent_state.created_time, - "started_time": parent_state.started_time, - "archived_time": parent_state.archived_time, - "children": children, - } - return state_tree - - -@_ensure_return_api_result -def get_execution_data_inputs(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: - """ - 获取某个节点执行数据的输入数据 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 节点 ID - :type node_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - return runtime.get_execution_data_inputs(node_id) - - -@_ensure_return_api_result -def get_execution_data_outputs(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: - """ - 获取某个节点的执行数据输出 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 节点 ID - :type node_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - return runtime.get_execution_data_outputs(node_id) - - -@_ensure_return_api_result -def get_execution_data(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: - """ - 获取某个节点的执行数据 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 节点 ID - :type node_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - data = runtime.get_execution_data(node_id) - return {"inputs": data.inputs, "outputs": data.outputs} - - -@_ensure_return_api_result -def get_data(runtime: EngineRuntimeInterface, node_id: str) -> EngineAPIResult: - """ - 获取某个节点的原始输入数据 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 节点 ID - :type node_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - data = runtime.get_data(node_id) - return { - "inputs": {k: {"need_render": v.need_render, "value": v.value} for k, v in data.inputs.items()}, - "outputs": data.outputs, - } - - -@_ensure_return_api_result -def get_node_histories(runtime: EngineRuntimeInterface, node_id: str, loop: int = -1) -> EngineAPIResult: - """ - 获取某个节点的历史记录概览 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 节点 ID - :type node_id: str - :param loop: 重入次数, -1 表示不过滤重入次数 - :type loop: int, optional - :return: 执行结果 - :rtype: EngineAPIResult - """ - return [ - { - "id": h.id, - "node_id": h.node_id, - "started_time": h.started_time, - "archived_time": h.archived_time, - "loop": h.loop, - "skip": h.skip, - "version": h.version, - "inputs": h.inputs, - "outputs": h.outputs, - } - for h in runtime.get_histories(node_id, loop) - ] - - -@_ensure_return_api_result -def get_node_short_histories(runtime: EngineRuntimeInterface, node_id: str, loop: int = -1) -> EngineAPIResult: - """ - 获取某个节点的简要历史记录 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 节点 ID - :type node_id: str - :param loop: 重入次数, -1 表示不过滤重入次数 - :type loop: int, optional - :return: 执行结果 - :rtype: EngineAPIResult - """ - return [ - { - "id": h.id, - "node_id": h.node_id, - "started_time": h.started_time, - "archived_time": h.archived_time, - "loop": h.loop, - "skip": h.skip, - "version": h.version, - } - for h in runtime.get_short_histories(node_id, loop) - ] - - -@_ensure_return_api_result -def get_pipeline_debug_info(runtime: EngineRuntimeInterface, pipeline_id: str): - """ - 获取某个流程的调试信息 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param pipeline_id: 流程 ID - :type pipeline_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - - return { - "contex_values": runtime.get_context(pipeline_id), - "processes": runtime.get_process_info_with_root_pipeline(pipeline_id), - } - - -@_ensure_return_api_result -def get_node_debug_info(runtime: EngineRuntimeInterface, node_id: str): - """ - 获取某个节点的调试信息 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param node_id: 节点 ID - :type node_id: str - :return: 执行结果 - :rtype: EngineAPIResult - """ - - data = None - state = None - err = [] - - try: - data = runtime.get_data(node_id) - except Exception as e: - err.append(str(e)) - - try: - state = runtime.get_state(node_id) - except Exception as e: - err.append(str(e)) - - return { - "node": runtime.get_node(node_id), - "data": data, - "state": state, - "err": err, - } - - -@_ensure_return_api_result -def preview_node_inputs( - runtime: EngineRuntimeInterface, - pipeline: dict, - node_id: str, - subprocess_stack: List[str] = [], - root_pipeline_data: dict = {}, - parent_params: dict = {}, -): - """ - 预览某个节点的输入结果 - - :param pipeline: 预处理后的流程树数据 - :type pipeline: dict - :param node_id: 节点 ID - :type node_id: str - :param subprocess_stack: 子流程,需保证顺序 - :type subprocess_stack: List[str] - :param root_pipeline_data: root流程数据 - :param parent_params: 父流程传入参数 - :return: 执行结果 - :rtype: EngineAPIResult - """ - context_values = [ - ContextValue(key=key, type=VAR_CONTEXT_MAPPING[info["type"]], value=info["value"], code=info.get("custom_type")) - for key, info in list(pipeline["data"].get("inputs", {}).items()) + list(parent_params.items()) - ] - context = Context(runtime, context_values, root_pipeline_data) - - if subprocess_stack: - subprocess = subprocess_stack[0] - child_pipeline = pipeline["activities"][subprocess]["pipeline"] - param_data = {key: info["value"] for key, info in pipeline["activities"][subprocess]["params"].items()} - hydrated_context = context.hydrate(deformat=True) - hydrated_param_data = Template(param_data).render(hydrated_context) - formatted_param_data = {key: {"value": value, "type": "plain"} for key, value in hydrated_param_data.items()} - return preview_node_inputs( - runtime=runtime, - pipeline=child_pipeline, - node_id=node_id, - subprocess_stack=subprocess_stack[1:], - root_pipeline_data=root_pipeline_data, - parent_params=formatted_param_data, - ) - raw_inputs = pipeline["activities"][node_id]["component"]["inputs"] - raw_inputs = {key: info["value"] for key, info in raw_inputs.items()} - hydrated_context = context.hydrate(deformat=True) - inputs = Template(raw_inputs).render(hydrated_context) - return inputs diff --git a/lib/bamboo_engine/builder/__init__.py b/lib/bamboo_engine/builder/__init__.py deleted file mode 100644 index 01852d1..0000000 --- a/lib/bamboo_engine/builder/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .builder import * # noqa -from .flow import * # noqa diff --git a/lib/bamboo_engine/builder/builder.py b/lib/bamboo_engine/builder/builder.py deleted file mode 100644 index 4b25d5c..0000000 --- a/lib/bamboo_engine/builder/builder.py +++ /dev/null @@ -1,224 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import copy -import queue - -from bamboo_engine.utils.string import unique_id - -from .flow.data import Data, Params -from .flow.event import ExecutableEndEvent - - -__all__ = ["build_tree"] - -__skeleton = { - "id": None, - "start_event": None, - "end_event": None, - "activities": {}, - "gateways": {}, - "flows": {}, - "data": {"inputs": {}, "outputs": []}, -} - -__node_type = { - "ServiceActivity": "activities", - "SubProcess": "activities", - "EmptyEndEvent": "end_event", - "EmptyStartEvent": "start_event", - "ParallelGateway": "gateways", - "ConditionalParallelGateway": "gateways", - "ExclusiveGateway": "gateways", - "ConvergeGateway": "gateways", -} - -__start_elem = {"EmptyStartEvent"} - -__end_elem = {"EmptyEndEvent"} - -__multiple_incoming_type = { - "ServiceActivity", - "ConvergeGateway", - "EmptyEndEvent", - "ParallelGateway", - "ConditionalParallelGateway", - "ExclusiveGateway", - "SubProcess", -} - -__incoming = "__incoming" - - -def build_tree(start_elem, id=None, data=None): - tree = copy.deepcopy(__skeleton) - elem_queue = queue.Queue() - processed_elem = set() - - tree[__incoming] = {} - elem_queue.put(start_elem) - - while not elem_queue.empty(): - # get elem - elem = elem_queue.get() - - # update node when we meet again - if elem.id in processed_elem: - __update(tree, elem) - continue - - # add to queue - for e in elem.outgoing: - elem_queue.put(e) - - # mark as processed - processed_elem.add(elem.id) - - # tree grow - __grow(tree, elem) - - del tree[__incoming] - tree["id"] = id or unique_id("p") - user_data = data.to_dict() if isinstance(data, Data) else data - tree["data"] = user_data or tree["data"] - return tree - - -def __update(tree, elem): - node_type = __node_type[elem.type()] - node = tree[node_type] if node_type == "end_event" else tree[node_type][elem.id] - node["incoming"] = tree[__incoming][elem.id] - - -def __grow(tree, elem): - if elem.type() in __start_elem: - outgoing = unique_id("f") - tree["start_event"] = { - "incoming": "", - "outgoing": outgoing, - "type": elem.type(), - "id": elem.id, - "name": elem.name, - } - - next_elem = elem.outgoing[0] - __grow_flow(tree, outgoing, elem, next_elem) - - elif elem.type() in __end_elem or isinstance(elem, ExecutableEndEvent): - tree["end_event"] = { - "incoming": tree[__incoming][elem.id], - "outgoing": "", - "type": elem.type(), - "id": elem.id, - "name": elem.name, - } - - elif elem.type() == "ServiceActivity": - outgoing = unique_id("f") - - tree["activities"][elem.id] = { - "incoming": tree[__incoming][elem.id], - "outgoing": outgoing, - "type": elem.type(), - "id": elem.id, - "name": elem.name, - "error_ignorable": elem.error_ignorable, - "timeout": elem.timeout, - "skippable": elem.skippable, - "retryable": elem.retryable, - "component": elem.component_dict(), - "optional": False, - } - - next_elem = elem.outgoing[0] - __grow_flow(tree, outgoing, elem, next_elem) - - elif elem.type() == "SubProcess": - outgoing = unique_id("f") - - subprocess_param = elem.params.to_dict() if isinstance(elem.params, Params) else elem.params - - subprocess = { - "id": elem.id, - "incoming": tree[__incoming][elem.id], - "name": elem.name, - "outgoing": outgoing, - "type": elem.type(), - "params": subprocess_param, - } - - subprocess["pipeline"] = build_tree(start_elem=elem.start, id=elem.id, data=elem.data) - - tree["activities"][elem.id] = subprocess - - next_elem = elem.outgoing[0] - __grow_flow(tree, outgoing, elem, next_elem) - - elif elem.type() == "ParallelGateway": - outgoing = [unique_id("f") for _ in range(len(elem.outgoing))] - - tree["gateways"][elem.id] = { - "id": elem.id, - "incoming": tree[__incoming][elem.id], - "outgoing": outgoing, - "type": elem.type(), - "name": elem.name, - } - - for i, next_elem in enumerate(elem.outgoing): - __grow_flow(tree, outgoing[i], elem, next_elem) - - elif elem.type() in {"ExclusiveGateway", "ConditionalParallelGateway"}: - outgoing = [unique_id("f") for _ in range(len(elem.outgoing))] - - tree["gateways"][elem.id] = { - "id": elem.id, - "incoming": tree[__incoming][elem.id], - "outgoing": outgoing, - "type": elem.type(), - "name": elem.name, - "conditions": elem.link_conditions_with(outgoing), - } - - for i, next_elem in enumerate(elem.outgoing): - __grow_flow(tree, outgoing[i], elem, next_elem) - - elif elem.type() == "ConvergeGateway": - outgoing = unique_id("f") - - tree["gateways"][elem.id] = { - "id": elem.id, - "incoming": tree[__incoming][elem.id], - "outgoing": outgoing, - "type": elem.type(), - "name": elem.name, - } - - next_elem = elem.outgoing[0] - __grow_flow(tree, outgoing, elem, next_elem) - - else: - raise Exception() - - -def __grow_flow(tree, outgoing, elem, next_element): - tree["flows"][outgoing] = { - "is_default": False, - "source": elem.id, - "target": next_element.id, - "id": outgoing, - } - if next_element.type() in __multiple_incoming_type: - tree[__incoming].setdefault(next_element.id, []).append(outgoing) - else: - tree[__incoming][next_element.id] = outgoing diff --git a/lib/bamboo_engine/builder/flow/__init__.py b/lib/bamboo_engine/builder/flow/__init__.py deleted file mode 100644 index 3d63692..0000000 --- a/lib/bamboo_engine/builder/flow/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .activity import * # noqa -from .event import * # noqa -from .gateway import * # noqa -from .data import * # noqa diff --git a/lib/bamboo_engine/builder/flow/activity.py b/lib/bamboo_engine/builder/flow/activity.py deleted file mode 100644 index 92e4fdf..0000000 --- a/lib/bamboo_engine/builder/flow/activity.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .base import Element -from bamboo_engine.eri import NodeType -from bamboo_engine.utils.collections import FancyDict - -__all__ = ["ServiceActivity", "SubProcess"] - - -class ServiceActivity(Element): - def __init__( - self, component_code=None, error_ignorable=False, timeout=None, skippable=True, retryable=True, *args, **kwargs - ): - self.component = FancyDict({"code": component_code, "inputs": FancyDict({})}) - self.error_ignorable = error_ignorable - self.timeout = timeout - self.skippable = skippable - self.retryable = retryable - super(ServiceActivity, self).__init__(*args, **kwargs) - - def type(self): - return NodeType.ServiceActivity.value - - def component_dict(self): - return { - "code": self.component.code, - "inputs": {key: var.to_dict() for key, var in list(self.component.inputs.items())}, - } - - -class SubProcess(Element): - def __init__(self, start=None, data=None, params=None, global_outputs=None, *args, **kwargs): - self.start = start - self.data = data - self.params = params or {} - self.global_outputs = FancyDict(global_outputs or {}) - super(SubProcess, self).__init__(*args, **kwargs) - - def type(self): - return NodeType.SubProcess.value diff --git a/lib/bamboo_engine/builder/flow/base.py b/lib/bamboo_engine/builder/flow/base.py deleted file mode 100644 index cb1ea8e..0000000 --- a/lib/bamboo_engine/builder/flow/base.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from bamboo_engine.utils.string import unique_id - -__all__ = ["Element"] - - -class Element(object): - def __init__(self, id=None, name=None, outgoing=None): - self.id = id or unique_id("e") - self.name = name - self.outgoing = outgoing or [] - - def extend(self, element): - """ - build a connection from self to element and return element - :param element: target - :rtype: Element - """ - self.outgoing.append(element) - return element - - def connect(self, *args): - """ - build connections from self to elements in args and return self - :param args: target elements - :rtype: Element - """ - for e in args: - self.outgoing.append(e) - return self - - def converge(self, element): - """ - converge all connection those diverge from self to element and return element - :param element: target - :rtype: Element - """ - for e in self.outgoing: - e.tail().connect(element) - return element - - def to(self, element): - return element - - def tail(self): - """ - get tail element for self - :rtype: Element - """ - is_tail = len(self.outgoing) == 0 - e = self - - while not is_tail: - e = e.outgoing[0] - is_tail = len(e.outgoing) == 0 - - return e - - def type(self): - raise NotImplementedError() - - def __eq__(self, other): - return self.id == other.id - - def __repr__(self): - return "<{cls} {name}:{id}>".format(cls=type(self).__name__, name=self.name, id=self.id) diff --git a/lib/bamboo_engine/builder/flow/data.py b/lib/bamboo_engine/builder/flow/data.py deleted file mode 100644 index b5a9e79..0000000 --- a/lib/bamboo_engine/builder/flow/data.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from bamboo_engine.utils.collections import FancyDict - - -class Data(object): - def __init__(self, inputs=None, outputs=None, pre_render_keys=None): - self.inputs = FancyDict(inputs or {}) - self.outputs = outputs or [] - self.pre_render_keys = pre_render_keys or [] - - def to_dict(self): - base = {"inputs": {}, "outputs": self.outputs, "pre_render_keys": self.pre_render_keys} - - for key, value in list(self.inputs.items()): - base["inputs"][key] = value.to_dict() if isinstance(value, Var) else value - - return base - - -class Params(object): - def __init__(self, params=None): - self.params = FancyDict(params or {}) - - def to_dict(self): - base = {} - - for key, value in list(self.params.items()): - base[key] = value.to_dict() if isinstance(value, Var) else value - - return base - - -class Var(object): - PLAIN = "plain" - SPLICE = "splice" - LAZY = "lazy" - - def __init__(self, type, value, custom_type=None): - self.type = type - self.value = value - self.custom_type = custom_type - - def to_dict(self): - base = {"type": self.type, "value": self.value} - if self.type == self.LAZY: - base["custom_type"] = self.custom_type - - return base - - -class DataInput(Var): - def __init__(self, *args, **kwargs): - super(DataInput, self).__init__(*args, **kwargs) - - def to_dict(self): - base = super(DataInput, self).to_dict() - base["is_param"] = True - return base - - -class NodeOutput(Var): - def __init__(self, source_act, source_key, *args, **kwargs): - self.source_act = source_act - self.source_key = source_key - kwargs["value"] = None - super(NodeOutput, self).__init__(*args, **kwargs) - - def to_dict(self): - base = super(NodeOutput, self).to_dict() - base["source_act"] = self.source_act - base["source_key"] = self.source_key - return base - - -class RewritableNodeOutput(Var): - def __init__(self, source_act, *args, **kwargs): - self.source_act = source_act - kwargs["value"] = None - super(RewritableNodeOutput, self).__init__(*args, **kwargs) - - def to_dict(self): - base = super(RewritableNodeOutput, self).to_dict() - base["source_act"] = self.source_act - return base diff --git a/lib/bamboo_engine/builder/flow/event.py b/lib/bamboo_engine/builder/flow/event.py deleted file mode 100644 index ce4b793..0000000 --- a/lib/bamboo_engine/builder/flow/event.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -from bamboo_engine.eri import NodeType -from .base import Element - -__all__ = ["EmptyEndEvent", "EmptyStartEvent", "ExecutableEndEvent"] - - -class EmptyStartEvent(Element): - def type(self): - return NodeType.EmptyStartEvent.value - - -class EmptyEndEvent(Element): - def type(self): - return NodeType.EmptyEndEvent.value - - -class ExecutableEndEvent(Element): - def __init__(self, type, **kwargs): - self._type = type - super(ExecutableEndEvent, self).__init__(**kwargs) - - def type(self): - return self._type diff --git a/lib/bamboo_engine/builder/flow/gateway.py b/lib/bamboo_engine/builder/flow/gateway.py deleted file mode 100644 index 49dd11b..0000000 --- a/lib/bamboo_engine/builder/flow/gateway.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from bamboo_engine.eri import NodeType - -from .base import Element - -__all__ = [ - "ParallelGateway", - "ExclusiveGateway", - "ConvergeGateway", - "ConditionalParallelGateway", -] - - -class ParallelGateway(Element): - def type(self): - return NodeType.ParallelGateway.value - - -class ConditionGateway(Element): - def __init__(self, conditions=None, *args, **kwargs): - self.conditions = conditions or {} - super(ConditionGateway, self).__init__(*args, **kwargs) - - def add_condition(self, index, evaluate): - self.conditions[index] = evaluate - - def link_conditions_with(self, outgoing): - conditions = {} - for i, out in enumerate(outgoing): - conditions[out] = {"evaluate": self.conditions[i]} - - return conditions - - -class ConditionalParallelGateway(ConditionGateway): - def type(self): - return NodeType.ConditionalParallelGateway.value - - -class ExclusiveGateway(ConditionGateway): - def type(self): - return NodeType.ExclusiveGateway.value - - -class ConvergeGateway(Element): - def type(self): - return NodeType.ConvergeGateway.value diff --git a/lib/bamboo_engine/config.py b/lib/bamboo_engine/config.py deleted file mode 100644 index 698af01..0000000 --- a/lib/bamboo_engine/config.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -# 引擎内部配置模块 - - -class Settings: - """ - 引擎全局配置对象 - """ - - MAKO_SANDBOX_SHIELD_WORDS = [ - "ascii", - "bytearray", - "bytes", - "callable", - "chr", - "classmethod", - "compile", - "delattr", - "dir", - "divmod", - "exec", - "eval", - "filter", - "frozenset", - "getattr", - "globals", - "hasattr", - "hash", - "help", - "id", - "input", - "isinstance", - "issubclass", - "iter", - "locals", - "map", - "memoryview", - "next", - "object", - "open", - "print", - "property", - "repr", - "setattr", - "staticmethod", - "super", - "type", - "vars", - "__import__", - ] - - MAKO_SANDBOX_IMPORT_MODULES = {} - - RERUN_INDEX_OFFSET = 0 diff --git a/lib/bamboo_engine/context.py b/lib/bamboo_engine/context.py deleted file mode 100644 index 6ed2032..0000000 --- a/lib/bamboo_engine/context.py +++ /dev/null @@ -1,169 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -# 流程上下文相关逻辑封装模块 - - -import logging -from weakref import WeakValueDictionary -from typing import List, Dict, Any - -from bamboo_engine.eri import ( - ContextValue, - EngineRuntimeInterface, - Variable, - ContextValueType, -) -from .template.template import Template -from .utils.string import deformat_var_key - -logger = logging.getLogger("bamboo_engine") - - -class PlainVariable(Variable): - """ - 普通变量 - """ - - def __init__(self, key: str, value: Any): - self.key = key - self.value = value - - def get(self): - return self.value - - -class SpliceVariable(Variable): - """ - 模板类型变量,会尝试在流程上下文中解析变量中定义的模板 - """ - - def __init__(self, key: str, value: Any, pool: WeakValueDictionary): - self.key = key - self.value = value - self.pool = pool - self.refs = [k for k in Template(value).get_reference()] - - def get(self): - context = {} - for r in self.refs: - if r not in self.pool: - continue - - var = self.pool[r] - if issubclass(var.__class__, Variable): - var = var.get() - context[deformat_var_key(r)] = var - - return Template(self.value).render(context=context) - - -def _raw_key(key: str) -> str: - return key - - -class Context: - """ - 流程执行上下文,封装引擎在执行流程的过程中对上下文进行的操作和逻辑 - """ - - def __init__( - self, - runtime: EngineRuntimeInterface, - values: List[ContextValue], - additional_data: dict, - ): - """ - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param values: 上下文数据列表 - :type values: List[ContextValue] - :param additional_data: 额外数据字典 - :type additional_data: dict - """ - self.values = values - self.runtime = runtime - self.pool = WeakValueDictionary() - self.variables = {} - self.additional_data = additional_data - - # 将上下文数据转换成变量,变量内封装了自身解析的逻辑,且实现了 Variable 接口 - for v in self.values: - if v.type is ContextValueType.PLAIN: - self.variables[v.key] = PlainVariable(key=v.key, value=v.value) - elif v.type is ContextValueType.SPLICE: - self.variables[v.key] = SpliceVariable(key=v.key, value=v.value, pool=self.pool) - elif v.type is ContextValueType.COMPUTE: - self.variables[v.key] = self.runtime.get_compute_variable( - code=v.code, - key=v.key, - value=SpliceVariable(key=v.key, value=v.value, pool=self.pool), - additional_data=self.additional_data, - ) - - for k, var in self.variables.items(): - self.pool[k] = var - - def hydrate(self, deformat=False, mute_error=False) -> Dict[str, Any]: - """ - 将当前上下文中的数据清洗成 Dict[str, Any] 类型的朴素数据,过程中会进行变量引用的分析和替换 - - :param deformat: 是否将返回字典中的 key 值从 ${%s} 替换为 %s - :type deformat: bool, optional - :return: 上下文数据朴素值字典 - :rtype: Dict[str, Any] - """ - key_formatter = deformat_var_key if deformat else _raw_key - hydrated = {} - - for key, var in self.pool.items(): - try: - hydrated[key_formatter(key)] = var.get() - except Exception as e: - if not mute_error: - raise e - logger.exception("%s get error." % key) - hydrated[key_formatter(key)] = str(e) - - return hydrated - - def extract_outputs( - self, - pipeline_id: str, - data_outputs: Dict[str, str], - execution_data_outputs: Dict[str, Any], - ): - """ - 将某个节点的输出提取到流程上下文中 - - :param pipeline_id: 上下文对应的流程/子流程 ID - :type pipeline_id: str - :param data_outputs: 节点输出键映射 - :type data_outputs: Dict[str, str] - :param execution_data_outputs: 节点执行数据输出 - :type execution_data_outputs: Dict[str, Any] - """ - update = {} - for origin_key, target_key in data_outputs.items(): - if origin_key not in execution_data_outputs: - continue - - update[target_key] = ContextValue( - key=target_key, - type=ContextValueType.PLAIN, - value=execution_data_outputs[origin_key], - ) - - self.runtime.upsert_plain_context_values(pipeline_id=pipeline_id, update=update) diff --git a/lib/bamboo_engine/engine.py b/lib/bamboo_engine/engine.py deleted file mode 100644 index 6a85e62..0000000 --- a/lib/bamboo_engine/engine.py +++ /dev/null @@ -1,1069 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -# 引擎核心模块 - -import time -import random -import logging -import traceback -from typing import Optional -from contextlib import contextmanager - - -from . import states -from . import validator -from .local import set_node_info, clear_node_info, CurrentNodeInfo -from .exceptions import InvalidOperationError, NotFoundError, StateVersionNotMatchError -from .handler import HandlerFactory -from .metrics import ( - ENGINE_RUNNING_PROCESSES, - ENGINE_RUNNING_SCHEDULES, - ENGINE_PROCESS_RUNNING_TIME, - ENGINE_SCHEDULE_RUNNING_TIME, - ENGINE_NODE_EXECUTE_TIME, - ENGINE_NODE_SCHEDULE_TIME, - setup_gauge, - setup_histogram, -) -from .eri import ( - EngineRuntimeInterface, - ScheduleType, - NodeType, - State, - ExecutionData, - DataInput, - Node, -) -from .utils.string import get_lower_case_name -from .utils.host import get_hostname - -logger = logging.getLogger("bamboo_engine") - - -class Engine: - """ - 流程引擎,封装流程核心调度逻辑 - """ - - PURE_SKIP_ENABLE_NODE_TYPE = {NodeType.ServiceActivity, NodeType.EmptyStartEvent} - - def __init__(self, runtime: EngineRuntimeInterface): - self.runtime = runtime - self._hostname = get_hostname() - - # api - def run_pipeline( - self, - pipeline: dict, - root_pipeline_data: Optional[dict] = None, - root_pipeline_context: Optional[dict] = None, - subprocess_context: Optional[dict] = None, - **options - ): - """ - 运行流程 - - :param pipeline: 流程数据 - :type pipeline: dict - :param root_pipeline_data 根流程数据 - :type root_pipeline_data: dict - :param root_pipeline_context 根流程上下文 - :type root_pipeline_context: dict - :param subprocess_context 子流程预置流程上下文 - :type subprocess_context: dict - """ - - root_pipeline_data = {} if root_pipeline_data is None else root_pipeline_data - root_pipeline_context = {} if root_pipeline_context is None else root_pipeline_context - subprocess_context = {} if subprocess_context is None else subprocess_context - cycle_tolerate = options.get("cycle_tolerate", False) - validator.validate_and_process_pipeline(pipeline, cycle_tolerate) - - self.runtime.pre_prepare_run_pipeline( - pipeline, root_pipeline_data, root_pipeline_context, subprocess_context, **options - ) - - process_id = self.runtime.prepare_run_pipeline( - pipeline, root_pipeline_data, root_pipeline_context, subprocess_context, **options - ) - # execute from start event - self.runtime.execute( - process_id=process_id, - node_id=pipeline["start_event"]["id"], - root_pipeline_id=pipeline["id"], - parent_pipeline_id=pipeline["id"], - ) - - self.runtime.post_prepare_run_pipeline( - pipeline, root_pipeline_data, root_pipeline_context, subprocess_context, **options - ) - - def pause_pipeline(self, pipeline_id: str): - """ - 暂停流程 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - if not self.runtime.has_state(pipeline_id): - raise NotFoundError("node({}) does not exist".format(pipeline_id)) - - self.runtime.pre_pause_pipeline(pipeline_id) - - self.runtime.set_state(node_id=pipeline_id, to_state=states.SUSPENDED) - - self.runtime.post_pause_pipeline(pipeline_id) - - def revoke_pipeline(self, pipeline_id: str): - """ - 撤销流程 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - if not self.runtime.has_state(pipeline_id): - raise NotFoundError("node({}) does not exist".format(pipeline_id)) - - self.runtime.pre_revoke_pipeline(pipeline_id) - - self.runtime.set_state(node_id=pipeline_id, to_state=states.REVOKED) - - self.runtime.post_revoke_pipeline(pipeline_id) - - def resume_pipeline(self, pipeline_id: str): - """ - 继续流程 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - state = self.runtime.get_state(pipeline_id) - - if state.name != states.SUSPENDED: - raise InvalidOperationError("pipeline({}) state is: {}".format(pipeline_id, state.name)) - - info_list = self.runtime.get_suspended_process_info(pipeline_id) - - self.runtime.pre_resume_pipeline(pipeline_id) - - self.runtime.set_state(node_id=pipeline_id, to_state=states.RUNNING) - - if info_list: - self.runtime.batch_resume(process_id_list=[i.process_id for i in info_list]) - for info in info_list: - self.runtime.execute( - process_id=info.process_id, - node_id=info.current_node, - root_pipeline_id=info.root_pipeline_id, - parent_pipeline_id=info.top_pipeline_id, - ) - - self.runtime.post_resume_pipeline(pipeline_id) - - def pause_node_appoint(self, node_id: str): - """ - 预约暂停节点 - - :param node_id: 节点 ID - :type node_id: str - """ - node = self.runtime.get_node(node_id) - - if node.type == NodeType.SubProcess: - raise InvalidOperationError("can not use pause_node_appoint api for {}".format(node.type)) - - self.runtime.pre_pause_node(node_id) - - self.runtime.set_state(node_id=node_id, to_state=states.SUSPENDED) - - self.runtime.post_pause_node(node_id) - - def resume_node_appoint(self, node_id: str): - """ - 继续由于节点暂停被阻塞的流程的执行 - - :param node_id: 节点 ID - :type node_id: str - """ - node = self.runtime.get_node(node_id) - - if node.type == NodeType.SubProcess: - raise InvalidOperationError("can not use pause_node_appoint api for {}".format(node.type)) - - self.runtime.pre_resume_node(node_id) - - info_list = self.runtime.get_suspended_process_info(node_id) - - self.runtime.set_state(node_id=node_id, to_state=states.READY) - - # found process suspended by node suspend - for info in info_list: - self.runtime.resume(process_id=info.process_id) - self.runtime.execute( - process_id=info.process_id, - node_id=info.current_node, - root_pipeline_id=info.root_pipeline_id, - parent_pipeline_id=info.top_pipeline_id, - ) - - self.runtime.post_resume_node(node_id) - - def retry_node(self, node_id: str, data: Optional[dict] = None): - """ - 重试节点 - - :param node_id: 节点 ID - :type node_id: str - :param data: 重试时使用的输入数据, defaults to None - :type data: Optional[dict], optional - """ - node = self.runtime.get_node(node_id) - - if not node.can_retry: - raise InvalidOperationError("can not retry node({}) with can_retry({})".format(node_id, node.can)) - - state = self.runtime.get_state(node_id) - - process_info = self._ensure_state_is_fail_and_return_process_info(state) - - self.runtime.pre_retry_node(node_id, data) - - if data is not None: - self.runtime.set_data_inputs( - node_id, - {k: DataInput(need_render=True, value=v) for k, v in data.items()}, - ) - - self._add_history(node_id, state) - - self.runtime.set_state( - node_id=node_id, - to_state=states.READY, - is_retry=True, - refresh_version=True, - clear_started_time=True, - clear_archived_time=True, - ) - - self.runtime.execute( - process_id=process_info.process_id, - node_id=node_id, - root_pipeline_id=process_info.root_pipeline_id, - parent_pipeline_id=process_info.top_pipeline_id, - ) - - self.runtime.post_retry_node(node_id, data) - - def retry_subprocess(self, node_id: str): - """ - 重试进入失败的子流程 - - :param node_id: 子流程 ID - :type node_id: str - :raises InvalidOperationError: [description] - """ - node = self.runtime.get_node(node_id) - - if node.type is not NodeType.SubProcess: - raise InvalidOperationError("node({}) type is not SubProcess".format(node_id)) - - state = self.runtime.get_state(node_id) - - process_info = self._ensure_state_is_fail_and_return_process_info(state) - - self.runtime.pre_retry_subprocess(node_id) - - # reset pipeline stack - if process_info.pipeline_stack[-1] == node_id: - self.runtime.set_pipeline_stack(process_info.process_id, process_info.pipeline_stack[:-1]) - - self._add_history(node_id, state) - - self.runtime.set_state( - node_id=node_id, - to_state=states.READY, - is_retry=True, - refresh_version=True, - clear_started_time=True, - clear_archived_time=True, - ) - - self.runtime.execute( - process_id=process_info.process_id, - node_id=node_id, - root_pipeline_id=process_info.root_pipeline_id, - parent_pipeline_id=process_info.top_pipeline_id, - ) - - self.runtime.post_retry_subprocess(node_id) - - def skip_node(self, node_id: str): - """ - 跳过失败的节点继续执行 - - :param node_id: 节点 ID - :type node_id: str - :raises InvalidOperationError: [description] - :raises InvalidOperationError: [description] - """ - node = self.runtime.get_node(node_id) - - if not node.can_skip: - raise InvalidOperationError("can not skip this node") - - if node.type not in self.PURE_SKIP_ENABLE_NODE_TYPE: - raise InvalidOperationError("can not use skip_node api for {}".format(node.type)) - - state = self.runtime.get_state(node_id) - - process_info = self._ensure_state_is_fail_and_return_process_info(state) - - self.runtime.pre_skip_node(node_id) - - # pure skip node type only has 1 next node - next_node_id = node.target_nodes[0] - - self._add_history(node_id, state) - - self.runtime.set_state( - node_id=node_id, - to_state=states.FINISHED, - is_skip=True, - refresh_version=True, - set_archive_time=True, - ) - - # 跳过节点时不再做节点输出提取到上下文的操作 - # 因为节点失败的位置未知,可能提取出来的变量是无法预知的,会导致不可预知的行为 - self.runtime.execute( - process_id=process_info.process_id, - node_id=next_node_id, - root_pipeline_id=process_info.root_pipeline_id, - parent_pipeline_id=process_info.top_pipeline_id, - ) - - self.runtime.post_skip_node(node_id) - - def skip_exclusive_gateway(self, node_id: str, flow_id: str): - """ - 跳过执行失败的分支网关继续执行 - - :param node_id: 节点 ID - :type node_id: str - :param flow_id: 需要继续执行的流 ID - :type flow_id: str - :raises InvalidOperationError: [description] - """ - node = self.runtime.get_node(node_id) - - if node.type != NodeType.ExclusiveGateway: - raise InvalidOperationError("{} is not exclusive gateway, actual: {}".format(node_id, node.type.value)) - - next_node_id = node.targets[flow_id] - - state = self.runtime.get_state(node_id) - - process_info = self._ensure_state_is_fail_and_return_process_info(state) - - self.runtime.pre_skip_exclusive_gateway(node_id, flow_id) - - self._add_history(node_id, state) - - self.runtime.set_state( - node_id=node_id, - to_state=states.FINISHED, - is_skip=True, - refresh_version=True, - set_archive_time=True, - ) - - self.runtime.execute( - process_id=process_info.process_id, - node_id=next_node_id, - root_pipeline_id=process_info.root_pipeline_id, - parent_pipeline_id=process_info.top_pipeline_id, - ) - - self.runtime.post_skip_exclusive_gateway(node_id, flow_id) - - def skip_conditional_parallel_gateway(self, node_id: str, flow_ids: list, converge_gateway_id: str): - """ - 跳过执行失败的条件并行网关继续执行 - - :param node_id: 节点 ID - :type node_id: str - :param flow_ids: 需要继续执行的流 ID 列表 - :type flow_ids: list - :param converge_gateway_id: 目标汇聚网关 ID - :type converge_gateway_id: str - :raises InvalidOperationError: [description] - """ - node = self.runtime.get_node(node_id) - - if node.type != NodeType.ConditionalParallelGateway: - raise InvalidOperationError( - "{} is not conditional parallel gateway, actual: {}".format(node_id, node.type.value) - ) - - state = self.runtime.get_state(node_id) - process_info = self._ensure_state_is_fail_and_return_process_info(state) - process_id = process_info.process_id - - self.runtime.pre_skip_conditional_parallel_gateway(node_id, flow_ids, converge_gateway_id) - - self.runtime.sleep(process_id) - fork_targets = [node.targets[flow_id] for flow_id in flow_ids] - from_to = {target: converge_gateway_id for target in fork_targets} - dispatch_processes = self.runtime.fork( - parent_id=process_info.process_id, - root_pipeline_id=process_info.root_pipeline_id, - pipeline_stack=process_info.pipeline_stack, - from_to=from_to, - ) - children = [d.process_id for d in dispatch_processes] - self.runtime.join(process_id, children) - for d in dispatch_processes: - self.runtime.execute( - process_id=d.process_id, - node_id=d.node_id, - root_pipeline_id=process_info.root_pipeline_id, - parent_pipeline_id=process_info.top_pipeline_id, - ) - - self._add_history(node_id, state) - - self.runtime.set_state( - node_id=node_id, - to_state=states.FINISHED, - is_skip=True, - refresh_version=True, - set_archive_time=True, - ) - - self.runtime.post_skip_conditional_parallel_gateway(node_id, flow_ids, converge_gateway_id) - - def forced_fail_activity(self, node_id: str, ex_data: str): - """ - 强制失败某个 Activity - - :param node_id: 节点 ID - :type node_id: str - :param ex_data: 强制失败时写入节点有慈航数据的信息 - :type ex_data: str - :raises InvalidOperationError: [description] - :raises InvalidOperationError: [description] - """ - node = self.runtime.get_node(node_id) - - if node.type != NodeType.ServiceActivity: - raise InvalidOperationError("{} is not activity, actual: {}".format(node_id, node.type.value)) - - state = self.runtime.get_state(node_id) - - if state.name != states.RUNNING: - raise InvalidOperationError("{} state is not RUNNING, actual: {}".format(node_id, state.name)) - - process_id = self.runtime.get_process_id_with_current_node_id(node_id) - - if not process_id: - raise InvalidOperationError("can not find process with current node id: {}".format(node_id)) - - self.runtime.pre_forced_fail_activity(node_id, ex_data) - - outputs = self.runtime.get_execution_data_outputs(node_id) - - outputs["ex_data"] = ex_data - outputs["_forced_failed"] = True - - old_ver = state.version - new_ver = self.runtime.set_state( - node_id=node_id, - to_state=states.FAILED, - refresh_version=True, - set_archive_time=True, - ) - - self.runtime.set_execution_data_outputs(node_id, outputs) - - self.runtime.kill(process_id) - - self.runtime.post_forced_fail_activity(node_id, ex_data, old_ver, new_ver) - - def callback(self, node_id: str, version: str, data: dict): - """ - 回调某个节点 - - :param node_id: 节点 ID - :type node_id: str - :param version: 回调执行版本 - :type version: str - :param data: 回调数据 - :type data: dict - :raises InvalidOperationError: [description] - :raises InvalidOperationError: [description] - :raises InvalidOperationError: [description] - :raises InvalidOperationError: [description] - """ - - process_info = self.runtime.get_sleep_process_info_with_current_node_id(node_id) - - if not process_info: - raise InvalidOperationError("can not find process with current node id: {}".format(node_id)) - - state = self.runtime.get_state(node_id) - - schedule = self.runtime.get_schedule_with_node_and_version(node_id, version) - - if state.version != version: - self.runtime.expire_schedule(schedule.id) - raise InvalidOperationError("node version {} not exist".format(version)) - - if schedule.finished: - raise InvalidOperationError("scheudle is already finished") - - if schedule.expired: - raise InvalidOperationError("scheudle is already expired") - - self.runtime.pre_callback(node_id, version, data) - - data_id = self.runtime.set_callback_data(node_id, state.version, data) - - self.runtime.schedule(process_info.process_id, node_id, schedule.id, data_id) - - self.runtime.post_callback(node_id, version, data) - - # engine event - @setup_gauge(ENGINE_RUNNING_PROCESSES) - @setup_histogram(ENGINE_PROCESS_RUNNING_TIME) - def execute(self, process_id: int, node_id: str, root_pipeline_id: str, parent_pipeline_id: str): - """ - 在某个进程上从某个节点开始进入推进循环 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param root_pipeline_id: 根流程 ID - :type root_pipeline_id: str - :param parent_pipeline_id: 父流程 ID - :type parent_pipeline_id: str - """ - current_node_id = node_id - - # 推进前准备 - try: - process_info = self.runtime.get_process_info(process_id) - self.runtime.wake_up(process_id) - except Exception: - ex_data = traceback.format_exc() - logger.exception( - "[%s]execute node(%s) prepare fail", - root_pipeline_id, - current_node_id, - ) - - self.runtime.sleep(process_id) - - outputs = self.runtime.get_execution_data_outputs(current_node_id) - outputs["ex_data"] = ex_data - self.runtime.set_execution_data_outputs(current_node_id, outputs) - - self.runtime.set_state( - node_id=current_node_id, - to_state=states.FAILED, - root_id=root_pipeline_id, - parent_id=parent_pipeline_id, - set_started_time=True, - set_archive_time=True, - ) - - return - - # 推进循环 - while True: - # 进程心跳 - try: - self.runtime.beat(process_id) - except Exception: - # do not fail the flow when beat failed - logger.exception("process(%s) beat error" % process_id) - - try: - # 遇到推进终点后需要尝试唤醒父进程 - if current_node_id == process_info.destination_id: - self.runtime.die(process_id) - wake_up_seccess = self.runtime.child_process_finish(process_info.parent_id, process_id) - - if wake_up_seccess: - - self.runtime.execute( - process_id=process_info.parent_id, - node_id=process_info.destination_id, - root_pipeline_id=process_info.root_pipeline_id, - parent_pipeline_id=process_info.top_pipeline_id, - ) - - return - - logger.info("[pipeline-trace](root_pipeline: %s) execute node %s" % (root_pipeline_id, current_node_id)) - self.runtime.set_current_node(process_id, current_node_id) - - # 冻结检测 - if self.runtime.is_frozen(process_id): - logger.info( - "root pipeline[%s] freeze at node %s", - process_info.root_pipeline_id, - current_node_id, - ) - self.runtime.freeze(process_id) - return - - node_state_map = self.runtime.batch_get_state_name(process_info.pipeline_stack) - - # 检测根流程是否被撤销 - if node_state_map[process_info.root_pipeline_id] == states.REVOKED: - self.runtime.die(process_id) - logger.info( - "root pipeline[%s] revoked checked at node %s", - process_info.root_pipeline_id, - current_node_id, - ) - return - - # 检测流程栈中是否有被暂停的流程 - for pid in process_info.pipeline_stack: - if node_state_map[pid] == states.SUSPENDED: - logger.info( - "root pipeline[%s] process %s suspended by subprocess %s", - process_info.root_pipeline_id, - process_id, - pid, - ) - self.runtime.suspend(process_id, pid) - return - - node = self.runtime.get_node(current_node_id) - node_state = self.runtime.get_state_or_none(current_node_id) - loop = 1 - inner_loop = 1 - reset_mark_bit = False - - if node_state: - rerun_limit = self.runtime.node_rerun_limit(process_info.root_pipeline_id, current_node_id) - # 重入次数超过限制 - if ( - node_state.name == states.FINISHED - and node.type != NodeType.SubProcess - and node_state.loop > rerun_limit - ): - exec_outputs = self.runtime.get_execution_data_outputs(current_node_id) - exec_outputs["ex_data"] = "node execution exceed rerun limit {}".format(rerun_limit) - - self.runtime.set_execution_data_outputs(current_node_id, exec_outputs) - self.runtime.set_state( - node_id=current_node_id, - to_state=states.FAILED, - set_archive_time=True, - ) - self.runtime.sleep(process_id) - - return - - # 检测节点是否被预约暂停 - if node_state.name == states.SUSPENDED: - # 预约暂停的节点在预约时获取不到 root_id 和 parent_id,故在此进行设置 - self.runtime.set_state_root_and_parent( - node_id=current_node_id, - root_id=process_info.root_pipeline_id, - parent_id=process_info.top_pipeline_id, - ) - self.runtime.suspend(process_id, current_node_id) - logger.info( - "root_pipeline[%s] process %s suspended by node %s", - process_info.root_pipeline_id, - process_id, - current_node_id, - ) - return - - # 设置状态前检测 - if node_state.name not in states.INVERTED_TRANSITION[states.RUNNING]: - self.runtime.sleep(process_id) - return - - if node_state.name == states.FINISHED: - loop = node_state.loop + 1 - inner_loop = node_state.inner_loop + 1 - reset_mark_bit = True - - # 重入前记录历史 - if node_state.name == states.FINISHED: - self._add_history(node_id=current_node_id, state=node_state) - - version = self.runtime.set_state( - node_id=current_node_id, - to_state=states.RUNNING, - loop=loop, - inner_loop=inner_loop, - root_id=process_info.root_pipeline_id, - parent_id=process_info.top_pipeline_id, - set_started_time=True, - reset_skip=reset_mark_bit, - reset_retry=reset_mark_bit, - reset_error_ignored=reset_mark_bit, - refresh_version=reset_mark_bit, - ) - set_node_info(CurrentNodeInfo(node_id=current_node_id, version=version, loop=loop)) - - logger.info( - "root pipeline[%s] before execute %s(%s) state: %s", - process_info.root_pipeline_id, - node.__class__.__name__, - current_node_id, - node_state, - ) - handler = HandlerFactory.get_handler(node, self.runtime) - type_label = self._get_metrics_node_type(node) - execute_start = time.time() - execute_result = handler.execute(process_info, loop, inner_loop, version) - logger.info( - "root pipeline[%s] node(%s) execute result: %s", - process_info.root_pipeline_id, - node.id, - execute_result.__dict__, - ) - - ENGINE_NODE_EXECUTE_TIME.labels(type=type_label, hostname=self._hostname).observe( - time.time() - execute_start - ) - - # 进程是否要进入睡眠 - if execute_result.should_sleep: - self.runtime.sleep(process_id) - - # 节点是否准备好进入调度 - if execute_result.schedule_ready: - schedule = self.runtime.set_schedule( - process_id=process_id, - node_id=current_node_id, - version=version, - schedule_type=execute_result.schedule_type, - ) - if execute_result.schedule_type == ScheduleType.POLL: - self.runtime.schedule(process_id, current_node_id, schedule.id) - # 是否有待调度的子进程 - elif execute_result.dispatch_processes: - children = [d.process_id for d in execute_result.dispatch_processes] - logger.info( - "root pipeline[%s] with top pipeline[%s] dispatch %s children: %s", - process_info.root_pipeline_id, - process_info.top_pipeline_id, - len(execute_result.dispatch_processes), - execute_result.dispatch_processes, - ) - self.runtime.join(process_id, children) - for d in execute_result.dispatch_processes: - self.runtime.execute( - process_id=d.process_id, - node_id=d.node_id, - root_pipeline_id=process_info.root_pipeline_id, - parent_pipeline_id=process_info.top_pipeline_id, - ) - - if execute_result.should_die: - self.runtime.die(process_id) - - if execute_result.should_sleep or execute_result.should_die: - return - - current_node_id = execute_result.next_node_id - except Exception as e: - ex_data = traceback.format_exc() - logger.warning( - "[%s]execute exception catch at node(%s): %s", - process_info.root_pipeline_id, - current_node_id, - ex_data, - ) - - # state version already changed, so give up this execute - if isinstance(e, StateVersionNotMatchError): - logger.warning( - "[%s]execute exception catch StateVersionNotMatchError at node(%s): %s", - process_info.root_pipeline_id, - current_node_id, - ex_data, - ) - return - - # make sure sleep call at first, because remain operations may have been completed in execute - self.runtime.sleep(process_info.process_id) - - outputs = self.runtime.get_execution_data_outputs(current_node_id) - outputs["ex_data"] = ex_data - self.runtime.set_execution_data_outputs(current_node_id, outputs) - - self.runtime.set_state( - node_id=current_node_id, - to_state=states.FAILED, - root_id=process_info.root_pipeline_id, - parent_id=process_info.top_pipeline_id, - set_started_time=True, - set_archive_time=True, - ) - - return - finally: - clear_node_info() - - @setup_gauge(ENGINE_RUNNING_SCHEDULES) - @setup_histogram(ENGINE_SCHEDULE_RUNNING_TIME) - def schedule( - self, - process_id: int, - node_id: str, - schedule_id: str, - callback_data_id: Optional[int] = None, - ): - """ - 在某个进程上开始某个节点的调度 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param schedule_id: 调度对象 ID - :type schedule_id: str - :param callback_data_id: 回调数据 ID, defaults to None - :type callback_data_id: Optional[int], optional - """ - root_pipeline_id = "" - try: - process_info = self.runtime.get_process_info(process_id) - root_pipeline_id = process_info.root_pipeline_id - - state = self.runtime.get_state(node_id) - schedule = self.runtime.get_schedule(schedule_id) - - # engine context prepare - set_node_info(CurrentNodeInfo(node_id=node_id, version=state.version, loop=state.loop)) - - # schedule alredy finished - if schedule.finished: - logger.warning( - "root pipeline[%s] schedule(%s) %s with version %s already finished", - root_pipeline_id, - schedule_id, - node_id, - schedule.version, - ) - return - - # 检查 schedule 是否过期 - if state.version != schedule.version: - logger.info( - "root pipeline[%s] schedule(%s) %s with version %s expired, current version: %s", - root_pipeline_id, - schedule_id, - node_id, - schedule.version, - state.version, - ) - self.runtime.expire_schedule(schedule_id) - return - - # 检查节点状态是否合法 - if state.name != states.RUNNING: - logger.info( - "root pipeline[%s] schedule(%s) %s with version %s state is not running: %s", - root_pipeline_id, - schedule_id, - node_id, - schedule.version, - state.name, - ) - self.runtime.expire_schedule(schedule_id) - return - - # try to get lock - lock_get = self.runtime.apply_schedule_lock(schedule_id) - - if not lock_get: - # only retry at multiple calback type - if schedule.type is not ScheduleType.MULTIPLE_CALLBACK: - logger.info( - "root pipeline[%s] schedule(%s) %s with version %s is not multiple callback type, will not retry to get lock", # noqa - root_pipeline_id, - schedule_id, - node_id, - schedule.version, - ) - return - - try_after = random.randint(1, 5) - logger.info( - "root pipeline[%s] schedule(%s) lock %s with data %s fetch fail, try after %s", - root_pipeline_id, - node_id, - schedule_id, - callback_data_id, - try_after, - ) - self.runtime.set_next_schedule( - process_id=process_id, - node_id=node_id, - schedule_id=schedule_id, - callback_data_id=callback_data_id, - schedule_after=try_after, - ) - return - - logger.info( - "[pipeline-trace](root_pipeline: %s) schedule node %s with version %s" - % (root_pipeline_id, node_id, schedule.version) - ) - with self._schedule_lock_keeper(schedule_id): - # 进程心跳 - self.runtime.beat(process_id) - - # fetch callback data - callback_data = None - if callback_data_id: - callback_data = self.runtime.get_callback_data(callback_data_id) - - # fetch node info and start schedule - node = self.runtime.get_node(node_id) - handler = HandlerFactory.get_handler(node, self.runtime) - type_label = self._get_metrics_node_type(node) - - logger.info( - "root pipeline[%s] before schedule node %s with data %s", - root_pipeline_id, - node, - callback_data, - ) - schedule_start = time.time() - schedule_result = handler.schedule(process_info, state.loop, state.inner_loop, schedule, callback_data) - ENGINE_NODE_SCHEDULE_TIME.labels(type=type_label, hostname=self._hostname).observe( - time.time() - schedule_start - ) - logger.info( - "root pipeline[%s] node(%s) schedule result: %s", - process_info.root_pipeline_id, - node.id, - schedule_result.__dict__, - ) - - if schedule_result.has_next_schedule: - self.runtime.set_next_schedule( - process_info.process_id, - node_id, - schedule_id, - schedule_result.schedule_after, - ) - - if schedule_result.schedule_done: - self.runtime.finish_schedule(schedule_id) - self.runtime.execute( - process_id=process_id, - node_id=schedule_result.next_node_id, - root_pipeline_id=process_info.root_pipeline_id, - parent_pipeline_id=process_info.top_pipeline_id, - ) - except Exception as e: - ex_data = traceback.format_exc() - logger.warning( - "root pipeline[%s] schedule exception catch at node(%s): %s", - root_pipeline_id, - node_id, - ex_data, - ) - - # state version already changed, so give up this schedule - if isinstance(e, StateVersionNotMatchError): - logger.exception( - "root pipeline[%s] schedule exception catch StateVersionNotMatchError at node(%s): %s", - root_pipeline_id, - node_id, - ex_data, - ) - return - - # make sure release_schedule_lock call at first, - # because remain operations may have been completed in execute - self.runtime.release_schedule_lock(schedule_id) - - outputs = self.runtime.get_execution_data_outputs(node_id) - outputs["ex_data"] = ex_data - self.runtime.set_execution_data_outputs(node_id, outputs) - - self.runtime.set_state(node_id=node_id, to_state=states.FAILED, set_archive_time=True) - finally: - clear_node_info() - - # help method - @contextmanager - def _schedule_lock_keeper(self, schedule_id: int): - yield - self.runtime.release_schedule_lock(schedule_id) - - def _add_history( - self, - node_id: str, - state: Optional[State] = None, - exec_data: Optional[ExecutionData] = None, - ) -> int: - if not state: - state = self.runtime.get_state(node_id) - - if not exec_data: - try: - exec_data = self.runtime.get_execution_data(node_id) - except NotFoundError: - # execution data may be lack with some node - logger.warning("can't not find execution data for %s at loop %s" % (node_id, state.loop)) - history_inputs = {} - history_outputs = {} - else: - history_inputs = exec_data.inputs - history_outputs = exec_data.outputs - - return self.runtime.add_history( - node_id=node_id, - started_time=state.started_time, - archived_time=state.archived_time, - loop=state.loop, - skip=state.skip, - retry=state.retry, - version=state.version, - inputs=history_inputs, - outputs=history_outputs, - ) - - def _ensure_state_is_fail_and_return_process_info(self, state: State) -> str: - if state.name != states.FAILED: - raise InvalidOperationError("{} state is not FAILED, actual {}".format(state.node_id, state.name)) - - process_info = self.runtime.get_sleep_process_info_with_current_node_id(state.node_id) - - if not process_info: - raise InvalidOperationError("can not find sleep process with current node id: {}".format(state.node_id)) - - return process_info - - def _get_metrics_node_type(self, node: Node) -> str: - if node.type != NodeType.ServiceActivity: - return get_lower_case_name(node.type.value) - - return "{}_{}".format(node.code, node.version) diff --git a/lib/bamboo_engine/eri/__init__.py b/lib/bamboo_engine/eri/__init__.py deleted file mode 100644 index 685aed2..0000000 --- a/lib/bamboo_engine/eri/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -""" -引擎运行时接口定义模块 -""" - -from .interfaces import * # noqa -from .models import * # noqa diff --git a/lib/bamboo_engine/eri/interfaces.py b/lib/bamboo_engine/eri/interfaces.py deleted file mode 100644 index b4afbf4..0000000 --- a/lib/bamboo_engine/eri/interfaces.py +++ /dev/null @@ -1,1433 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from datetime import datetime -from abc import ABCMeta, abstractmethod -from typing import List, Optional, Dict, Set, Any - -from .models import ( - State, - Node, - Schedule, - ScheduleType, - Data, - DataInput, - ExecutionData, - ExecutionHistory, - ExecutionShortHistory, - CallbackData, - ProcessInfo, - SuspendedProcessInfo, - DispatchProcess, - ContextValue, -) - -# plugin interface - -__version__ = "5.0.0" - - -def version(): - return __version__ - - -class Service(metaclass=ABCMeta): - """ - 服务对象接口 - """ - - def pre_execute(self, data: ExecutionData, root_pipeline_data: ExecutionData): - """ - execute 执行前执行的逻辑 - - :param data: 节点执行数据 - :type data: ExecutionData - :param root_pipeline_data: 根流程执行数据 - :type root_pipeline_data: ExecutionData - """ - - @abstractmethod - def execute(self, data: ExecutionData, root_pipeline_data: ExecutionData) -> bool: - """ - execute 逻辑 - - :param data: 节点执行数据 - :type data: ExecutionData - :param root_pipeline_data: 根流程执行数据 - :type root_pipeline_data: ExecutionData - :return: 是否执行成功 - :rtype: bool - """ - - @abstractmethod - def schedule( - self, - schedule: Schedule, - data: ExecutionData, - root_pipeline_data: ExecutionData, - callback_data: Optional[CallbackData] = None, - ) -> bool: - """ - schedule 逻辑 - - :param schedule: Schedule 对象 - :type schedule: Schedule - :param data: 节点执行数据 - :type data: ExecutionData - :param root_pipeline_data: 根流程执行数据 - :type root_pipeline_data: ExecutionData - :param callback_data: 回调数据, defaults to None - :type callback_data: Optional[CallbackData], optional - :return: [description] - :rtype: bool - """ - - @abstractmethod - def need_schedule(self) -> bool: - """ - 服务是否需要调度 - - :return: 是否需要调度 - :rtype: bool - """ - - @abstractmethod - def schedule_type(self) -> Optional[ScheduleType]: - """ - 服务调度类型 - - :return: 调度类型 - :rtype: Optional[ScheduleType] - """ - - @abstractmethod - def is_schedule_done(self) -> bool: - """ - 调度是否完成 - - :return: 调度是否完成 - :rtype: bool - """ - - @abstractmethod - def schedule_after( - self, - schedule: Optional[Schedule], - data: ExecutionData, - root_pipeline_data: ExecutionData, - ) -> int: - """ - 计算下一次调度间隔 - - :param schedule: 调度对象,未进行调度时传入为空 - :type schedule: Optional[Schedule] - :param data: 节点执行数据 - :type data: ExecutionData - :param root_pipeline_data: 根流程执行数据 - :type root_pipeline_data: ExecutionData - :return: 调度间隔,单位为秒 - :rtype: int - """ - - @abstractmethod - def setup_runtime_attributes(self, **attrs): - """ - 装载运行时属性 - - :param attrs: 运行时属性 - :type attrs: Dict[str, Any] - """ - - -class ExecutableEvent(metaclass=ABCMeta): - """ - 可执行结束节点接口 - """ - - @abstractmethod - def execute(pipeline_stack: List[str], root_pipeline_id: str): - """ - execute 逻辑 - - :param pipeline_stack: 流程栈 - :type pipeline_stack: List[str] - :param root_pipeline_id: 根流程 ID - :type root_pipeline_id: str - """ - - -class Variable(metaclass=ABCMeta): - """ - 变量接口 - """ - - @abstractmethod - def get(self) -> Any: - """ - 获取变量值 - - :return: 变量值 - :rtype: Any - """ - - -# runtime interface - - -class PluginManagerMixin: - """ - 插件管理接口,声明了插件(服务,可执行结束节点,变量)管理相关的接口 - """ - - @abstractmethod - def get_service(self, code: str, version: str) -> Service: - """ - 根据代号与版本获取特定服务对象实例 - - :param code: 服务唯一代号 - :type code: str - :param version: 服务版本 - :type version: str - :return: 服务对象实例 - :rtype: Service - """ - - @abstractmethod - def get_executable_end_event(self, code: str) -> ExecutableEvent: - """ - 根据代号获取特定可执行结束事件实例 - - :param code: 可执行结束事件唯一代号 - :type code: str - :return: 可执行结束事件实例 - :rtype: ExecutableEvent: - """ - - @abstractmethod - def get_compute_variable( - self, - code: str, - key: str, - value: Variable, - additional_data: dict, - ) -> Variable: - """ - 根据代号获取变量实例 - - :param code: 唯一代号 - :type code: str - :param key: 变量 key - :type key: str - :param value: 变量配置 - :type value: Any - :param additional_data: 额外数据字典 - :type additional_data: dict - :return: 变量实例 - :rtype: Variable - """ - - -class EngineAPIHooksMixin: - """ - 引擎 API 执行时调用的钩子相关接口声明 - """ - - def pre_prepare_run_pipeline( - self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options - ): - """ - 调用 pre_prepare_run_pipeline 前执行的钩子 - - :param pipeline: 流程描述对象 - :type pipeline: dict - :param root_pipeline_data 根流程数据 - :type root_pipeline_data: dict - :param root_pipeline_context 根流程上下文 - :type root_pipeline_context: dict - :param subprocess_context 子流程预置流程上下文 - :type subprocess_context: dict - """ - - def post_prepare_run_pipeline( - self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options - ): - """ - 调用 pre_prepare_run_pipeline 后执行的钩子 - - :param pipeline: 流程描述对象 - :type pipeline: dict - :param root_pipeline_data 根流程数据 - :type root_pipeline_data: dict - :param root_pipeline_context 根流程上下文 - :type root_pipeline_context: dict - :param subprocess_context 子流程预置流程上下文 - :type subprocess_context: dict - """ - - def pre_pause_pipeline(self, pipeline_id: str): - """ - 暂停 pipeline 前执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def post_pause_pipeline(self, pipeline_id: str): - """ - 暂停 pipeline 后执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def pre_revoke_pipeline(self, pipeline_id: str): - """ - 撤销 pipeline 前执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def post_revoke_pipeline(self, pipeline_id: str): - """ - 撤销 pipeline 后执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def pre_resume_pipeline(self, pipeline_id: str): - """ - 继续 pipeline 前执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def post_resume_pipeline(self, pipeline_id: str): - """ - 继续 pipeline 后执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def pre_resume_node(self, node_id: str): - """ - 继续节点后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - """ - - def post_resume_node(self, node_id: str): - """ - 继续节点后执行的钩子 - - :param node_id: [description]节点 ID - :type node_id: str - """ - - def pre_pause_node(self, node_id: str): - """ - 暂停节点前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - """ - - def post_pause_node(self, node_id: str): - """ - 暂停节点后执行的钩子 - - :param node_id: [description]节点 ID - :type node_id: str - """ - - def pre_retry_node(self, node_id: str, data: Optional[dict]): - """ - 重试节点前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param data: 重试时使用的节点执行输入 - :type data: Optional[dict] - """ - - def post_retry_node(self, node_id: str, data: Optional[dict]): - """ - 重试节点后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param data: 重试时使用的节点执行输入 - :type data: Optional[dict] - """ - - def pre_skip_node(self, node_id: str): - """ - 跳过节点前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - """ - - def post_skip_node(self, node_id: str): - """ - 跳过节点后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - """ - - def pre_skip_exclusive_gateway(self, node_id: str, flow_id: str): - """ - 跳过分支网关前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param flow_id: 跳过后选择的目标流 ID - :type flow_id: str - """ - - def post_skip_exclusive_gateway(self, node_id: str, flow_id: str): - """ - 跳过分支网关后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param flow_id: 跳过后选择的目标流 ID - :type flow_id: str - """ - - def pre_skip_conditional_parallel_gateway(self, node_id: str, flow_ids: list, converge_gateway_id: str): - """ - 跳过条件并行网关前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param flow_ids: 跳过后选择的目标流 ID 列表 - :type flow_ids: list - :param converge_gateway_id: 目标汇聚网关 ID - :type converge_gateway_id: str - """ - - def post_skip_conditional_parallel_gateway(self, node_id: str, flow_ids: list, converge_gateway_id: str): - """ - 跳过条件并行网关后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param flow_ids: 跳过后选择的目标流 ID 列表 - :type flow_ids: list - :param converge_gateway_id: 目标汇聚网关 ID - :type converge_gateway_id: str - """ - - def pre_forced_fail_activity(self, node_id: str, ex_data: str): - """ - 强制失败节点前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param ex_data: 写入节点执行数据的失败信息 - :type ex_data: str - """ - - def post_forced_fail_activity(self, node_id: str, ex_data: str, old_version: str, new_version: str): - """ - 强制失败节点后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param ex_data: 写入节点执行数据的失败信息 - :type ex_data: str - :param old_version: 强制失败前的节点版本 - :type old_version: str - :param new_version: 强制失败后的节点版本 - :type new_version: str - """ - - def pre_callback(self, node_id: str, version: str, data: str): - """ - 回调节点前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param version: 节点执行版本 - :type version: str - :param data: 回调数据 - :type data: str - """ - - def post_callback(self, node_id: str, version: str, data: str): - """ - 回调节点后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param version: 节点执行版本 - :type version: str - :param data: 回调数据 - :type data: str - """ - - def pre_retry_subprocess(self, node_id: str): - """ - 子流程重试前执行的钩子 - - :param node_id: 子流程节点 ID - :type node_id: str - """ - - def post_retry_subprocess(self, node_id: str): - """ - 子流程重试后执行的钩子 - - :param node_id: 子流程节点 ID - :type node_id: str - """ - - -class TaskMixin: - """ - 引擎任务派发相关接口 - """ - - @abstractmethod - def execute(self, process_id: int, node_id: str, root_pipeline_id: str, parent_pipeline_id: str): - """ - 派发执行任务,执行任务被拉起执行时应该调用 Engine 实例的 execute 方法 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param root_pipeline_id: 根流程 ID - :type root_pipeline_id: str - :param parent_pipeline_id: 父流程 ID - :type parent_pipeline_id: str - """ - - @abstractmethod - def schedule( - self, - process_id: int, - node_id: str, - schedule_id: str, - callback_data_id: Optional[int] = None, - ): - """ - 派发调度任务,调度任务被拉起执行时应该调用 Engine 实例的 schedule 方法 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param schedule_id: 调度 ID - :type schedule_id: str - :param callback_data_id: 回调数据, defaults to None - :type callback_data_id: Optional[int], optional - """ - - @abstractmethod - def set_next_schedule( - self, - process_id: int, - node_id: str, - schedule_id: str, - schedule_after: int, - callback_data_id: Optional[int] = None, - ): - """ - 设置下次调度时间,调度倒数归零后应该执行 Engine 实例的 schedule 方法 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param schedule_id: 调度 ID - :type schedule_id: str - :param schedule_after: 调度倒数 - :type schedule_after: int - :param callback_data_id: 回调数据, defaults to None - :type callback_data_id: Optional[int], optional - """ - - @abstractmethod - def start_timeout_monitor(self, process_id: int, node_id: str, version: str, timeout: int): - """ - 开始对某个节点执行的超时监控,若超时时间归零后节点未进入归档状态,则强制失败该节点 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param version: 执行版本 - :type version: str - :param timeout: 超时时间,单位为秒 - :type timeout: int - """ - - @abstractmethod - def stop_timeout_monitor( - self, - process_id: int, - node_id: str, - version: str, - ): - """ - 停止对某个节点的超时监控 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param version: 执行版本 - :type version: str - """ - - -class ProcessMixin: - """ - 进程相关接口 - """ - - @abstractmethod - def beat(self, process_id: int): - """ - 进程心跳 - - :param process_id: 进程 ID - :type process_id: int - """ - - @abstractmethod - def wake_up(self, process_id: int): - """ - 将当前进程标记为唤醒状态 - - :param process_id: 进程 ID - :type process_id: int - """ - - @abstractmethod - def sleep(self, process_id: int): - """ - 将当前进程标记为睡眠状态 - - :param process_id: 进程 ID - :type process_id: int - """ - - @abstractmethod - def suspend(self, process_id: int, by: str): - """ - 将当前进程标记为阻塞状态 - - :param process_id: 进程 ID - :type process_id: int - :param by: 造成阻塞的节点信息 - :type by: str - """ - - @abstractmethod - def resume(self, process_id: int): - """ - 将进程标记为非阻塞状态 - - :param process_id: 进程 ID - :type process_id: int - """ - - @abstractmethod - def batch_resume(self, process_id_list: List[int]): - """ - 批量将进程标记为非阻塞状态 - - :param process_id_list: 进程 ID 列表 - :type process_id_list: List[int] - """ - - @abstractmethod - def die(self, process_id: int): - """ - 将当前进程标记为非存活状态 - - :param process_id: 进程 ID - :type process_id: int - """ - - @abstractmethod - def get_process_info(self, process_id: int) -> ProcessInfo: - """ - 获取某个进程的基本信息 - - :param process_id: 进程 ID - :type process_id: int - :return: 进程基本信息 - :rtype: ProcessInfo - """ - - @abstractmethod - def get_process_info_with_root_pipeline(self, pipeline_id: str) -> List[ProcessInfo]: - """ - 根据根流程 ID 获取一批进程的信息 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :return: 进程基本信息 - :rtype: List[ProcessInfo] - """ - - @abstractmethod - def kill(self, process_id: int): - """ - 强制结束某个进程正在进行的活动,并将其标志为睡眠状态 - - :param process_id: 进程 ID - :type process_id: int - """ - - @abstractmethod - def get_suspended_process_info(self, suspended_by: str) -> List[SuspendedProcessInfo]: - """ - 获取由于 pipeline 暂停而被暂停执行的进程信息 - - : param suspended_by: 进程 ID - : type suspended_by: str - : return: 暂停的进程信息 - : rtype: SuspendedProcessInfo - """ - - @abstractmethod - def get_sleep_process_info_with_current_node_id(self, node_id: str) -> Optional[ProcessInfo]: - """ - 获取由于处于睡眠状态且当前节点 ID 为 node_id 的进程 ID - - : param node_id: 节点 ID - : type node_id: str - : return: 进程 ID - : rtype: str - """ - - @abstractmethod - def get_process_id_with_current_node_id(self, node_id: str) -> Optional[int]: - """ - 获取当前节点 ID 为 node_id 且存活的进程 ID - - : param node_id: 节点 ID - : type node_id: str - : return: 进程 ID - : rtype: str - """ - - @abstractmethod - def set_current_node(self, process_id: int, node_id: str): - """ - 将进程当前处理节点标记为 node - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - """ - - @abstractmethod - def child_process_finish(self, parent_id: int, process_id: int) -> bool: - """ - 标记某个进程的子进程执行完成,并返回是否能够唤醒父进程继续执行的标志位 - - :param parent_id: 父进程 ID - :type parent_id: int - :param process_id: 子进程 ID - :type process_id: int - :return: 是否能够唤醒父进程继续执行 - :rtype: bool - """ - - @abstractmethod - def is_frozen(self, process_id: int) -> bool: - """ - 检测当前进程是否需要被冻结 - - :param process_id: 进程 ID - :type process_id: int - :return: 是否需要被冻结 - :rtype: bool - """ - - @abstractmethod - def freeze(self, process_id: int): - """ - 冻结当前进程 - - :param process_id: 进程 ID - :type process_id: int - """ - - @abstractmethod - def fork( - self, - parent_id: str, - root_pipeline_id: str, - pipeline_stack: List[str], - from_to: Dict[str, str], - ) -> List[DispatchProcess]: - """ - 根据当前进程 fork 出多个子进程 - - :param parent_id: 父进程 ID - :type parent_id: str - :param root_pipeline_id: 根流程 ID - :type root_pipeline_id: str - :param pipeline_stack: 子流程栈 - :type pipeline_stack: List[str] - :param from_to: 子进程的执行开始节点和目标节点 - :type from_to: Dict[str, str] - :return: 待调度进程信息列表 - :rtype: List[DispatchProcess] - """ - - @abstractmethod - def join(self, process_id: int, children_id: List[str]): - """ - 让父进程等待子进程 - - :param process_id: 父进程 ID - :type process_id: int - :param children_id: 子进程 ID 列表 - :type children_id: List[str] - """ - - @abstractmethod - def set_pipeline_stack(self, process_id: int, stack: List[str]): - """ - 设置进程的流程栈 - - :param process_id: 进程 ID - :type process_id: int - :param stack: 流程栈 - :type stack: List[str] - """ - - -class StateMixin: - """ - 状态相关接口 - """ - - @abstractmethod - def get_state(self, node_id: str) -> State: - """ - 获取某个节点的状态对象 - - : param node_id: 节点 ID - : type node_id: str - : return: State 实例 - : rtype: State - """ - - @abstractmethod - def get_state_or_none(self, node_id: str) -> Optional[State]: - """ - 获取某个节点的状态对象,如果不存在则返回 None - - : param node_id: 节点 ID - : type node_id: str - : return: State 实例 - : rtype: State - """ - - @abstractmethod - def get_state_by_root(self, root_id: str) -> List[State]: - """ - 根据根节点 ID 获取一批节点状态 - - :param root_id: 根节点 ID - :type root_id: str - :return: 节点状态列表 - :rtype: List[State] - """ - - @abstractmethod - def get_state_by_parent(self, parent_id: str) -> List[State]: - """ - 根据父节点 ID 获取一批节点状态 - - :param parent_id: 父节点 ID - :type parent_id: str - :return: 节点状态列表 - :rtype: List[State] - """ - - @abstractmethod - def batch_get_state_name(self, node_id_list: List[str]) -> Dict[str, str]: - """ - 批量获取一批节点的状态 - - :param node_id_list: 节点 ID 列表 - :type node_id_list: List[str] - :return: 节点ID -> 状态名称 - :rtype: Dict[str, str] - """ - - @abstractmethod - def has_state(self, node_id: str) -> bool: - """ - 是否存在某个节点的的状态 - - :param node_id: 节点 ID - :type node_id: str - :return: 该节点状态是否存在 - :rtype: bool - """ - - @abstractmethod - def reset_state_inner_loop(self, node_id: str) -> str: - """ - 设置节点的当前流程重入次数 - - :param node_id: 节点 ID - :type node_id: str - """ - - @abstractmethod - def reset_children_state_inner_loop(self, node_id: str): - """ - 批量设置子流程节点的所有子节点inner_loop次数 - - :param node_id: 子流程节点 ID - :type node_id: str - """ - - @abstractmethod - def set_state( - self, - node_id: str, - to_state: str, - loop: int = -1, - inner_loop: int = -1, - version: str = None, - root_id: Optional[str] = None, - parent_id: Optional[str] = None, - is_retry: bool = False, - is_skip: bool = False, - reset_retry: bool = False, - reset_skip: bool = False, - error_ignored: bool = False, - reset_error_ignored: bool = False, - refresh_version: bool = False, - clear_started_time: bool = False, - set_started_time: bool = False, - clear_archived_time: bool = False, - set_archive_time: bool = False, - ) -> str: - """ - 设置节点的状态,如果节点存在,进行状态转换时需要满足状态转换状态机 - - :param node_id: 节点 ID - :type node_id: str - :param to_state: 目标状态 - :type to_state: str - :param loop: 循环次数, 为 -1 时表示不设置 - :type loop: int, optional - :param inner_loop: 当前流程循环次数, 为 -1 时表示不设置 - :type inner_loop: int, optional - :param version: 目标状态版本,为空时表示不做版本校验 - :type version: Optional[str], optional - :param root_id: 根节点 ID,为空时表示不设置 - :type root_id: Optional[str], optional - :param parent_id: 父节点 ID,为空时表示不设置 - :type parent_id: Optional[str], optional - :param is_retry: 是否增加重试次数 - :type is_retry: bool, optional - :param is_skip: 是否将跳过设置为 True - :type is_skip: bool, optional - :param reset_retry: 是否重置重试次数 - :type reset_retry: bool, optional - :param reset_skip: 是否重置跳过标志 - :type reset_skip: bool, optional - :param error_ignored: 是否为忽略错误跳过 - :type error_ignored: bool, optional - :param reset_error_ignored: 是否重置忽略错误标志 - :type reset_error_ignored: bool, optional - :param refresh_version: 是否刷新版本号 - :type refresh_version: bool, optional - :param clear_started_time: 是否清空开始时间 - :type clear_started_time: bool, optional - :param set_started_time: 是否设置开始时间 - :type set_started_time: bool, optional - :param clear_archived_time: 是否清空归档时间 - :type clear_archived_time: bool, optional - :param set_archive_time: 是否设置归档时间 - :type set_archive_time: bool, optional - :return: 该节点最新版本 - :rtype: str - """ - - @abstractmethod - def set_state_root_and_parent(self, node_id: str, root_id: str, parent_id: str): - """ - 设置节点的根流程和父流程 ID - - :param node_id: 节点 ID - :type node_id: str - :param root_id: 根流程 ID - :type root_id: str - :param parent_id: 父流程 ID - :type parent_id: str - """ - - -class NodeMixin: - """ - 节点相关接口 - """ - - @abstractmethod - def get_node(self, node_id: str) -> Node: - """ - 获取某个节点的详细信息 - - :param node_id: 节点 ID - :type node_id: str - :return: Node 实例 - :rtype: Node - """ - - -class ScheduleMixin: - """ - 调度实例相关接口 - """ - - @abstractmethod - def set_schedule( - self, - process_id: int, - node_id: str, - version: str, - schedule_type: ScheduleType, - ) -> Schedule: - """ - 设置 schedule 对象 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param version: 执行版本 - :type version: str - :param schedule_type: 调度类型 - :type schedule_type: ScheduleType - :return: 调度对象实例 - :rtype: Schedule - """ - - @abstractmethod - def get_schedule(self, schedule_id: str) -> Schedule: - """ - 获取 Schedule 对象 - - :param schedule_id: 调度实例 ID - :type schedule_id: str - :return: Schedule 对象实例 - :rtype: Schedule - """ - - @abstractmethod - def get_schedule_with_node_and_version(self, node_id: str, version: str) -> Schedule: - """ - 通过节点 ID 和执行版本来获取 Scheudle 对象 - - :param node_id: 节点 ID - :type node_id: str - :param version: 执行版本 - :type version: str - :return: Schedule 对象 - :rtype: Schedule - """ - - @abstractmethod - def apply_schedule_lock(self, schedule_id: str) -> bool: - """ - 获取 Schedule 对象的调度锁,返回是否成功获取锁 - - :param schedule_id: 调度实例 ID - :type schedule_id: str - :return: 是否成功获取锁 - :rtype: bool - """ - - @abstractmethod - def release_schedule_lock(self, schedule_id: int): - """ - 释放指定 Schedule 的调度锁 - - :param schedule_id: Schedule ID - :type schedule_id: int - """ - - @abstractmethod - def expire_schedule(self, schedule_id: int): - """ - 将某个 Schedule 对象标记为已过期 - - :param schedule_id: 调度实例 ID - :type schedule_id: int - """ - - @abstractmethod - def finish_schedule(self, schedule_id: int): - """ - 将某个 Schedule 对象标记为已完成 - - :param schedule_id: 调度实例 ID - :type schedule_id: int - """ - - @abstractmethod - def add_schedule_times(self, schedule_id: int): - """ - 将某个 Schedule 对象的调度次数 +1 - - :param schedule_id: 调度实例 ID - :type schedule_id: int - """ - - -class ContextMixin: - """ - 流程上下文相关接口 - """ - - @abstractmethod - def get_context_values(self, pipeline_id: str, keys: set) -> List[ContextValue]: - """ - 获取某个流程上下文中的 keys 所指定的键对应变量的值 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :param keys: 变量键 - :type keys: set - :return: 变量值信息 - :rtype: List[ContextValue] - """ - - @abstractmethod - def get_context_key_references(self, pipeline_id: str, keys: set) -> set: - """ - 获取某个流程上下文中 keys 所指定的变量直接和间接引用的其他所有变量的键 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :param keys: 变量 key 列表 - :type keys: set - :return: keys 所指定的变量直接和简介引用的其他所有变量的键 - :rtype: set - """ - - @abstractmethod - def upsert_plain_context_values(self, pipeline_id: str, update: Dict[str, ContextValue]): - """ - 更新或创建新的普通上下文数据 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :param update: 更新数据 - :type update: Dict[str, ContextValue] - """ - - def get_context(self, pipeline_id: str) -> List[ContextValue]: - """ - 获取某个流程的所有上下文数据 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :return: [description] - :rtype: List[ContextValue] - """ - - def get_context_outputs(self, pipeline_id: str) -> Set[str]: - """ - 获取流程上下文需要输出的数据 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :return: 输出数据 key - :rtype: Set[str] - """ - - -class DataMixin: - """ - 节点数据,执行数据,回调数据相关接口 - """ - - @abstractmethod - def get_data(self, node_id: str) -> Data: - """ - 获取某个节点的数据对象 - - :param node_id: 节点 ID - :type node_id: str - :return: 数据对象实例 - :rtype: Data - """ - - @abstractmethod - def get_data_inputs(self, node_id: str) -> Dict[str, DataInput]: - """ - 获取某个节点的输入数据 - - :param node_id: 节点 ID - :type node_id: str - :return: 输入数据字典 - :rtype: dict - """ - - @abstractmethod - def get_data_outputs(self, node_id: str) -> dict: - """ - 获取某个节点的输出数据 - - :param node_id: 节点 ID - :type node_id: str - :return: 输入数据字典 - :rtype: dict - """ - - @abstractmethod - def set_data_inputs(self, node_id: str, data: Dict[str, DataInput]): - """ - 将节点数据对象的 inputs 设置为 data - - : param node_id: 节点 ID - : type node_id: str - : param data: 目标数据 - : type data: dict - """ - - # execution data relate - @abstractmethod - def get_execution_data(self, node_id: str) -> ExecutionData: - """ - 获取某个节点的执行数据 - - : param node_id: 节点 ID - : type node_id: str - : return: 执行数据实例 - : rtype: ExecutionData - """ - - @abstractmethod - def get_execution_data_inputs(self, node_id: str) -> dict: - """ - 获取某个节点的执行数据输入 - - :param node_id: 节点 ID - :type node_id: str - :return: 执行数据输入 - :rtype: dict - """ - - @abstractmethod - def get_execution_data_outputs(self, node_id: str) -> dict: - """ - 获取某个节点的执行数据输出 - - :param node_id: 节点 ID - :type node_id: str - :return: 执行数据输出 - :rtype: dict - """ - - @abstractmethod - def set_execution_data(self, node_id: str, data: ExecutionData): - """ - 设置某个节点的执行数据 - - :param node_id: 节点 ID - :type node_id: str - :param data: 执行数据实例 - :type data: ExecutionData - """ - - @abstractmethod - def set_execution_data_inputs(self, node_id: str, inputs: dict): - """ - 设置某个节点的执行数据输入 - - :param node_id: 节点 ID - :type node_id: str - :param outputs: 输出数据 - :type outputs: dict - """ - - @abstractmethod - def set_execution_data_outputs(self, node_id: str, outputs: dict): - """ - 设置某个节点的执行数据输出 - - :param node_id: 节点 ID - :type node_id: str - :param outputs: 输出数据 - :type outputs: dict - """ - - # callback data relate - @abstractmethod - def set_callback_data(self, node_id: str, version: str, data: dict) -> int: - """ - 设置某个节点执行数据的回调数据 - - :param node_id: 节点 ID - :type node_id: str - :param version: 节点执行版本 - :type version: str - :param data: 回调数据 - :type data: dict - :return: 回调数据 ID - :rtype: int - """ - - @abstractmethod - def get_callback_data(self, data_id: int) -> CallbackData: - """ - 获取回调数据 - - :param data_id: Data ID - :type data_id: int - :return: 回调数据实例 - :rtype: CallbackData - """ - - -class ExecutionHistoryMixin: - """ - 执行历史相关接口 - """ - - @abstractmethod - def add_history( - self, - node_id: str, - started_time: datetime, - archived_time: datetime, - loop: int, - skip: bool, - retry: int, - version: str, - inputs: dict, - outputs: dict, - ) -> int: - """ - 为某个节点记录一次执行历史 - - : param node_id: 节点 ID - : type node_id: str - : param started_time: 开始时间 - : type started_time: datetime - : param archived_time: 归档时间 - : type archived_time: datetime - : param loop: 重入计数 - : type loop: int - : param skip: 是否跳过 - : type skip: bool - : param retry: 重试次数 - : type retry: int - : param version: 节点执行版本号 - : type version: str - : param inputs: 输入数据 - : type inputs: dict - : param outputs: 输出数据 - : type outputs: dict - """ - - @abstractmethod - def get_histories(self, node_id: str, loop: int = -1) -> List[ExecutionHistory]: - """ - 返回某个节点的历史记录 - - :param node_id: 节点 ID - :type node_id: str - :param loop: 重入次数, -1 表示不过滤重入次数 - :type loop: int, optional - :return: 历史记录列表 - :rtype: List[History] - """ - - @abstractmethod - def get_short_histories(self, node_id: str, loop: int = -1) -> List[ExecutionShortHistory]: - """ - 返回某个节点的简要历史记录 - - :param node_id: 节点 ID - :type node_id: str - :param loop: 重入次数, -1 表示不过滤重入次数 - :type loop: int, optional - :return: 历史记录列表 - :rtype: List[ExecutionShortHistory] - """ - - -class EngineRuntimeInterface( - PluginManagerMixin, - EngineAPIHooksMixin, - TaskMixin, - ProcessMixin, - StateMixin, - NodeMixin, - ScheduleMixin, - ContextMixin, - DataMixin, - ExecutionHistoryMixin, - metaclass=ABCMeta, -): - @abstractmethod - def prepare_run_pipeline( - self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options - ) -> int: - """ - 进行 pipeline 执行前的准备工作,并返回 进程 ID,该函数执行完成后即代表 - pipeline 是随时可以通过 execute(process_id, start_event_id) 启动执行的 - 一般来说,应该完成以下工作: - - 准备好进程模型 - - 准备好流程中每个节点的信息 - - 准备好流程中每个节点数据对象的信息 - - :param pipeline: pipeline 描述对象 - :type pipeline: dict - :param root_pipeline_data 根流程数据 - :type root_pipeline_data: dict - :param root_pipeline_context 根流程上下文 - :type root_pipeline_context: dict - :param subprocess_context 子流程预置流程上下文 - :type subprocess_context: dict - :return: 进程 ID - :rtype: str - """ - - @abstractmethod - def node_rerun_limit(self, root_pipeline_id: str, node_id: str) -> int: - """ - 返回节点最大重入次数 - - :param root_pipeline_id: 根流程 ID - :type root_pipeline_id: str - :param node_id: 节点 ID - :type node_id: str - :return: 节点最大重入次数 - :rtype: int - """ diff --git a/lib/bamboo_engine/eri/models.py b/lib/bamboo_engine/eri/models.py deleted file mode 100644 index d24bb8a..0000000 --- a/lib/bamboo_engine/eri/models.py +++ /dev/null @@ -1,646 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -# ERI 中相关的模型对象 - - -from enum import Enum -from datetime import datetime -from typing import List, Dict, Any, Optional - -from bamboo_engine.utils.object import Representable -from bamboo_engine.utils.collections import FancyDict -from bamboo_engine.exceptions import ValueError - - -# node relate models -class NodeType(Enum): - """ - 节点类型枚举 - """ - - ServiceActivity = "ServiceActivity" - SubProcess = "SubProcess" - ExclusiveGateway = "ExclusiveGateway" - ParallelGateway = "ParallelGateway" - ConditionalParallelGateway = "ConditionalParallelGateway" - ConvergeGateway = "ConvergeGateway" - EmptyStartEvent = "EmptyStartEvent" - EmptyEndEvent = "EmptyEndEvent" - ExecutableEndEvent = "ExecutableEndEvent" - - -class Node(Representable): - """ - 节点信息描述类 - """ - - def __init__( - self, - id: str, - type: NodeType, - target_flows: List[str], - target_nodes: List[str], - targets: Dict[str, str], - root_pipeline_id: str, - parent_pipeline_id: str, - can_skip: bool = True, - can_retry: bool = True, - ): - """ - - :param id: 节点 ID - :type id: str - :param type: 节点类型 - :type type: NodeType - :param target_flows: 节点目标流 ID 列表 - :type target_flows: List[str] - :param target_nodes: 目标节点 ID 列表 - :type target_nodes: List[str] - :param targets: 节点目标流,目标节点 ID 映射 - :type targets: Dict[str, str] - :param root_pipeline_id: 根流程 ID - :type root_pipeline_id: str - :param parent_pipeline_id: 父流程 ID - :type parent_pipeline_id: str - :param can_skip: 节点是否能够跳过 - :type can_skip: bool - :param can_retry: 节点是否能够重试 - :type can_retry: bool - """ - self.id = id - self.type = type - self.targets = targets - self.target_flows = target_flows - self.target_nodes = target_nodes - self.root_pipeline_id = root_pipeline_id - self.parent_pipeline_id = parent_pipeline_id - self.can_skip = can_skip - self.can_retry = can_retry - - -class EmptyStartEvent(Node): - pass - - -class ConvergeGateway(Node): - pass - - -class EmptyEndEvent(Node): - pass - - -class Condition(Representable): - """ - 分支条件 - """ - - def __init__(self, name: str, evaluation: str, target_id: str, flow_id: str): - """ - - :param name: 条件名 - :type name: str - :param evaluation: 条件表达式 - :type evaluation: str - :param target_id: 目标节点 ID - :type target_id: str - :param flow_id: 目标流 ID - :type flow_id: str - """ - self.name = name - self.evaluation = evaluation - self.target_id = target_id - self.flow_id = flow_id - - -class ParallelGateway(Node): - """ - 并行网关 - """ - - def __init__(self, converge_gateway_id: str, *args, **kwargs): - """ - - :param converge_gateway_id: 汇聚网关 ID - :type converge_gateway_id: str - """ - super().__init__(*args, **kwargs) - self.converge_gateway_id = converge_gateway_id - - -class ConditionalParallelGateway(Node): - """ - 条件并行网关 - """ - - def __init__(self, conditions: List[Condition], converge_gateway_id: str, *args, **kwargs): - """ - - :param conditions: 分支条件 - :type conditions: List[Condition] - :param converge_gateway_id: 汇聚网关 ID - :type converge_gateway_id: str - """ - super().__init__(*args, **kwargs) - self.conditions = conditions - self.converge_gateway_id = converge_gateway_id - - -class ExclusiveGateway(Node): - """ - 分支网关 - """ - - def __init__(self, conditions: List[Condition], *args, **kwargs): - """ - - :param conditions: 分支条件 - :type conditions: List[Condition] - """ - super().__init__(*args, **kwargs) - self.conditions = conditions - - -class ServiceActivity(Node): - """ - 服务节点 - """ - - def __init__(self, code: str, version: str, timeout: Optional[int], error_ignorable: bool, *args, **kwargs): - """ - - :param code: Service Code - :type code: str - :param version: 版本 - :type version: str - :param timeout: 超时限制 - :type timeout: Optional[int] - :param error_ignorable: 是否忽略错误 - :type error_ignorable: bool - """ - - super().__init__(*args, **kwargs) - self.code = code - self.version = version - self.timeout = timeout - self.error_ignorable = error_ignorable - - -class SubProcess(Node): - """ - 子流程 - """ - - def __init__(self, start_event_id: str, *args, **kwargs): - """ - - :param start_event_id: 子流程开始节点 ID - :type start_event_id: str - """ - super().__init__(*args, **kwargs) - self.start_event_id = start_event_id - - -class ExecutableEndEvent(Node): - """ - 可执行结束节点 - """ - - def __init__(self, code: str, *args, **kwargs): - """ - - :param code: 可执行结束节点 ID - :type code: str - """ - super().__init__(*args, **kwargs) - self.code = code - - -# runtime relate models -class ScheduleType(Enum): - """ - 调度类型 - """ - - CALLBACK = 1 - MULTIPLE_CALLBACK = 2 - POLL = 3 - - -class Schedule(Representable): - """ - 调度对象 - """ - - def __init__( - self, - id: int, - type: ScheduleType, - process_id: int, - node_id: str, - finished: bool, - expired: bool, - version: str, - times: int, - ): - """ - - :param id: ID - :type id: int - :param type: 类型 - :type type: ScheduleType - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param finished: 是否已完成 - :type finished: bool - :param expired: 是否已过期 - :type expired: bool - :param version: 绑定版本 - :type version: str - :param times: 调度次数 - :type times: int - """ - self.id = id - self.type = type - self.process_id = process_id - self.node_id = node_id - self.finished = finished - self.expired = expired - self.version = version - self.times = times - - -class State(Representable): - """ - 节点状态对象 - """ - - def __init__( - self, - node_id: str, - root_id: str, - parent_id: str, - name: str, - version: str, - loop: int, - inner_loop: int, - retry: int, - skip: bool, - error_ignored: bool, - created_time: datetime, - started_time: datetime, - archived_time: datetime, - ): - """ - :param node_id: 节点 ID - :type node_id: str - :param root_id: 根流程 ID - :type root_id: str - :param parent_id: 父流程 ID - :type parent_id: str - :param name: 状态名 - :type name: str - :param version: 版本 - :type version: str - :param loop: 重入次数 - :type loop: int - :param inner_loop: 子流程重入次数 - :type inner_loop: int - :param retry: 重试次数 - :type retry: int - :param skip: 是否跳过 - :type skip: bool - :param error_ignored: 是否出错后自动忽略 - :type error_ignored: bool - :param started_time: 创建时间 - :type started_time: datetime - :param started_time: 开始时间 - :type started_time: datetime - :param archived_time: 归档时间 - :type archived_time: datetime - """ - self.node_id = node_id - self.root_id = root_id - self.parent_id = parent_id - self.name = name - self.version = version - self.loop = loop - self.inner_loop = inner_loop - self.retry = retry - self.skip = skip - self.error_ignored = error_ignored - self.created_time = created_time - self.started_time = started_time - self.archived_time = archived_time - - -class DataInput(Representable): - """ - 节点数据输入项 - """ - - def __init__(self, need_render: bool, value: Any): - """ - :type is_splice: bool - :param value: 是否需要进行模板解析 - :type value: Any - """ - self.need_render = need_render - self.value = value - - -class Data(Representable): - """ - 节点数据对象 - """ - - def __init__(self, inputs: Dict[str, DataInput], outputs: Dict[str, str]): - """ - - :param inputs: 输入数据 - :type inputs: Dict[str, Any] - :param outputs: 节点输出配置 - :type outputs: Dict[str, str] - """ - self.inputs = inputs - self.outputs = outputs - - def plain_inputs(self) -> Dict[str, Any]: - """ - 获取不带输入项类型的输入字典 - """ - return {key: di.value for key, di in self.inputs.items()} - - def need_render_inputs(self) -> Dict[str, Any]: - """ - 获取需要进行渲染的输入项字典 - """ - return {key: di.value for key, di in self.inputs.items() if di.need_render} - - def render_escape_inputs(self) -> Dict[str, Any]: - """ - 获取不需要进行渲染的输入项字典 - """ - return {key: di.value for key, di in self.inputs.items() if not di.need_render} - - -class ExecutionData(Representable): - """ - 节点输出数据 - """ - - def __init__(self, inputs: Optional[dict], outputs: Optional[dict]): - """ - - :param inputs: 输入数据 - :type inputs: Optional[dict] - :param outputs: 输出数据 - :type outputs: Optional[dict] - """ - self.inputs = FancyDict(inputs) - self.outputs = FancyDict(outputs) - - -class ExecutionHistory(Representable): - """ - 节点执行历史 - """ - - def __init__( - self, - id: str, - node_id: str, - started_time: datetime, - archived_time: datetime, - loop: int, - skip: bool, - retry: int, - version: str, - inputs: dict, - outputs: dict, - ): - """ - - : param id: ID - : type id: str - : param node_id: Node ID - : type node_id: str - : param started_time: 开始时间 - : type started_time: datetime - : param archived_time: 归档时间 - : type archived_time: datetime - : param loop: 重入计数 - : type loop: int - : param skip: 是否跳过 - : type skip: bool - : param retry: 重试次数 - : type retry: int - : param version: 版本号 - : type version: str - : param inputs: 输入数据 - : type inputs: dict - : param outputs: 输出数据 - : type outputs: dict - """ - self.id = id - self.node_id = node_id - self.started_time = started_time - self.archived_time = archived_time - self.loop = loop - self.skip = skip - self.retry = retry - self.version = version - self.inputs = inputs - self.outputs = outputs - - -class ExecutionShortHistory(Representable): - """ - 简短节点执行历史 - """ - - def __init__( - self, - id: str, - node_id: str, - started_time: datetime, - archived_time: datetime, - loop: int, - skip: bool, - retry: int, - version: str, - ): - """ - - : param id: ID - : type id: str - : param node_id: Node ID - : type node_id: str - : param started_time: 开始时间 - : type started_time: datetime - : param archived_time: 归档时间 - : type archived_time: datetime - : param loop: 重入计数 - : type loop: int - : param skip: 是否跳过 - : type skip: bool - : param retry: 重试次数 - : type retry: int - : param version: 版本号 - : type version: str - """ - self.id = id - self.node_id = node_id - self.started_time = started_time - self.archived_time = archived_time - self.loop = loop - self.skip = skip - self.retry = retry - self.version = version - - -class CallbackData(Representable): - """ - 节点回调数据 - """ - - def __init__(self, id: int, node_id: str, version: str, data: dict): - """ - - :param id: 数据 ID - :type id: int - :param node_id: 节点 ID - :type node_id: str - :param version: 版本 - :type version: str - :param data: 数据 - :type data: dict - """ - self.id = id - self.node_id = node_id - self.version = version - self.data = data - - -class SuspendedProcessInfo(Representable): - """ - 挂起进程信息 - """ - - def __init__( - self, - process_id: int, - current_node: str, - root_pipeline_id: str, - pipeline_stack: List[str], - ): - """ - - :param process_id: 进程 ID - :type process_id: int - :param current_node: 当前节点 ID - :type current_node: str - :param root_pipeline_id: 根流程 ID - :type root_pipeline_id: str - :param pipeline_stack: 流程栈 - :type pipeline_stack: List[str] - """ - self.process_id = process_id - self.current_node = current_node - self.root_pipeline_id = root_pipeline_id - self.pipeline_stack = pipeline_stack - - @property - def top_pipeline_id(self): - return self.pipeline_stack[-1] - - -class ProcessInfo(Representable): - """ - 进程信息 - """ - - def __init__( - self, - process_id: int, - destination_id: str, - root_pipeline_id: str, - pipeline_stack: List[str], - parent_id: int, - ): - """ - - :param process_id: 进程 ID - :type process_id: int - :param destination_id: 进程目标节点 ID - :type destination_id: str - :param root_pipeline_id: 根流程 ID - :type root_pipeline_id: str - :param pipeline_stack: 流程栈 - :type pipeline_stack: List[str] - :param parent_id: 父进程 ID - :type parent_id: int - """ - self.process_id = process_id - self.destination_id = destination_id - self.parent_id = parent_id - self.root_pipeline_id = root_pipeline_id - self.pipeline_stack = pipeline_stack - - @property - def top_pipeline_id(self): - return self.pipeline_stack[-1] - - -class DispatchProcess(Representable): - """ - 待调度进程信息 - """ - - def __init__(self, process_id: int, node_id: str): - """ - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 调度开始节点 ID - :type node_id: str - """ - self.process_id = process_id - self.node_id = node_id - - -class ContextValueType(Enum): - """ - - :param Enum: [description] - :type Enum: [type] - """ - - PLAIN = 1 - SPLICE = 2 - COMPUTE = 3 - - -class ContextValue(Representable): - def __init__(self, key: str, type: ContextValueType, value: Any, code: Optional[str] = None): - if type is ContextValueType.COMPUTE and code is None: - raise ValueError("code can't be none when type is COMPUTE") - - self.key = key - self.type = type - self.value = value - self.code = code diff --git a/lib/bamboo_engine/exceptions.py b/lib/bamboo_engine/exceptions.py deleted file mode 100644 index 7c27002..0000000 --- a/lib/bamboo_engine/exceptions.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -# 异常定义模块 - - -class EngineException(Exception): - pass - - -class InvalidOperationError(EngineException): - pass - - -class NotFoundError(EngineException): - pass - - -class ValueError(EngineException): - pass - - -class StateVersionNotMatchError(EngineException): - pass - - -class TreeInvalidException(EngineException): - pass - - -class ConnectionValidateError(TreeInvalidException): - def __init__(self, failed_nodes, detail, *args): - self.failed_nodes = failed_nodes - self.detail = detail - super(ConnectionValidateError, self).__init__(*args) - - -class ConvergeMatchError(TreeInvalidException): - def __init__(self, gateway_id, *args): - self.gateway_id = gateway_id - super(ConvergeMatchError, self).__init__(*args) - - -class StreamValidateError(TreeInvalidException): - def __init__(self, node_id, *args): - self.node_id = node_id - super(StreamValidateError, self).__init__(*args) - - -class IsolateNodeError(TreeInvalidException): - pass diff --git a/lib/bamboo_engine/handler.py b/lib/bamboo_engine/handler.py deleted file mode 100644 index 5d8f8ca..0000000 --- a/lib/bamboo_engine/handler.py +++ /dev/null @@ -1,243 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -# 节点处理器逻辑封装模块 - -from typing import Optional, List -from abc import ABCMeta, abstractmethod - -from bamboo_engine import states - -from .eri import ( - EngineRuntimeInterface, - Node, - Schedule, - CallbackData, - ScheduleType, - DispatchProcess, - ProcessInfo, - NodeType, -) -from .exceptions import NotFoundError, InvalidOperationError - - -def register_handler(type: NodeType): - """ - 节点 Handler 注册函数 - - :param type: 节点类型 - :type type: NodeType - """ - - def register(cls): - HandlerFactory.add_handler(type, cls) - return cls - - return register - - -class ExecuteResult: - """ - Handler execute 方法返回的结果 - """ - - def __init__( - self, - should_sleep: bool, - schedule_ready: bool, - schedule_type: Optional[ScheduleType], - schedule_after: int, - dispatch_processes: List[DispatchProcess], - next_node_id: Optional[str], - should_die: bool = False, - ): - """ - - :param should_sleep: 当前进程是否应该进入休眠 - :type should_sleep: bool - :param schedule_ready: 被处理的节点是否准备好进入调度 - :type schedule_ready: bool - :param schedule_type: 被处理的节点的调度类型 - :type schedule_type: Optional[ScheduleType] - :param schedule_after: 在 schedule_after 秒后开始执行调度 - :type schedule_after: int - :param dispatch_processes: 需要派发的子进程信息列表 - :type dispatch_processes: List[DispatchProcess] - :param next_node_id: 推进循环中下一个要处理的节点的 ID - :type next_node_id: Optional[str] - :param should_die: 当前进程是否需要进入死亡状态, defaults to False - :type should_die: bool, optional - """ - self.should_sleep = should_sleep - self.schedule_ready = schedule_ready - self.schedule_type = schedule_type - self.schedule_after = schedule_after - self.dispatch_processes = dispatch_processes - self.next_node_id = next_node_id - self.should_die = should_die - - -class ScheduleResult: - """ - Handler schedule 方法返回的结果 - """ - - def __init__( - self, - has_next_schedule: bool, - schedule_after: int, - schedule_done: bool, - next_node_id: Optional[str], - ): - """ - - :param has_next_schedule: 是否还有下次调度 - :type has_next_schedule: bool - :param schedule_after: 在 schedule_after 秒后开始下次调度 - :type schedule_after: int - :param schedule_done: 调度是否完成 - :type schedule_done: bool - :param next_node_id: 调度完成后下一个需要执行的节点的 ID - :type next_node_id: Optional[str] - """ - self.has_next_schedule = has_next_schedule - self.schedule_after = schedule_after - self.schedule_done = schedule_done - self.next_node_id = next_node_id - - -class NodeHandler(metaclass=ABCMeta): - """ - 节点处理器,负责封装不同类型节点的 execute 和 schedule 逻辑 - """ - - LOOP_KEY = "_loop" - INNER_LOOP_KEY = "_inner_loop" - - def __init__(self, node: Node, runtime: EngineRuntimeInterface): - """ - - :param node: 节点实例 - :type node: Node - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - """ - self.node = node - self.runtime = runtime - - @abstractmethod - def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: - """ - 节点的 execute 处理逻辑 - - :param process_info: 进程信息 - :type process_id: ProcessInfo - :param loop: 重入次数 - :type loop: int - :param inner_loop: 当前流程重入次数 - :type inner_loop: int - :param version: 执行版本 - :type version: str - :return: 执行结果 - :rtype: ExecuteResult - """ - - def schedule( - self, - process_info: ProcessInfo, - loop: int, - inner_loop: int, - schedule: Schedule, - callback_data: Optional[CallbackData] = None, - ) -> ScheduleResult: - """ - 节点的 schedule 处理逻辑,不支持 schedule 的节点可以不实现该方法 - - :param process_info: 进程信息 - :type process_id: ProcessInfo - :param loop: 重入次数 - :type loop: int - :param inner_loop: 当前流程重入次数 - :type inner_loop: int - :param schedule: Schedule 实例 - :type schedule: Schedule - :param callback_data: 回调数据, defaults to None - :type callback_data: Optional[CallbackData], optional - :return: 调度结果 - :rtype: ScheduleResult - """ - raise NotImplementedError() - - def _execute_fail(self, ex_data: str) -> ExecuteResult: - exec_outputs = self.runtime.get_execution_data_outputs(self.node.id) - - self.runtime.set_state(node_id=self.node.id, to_state=states.FAILED, set_archive_time=True) - - exec_outputs["ex_data"] = ex_data - - self.runtime.set_execution_data_outputs(self.node.id, exec_outputs) - - return ExecuteResult( - should_sleep=True, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=[], - next_node_id=None, - ) - - def _get_plain_inputs(self, node_id: str): - return {key: di.value for key, di in self.runtime.get_data_inputs(node_id).items()} - - -class HandlerFactory: - """ - 节点处理器工厂 - """ - - _handlers = {} - - @classmethod - def add_handler(cls, type: NodeType, handler_cls): - """ - 向工厂中注册某个类型节点的处理器 - - :param type: 节点类型 - :type type: NodeType - :param handler_cls: [description] - :type handler_cls: [type] - :raises InvalidOperationError: [description] - """ - if not issubclass(handler_cls, NodeHandler): - raise InvalidOperationError( - "register handler err: {} is not subclass of {}".format(handler_cls, "NodeHandler") - ) - cls._handlers[type.value] = handler_cls - - @classmethod - def get_handler(cls, node: Node, runtime: EngineRuntimeInterface) -> NodeHandler: - """ - 获取某个节点的处理器实例 - - :param node: 节点实例 - :type node: NodeType - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :raises NotFoundError: [description] - :return: 节点处理器实例 - :rtype: NodeHandler - """ - if node.type.value not in cls._handlers: - raise NotFoundError("can not find handler for {} type node".format(node.type.value)) - - return cls._handlers[node.type.value](node, runtime) diff --git a/lib/bamboo_engine/handlers/__init__.py b/lib/bamboo_engine/handlers/__init__.py deleted file mode 100644 index a74816c..0000000 --- a/lib/bamboo_engine/handlers/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -""" -节点处理逻辑存放模块 -""" - - -def register(): - from .conditional_parallel_gateway import ConditionalParallelGatewayHandler # noqa - from .converge_gateway import ConvergeGatewayHandler # noqa - from .empty_end_event import EmptyEndEventHandler # noqa - from .empty_start_event import EmptyStartEventHandler # noqa - from .exclusive_gateway import ExclusiveGatewayHandler # noqa - from .executable_end_event import ExecutableEndEventHandler # noqa - from .parallel_gateway import ParallelGatewayHandler # noqa - from .service_activity import ServiceActivityHandler # noqa - from .subprocess import SubProcessHandler # noqa diff --git a/lib/bamboo_engine/handlers/conditional_parallel_gateway.py b/lib/bamboo_engine/handlers/conditional_parallel_gateway.py deleted file mode 100644 index aae7387..0000000 --- a/lib/bamboo_engine/handlers/conditional_parallel_gateway.py +++ /dev/null @@ -1,137 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -import json -import logging - -from bamboo_engine.utils.boolrule import BoolRule -from bamboo_engine.template.template import Template - -from bamboo_engine import states -from bamboo_engine.eri import NodeType, ProcessInfo -from bamboo_engine.context import Context -from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult -from bamboo_engine.utils.string import transform_escape_char - -logger = logging.getLogger("bamboo_engine") - - -@register_handler(NodeType.ConditionalParallelGateway) -class ConditionalParallelGatewayHandler(NodeHandler): - def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: - """ - 节点的 execute 处理逻辑 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param process_info: 进程信息 - :type process_id: ProcessInfo - :return: 执行结果 - :rtype: ExecuteResult - """ - evaluations = [c.evaluation for c in self.node.conditions] - top_pipeline_id = process_info.top_pipeline_id - root_pipeline_id = process_info.root_pipeline_id - - root_pipeline_inputs = self._get_plain_inputs(root_pipeline_id) - - # resolve conditions references - evaluation_refs = set() - for e in evaluations: - refs = Template(e).get_reference() - evaluation_refs = evaluation_refs.union(refs) - - logger.info( - "root_pipeline[%s] node(%s) evaluation original refs: %s", - root_pipeline_id, - self.node.id, - evaluation_refs, - ) - additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=evaluation_refs) - evaluation_refs = evaluation_refs.union(additional_refs) - - logger.info( - "root_pipeline[%s] node(%s) evaluation final refs: %s", - root_pipeline_id, - self.node.id, - evaluation_refs, - ) - context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=evaluation_refs) - context = Context(self.runtime, context_values, root_pipeline_inputs) - try: - hydrated_context = {k: transform_escape_char(v) for k, v in context.hydrate(deformat=True).items()} - except Exception as e: - logger.exception( - "root_pipeline[%s] node(%s) context hydrate error", - root_pipeline_id, - self.node.id, - ) - return self._execute_fail("evaluation context hydrate failed(%s), check node log for details." % e) - - # check conditions - fork_targets = [] - for c in self.node.conditions: - resolved_evaluate = Template(c.evaluation).render(hydrated_context) - logger.info( - "root_pipeline[%s] node(%s) render evaluation %s: %s with %s", - root_pipeline_id, - self.node.id, - c.evaluation, - resolved_evaluate, - hydrated_context, - ) - try: - result = BoolRule(resolved_evaluate).test() - logger.info( - "root_pipeline[%s] node(%s) %s test result: %s", - root_pipeline_id, - self.node.id, - resolved_evaluate, - result, - ) - except Exception as e: - # test failed - return self._execute_fail( - "evaluate[{}] fail with data[{}] message: {}".format( - c.resolved_evaluate, json.dumps(hydrated_context), e - ) - ) - else: - if result: - fork_targets.append(c.target_id) - - # all miss - if not fork_targets: - return self._execute_fail("all conditions of branches are not meet") - - # fork - from_to = {} - for target in fork_targets: - from_to[target] = self.node.converge_gateway_id - - dispatch_processes = self.runtime.fork( - parent_id=process_info.process_id, - root_pipeline_id=process_info.root_pipeline_id, - pipeline_stack=process_info.pipeline_stack, - from_to=from_to, - ) - - self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True) - - return ExecuteResult( - should_sleep=True, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=dispatch_processes, - next_node_id=None, - ) diff --git a/lib/bamboo_engine/handlers/converge_gateway.py b/lib/bamboo_engine/handlers/converge_gateway.py deleted file mode 100644 index 65ccf80..0000000 --- a/lib/bamboo_engine/handlers/converge_gateway.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from bamboo_engine import states -from bamboo_engine.eri import ProcessInfo, NodeType -from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult - - -@register_handler(NodeType.ConvergeGateway) -class ConvergeGatewayHandler(NodeHandler): - def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: - """ - 节点的 execute 处理逻辑 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param process_info: 进程信息 - :type process_id: ProcessInfo - :return: 执行结果 - :rtype: ExecuteResult - """ - - self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True) - - return ExecuteResult( - should_sleep=False, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=[], - next_node_id=self.node.target_nodes[0], - ) diff --git a/lib/bamboo_engine/handlers/empty_end_event.py b/lib/bamboo_engine/handlers/empty_end_event.py deleted file mode 100644 index 2b0eead..0000000 --- a/lib/bamboo_engine/handlers/empty_end_event.py +++ /dev/null @@ -1,135 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from bamboo_engine import states -from bamboo_engine.config import Settings -from bamboo_engine.eri import ProcessInfo, NodeType -from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult -from bamboo_engine.context import Context -from bamboo_engine.template.template import Template - -logger = logging.getLogger("bamboo_engine") - - -@register_handler(NodeType.EmptyEndEvent) -class EmptyEndEventHandler(NodeHandler): - def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: - """ - 节点的 execute 处理逻辑 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param process_info: 进程信息 - :type process_id: ProcessInfo - :return: 执行结果 - :rtype: ExecuteResult - """ - root_pipeline_id = process_info.root_pipeline_id - pipeline_id = process_info.pipeline_stack.pop() - root_pipeline_finished = len(process_info.pipeline_stack) == 0 - - root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id) - if not root_pipeline_finished: - subproc_state = self.runtime.get_state(pipeline_id) - - # write pipeline data - context_outputs = self.runtime.get_context_outputs(pipeline_id) - logger.info( - "root_pipeline[%s] pipeline(%s) context outputs: %s", - root_pipeline_id, - pipeline_id, - context_outputs, - ) - - context_values = self.runtime.get_context_values(pipeline_id=pipeline_id, keys=context_outputs) - logger.info( - "root_pipeline[%s] pipeline(%s) context values: %s", - root_pipeline_id, - pipeline_id, - context_values, - ) - - # caculate outputs values references - output_value_refs = set(Template([cv.value for cv in context_values]).get_reference()) - logger.info( - "root_pipeline[%s] node(%s) outputs values refs: %s", - root_pipeline_id, - self.node.id, - output_value_refs, - ) - - additional_refs = self.runtime.get_context_key_references(pipeline_id=pipeline_id, keys=output_value_refs) - output_value_refs = output_value_refs.union(additional_refs) - logger.info( - "root_pipeline[%s] pipeline(%s) outputs values final refs: %s", - root_pipeline_id, - pipeline_id, - output_value_refs, - ) - context_values.extend(self.runtime.get_context_values(pipeline_id=pipeline_id, keys=output_value_refs)) - - context = Context(self.runtime, context_values, root_pipeline_inputs) - hydrated_context = context.hydrate(deformat=False) - logger.info( - "root_pipeline[%s] pipeline(%s) hydrated context: %s", - root_pipeline_id, - pipeline_id, - hydrated_context, - ) - - outputs = {} - for key in context_outputs: - outputs[key] = hydrated_context.get(key, key) - if not root_pipeline_finished: - outputs[self.LOOP_KEY] = subproc_state.loop + Settings.RERUN_INDEX_OFFSET - outputs[self.INNER_LOOP_KEY] = subproc_state.inner_loop + Settings.RERUN_INDEX_OFFSET - self.runtime.set_execution_data_outputs(node_id=pipeline_id, outputs=outputs) - - self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True) - - self.runtime.set_state(node_id=pipeline_id, to_state=states.FINISHED, set_archive_time=True) - - # root pipeline finish - if root_pipeline_finished: - return ExecuteResult( - should_sleep=False, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=[], - next_node_id=None, - should_die=True, - ) - - # subprocess finish - subprocess = self.runtime.get_node(pipeline_id) - self.runtime.set_pipeline_stack(process_info.process_id, process_info.pipeline_stack) - - # extract subprocess outputs to parent context - subprocess_outputs = self.runtime.get_data_outputs(pipeline_id) - context.extract_outputs( - pipeline_id=process_info.pipeline_stack[-1], - data_outputs=subprocess_outputs, - execution_data_outputs=outputs, - ) - - return ExecuteResult( - should_sleep=False, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=[], - next_node_id=subprocess.target_nodes[0], - ) diff --git a/lib/bamboo_engine/handlers/empty_start_event.py b/lib/bamboo_engine/handlers/empty_start_event.py deleted file mode 100644 index 5508725..0000000 --- a/lib/bamboo_engine/handlers/empty_start_event.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -import logging - -from bamboo_engine import states -from bamboo_engine.context import Context -from bamboo_engine.eri import ProcessInfo, NodeType, ContextValue, ContextValueType -from bamboo_engine.exceptions import NotFoundError -from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult - -logger = logging.getLogger("bamboo_engine") - - -@register_handler(NodeType.EmptyStartEvent) -class EmptyStartEventHandler(NodeHandler): - def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: - """ - 节点的 execute 处理逻辑 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param process_info: 进程信息 - :type process_id: ProcessInfo - :return: 执行结果 - :rtype: ExecuteResult - """ - - try: - data = self.runtime.get_data(self.node.id) - except NotFoundError: - need_pre_render = False - else: - need_pre_render = True - - if need_pre_render: - top_pipeline_id = process_info.top_pipeline_id - root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id) - upsert_context_dict = dict() - pre_render_keys = data.inputs["pre_render_keys"].value - - logger.info("top_pipeline({}) pre_render_keys are: {}".format(top_pipeline_id, ",".join(pre_render_keys))) - - refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=set(pre_render_keys)) - - context_values = self.runtime.get_context_values( - pipeline_id=top_pipeline_id, keys=set(pre_render_keys).union(refs) - ) - context = Context(self.runtime, context_values, root_pipeline_inputs) - hydrated_context = context.hydrate(deformat=False) - for context_value in context_values: - context_key = context_value.key - if context_key in pre_render_keys: - upsert_context_dict[context_key] = ContextValue( - key=context_key, - type=ContextValueType.PLAIN, - value=hydrated_context[context_key], - ) - - logger.info(f"top_pipeline({top_pipeline_id}) pre_render_keys results are: {upsert_context_dict}") - self.runtime.upsert_plain_context_values(top_pipeline_id, upsert_context_dict) - - self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True) - - return ExecuteResult( - should_sleep=False, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=[], - next_node_id=self.node.target_nodes[0], - ) diff --git a/lib/bamboo_engine/handlers/exclusive_gateway.py b/lib/bamboo_engine/handlers/exclusive_gateway.py deleted file mode 100644 index ee407ac..0000000 --- a/lib/bamboo_engine/handlers/exclusive_gateway.py +++ /dev/null @@ -1,138 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -import json -import logging - -from bamboo_engine import states -from bamboo_engine.context import Context -from bamboo_engine.template import Template -from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult -from bamboo_engine.utils.boolrule import BoolRule -from bamboo_engine.eri import NodeType, ProcessInfo - -from bamboo_engine.utils.string import transform_escape_char - -logger = logging.getLogger("bamboo_engine") - - -@register_handler(NodeType.ExclusiveGateway) -class ExclusiveGatewayHandler(NodeHandler): - def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: - """ - 节点的 execute 处理逻辑 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param process_info: 进程信息 - :type process_id: ProcessInfo - :return: 执行结果 - :rtype: ExecuteResult - """ - evaluations = [c.evaluation for c in self.node.conditions] - top_pipeline_id = process_info.top_pipeline_id - root_pipeline_id = process_info.root_pipeline_id - - root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id) - - # resolve conditions references - evaluation_refs = set() - for e in evaluations: - refs = Template(e).get_reference() - evaluation_refs = evaluation_refs.union(refs) - - logger.info( - "root_pipeline[%s] node(%s) evaluation original refs: %s", - root_pipeline_id, - self.node.id, - evaluation_refs, - ) - additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=evaluation_refs) - evaluation_refs = evaluation_refs.union(additional_refs) - - logger.info( - "root_pipeline[%s] node(%s) evaluation final refs: %s", - root_pipeline_id, - self.node.id, - evaluation_refs, - ) - context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=evaluation_refs) - logger.info( - "root_pipeline[%s] node(%s) evaluation context values: %s", - root_pipeline_id, - self.node.id, - context_values, - ) - - context = Context(self.runtime, context_values, root_pipeline_inputs) - try: - hydrated_context = {k: transform_escape_char(v) for k, v in context.hydrate(deformat=True).items()} - except Exception as e: - logger.exception( - "root_pipeline[%s] node(%s) context hydrate error", - root_pipeline_id, - self.node.id, - ) - return self._execute_fail("evaluation context hydrate failed(%s), check node log for details." % e) - - # check conditions - meet_targets = [] - meet_conditions = [] - for c in self.node.conditions: - resolved_evaluate = Template(c.evaluation).render(hydrated_context) - logger.info( - "root_pipeline[%s] node(%s) render evaluation %s: %s with %s", - root_pipeline_id, - self.node.id, - c.evaluation, - resolved_evaluate, - hydrated_context, - ) - try: - result = BoolRule(resolved_evaluate).test() - logger.info( - "root_pipeline[%s] node(%s) %s test result: %s", - root_pipeline_id, - self.node.id, - resolved_evaluate, - result, - ) - except Exception as e: - # test failed - return self._execute_fail( - "evaluate[{}] fail with data[{}] message: {}".format( - resolved_evaluate, json.dumps(hydrated_context), e - ) - ) - else: - if result: - meet_conditions.append(c.name) - meet_targets.append(c.target_id) - - # all miss - if not meet_targets: - return self._execute_fail("all conditions of branches are not meet") - - # multiple branch hit - if len(meet_targets) != 1: - return self._execute_fail("multiple conditions meet: {}".format(meet_conditions)) - - self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True) - - return ExecuteResult( - should_sleep=False, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=[], - next_node_id=meet_targets[0], - ) diff --git a/lib/bamboo_engine/handlers/executable_end_event.py b/lib/bamboo_engine/handlers/executable_end_event.py deleted file mode 100644 index 72d9a66..0000000 --- a/lib/bamboo_engine/handlers/executable_end_event.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -import copy -import logging -import traceback - -from bamboo_engine import states -from bamboo_engine.eri import ProcessInfo, NodeType -from bamboo_engine.handler import register_handler, ExecuteResult - -from .empty_end_event import EmptyEndEventHandler - -logger = logging.getLogger("bamboo_engine") - - -@register_handler(NodeType.ExecutableEndEvent) -class ExecutableEndEventHandler(EmptyEndEventHandler): - def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: - """ - 节点的 execute 处理逻辑 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param process_info: 进程信息 - :type process_id: ProcessInfo - :return: 执行结果 - :rtype: ExecuteResult - """ - - logger.info( - "root_pipeline[%s] node(%s) executable end event: %s", - process_info.root_pipeline_id, - self.node.id, - self.node, - ) - event = self.runtime.get_executable_end_event(code=self.node.code) - - try: - event.execute( - pipeline_stack=copy.copy(process_info.pipeline_stack), - root_pipeline_id=process_info.root_pipeline_id, - ) - except Exception: - ex_data = traceback.format_exc() - logger.warning( - "root_pipeline[%s] node(%s) executable end event execute raise: %s", - process_info.root_pipeline_id, - self.node.id, - ex_data, - ) - - self.runtime.set_execution_data_outputs(node_id=self.node.id, outputs={"ex_data": ex_data}) - - self.runtime.set_state(node_id=self.node.id, to_state=states.FAILED, set_archive_time=True) - - return ExecuteResult( - should_sleep=True, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=[], - next_node_id=None, - ) - - return super().execute(process_info=process_info, loop=loop, inner_loop=inner_loop, version=version) diff --git a/lib/bamboo_engine/handlers/parallel_gateway.py b/lib/bamboo_engine/handlers/parallel_gateway.py deleted file mode 100644 index 7a94271..0000000 --- a/lib/bamboo_engine/handlers/parallel_gateway.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from bamboo_engine import states -from bamboo_engine.eri import ProcessInfo, NodeType -from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult - - -@register_handler(NodeType.ParallelGateway) -class ParallelGatewayHandler(NodeHandler): - def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: - """ - 节点的 execute 处理逻辑 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param process_info: 进程信息 - :type process_id: ProcessInfo - :return: 执行结果 - :rtype: ExecuteResult - """ - - from_to = {} - for target in self.node.target_nodes: - from_to[target] = self.node.converge_gateway_id - - dispatch_processes = self.runtime.fork( - parent_id=process_info.process_id, - root_pipeline_id=process_info.root_pipeline_id, - pipeline_stack=process_info.pipeline_stack, - from_to=from_to, - ) - - self.runtime.set_state(node_id=self.node.id, to_state=states.FINISHED, set_archive_time=True) - - return ExecuteResult( - should_sleep=True, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=dispatch_processes, - next_node_id=None, - ) diff --git a/lib/bamboo_engine/handlers/service_activity.py b/lib/bamboo_engine/handlers/service_activity.py deleted file mode 100644 index f89ecdf..0000000 --- a/lib/bamboo_engine/handlers/service_activity.py +++ /dev/null @@ -1,508 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import traceback -from typing import Optional - -from bamboo_engine import states -from bamboo_engine.config import Settings - -from bamboo_engine.context import Context -from bamboo_engine.template import Template -from bamboo_engine.eri import ( - ProcessInfo, - ContextValue, - ContextValueType, - ExecutionData, - CallbackData, - ScheduleType, - NodeType, - Schedule, -) -from bamboo_engine.handler import ( - register_handler, - NodeHandler, - ExecuteResult, - ScheduleResult, -) - -logger = logging.getLogger("bamboo_engine") - - -@register_handler(NodeType.ServiceActivity) -class ServiceActivityHandler(NodeHandler): - """ - 其中所有 set_state 调用都会传入 state version 来确保能够在用户强制失败节点后放弃后续无效的任务执行 - """ - - def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: - """ - 节点的 execute 处理逻辑 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param process_info: 进程信息 - :type process_id: ProcessInfo - :return: 执行结果 - :rtype: ExecuteResult - """ - top_pipeline_id = process_info.top_pipeline_id - root_pipeline_id = process_info.root_pipeline_id - - data = self.runtime.get_data(self.node.id) - root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id) - need_render_inputs = data.need_render_inputs() - render_escape_inputs = data.render_escape_inputs() - - logger.info( - "root_pipeline[%s] node(%s) activity execute data: %s, root inputs: %s", - root_pipeline_id, - self.node.id, - data, - root_pipeline_inputs, - ) - - # resolve inputs context references - inputs_refs = set(Template(need_render_inputs).get_reference()) - logger.info( - "root_pipeline[%s] node(%s) activity original refs: %s", - root_pipeline_id, - self.node.id, - inputs_refs, - ) - - additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=inputs_refs) - inputs_refs = inputs_refs.union(additional_refs) - logger.info( - "root_pipeline[%s] node(%s) activity final refs: %s", - root_pipeline_id, - self.node.id, - inputs_refs, - ) - - # prepare context - context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=inputs_refs) - - # pre extract loop outputs - loop_value = loop + Settings.RERUN_INDEX_OFFSET - need_render_inputs[self.LOOP_KEY] = loop_value - if self.LOOP_KEY in data.outputs: - loop_output_key = data.outputs[self.LOOP_KEY] - context_values.append(ContextValue(key=loop_output_key, type=ContextValueType.PLAIN, value=loop_value)) - - # pre extract inner_loop outputs - inner_loop_value = inner_loop + Settings.RERUN_INDEX_OFFSET - need_render_inputs[self.INNER_LOOP_KEY] = inner_loop_value - if self.INNER_LOOP_KEY in data.outputs: - inner_loop_output_key = data.outputs[self.INNER_LOOP_KEY] - context_values.append( - ContextValue( - key=inner_loop_output_key, - type=ContextValueType.PLAIN, - value=inner_loop_value, - ) - ) - - logger.info( - "root_pipeline[%s] node(%s) activity context values: %s", - root_pipeline_id, - self.node.id, - context_values, - ) - - context = Context(self.runtime, context_values, root_pipeline_inputs) - # hydrate will call user code, use try to catch unexpected error - try: - hydrated_context = context.hydrate(deformat=True) - except Exception as e: - logger.exception( - "root_pipeline[%s] node(%s) activity context hydrate error", - root_pipeline_id, - self.node.id, - ) - service_data = ExecutionData(inputs=data.plain_inputs(), outputs={}) - service_data.outputs.ex_data = "inputs hydrate failed(%s), check node log for details" % e - service_data.outputs._result = False - service_data.outputs._loop = loop - service_data.outputs._inner_loop = inner_loop - - self.runtime.set_execution_data(node_id=self.node.id, data=service_data) - self.runtime.set_state( - node_id=self.node.id, - version=version, - to_state=states.FAILED, - set_archive_time=True, - ) - return ExecuteResult( - should_sleep=True, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=[], - next_node_id=None, - ) - - logger.info( - "root_pipeline[%s] node(%s) actvity hydrated context: %s", - root_pipeline_id, - self.node.id, - hydrated_context, - ) - - # resolve inputs - execute_inputs = Template(need_render_inputs).render(hydrated_context) - execute_inputs.update(render_escape_inputs) - - # data prepare - service_data = ExecutionData(inputs=execute_inputs, outputs={}) - root_pipeline_data = ExecutionData(inputs=root_pipeline_inputs, outputs={}) - - # execute - service = self.runtime.get_service(code=self.node.code, version=self.node.version) - service.setup_runtime_attributes( - id=self.node.id, - version=version, - top_pipeline_id=top_pipeline_id, - root_pipeline_id=root_pipeline_id, - loop=loop, - inner_loop=inner_loop, - ) - - # start monitor - monitoring = False - if self.node.timeout is not None: - monitoring = True - self.runtime.start_timeout_monitor( - process_id=process_info.process_id, - node_id=self.node.id, - version=version, - timeout=self.node.timeout, - ) - - # pre_execute and excute - logger.debug( - "root_pipeline[%s] node(%s) service data before execute: %s", - self.node.id, - root_pipeline_id, - service_data, - ) - logger.debug( - "root_pipeline[%s] node(%s) root pipeline data before execute: %s", - self.node.id, - root_pipeline_id, - root_pipeline_data, - ) - execute_success = False - try: - service.pre_execute(data=service_data, root_pipeline_data=root_pipeline_data) - execute_success = service.execute(data=service_data, root_pipeline_data=root_pipeline_data) - except Exception: - ex_data = traceback.format_exc() - service_data.outputs.ex_data = ex_data - logger.warning("root_pipeline[%s]service execute fail: %s", process_info.root_pipeline_id, ex_data) - logger.debug("root_pipeline[%s] service data after execute: %s", root_pipeline_id, service_data) - service_data.outputs._result = execute_success - service_data.outputs._loop = loop - service_data.outputs._inner_loop = inner_loop - - # execute success - if execute_success: - - need_schedule = service.need_schedule() - next_node_id = None - - if not need_schedule: - if monitoring: - self.runtime.stop_timeout_monitor( - process_id=process_info.process_id, - node_id=self.node.id, - version=version, - timeout=self.node.timeout, - ) - - self.runtime.set_state( - node_id=self.node.id, - version=version, - to_state=states.FINISHED, - set_archive_time=True, - ) - - context.extract_outputs( - pipeline_id=top_pipeline_id, - data_outputs=data.outputs, - execution_data_outputs=service_data.outputs, - ) - next_node_id = self.node.target_nodes[0] - - self.runtime.set_execution_data(node_id=self.node.id, data=service_data) - - return ExecuteResult( - should_sleep=need_schedule, - schedule_ready=need_schedule, - schedule_type=service.schedule_type(), - schedule_after=service.schedule_after( - schedule=None, - data=service_data, - root_pipeline_data=root_pipeline_data, - ), - dispatch_processes=[], - next_node_id=next_node_id, - ) - - # pre_execute failed or execute failed - if monitoring: - self.runtime.stop_timeout_monitor( - process_id=process_info.process_id, - node_id=self.node.id, - version=version, - timeout=self.node.timeout, - ) - - if not self.node.error_ignorable: - self.runtime.set_state( - node_id=self.node.id, - version=version, - to_state=states.FAILED, - set_archive_time=True, - ) - - self.runtime.set_execution_data(node_id=self.node.id, data=service_data) - - context.extract_outputs( - pipeline_id=top_pipeline_id, - data_outputs=data.outputs, - execution_data_outputs=service_data.outputs, - ) - - return ExecuteResult( - should_sleep=True, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=[], - next_node_id=None, - ) - - # pre_execute failed or execute failed and error ignore - self.runtime.set_state( - node_id=self.node.id, - version=version, - to_state=states.FINISHED, - set_archive_time=True, - error_ignored=True, - ) - - self.runtime.set_execution_data(node_id=self.node.id, data=service_data) - - context.extract_outputs( - pipeline_id=top_pipeline_id, - data_outputs=data.outputs, - execution_data_outputs=service_data.outputs, - ) - - return ExecuteResult( - should_sleep=False, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=[], - next_node_id=self.node.target_nodes[0], - ) - - def _finish_schedule( - self, - process_info: ProcessInfo, - schedule: Schedule, - data_outputs: dict, - execution_data: ExecutionData, - error_ignored: bool, - root_pipeline_inputs: dict, - ) -> ScheduleResult: - if self.node.timeout is not None: - self.runtime.stop_timeout_monitor( - process_id=process_info.process_id, - node_id=self.node.id, - version=schedule.version, - timeout=self.node.timeout, - ) - - self.runtime.set_state( - node_id=self.node.id, - version=schedule.version, - to_state=states.FINISHED, - set_archive_time=True, - error_ignored=error_ignored, - ) - - context = Context(self.runtime, [], root_pipeline_inputs) - context.extract_outputs( - pipeline_id=process_info.top_pipeline_id, - data_outputs=data_outputs, - execution_data_outputs=execution_data.outputs, - ) - - return ScheduleResult( - has_next_schedule=False, - schedule_after=-1, - schedule_done=True, - next_node_id=self.node.target_nodes[0], - ) - - def schedule( - self, - process_info: ProcessInfo, - loop: int, - inner_loop: int, - schedule: Schedule, - callback_data: Optional[CallbackData] = None, - ) -> ScheduleResult: - """ - 节点的 schedule 处理逻辑 - - :param process_id: 进程 ID - :type process_id: int - :param schedule: Schedule 实例 - :type schedule: Schedule - :param callback_data: 回调数据, defaults to None - :type callback_data: Optional[CallbackData], optional - :return: 调度结果 - :rtype: ScheduleResult - """ - # data prepare - top_pipeline_id = process_info.top_pipeline_id - root_pipeline_id = process_info.root_pipeline_id - - data_outputs = self.runtime.get_data_outputs(self.node.id) - service_data = self.runtime.get_execution_data(self.node.id) - - root_pipeline_inputs = self._get_plain_inputs(root_pipeline_id) - root_pipeline_data = ExecutionData(inputs=root_pipeline_inputs, outputs={}) - logger.info( - "root_pipeline[%s] node(%s) activity schedule data: %s, root inputs: %s", - root_pipeline_id, - self.node.id, - service_data, - root_pipeline_inputs, - ) - - # schedule - service = self.runtime.get_service(code=self.node.code, version=self.node.version) - service.setup_runtime_attributes( - id=self.node.id, - version=schedule.version, - top_pipeline_id=top_pipeline_id, - root_pipeline_id=root_pipeline_id, - loop=loop, - inner_loop=inner_loop, - ) - - schedule_success = False - schedule.times += 1 - try: - schedule_success = service.schedule( - schedule=schedule, - data=service_data, - root_pipeline_data=root_pipeline_data, - callback_data=callback_data, - ) - except Exception: - service_data.outputs.ex_data = traceback.format_exc() - - service_data.outputs._result = schedule_success - service_data.outputs._loop = loop - service_data.outputs._inner_loop = inner_loop - - self.runtime.add_schedule_times(schedule.id) - self.runtime.set_execution_data(node_id=self.node.id, data=service_data) - - monitoring = self.node.timeout is not None - schedule_type = service.schedule_type() - - # schedule success - if schedule_success: - if schedule_type == ScheduleType.CALLBACK: - return self._finish_schedule( - process_info=process_info, - schedule=schedule, - data_outputs=data_outputs, - execution_data=service_data, - error_ignored=False, - root_pipeline_inputs=root_pipeline_inputs, - ) - else: - is_schedule_done = service.is_schedule_done() - - # poll or multi-callback finished - if is_schedule_done: - return self._finish_schedule( - process_info=process_info, - schedule=schedule, - data_outputs=data_outputs, - execution_data=service_data, - error_ignored=False, - root_pipeline_inputs=root_pipeline_inputs, - ) - - has_next_schedule = schedule_type == ScheduleType.POLL - return ScheduleResult( - has_next_schedule=has_next_schedule, - schedule_after=service.schedule_after( - schedule=schedule, - data=service_data, - root_pipeline_data=root_pipeline_data, - ), - schedule_done=False, - next_node_id=None, - ) - - if monitoring: - self.runtime.stop_timeout_monitor( - process_id=process_info.process_id, - node_id=self.node.id, - version=schedule.version, - timeout=self.node.timeout, - ) - - # schedule fail - if not self.node.error_ignorable: - self.runtime.set_state( - node_id=self.node.id, - version=schedule.version, - to_state=states.FAILED, - set_archive_time=True, - ) - - context = Context(self.runtime, [], root_pipeline_inputs) - context.extract_outputs( - pipeline_id=process_info.top_pipeline_id, - data_outputs=data_outputs, - execution_data_outputs=service_data.outputs, - ) - - return ScheduleResult( - has_next_schedule=False, - schedule_after=-1, - schedule_done=False, - next_node_id=None, - ) - - # schedule fail and error ignore - return self._finish_schedule( - process_info=process_info, - schedule=schedule, - data_outputs=data_outputs, - execution_data=service_data, - error_ignored=True, - root_pipeline_inputs=root_pipeline_inputs, - ) diff --git a/lib/bamboo_engine/handlers/subprocess.py b/lib/bamboo_engine/handlers/subprocess.py deleted file mode 100644 index 510098b..0000000 --- a/lib/bamboo_engine/handlers/subprocess.py +++ /dev/null @@ -1,129 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from bamboo_engine.context import Context -from bamboo_engine.config import Settings -from bamboo_engine.template import Template -from bamboo_engine.eri import ProcessInfo, ContextValue, ContextValueType, NodeType -from bamboo_engine.handler import register_handler, NodeHandler, ExecuteResult - -logger = logging.getLogger("bamboo_engine") - - -@register_handler(NodeType.SubProcess) -class SubProcessHandler(NodeHandler): - def execute(self, process_info: ProcessInfo, loop: int, inner_loop: int, version: str) -> ExecuteResult: - """ - 节点的 execute 处理逻辑 - - :param runtime: 引擎运行时实例 - :type runtime: EngineRuntimeInterface - :param process_info: 进程信息 - :type process_id: ProcessInfo - :return: 执行结果 - :rtype: ExecuteResult - """ - data = self.runtime.get_data(self.node.id) - root_pipeline_inputs = self._get_plain_inputs(process_info.root_pipeline_id) - need_render_inputs = data.need_render_inputs() - render_escape_inputs = data.render_escape_inputs() - top_pipeline_id = process_info.top_pipeline_id - root_pipeline_id = process_info.root_pipeline_id - - logger.info( - "root_pipeline[%s] node(%s) subprocess data: %s", - root_pipeline_id, - self.node.id, - data, - ) - - # reset inner_loop of nodes in subprocess - self.runtime.reset_children_state_inner_loop(self.node.id) - - # resolve inputs context references - inputs_refs = Template(need_render_inputs).get_reference() - logger.info( - "root_pipeline[%s] node(%s) subprocess original refs: %s", - root_pipeline_id, - self.node.id, - inputs_refs, - ) - - additional_refs = self.runtime.get_context_key_references(pipeline_id=top_pipeline_id, keys=inputs_refs) - inputs_refs = inputs_refs.union(additional_refs) - logger.info( - "root_pipeline[%s] node(%s) subprocess final refs: %s", - root_pipeline_id, - self.node.id, - inputs_refs, - ) - - # prepare context - context_values = self.runtime.get_context_values(pipeline_id=top_pipeline_id, keys=inputs_refs) - - # pre extract loop outputs - loop_value = loop + Settings.RERUN_INDEX_OFFSET - if self.LOOP_KEY in data.outputs: - loop_output_key = data.outputs[self.LOOP_KEY] - context_values.append( - ContextValue( - key=loop_output_key, - type=ContextValueType.PLAIN, - value=loop_value, - ) - ) - logger.info( - "root_pipeline[%s] node(%s) subprocess parent context values: %s", - root_pipeline_id, - self.node.id, - context_values, - ) - - context = Context(self.runtime, context_values, root_pipeline_inputs) - hydrated_context = context.hydrate(deformat=True) - logger.info( - "root_pipeline[%s] node(%s) subprocess parent hydrated context: %s", - root_pipeline_id, - self.node.id, - hydrated_context, - ) - - # resolve inputs - subprocess_inputs = Template(need_render_inputs).render(hydrated_context) - subprocess_inputs.update(render_escape_inputs) - sub_context_values = { - key: ContextValue(key=key, type=ContextValueType.PLAIN, value=value) - for key, value in subprocess_inputs.items() - } - logger.info( - "root_pipeline[%s] node(%s) subprocess inject context: %s", - root_pipeline_id, - self.node.id, - sub_context_values, - ) - - # update subprocess context, inject subprocess data - self.runtime.upsert_plain_context_values(self.node.id, sub_context_values) - process_info.pipeline_stack.append(self.node.id) - self.runtime.set_pipeline_stack(process_info.process_id, process_info.pipeline_stack) - - return ExecuteResult( - should_sleep=False, - schedule_ready=False, - schedule_type=None, - schedule_after=-1, - dispatch_processes=[], - next_node_id=self.node.start_event_id, - ) diff --git a/lib/bamboo_engine/local.py b/lib/bamboo_engine/local.py deleted file mode 100644 index 3333b70..0000000 --- a/lib/bamboo_engine/local.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -# 引擎执行 local - - -from typing import Optional - -from werkzeug.local import Local - -from .utils.object import Representable - -_local = Local() - - -class CurrentNodeInfo(Representable): - def __init__(self, node_id: str, version: str, loop: int): - self.node_id = node_id - self.version = version - self.loop = loop - - -def set_node_info(node_info: CurrentNodeInfo): - """ - 设置当前进程/线程/协程 Local 中的当前节点信息 - - :param node_id: 节点 ID - :type node_id: str - :param version: 节点版本 - :type version: str - :param loop: 重入次数 - :type loop: int - """ - _local.current_node_info = node_info - - -def get_node_info() -> Optional[CurrentNodeInfo]: - """ - 获取当前进程/线程/协程正在处理的节点 ID,版本及重入次数 - - :return: 节点 ID - :rtype: [type] - """ - return getattr(_local, "current_node_info", None) - - -def clear_node_info(): - """ - 清理当前进程/线程/协程 Local 中的当前节点信息 - """ - _local.current_node_info = None diff --git a/lib/bamboo_engine/metrics.py b/lib/bamboo_engine/metrics.py deleted file mode 100644 index 4de8a63..0000000 --- a/lib/bamboo_engine/metrics.py +++ /dev/null @@ -1,194 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import os -import time -from functools import wraps - -from prometheus_client import Gauge, Histogram - -from .utils.host import get_hostname - -HOST_NAME = get_hostname() - - -def decode_buckets(buckets_list): - return [float(x) for x in buckets_list.split(",")] - - -def get_histogram_buckets_from_evn(env_name): - if env_name in os.environ: - buckets = decode_buckets(os.environ.get(env_name)) - else: - if hasattr(Histogram, "DEFAULT_BUCKETS"): # pragma: no cover - buckets = Histogram.DEFAULT_BUCKETS - else: # pragma: no cover - # For prometheus-client < 0.3.0 we cannot easily access - # the default buckets: - buckets = ( - 0.005, - 0.01, - 0.025, - 0.05, - 0.075, - 0.1, - 0.25, - 0.5, - 0.75, - 1.0, - 2.5, - 5.0, - 7.5, - 10.0, - float("inf"), - ) - return buckets - - -def setup_gauge(*gauges): - def wrapper(func): - @wraps(func) - def _wrapper(*args, **kwargs): - for g in gauges: - g.labels(hostname=HOST_NAME).inc(1) - try: - return func(*args, **kwargs) - finally: - for g in gauges: - g.labels(hostname=HOST_NAME).dec(1) - - return _wrapper - - return wrapper - - -def setup_histogram(*histograms): - def wrapper(func): - @wraps(func) - def _wrapper(*args, **kwargs): - start = time.time() - try: - return func(*args, **kwargs) - finally: - for h in histograms: - h.labels(hostname=HOST_NAME).observe(time.time() - start) - - return _wrapper - - return wrapper - - -# engine metrics -ENGINE_RUNNING_PROCESSES = Gauge("engine_running_processes", "count running state processes", labelnames=["hostname"]) -ENGINE_RUNNING_SCHEDULES = Gauge("engine_running_schedules", "count running state schedules", labelnames=["hostname"]) -ENGINE_PROCESS_RUNNING_TIME = Histogram( - "engine_process_running_time", - "time spent running process", - buckets=get_histogram_buckets_from_evn("ENGINE_PROCESS_RUNNING_TIME_BUCKETS"), - labelnames=["hostname"], -) -ENGINE_SCHEDULE_RUNNING_TIME = Histogram( - "engine_schedule_running_time", - "time spent running schedule", - buckets=get_histogram_buckets_from_evn("ENGINE_SCHEDULE_RUNNING_TIME_BUCKETS"), - labelnames=["hostname"], -) -ENGINE_NODE_EXECUTE_TIME = Histogram( - "engine_node_execute_time", - "time spent executing node", - buckets=get_histogram_buckets_from_evn("ENGINE_NODE_EXECUTE_TIME_BUCKETS"), - labelnames=["type", "hostname"], -) -ENGINE_NODE_SCHEDULE_TIME = Histogram( - "engine_node_schedule_time", - "time spent scheduling node", - buckets=get_histogram_buckets_from_evn("ENGINE_NODE_SCHEDULE_TIME_BUCKETS"), - labelnames=["type", "hostname"], -) - -# runtime metrics -ENGINE_RUNTIME_CONTEXT_VALUE_READ_TIME = Histogram( - "engine_runtime_context_value_read_time", "time spent reading context value", labelnames=["hostname"] -) -ENGINE_RUNTIME_CONTEXT_REF_READ_TIME = Histogram( - "engine_runtime_context_ref_read_time", "time spent reading context value reference", labelnames=["hostname"] -) -ENGINE_RUNTIME_CONTEXT_VALUE_UPSERT_TIME = Histogram( - "engine_runtime_context_value_upsert_time", "time spent upserting context value", labelnames=["hostname"] -) - -ENGINE_RUNTIME_DATA_INPUTS_READ_TIME = Histogram( - "engine_runtime_data_inputs_read_time", "time spent reading node data inputs", labelnames=["hostname"] -) -ENGINE_RUNTIME_DATA_OUTPUTS_READ_TIME = Histogram( - "engine_runtime_data_outputs_read_time", "time spent reading node data outputs", labelnames=["hostname"] -) -ENGINE_RUNTIME_DATA_READ_TIME = Histogram( - "engine_runtime_data_read_time", "time spent reading node data inputs and outputs", labelnames=["hostname"] -) - -ENGINE_RUNTIME_EXEC_DATA_INPUTS_READ_TIME = Histogram( - "engine_runtime_exec_data_inputs_read_time", - "time spent reading node execution data inputs", - labelnames=["hostname"], -) -ENGINE_RUNTIME_EXEC_DATA_OUTPUTS_READ_TIME = Histogram( - "engine_runtime_exec_data_outputs_read_time", - "time spent reading node execution data outputs", - labelnames=["hostname"], -) -ENGINE_RUNTIME_EXEC_DATA_READ_TIME = Histogram( - "engine_runtime_exec_data_read_time", - "time spent reading node execution data inputs and outputs", - labelnames=["hostname"], -) -ENGINE_RUNTIME_EXEC_DATA_INPUTS_WRITE_TIME = Histogram( - "engine_runtime_exec_data_inputs_write_time", - "time spent writing node execution data inputs", - labelnames=["hostname"], -) -ENGINE_RUNTIME_EXEC_DATA_OUTPUTS_WRITE_TIME = Histogram( - "engine_runtime_exec_data_outputs_write_time", - "time spent writing node execution data outputs", - labelnames=["hostname"], -) -ENGINE_RUNTIME_EXEC_DATA_WRITE_TIME = Histogram( - "engine_runtime_exec_data_write_time", - "time spent writing node execution data inputs and outputs", - labelnames=["hostname"], -) -ENGINE_RUNTIME_CALLBACK_DATA_READ_TIME = Histogram( - "engine_runtime_callback_data_read_time", "time spent reading node callback data", labelnames=["hostname"] -) - -ENGINE_RUNTIME_SCHEDULE_READ_TIME = Histogram( - "engine_runtime_schedule_read_time", "time spent reading schedule", labelnames=["hostname"] -) -ENGINE_RUNTIME_SCHEDULE_WRITE_TIME = Histogram( - "engine_runtime_schedule_write_time", "time spent writing schedule", labelnames=["hostname"] -) - -ENGINE_RUNTIME_STATE_READ_TIME = Histogram( - "engine_runtime_state_read_time", "time spent reading state", labelnames=["hostname"] -) -ENGINE_RUNTIME_STATE_WRITE_TIME = Histogram( - "engine_runtime_state_write_time", "time spent writing state", labelnames=["hostname"] -) - -ENGINE_RUNTIME_NODE_READ_TIME = Histogram( - "engine_runtime_node_read_time", "time spent reading node", labelnames=["hostname"] -) - -ENGINE_RUNTIME_PROCESS_READ_TIME = Histogram( - "engine_runtime_process_read_time", "time spent reading process", labelnames=["hostname"] -) diff --git a/lib/bamboo_engine/states.py b/lib/bamboo_engine/states.py deleted file mode 100644 index 7c7e8bc..0000000 --- a/lib/bamboo_engine/states.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -# 引擎内部状态及状态相关数据定义模块 - - -from enum import Enum - -from .utils.collections import ConstantDict - - -class StateType(Enum): - CREATED = "CREATED" - READY = "READY" - RUNNING = "RUNNING" - SUSPENDED = "SUSPENDED" - BLOCKED = "BLOCKED" - FINISHED = "FINISHED" - FAILED = "FAILED" - REVOKED = "REVOKED" - - -CREATED = StateType.CREATED.value -READY = StateType.READY.value -RUNNING = StateType.RUNNING.value -SUSPENDED = StateType.SUSPENDED.value -BLOCKED = StateType.BLOCKED.value -FINISHED = StateType.FINISHED.value -FAILED = StateType.FAILED.value -REVOKED = StateType.REVOKED.value - -ALL_STATES = frozenset([READY, RUNNING, SUSPENDED, BLOCKED, FINISHED, FAILED, REVOKED]) - -ARCHIVED_STATES = frozenset([FINISHED, FAILED, REVOKED]) -SLEEP_STATES = frozenset([SUSPENDED, REVOKED]) -CHILDREN_IGNORE_STATES = frozenset([BLOCKED]) - -INVERTED_TRANSITION = ConstantDict({RUNNING: frozenset([READY, FINISHED])}) - -TRANSITION = ConstantDict( - { - READY: frozenset([RUNNING, SUSPENDED]), - RUNNING: frozenset([FINISHED, FAILED, REVOKED, SUSPENDED]), - SUSPENDED: frozenset([READY, REVOKED, RUNNING]), - BLOCKED: frozenset([]), - FINISHED: frozenset([RUNNING, FAILED]), - FAILED: frozenset([READY, FINISHED]), - REVOKED: frozenset([]), - } -) - - -def can_transit(from_state, to_state): - - if from_state in TRANSITION: - if to_state in TRANSITION[from_state]: - return True - return False diff --git a/lib/bamboo_engine/template/__init__.py b/lib/bamboo_engine/template/__init__.py deleted file mode 100644 index d455fb8..0000000 --- a/lib/bamboo_engine/template/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -""" -模板相关逻辑存放模块 -""" - -from .template import Template # noqa diff --git a/lib/bamboo_engine/template/sandbox.py b/lib/bamboo_engine/template/sandbox.py deleted file mode 100644 index 1bd7503..0000000 --- a/lib/bamboo_engine/template/sandbox.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -# 模板渲染沙箱 - - -from typing import List, Dict - -import importlib - -from bamboo_engine.config import Settings - - -def _shield_words(sandbox: dict, words: List[str]): - for shield_word in words: - sandbox[shield_word] = None - - -class ModuleObject: - def __init__(self, sub_paths, module): - if len(sub_paths) == 1: - setattr(self, sub_paths[0], module) - return - setattr(self, sub_paths[0], ModuleObject(sub_paths[1:], module)) - - -def _import_modules(sandbox: dict, modules: Dict[str, str]): - for mod_path, alias in modules.items(): - mod = importlib.import_module(mod_path) - sub_paths = alias.split(".") - if len(sub_paths) == 1: - sandbox[alias] = mod - else: - sandbox[sub_paths[0]] = ModuleObject(sub_paths[1:], mod) - - -def get() -> dict: - sandbox = {} - - _shield_words(sandbox, Settings.MAKO_SANDBOX_SHIELD_WORDS) - _import_modules(sandbox, Settings.MAKO_SANDBOX_IMPORT_MODULES) - - return sandbox diff --git a/lib/bamboo_engine/template/template.py b/lib/bamboo_engine/template/template.py deleted file mode 100644 index aa8f257..0000000 --- a/lib/bamboo_engine/template/template.py +++ /dev/null @@ -1,195 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -# 封装模板处理,渲染逻辑的相关模块 - -import copy -import re -import logging - -from typing import Any, List, Set - -from mako.template import Template as MakoTemplate -from mako import lexer, codegen -from mako.exceptions import MakoException - -from bamboo_engine.utils.mako_utils.checker import check_mako_template_safety -from bamboo_engine.utils.mako_utils.exceptions import ForbiddenMakoTemplateException -from bamboo_engine.utils import mako_safety -from bamboo_engine.utils.string import deformat_var_key - -from . import sandbox - - -logger = logging.getLogger("root") -# find mako template(format is ${xxx},and ${}# not in xxx, # may raise memory error) -TEMPLATE_PATTERN = re.compile(r"\${[^${}#]+}") - - -class Template: - def __init__(self, data: Any): - self.data = data - - def get_reference(self, deformat=False) -> Set[str]: - """ - 获取当前数据中模板所引用的所有标志符 - - :return: 标志符列表 - :rtype: List[str] - """ - - reference = [] - templates = self.get_templates() - for tpl in templates: - reference += self._get_template_reference(tpl) - reference = set(reference) - if not deformat: - reference = {"${%s}" % r for r in reference} - - return reference - - def get_templates(self) -> List[str]: - """ - 获取当前数据中所有的模板片段 - - :return: 模板片段列表 - :rtype: List[str] - """ - templates = [] - data = self.data - if isinstance(data, str): - templates += self._get_string_templates(data) - if isinstance(data, (list, tuple)): - for item in data: - templates += Template(item).get_templates() - if isinstance(data, dict): - for value in list(data.values()): - templates += Template(value).get_templates() - return list(set(templates)) - - def render(self, context: dict) -> Any: - """ - 渲染当前模板 - - :param context: 模板渲染上下文 - :type context: dict - :return: 模板渲染后的数据 - :rtype: Any - """ - data = self.data - if isinstance(data, str): - return self._render_string(data, context) - if isinstance(data, list): - ldata = [""] * len(data) - for index, item in enumerate(data): - ldata[index] = Template(copy.deepcopy(item)).render(context) - return ldata - if isinstance(data, tuple): - ldata = [""] * len(data) - for index, item in enumerate(data): - ldata[index] = Template(copy.deepcopy(item)).render(context) - return tuple(ldata) - if isinstance(data, dict): - for key, value in list(data.items()): - data[key] = Template(copy.deepcopy(value)).render(context) - return data - return data - - def _get_string_templates(self, string) -> List[str]: - return list(set(TEMPLATE_PATTERN.findall(string))) - - def _get_template_reference(self, template: str) -> List[str]: - lex = lexer.Lexer(template) - - try: - node = lex.parse() - except MakoException as e: - logger.warning("pipeline get template[{}] reference error[{}]".format(template, e)) - return [] - - # Dummy compiler. _Identifiers class requires one - # but only interested in the reserved_names field - def compiler(): - return None - - compiler.reserved_names = set() - identifiers = codegen._Identifiers(compiler, node) - - return list(identifiers.undeclared) - - def _render_string(self, string: str, context: dict) -> str: - """ - 使用特定上下文渲染指定模板 - - :param string: 模板 - :type string: str - :param context: 上下文 - :type context: dict - :return: 渲染后的模板 - :rtype: str - """ - if not isinstance(string, str): - return string - templates = self._get_string_templates(string) - - # TODO keep render return object, here only process simple situation - if len(templates) == 1 and templates[0] == string and deformat_var_key(string) in context: - return context[deformat_var_key(string)] - - for tpl in templates: - try: - check_mako_template_safety( - tpl, - mako_safety.SingleLineNodeVisitor(), - mako_safety.SingleLinCodeExtractor(), - ) - except ForbiddenMakoTemplateException as e: - logger.warning("forbidden template: {}, exception: {}".format(tpl, e)) - continue - except Exception: - logger.exception("{} safety check error.".format(tpl)) - continue - resolved = Template._render_template(tpl, context) - string = string.replace(tpl, resolved) - return string - - @staticmethod - def _render_template(template: str, context: dict) -> Any: - """ - 使用特定上下文渲染指定模板 - - :param template: 模板 - :type template: Any - :param context: 上下文 - :type context: dict - :raises TypeError: [description] - :return: [description] - :rtype: str - """ - data = {} - data.update(sandbox.get()) - data.update(context) - if not isinstance(template, str): - raise TypeError("constant resolve error, template[%s] is not a string" % template) - try: - tm = MakoTemplate(template) - except (MakoException, SyntaxError) as e: - logger.error("pipeline resolve template[{}] error[{}]".format(template, e)) - return template - try: - resolved = tm.render_unicode(**data) - except Exception as e: - logger.warning("constant content({}) is invalid, data({}), error: {}".format(template, data, e)) - return template - else: - return resolved diff --git a/lib/bamboo_engine/utils/__init__.py b/lib/bamboo_engine/utils/__init__.py deleted file mode 100644 index 1714047..0000000 --- a/lib/bamboo_engine/utils/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -""" -引擎内部使用的各类工具存放模块 -""" diff --git a/lib/bamboo_engine/utils/boolrule/__init__.py b/lib/bamboo_engine/utils/boolrule/__init__.py deleted file mode 100644 index 2133c22..0000000 --- a/lib/bamboo_engine/utils/boolrule/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -# bool 表达式解析工具模块 - - -from .boolrule import ( # noqa - BoolRule, - MissingVariableException, - UnknownOperatorException, -) diff --git a/lib/bamboo_engine/utils/boolrule/boolrule.py b/lib/bamboo_engine/utils/boolrule/boolrule.py deleted file mode 100644 index 0f346d1..0000000 --- a/lib/bamboo_engine/utils/boolrule/boolrule.py +++ /dev/null @@ -1,291 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pyparsing import ( - CaselessLiteral, - Combine, - Forward, - Group, - Keyword, - Optional, - ParseException, - ParseResults, - QuotedString, - Suppress, - Word, - ZeroOrMore, - alphanums, - alphas, - delimitedList, - nums, - oneOf, -) - -PATH_DELIMITER = "." - - -class SubstituteVal(object): - """ - Represents a token that will later be replaced by a context value. - """ - - def __init__(self, t): - self._path = t[0] - - def get_val(self, context): - if not context: - # raise MissingVariableException( - # 'context missing or empty' - # ) - return self._path - - val = context - - try: - for part in self._path.split(PATH_DELIMITER): - val = getattr(val, part) if hasattr(val, part) else val[part] - - except KeyError: - raise MissingVariableException("no value supplied for {}".format(self._path)) - - return val - - def __repr__(self): - return "SubstituteVal(%s)" % self._path - - -def get_bool_expression(): - - # Grammar definition - # match gcloud's variable - identifier = Combine(Optional("${") + Optional("_") + Word(alphas, alphanums + "_") + Optional("}")) - # identifier = Word(alphas, alphanums + "_") - propertyPath = delimitedList(identifier, PATH_DELIMITER, combine=True) - - and_ = Keyword("and", caseless=True) - or_ = Keyword("or", caseless=True) - - lparen = Suppress("(") - rparen = Suppress(")") - - binaryOp = oneOf("== != < > >= <= in notin issuperset notissuperset", caseless=True)("operator") - - E = CaselessLiteral("E") - numberSign = Word("+-", exact=1) - realNumber = Combine( - Optional(numberSign) - + (Word(nums) + "." + Optional(Word(nums)) | ("." + Word(nums))) - + Optional(E + Optional(numberSign) + Word(nums)) - ) - - integer = Combine(Optional(numberSign) + Word(nums) + Optional(E + Optional("+") + Word(nums))) - - # str_ = quotedString.addParseAction(removeQuotes) - str_ = QuotedString('"') | QuotedString("'") - bool_ = oneOf("true false", caseless=True) - - simpleVals = ( - realNumber.setParseAction(lambda toks: float(toks[0])) - | integer.setParseAction(lambda toks: int(toks[0])) - | str_ - | bool_.setParseAction(lambda toks: toks[0] == "true") - | propertyPath.setParseAction(lambda toks: SubstituteVal(toks)) - ) # need to add support for alg expressions - - propertyVal = simpleVals | (lparen + Group(delimitedList(simpleVals)) + rparen) - - boolExpression = Forward() - boolCondition = Group( - (Group(propertyVal)("lval") + binaryOp + Group(propertyVal)("rval")) | (lparen + boolExpression + rparen) - ) - boolExpression << boolCondition + ZeroOrMore((and_ | or_) + boolExpression) - - return boolExpression - - -def double_equals_trans(lval, rval, operator): - # double equals - if operator in ["in", "notin"]: - if isinstance(rval, list) and len(rval): - transed_rval = [] - if isinstance(lval, int): - for item in rval: - try: - transed_rval.append(int(item)) - except Exception: - pass - elif isinstance(lval, str): - for item in rval: - try: - transed_rval.append(str(item)) - except Exception: - pass - rval += transed_rval - - elif operator in ["issuperset", "notissuperset"]: - # avoid convert set('abc') to {a, b, c}, but keep {'abc'} - if isinstance(lval, str): - lval = [lval] - if isinstance(rval, str): - rval = [rval] - - else: - try: - if isinstance(lval, int): - rval = int(rval) - elif isinstance(rval, int): - lval = int(lval) - if isinstance(lval, str): - rval = str(rval) - elif isinstance(rval, str): - lval = str(lval) - except Exception: - pass - - return lval, rval - - -class BoolRule(object): - """ - Represents a boolean expression and provides a `test` method to evaluate - the expression and determine its truthiness. - - :param query: A string containing the query to be evaluated - :param lazy: If ``True``, parse the query the first time it's tested rather - than immediately. This can help with performance if you - instantiate a lot of rules and only end up evaluating a - small handful. - """ - - _compiled = False - _tokens = None - _query = None - - def __init__(self, query, lazy=False, strict=True): - self._query = query - self.strict = strict - if not lazy: - self._compile() - - def test(self, context=None): - """ - Test the expression against the given context and return the result. - - :param context: A dict context to evaluate the expression against. - :return: True if the expression succesfully evaluated against the - context, or False otherwise. - """ - if self._is_match_all(): - return True - - self._compile() - return self._test_tokens(self._tokens, context) - - def _is_match_all(self): - return True if self._query == "*" else False - - def _compile(self): - if not self._compiled: - - # special case match-all query - if self._is_match_all(): - return - - try: - self._tokens = get_bool_expression().parseString(self._query, parseAll=self.strict) - except ParseException: - raise - - self._compiled = True - - def _expand_val(self, val, context): - if type(val) == list: - val = [self._expand_val(v, context) for v in val] - - if isinstance(val, SubstituteVal): - ret = val.get_val(context) - return ret - - if isinstance(val, ParseResults): - return [self._expand_val(x, context) for x in val.asList()] - - return val - - def _test_tokens(self, tokens, context): - passed = False - - for token in tokens: - - if not isinstance(token, ParseResults): - if token == "or" and passed: - return True - elif token == "and" and not passed: - return False - continue - - if not token.getName(): - passed = self._test_tokens(token, context) - continue - - items = token.asDict() - - operator = items["operator"] - lval = self._expand_val(items["lval"][0], context) - rval = self._expand_val(items["rval"][0], context) - lval, rval = double_equals_trans(lval, rval, operator) - - if operator in ("=", "==", "eq"): - passed = lval == rval - elif operator in ("!=", "ne"): - passed = lval != rval - elif operator in (">", "gt"): - passed = lval > rval - elif operator in (">=", "ge"): - passed = lval >= rval - elif operator in ("<", "lt"): - passed = lval < rval - elif operator in ("<=", "le"): - passed = lval <= rval - elif operator == "in": - passed = lval in rval - elif operator == "notin": - passed = lval not in rval - elif operator == "issuperset": - passed = set(lval).issuperset(set(rval)) - elif operator == "notissuperset": - passed = not set(lval).issuperset(set(rval)) - else: - raise UnknownOperatorException("Unknown operator '{}'".format(operator)) - - return passed - - -class MissingVariableException(Exception): - """ - Raised when an expression contains a property path that's not supplied in - the context. - """ - - pass - - -class UnknownOperatorException(Exception): - """ - Raised when an expression uses an unknown operator. - - This should never be thrown since the operator won't be correctly parsed as - a token by pyparsing, but it's useful to have this hanging around for when - additional operators are being added. - """ - - pass diff --git a/lib/bamboo_engine/utils/collections.py b/lib/bamboo_engine/utils/collections.py deleted file mode 100644 index 8f777cb..0000000 --- a/lib/bamboo_engine/utils/collections.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -# 集合类工具 - - -from typing import Any - - -class FancyDict(dict): - def __getattr__(self, key: str) -> Any: - try: - return self[key] - except KeyError as k: - raise AttributeError(k) - - def __setattr__(self, key: str, value: Any): - # 内建属性不放入 key 中 - if key.startswith("__") and key.endswith("__"): - super().__setattr__(key, value) - else: - self[key] = value - - def __delattr__(self, key: str): - try: - del self[key] - except KeyError as k: - raise AttributeError(k) - - -class ConstantDict(dict): - """ConstantDict is a subclass of :class:`dict`, implementing __setitem__ - method to avoid item assignment:: - - >>> d = ConstantDict({'key': 'value'}) - >>> d['key'] = 'value' - Traceback (most recent call last): - ... - TypeError: 'ConstantDict' object does not support item assignment - """ - - def __setitem__(self, key: str, value: Any): - raise TypeError("'%s' object does not support item assignment" % self.__class__.__name__) diff --git a/lib/bamboo_engine/utils/constants.py b/lib/bamboo_engine/utils/constants.py deleted file mode 100644 index f643464..0000000 --- a/lib/bamboo_engine/utils/constants.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from bamboo_engine.eri import ContextValueType - - -VAR_CONTEXT_MAPPING = { - "plain": ContextValueType.PLAIN, - "splice": ContextValueType.SPLICE, - "lazy": ContextValueType.COMPUTE, -} diff --git a/lib/bamboo_engine/utils/graph.py b/lib/bamboo_engine/utils/graph.py deleted file mode 100644 index fad1c14..0000000 --- a/lib/bamboo_engine/utils/graph.py +++ /dev/null @@ -1,261 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -class Graph(object): - def __init__(self, nodes, flows): - self.nodes = nodes - self.flows = flows - self.path = [] - self.last_visited_node = "" - self.graph = {node: [] for node in self.nodes} - for flow in self.flows: - self.graph[flow[0]].append(flow[1]) - - def has_cycle(self): - self.path = [] - visited = {node: False for node in self.nodes} - visit_stack = {node: False for node in self.nodes} - - for node in self.nodes: - if self._has_cycle(node, visited, visit_stack): - return True - return False - - def _has_cycle(self, node, visited, visit_stack): - self.last_visited_node = node - self.path.append(node) - visited[node] = True - visit_stack[node] = True - - for neighbor in self.graph[node]: - if not visited[neighbor]: - if self._has_cycle(neighbor, visited, visit_stack): - return True - elif visit_stack[neighbor]: - self.path.append(neighbor) - return True - - self.path.remove(node) - visit_stack[node] = False - return False - - def get_cycle(self): - if self.has_cycle(): - cross_node = self.path[-1] - if self.path.count(cross_node) > 1: - return self.path[self.path.index(cross_node) :] - else: - return self.path - return [] - - -if __name__ == "__main__": - graph1 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4]]) - assert not graph1.has_cycle() - assert graph1.get_cycle() == [] - graph2 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4], [4, 1]]) - assert graph2.has_cycle() - assert graph2.get_cycle() == [1, 2, 3, 4, 1] - graph3 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4], [4, 2]]) - assert graph3.has_cycle() - assert graph3.get_cycle() == [2, 3, 4, 2] - graph4 = Graph( - [ - "n20c4a0601193f268bfa168f1192eacd", - "nef42d10350b3961b53df7af67e16d9b", - "n0ada7b4abe63771a43052eaf188dc4b", - "n0cd3b95c714388bacdf1a486ab432fc", - "n1430047af8537f88710c4bbf3cbfb0f", - "n383748fe27434d582f0ca17af9d968a", - "n51426abd4be3a4691c80a73c3f93b3c", - "n854753a77933562ae72ec87c365f23d", - "n89f083892a731d7b9d7edb0f372006d", - "n8d4568db0ad364692b0387e86a2f1e0", - "n8daedbb02273a0fbc94cc118c90649f", - "n90b7ef55fe839b181879e036b4f8ffe", - "n99817348b4a36a6931854c93eed8c5f", - "na02956eba6f3a36ab9b0af2f2350213", - "nc3d0d49adf530bbaffe53630c184c0a", - "nca50848d1aa340f8c2b4776ce81868d", - "ncab9a48e79d357195dcee68dad3a31f", - "ncb4e013a6a8348bab087cc8500a3876", - "ne1f86f902a23e7fa4a67192e8b38a05", - "ne26def77df1385caa206c64e7e3ea53", - "nf3ebee137c53da28091ad7d140ce00c", - "nfc1dcdd7476393b9a81a988c113e1cf", - "n0197f8f210b3a1b8a7fc2f90e94744e", - "n01fb40259ad3cf285bb11a8bbbe59f2", - "n03f39191e8a32629145ba6a677ed040", - "n03ffc3b9e12316d8be63261cb9dec71", - "n07982b8985139249bca3a046f3a4379", - "n0b9e36e6b633ddb906d2044f658f110", - "n136c4fedebe3eb0ba932495aff6a945", - "n17cdc62c5d43976a413bda8f35634eb", - "n1d48483d8023439ad98d61d156c85fb", - "n26725bdcc0931fab0bc73e7244545ca", - "n2890db24f6c3cd1bbcd6b7d8cf2c045", - "n2ad9caac5b737bd897d4c8844c85f12", - "n2c88d1c1d8b35aebf883cbf259fb6bc", - "n302d25dfc9c369ab13104d5208e7119", - "n31688b7ab44338e9e6cb8dcaf259eef", - "n374443fbdc1313d98ebbe19d535fec2", - "n38c3dd0344a3f86bc7511c454bcdf4c", - "n3934eef90463940a6a9cf4ba2e63b1c", - "n40d5f0ca4bc3dd99c0b264cb186f00f", - "n476ddcb6dd33e2abac43596b08c2bc1", - "n4790f8aa48e335aa712e2af757e180b", - "n48bbfdc912334fc89c4f48c05e8969e", - "n5bef4f4532a382eaf79a0af70b2396b", - "n5ced56bcc863060ac4977755f35a5f5", - "n66a0562670e37648a3e05c243335bff", - "n6dc118cd3f7341d9ef8c97c63e2e9d9", - "n6e9d52e1ea53958a93e5b34022e7037", - "n786694b5ed33295a885b5bcd8c7c1ce", - "n7dccd56c80233469a4609f684ebe457", - "n8492d92ab6a3da48c2b49d6fcb8a479", - "n86a8b1a56f9399f90c4c227594a9d03", - "n8a805c0cd02307bad9f7828880b53dc", - "n8c7e35b0457300d9d6a96a6b1d18329", - "n91fdaed36403d06a07f4afe85e2892c", - "n9335d0718a937f9a39ec5b36d5637fe", - "n9372fb07ad936cba31f3d4e440f395a", - "n9ab96f926d83a93a5d3ebe2888fd343", - "na2a8a54e68033d0a276eb88dbff91c3", - "na493a7b5d5b3cc29f4070a6c4589cb7", - "nadfa68cb2503a39aac6626d6c72484a", - "nae1218ddd2e3448b562bc79dc084401", - "nc012287be793377b975b0230b35d713", - "ncb2e01f0c5336fe82b0e0e496f2612b", - "ncb5843900903b4c8a0a8302474d8c51", - "ncbf4db2c48f3348b2c7081f9e3b363a", - "nd4ee6c3248935ce9239e4bb20a81ab8", - "ndb1cf7af0e2319c9868530d0df8fd93", - "ne36a6858a733430bffa4fec053dc1ab", - "ne7af4a7c3613b3d81fe9e6046425a36", - "ne8035dd8de732758c1cc623f80f2fc8", - "ned91fdb914c35f3a21f320f62d72ffd", - "nf5448b3c66430f4a299d08208d313a6", - "nfaa0756a06f300495fb2e2e45e05ed3", - ], - [ - ["n8d4568db0ad364692b0387e86a2f1e0", "n5bef4f4532a382eaf79a0af70b2396b"], - ["n8daedbb02273a0fbc94cc118c90649f", "nf5448b3c66430f4a299d08208d313a6"], - ["n01fb40259ad3cf285bb11a8bbbe59f2", "ne1f86f902a23e7fa4a67192e8b38a05"], - ["ncab9a48e79d357195dcee68dad3a31f", "n0197f8f210b3a1b8a7fc2f90e94744e"], - ["na493a7b5d5b3cc29f4070a6c4589cb7", "ne1f86f902a23e7fa4a67192e8b38a05"], - ["n89f083892a731d7b9d7edb0f372006d", "n136c4fedebe3eb0ba932495aff6a945"], - ["n51426abd4be3a4691c80a73c3f93b3c", "n9ab96f926d83a93a5d3ebe2888fd343"], - ["n89f083892a731d7b9d7edb0f372006d", "n8492d92ab6a3da48c2b49d6fcb8a479"], - ["n17cdc62c5d43976a413bda8f35634eb", "n6e9d52e1ea53958a93e5b34022e7037"], - ["n476ddcb6dd33e2abac43596b08c2bc1", "ne1f86f902a23e7fa4a67192e8b38a05"], - ["n6dc118cd3f7341d9ef8c97c63e2e9d9", "nfc1dcdd7476393b9a81a988c113e1cf"], - ["n91fdaed36403d06a07f4afe85e2892c", "ncb4e013a6a8348bab087cc8500a3876"], - ["n8a805c0cd02307bad9f7828880b53dc", "n3934eef90463940a6a9cf4ba2e63b1c"], - ["n2890db24f6c3cd1bbcd6b7d8cf2c045", "n0ada7b4abe63771a43052eaf188dc4b"], - ["ned91fdb914c35f3a21f320f62d72ffd", "n383748fe27434d582f0ca17af9d968a"], - ["n89f083892a731d7b9d7edb0f372006d", "n0b9e36e6b633ddb906d2044f658f110"], - ["nc3d0d49adf530bbaffe53630c184c0a", "na493a7b5d5b3cc29f4070a6c4589cb7"], - ["ncb2e01f0c5336fe82b0e0e496f2612b", "nc012287be793377b975b0230b35d713"], - ["n86a8b1a56f9399f90c4c227594a9d03", "nf3ebee137c53da28091ad7d140ce00c"], - ["nc3d0d49adf530bbaffe53630c184c0a", "nadfa68cb2503a39aac6626d6c72484a"], - ["na02956eba6f3a36ab9b0af2f2350213", "na2a8a54e68033d0a276eb88dbff91c3"], - ["n8daedbb02273a0fbc94cc118c90649f", "n07982b8985139249bca3a046f3a4379"], - ["n136c4fedebe3eb0ba932495aff6a945", "nfc1dcdd7476393b9a81a988c113e1cf"], - ["n9372fb07ad936cba31f3d4e440f395a", "n1430047af8537f88710c4bbf3cbfb0f"], - ["n8d4568db0ad364692b0387e86a2f1e0", "n91fdaed36403d06a07f4afe85e2892c"], - ["n854753a77933562ae72ec87c365f23d", "n40d5f0ca4bc3dd99c0b264cb186f00f"], - ["n854753a77933562ae72ec87c365f23d", "n1d48483d8023439ad98d61d156c85fb"], - ["n9ab96f926d83a93a5d3ebe2888fd343", "n383748fe27434d582f0ca17af9d968a"], - ["ne36a6858a733430bffa4fec053dc1ab", "n0cd3b95c714388bacdf1a486ab432fc"], - ["n03ffc3b9e12316d8be63261cb9dec71", "nca50848d1aa340f8c2b4776ce81868d"], - ["ne8035dd8de732758c1cc623f80f2fc8", "n0ada7b4abe63771a43052eaf188dc4b"], - ["n51426abd4be3a4691c80a73c3f93b3c", "ned91fdb914c35f3a21f320f62d72ffd"], - ["nd4ee6c3248935ce9239e4bb20a81ab8", "nfaa0756a06f300495fb2e2e45e05ed3"], - ["n5bef4f4532a382eaf79a0af70b2396b", "ncb4e013a6a8348bab087cc8500a3876"], - ["ne26def77df1385caa206c64e7e3ea53", "n786694b5ed33295a885b5bcd8c7c1ce"], - ["n854753a77933562ae72ec87c365f23d", "ne8035dd8de732758c1cc623f80f2fc8"], - ["n374443fbdc1313d98ebbe19d535fec2", "ndb1cf7af0e2319c9868530d0df8fd93"], - ["nfaa0756a06f300495fb2e2e45e05ed3", "n8c7e35b0457300d9d6a96a6b1d18329"], - ["n90b7ef55fe839b181879e036b4f8ffe", "n26725bdcc0931fab0bc73e7244545ca"], - ["n8d4568db0ad364692b0387e86a2f1e0", "ncb2e01f0c5336fe82b0e0e496f2612b"], - ["ncb5843900903b4c8a0a8302474d8c51", "ncb4e013a6a8348bab087cc8500a3876"], - ["nf5448b3c66430f4a299d08208d313a6", "nf3ebee137c53da28091ad7d140ce00c"], - ["n20c4a0601193f268bfa168f1192eacd", "nd4ee6c3248935ce9239e4bb20a81ab8"], - ["nca50848d1aa340f8c2b4776ce81868d", "nc3d0d49adf530bbaffe53630c184c0a"], - ["na02956eba6f3a36ab9b0af2f2350213", "n03ffc3b9e12316d8be63261cb9dec71"], - ["n7dccd56c80233469a4609f684ebe457", "n8daedbb02273a0fbc94cc118c90649f"], - ["n0ada7b4abe63771a43052eaf188dc4b", "na02956eba6f3a36ab9b0af2f2350213"], - ["n9335d0718a937f9a39ec5b36d5637fe", "n99817348b4a36a6931854c93eed8c5f"], - ["n90b7ef55fe839b181879e036b4f8ffe", "n5ced56bcc863060ac4977755f35a5f5"], - ["ncb4e013a6a8348bab087cc8500a3876", "ne26def77df1385caa206c64e7e3ea53"], - ["na02956eba6f3a36ab9b0af2f2350213", "n4790f8aa48e335aa712e2af757e180b"], - ["nc012287be793377b975b0230b35d713", "ncb4e013a6a8348bab087cc8500a3876"], - ["n8d4568db0ad364692b0387e86a2f1e0", "ncb5843900903b4c8a0a8302474d8c51"], - ["n40d5f0ca4bc3dd99c0b264cb186f00f", "n0ada7b4abe63771a43052eaf188dc4b"], - ["n38c3dd0344a3f86bc7511c454bcdf4c", "n17cdc62c5d43976a413bda8f35634eb"], - ["n6e9d52e1ea53958a93e5b34022e7037", "n90b7ef55fe839b181879e036b4f8ffe"], - ["nf3ebee137c53da28091ad7d140ce00c", "n51426abd4be3a4691c80a73c3f93b3c"], - ["n99817348b4a36a6931854c93eed8c5f", "n89f083892a731d7b9d7edb0f372006d"], - ["n89f083892a731d7b9d7edb0f372006d", "n6dc118cd3f7341d9ef8c97c63e2e9d9"], - ["n8daedbb02273a0fbc94cc118c90649f", "n66a0562670e37648a3e05c243335bff"], - ["nadfa68cb2503a39aac6626d6c72484a", "ne1f86f902a23e7fa4a67192e8b38a05"], - ["n383748fe27434d582f0ca17af9d968a", "nef42d10350b3961b53df7af67e16d9b"], - ["na02956eba6f3a36ab9b0af2f2350213", "n03f39191e8a32629145ba6a677ed040"], - ["nae1218ddd2e3448b562bc79dc084401", "n383748fe27434d582f0ca17af9d968a"], - ["n26725bdcc0931fab0bc73e7244545ca", "n1430047af8537f88710c4bbf3cbfb0f"], - ["n48bbfdc912334fc89c4f48c05e8969e", "n8a805c0cd02307bad9f7828880b53dc"], - ["ne7af4a7c3613b3d81fe9e6046425a36", "ncb4e013a6a8348bab087cc8500a3876"], - ["nfc1dcdd7476393b9a81a988c113e1cf", "n8d4568db0ad364692b0387e86a2f1e0"], - ["n0197f8f210b3a1b8a7fc2f90e94744e", "n99817348b4a36a6931854c93eed8c5f"], - ["n90b7ef55fe839b181879e036b4f8ffe", "n302d25dfc9c369ab13104d5208e7119"], - ["n1d48483d8023439ad98d61d156c85fb", "n0ada7b4abe63771a43052eaf188dc4b"], - ["na2a8a54e68033d0a276eb88dbff91c3", "nca50848d1aa340f8c2b4776ce81868d"], - ["n90b7ef55fe839b181879e036b4f8ffe", "n9372fb07ad936cba31f3d4e440f395a"], - ["ndb1cf7af0e2319c9868530d0df8fd93", "n2ad9caac5b737bd897d4c8844c85f12"], - ["n8492d92ab6a3da48c2b49d6fcb8a479", "nfc1dcdd7476393b9a81a988c113e1cf"], - ["n8d4568db0ad364692b0387e86a2f1e0", "ne7af4a7c3613b3d81fe9e6046425a36"], - ["n302d25dfc9c369ab13104d5208e7119", "n1430047af8537f88710c4bbf3cbfb0f"], - ["n51426abd4be3a4691c80a73c3f93b3c", "n2c88d1c1d8b35aebf883cbf259fb6bc"], - ["n786694b5ed33295a885b5bcd8c7c1ce", "n0cd3b95c714388bacdf1a486ab432fc"], - ["n854753a77933562ae72ec87c365f23d", "n2890db24f6c3cd1bbcd6b7d8cf2c045"], - ["nc3d0d49adf530bbaffe53630c184c0a", "n476ddcb6dd33e2abac43596b08c2bc1"], - ["n2c88d1c1d8b35aebf883cbf259fb6bc", "n383748fe27434d582f0ca17af9d968a"], - ["n0cd3b95c714388bacdf1a486ab432fc", "n854753a77933562ae72ec87c365f23d"], - ["n51426abd4be3a4691c80a73c3f93b3c", "nae1218ddd2e3448b562bc79dc084401"], - ["nc3d0d49adf530bbaffe53630c184c0a", "n01fb40259ad3cf285bb11a8bbbe59f2"], - ["ne1f86f902a23e7fa4a67192e8b38a05", "n374443fbdc1313d98ebbe19d535fec2"], - ["n0b9e36e6b633ddb906d2044f658f110", "nfc1dcdd7476393b9a81a988c113e1cf"], - ["ncab9a48e79d357195dcee68dad3a31f", "ncbf4db2c48f3348b2c7081f9e3b363a"], - ["n8daedbb02273a0fbc94cc118c90649f", "n86a8b1a56f9399f90c4c227594a9d03"], - ["ncbf4db2c48f3348b2c7081f9e3b363a", "n99817348b4a36a6931854c93eed8c5f"], - ["n1430047af8537f88710c4bbf3cbfb0f", "ncab9a48e79d357195dcee68dad3a31f"], - ["n4790f8aa48e335aa712e2af757e180b", "nca50848d1aa340f8c2b4776ce81868d"], - ["ne26def77df1385caa206c64e7e3ea53", "ne36a6858a733430bffa4fec053dc1ab"], - ["ncab9a48e79d357195dcee68dad3a31f", "n31688b7ab44338e9e6cb8dcaf259eef"], - ["n07982b8985139249bca3a046f3a4379", "nf3ebee137c53da28091ad7d140ce00c"], - ["n66a0562670e37648a3e05c243335bff", "nf3ebee137c53da28091ad7d140ce00c"], - ["n03f39191e8a32629145ba6a677ed040", "nca50848d1aa340f8c2b4776ce81868d"], - ["n8c7e35b0457300d9d6a96a6b1d18329", "n38c3dd0344a3f86bc7511c454bcdf4c"], - ["n5ced56bcc863060ac4977755f35a5f5", "n1430047af8537f88710c4bbf3cbfb0f"], - ["n2ad9caac5b737bd897d4c8844c85f12", "n48bbfdc912334fc89c4f48c05e8969e"], - ["n31688b7ab44338e9e6cb8dcaf259eef", "n99817348b4a36a6931854c93eed8c5f"], - ["n3934eef90463940a6a9cf4ba2e63b1c", "n7dccd56c80233469a4609f684ebe457"], - ["ncab9a48e79d357195dcee68dad3a31f", "n9335d0718a937f9a39ec5b36d5637fe"], - ], - ) - assert not graph4.has_cycle() - assert graph4.get_cycle() == [] - graph5 = Graph([1, 2, 3, 4, 5], [[1, 2], [2, 3], [2, 4], [4, 5], [5, 2]]) - assert graph5.has_cycle() - assert graph5.get_cycle() == [2, 4, 5, 2] diff --git a/lib/bamboo_engine/utils/host.py b/lib/bamboo_engine/utils/host.py deleted file mode 100644 index 1993989..0000000 --- a/lib/bamboo_engine/utils/host.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import socket - - -def get_hostname(): - """ - 获取当前主机名 - """ - return socket.gethostname() diff --git a/lib/bamboo_engine/utils/mako_safety.py b/lib/bamboo_engine/utils/mako_safety.py deleted file mode 100644 index e7abf2b..0000000 --- a/lib/bamboo_engine/utils/mako_safety.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -# Mako 安全工具 - - -from ast import NodeVisitor - -from mako import parsetree - -from .mako_utils.code_extract import MakoNodeCodeExtractor -from .mako_utils.exceptions import ForbiddenMakoTemplateException - - -class SingleLineNodeVisitor(NodeVisitor): - """ - 遍历语法树节点,遇到魔术方法使用或 import 时,抛出异常 - """ - - def __init__(self, *args, **kwargs): - super(SingleLineNodeVisitor, self).__init__(*args, **kwargs) - - def visit_Attribute(self, node): - if node.attr.startswith("__"): - raise ForbiddenMakoTemplateException("can not access private attribute") - - def visit_Name(self, node): - if node.id.startswith("__"): - raise ForbiddenMakoTemplateException("can not access private method") - - def visit_Import(self, node): - raise ForbiddenMakoTemplateException("can not use import statement") - - def visit_ImportFrom(self, node): - self.visit_Import(node) - - -class SingleLinCodeExtractor(MakoNodeCodeExtractor): - def extract(self, node): - if isinstance(node, parsetree.Code) or isinstance(node, parsetree.Expression): - return node.text - elif isinstance(node, parsetree.Text): - return None - else: - raise ForbiddenMakoTemplateException("Unsupported node: [{}]".format(node.__class__.__name__)) diff --git a/lib/bamboo_engine/utils/mako_utils/__init__.py b/lib/bamboo_engine/utils/mako_utils/__init__.py deleted file mode 100644 index 03e37cd..0000000 --- a/lib/bamboo_engine/utils/mako_utils/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -""" -Mako 相关工具模块 -""" diff --git a/lib/bamboo_engine/utils/mako_utils/checker.py b/lib/bamboo_engine/utils/mako_utils/checker.py deleted file mode 100644 index e75ceb2..0000000 --- a/lib/bamboo_engine/utils/mako_utils/checker.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -import ast -from typing import List - -from mako import parsetree -from mako.exceptions import MakoException -from mako.lexer import Lexer - -from .code_extract import MakoNodeCodeExtractor -from .exceptions import ForbiddenMakoTemplateException - - -def parse_template_nodes( - nodes: List[parsetree.Node], - node_visitor: ast.NodeVisitor, - code_extractor: MakoNodeCodeExtractor, -): - """ - 解析mako模板节点,逐个节点解析抽象语法树并检查安全性 - :param nodes: mako模板节点列表 - :param node_visitor: 节点访问类,用于遍历AST节点 - :param code_extractor: Mako 词法节点处理器,用于提取 python 代码 - """ - for node in nodes: - code = code_extractor.extract(node) - if code is None: - continue - - ast_node = ast.parse(code, "", "exec") - node_visitor.visit(ast_node) - if hasattr(node, "nodes"): - parse_template_nodes(node.nodes, node_visitor) - - -def check_mako_template_safety(text: str, node_visitor: ast.NodeVisitor, code_extractor: MakoNodeCodeExtractor) -> bool: - """ - 检查mako模板是否安全,若不安全直接抛出异常,安全则返回True - :param text: mako模板内容 - :param node_visitor: 节点访问器,用于遍历AST节点 - """ - try: - lexer_template = Lexer(text).parse() - except MakoException as mako_error: - raise ForbiddenMakoTemplateException("非mako模板,解析失败, {err_msg}".format(err_msg=mako_error.__class__.__name__)) - parse_template_nodes(lexer_template.nodes, node_visitor, code_extractor) - return True diff --git a/lib/bamboo_engine/utils/mako_utils/code_extract.py b/lib/bamboo_engine/utils/mako_utils/code_extract.py deleted file mode 100644 index 5d090a5..0000000 --- a/lib/bamboo_engine/utils/mako_utils/code_extract.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import abc - -from mako import parsetree -from mako.ast import PythonFragment - -from .exceptions import ForbiddenMakoTemplateException - - -class MakoNodeCodeExtractor: - @abc.abstractmethod - def extract(self, node): - """ - 处理 Mako Lexer 分割出来的 code 对象,返回需要检测的 python 代码,返回 None 表示该节点不需要处理 - - :param node: mako parsetree node - :return: 需要处理的代码,或 None - """ - raise NotImplementedError() - - -class StrictMakoNodeCodeExtractor(MakoNodeCodeExtractor): - def extract(self, node): - if isinstance(node, parsetree.Code) or isinstance(node, parsetree.Expression): - return node.text - elif isinstance(node, parsetree.ControlLine): - if node.isend: - return None - return PythonFragment(node.text).code - elif isinstance(node, parsetree.Text): - return None - else: - raise ForbiddenMakoTemplateException("不支持[{}]节点".format(node.__class__.__name__)) diff --git a/lib/bamboo_engine/utils/mako_utils/exceptions.py b/lib/bamboo_engine/utils/mako_utils/exceptions.py deleted file mode 100644 index 384bbfb..0000000 --- a/lib/bamboo_engine/utils/mako_utils/exceptions.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -class ForbiddenMakoTemplateException(Exception): - pass diff --git a/lib/bamboo_engine/utils/mako_utils/visitors.py b/lib/bamboo_engine/utils/mako_utils/visitors.py deleted file mode 100644 index 16b3676..0000000 --- a/lib/bamboo_engine/utils/mako_utils/visitors.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import _ast -import ast - -from werkzeug.utils import import_string - -from .exceptions import ForbiddenMakoTemplateException - - -class StrictNodeVisitor(ast.NodeVisitor): - """ - 遍历语法树节点,遇到魔术方法使用或import时,抛出异常 - """ - - BLACK_LIST_MODULE_METHODS = { - "os": dir(__import__("os")), - "subprocess": dir(__import__("subprocess")), - "shutil": dir(__import__("shutil")), - "ctypes": dir(__import__("ctypes")), - "codecs": dir(__import__("codecs")), - "sys": dir(__import__("sys")), - "socket": dir(__import__("socket")), - "webbrowser": dir(__import__("webbrowser")), - "threading": dir(__import__("threading")), - "sqlite3": dir(__import__("threading")), - "signal": dir(__import__("signal")), - "imaplib": dir(__import__("imaplib")), - "fcntl": dir(__import__("fcntl")), - "pdb": dir(__import__("pdb")), - "pty": dir(__import__("pty")), - "glob": dir(__import__("glob")), - "tempfile": dir(__import__("tempfile")), - "types": dir(import_string("types.CodeType")) + dir(import_string("types.FrameType")), - "builtins": [ - "getattr", - "hasattr", - "breakpoint", - "compile", - "delattr", - "open", - "eval", - "exec", - "execfile", - "exit", - "dir", - "globals", - "locals", - "input", - "iter", - "next", - "quit", - "setattr", - "vars", - "memoryview", - "super", - "print", - ], - } - - BLACK_LIST_METHODS = [] - for module_name, methods in BLACK_LIST_MODULE_METHODS.items(): - BLACK_LIST_METHODS.append(module_name) - BLACK_LIST_METHODS.extend(methods) - BLACK_LIST_METHODS = set(BLACK_LIST_METHODS) - - WHITE_LIST_MODULES = ["datetime", "re", "random", "json", "math"] - - def __init__(self, black_list_methods=None, white_list_modules=None): - self.black_list_methods = black_list_methods or self.BLACK_LIST_METHODS - self.white_list_modules = white_list_modules or self.WHITE_LIST_MODULES - - @staticmethod - def is_white_list_ast_obj(ast_obj: _ast.AST) -> bool: - """ - 判断是否白名单对象,特殊豁免 - :param ast_obj: 抽象语法树节点 - :return: bool - """ - # re 正则表达式允许使用 compile - if isinstance(ast_obj, _ast.Attribute) and isinstance(ast_obj.value, _ast.Name): - if ast_obj.value.id == "re" and ast_obj.attr in ["compile"]: - return True - - return False - - def visit_Attribute(self, node): - if self.is_white_list_ast_obj(node): - return - - if node.attr in self.black_list_methods or node.attr.startswith("_"): - raise ForbiddenMakoTemplateException("Mako template forbidden.") - - def visit_Name(self, node): - if node.id in self.black_list_methods or node.id.startswith("_"): - raise ForbiddenMakoTemplateException("Mako template forbidden.") - - def visit_Import(self, node): - for name in node.names: - if name.name not in self.white_list_modules: - raise ForbiddenMakoTemplateException("Mako template forbidden.") - - def visit_ImportFrom(self, node): - self.visit_Import(node) diff --git a/lib/bamboo_engine/utils/object.py b/lib/bamboo_engine/utils/object.py deleted file mode 100644 index f1cc872..0000000 --- a/lib/bamboo_engine/utils/object.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -class Representable: - def __str__(self): - return "<%s: %s>" % (self.__class__.__name__, self.__dict__) - - __repr__ = __str__ diff --git a/lib/bamboo_engine/utils/string.py b/lib/bamboo_engine/utils/string.py deleted file mode 100644 index 36eb14f..0000000 --- a/lib/bamboo_engine/utils/string.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -# 字符串处理类工具 - - -import uuid - -ESCAPED_CHARS = {"\n": r"\n", "\r": r"\r", "\t": r"\t"} - - -def transform_escape_char(string: str) -> str: - """ - 对未转义的字符串进行转义,现有的转义字符包括\n, \r, \t - """ - if not isinstance(string, str): - return string - # 已转义的情况 - if len([c for c in ESCAPED_CHARS.values() if c in string]) > 0: - return string - for key, value in ESCAPED_CHARS.items(): - if key in string: - string = string.replace(key, value) - return string - - -def format_var_key(key: str) -> str: - """ - format key to ${key} - - :param key: key - :type key: str - :return: format key - :rtype: str - """ - return "${%s}" % key - - -def deformat_var_key(key: str) -> str: - """ - deformat ${key} to key - - :param key: key - :type key: str - :return: deformat key - :rtype: str - """ - return key[2:-1] - - -def unique_id(prefix: str) -> str: - if len(prefix) != 1: - raise ValueError("prefix length must be 1") - - return "{}{}".format(prefix, uuid.uuid4().hex) - - -def get_lower_case_name(text: str) -> str: - lst = [] - for index, char in enumerate(text): - if char.isupper() and index != 0: - lst.append("_") - lst.append(char) - - return "".join(lst).lower() diff --git a/lib/bamboo_engine/validator/__init__.py b/lib/bamboo_engine/validator/__init__.py deleted file mode 100644 index c9170ab..0000000 --- a/lib/bamboo_engine/validator/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .api import validate_and_process_pipeline # noqa diff --git a/lib/bamboo_engine/validator/api.py b/lib/bamboo_engine/validator/api.py deleted file mode 100644 index 37d2b7e..0000000 --- a/lib/bamboo_engine/validator/api.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from bamboo_engine.eri import NodeType -from bamboo_engine import exceptions - -from . import rules -from .connection import ( - validate_graph_connection, - validate_graph_without_circle, -) -from .gateway import validate_gateways, validate_stream -from .utils import format_pipeline_tree_io_to_list - - -def validate_and_process_pipeline(pipeline: dict, cycle_tolerate=False): - for subproc in [act for act in pipeline["activities"].values() if act["type"] == NodeType.SubProcess.value]: - validate_and_process_pipeline(subproc["pipeline"], cycle_tolerate) - - format_pipeline_tree_io_to_list(pipeline) - # 1. connection validation - validate_graph_connection(pipeline) - - # do not tolerate circle in flow - if not cycle_tolerate: - no_cycle = validate_graph_without_circle(pipeline) - if not no_cycle["result"]: - raise exceptions.TreeInvalidException(no_cycle["message"]) - - # 2. gateway validation - validate_gateways(pipeline) - - # 3. stream validation - validate_stream(pipeline) - - -def add_sink_type(node_type: str): - rules.FLOW_NODES_WITHOUT_STARTEVENT.append(node_type) - rules.NODE_RULES[node_type] = rules.SINK_RULE diff --git a/lib/bamboo_engine/validator/connection.py b/lib/bamboo_engine/validator/connection.py deleted file mode 100644 index 9c74958..0000000 --- a/lib/bamboo_engine/validator/connection.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from bamboo_engine.utils.graph import Graph -from bamboo_engine.exceptions import ConnectionValidateError - -from .rules import NODE_RULES -from .utils import get_nodes_dict - - -def validate_graph_connection(data): - """ - 节点连接合法性校验 - """ - nodes = get_nodes_dict(data) - - result = {"result": True, "message": {}, "failed_nodes": []} - - for i in nodes: - node_type = nodes[i]["type"] - rule = NODE_RULES[node_type] - message = "" - for j in nodes[i]["target"]: - if nodes[j]["type"] not in rule["allowed_out"]: - message += "不能连接%s类型节点\n" % nodes[i]["type"] - if rule["min_in"] > len(nodes[i]["source"]) or len(nodes[i]["source"]) > rule["max_in"]: - message += "节点的入度最大为%s,最小为%s\n" % (rule["max_in"], rule["min_in"]) - if rule["min_out"] > len(nodes[i]["target"]) or len(nodes[i]["target"]) > rule["max_out"]: - message += "节点的出度最大为%s,最小为%s\n" % (rule["max_out"], rule["min_out"]) - if message: - result["failed_nodes"].append(i) - result["message"][i] = message - - if result["failed_nodes"]: - raise ConnectionValidateError(failed_nodes=result["failed_nodes"], detail=result["message"]) - - -def validate_graph_without_circle(data): - """ - validate if a graph has not cycle - - return { - "result": False, - "message": "error message", - "error_data": ["node1_id", "node2_id", "node1_id"] - } - """ - - nodes = [data["start_event"]["id"], data["end_event"]["id"]] - nodes += list(data["gateways"].keys()) + list(data["activities"].keys()) - flows = [[flow["source"], flow["target"]] for _, flow in list(data["flows"].items())] - cycle = Graph(nodes, flows).get_cycle() - if cycle: - return { - "result": False, - "message": "pipeline graph has circle", - "error_data": cycle, - } - return {"result": True, "data": []} diff --git a/lib/bamboo_engine/validator/gateway.py b/lib/bamboo_engine/validator/gateway.py deleted file mode 100644 index cefdc36..0000000 --- a/lib/bamboo_engine/validator/gateway.py +++ /dev/null @@ -1,506 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import queue - -from bamboo_engine import exceptions -from .utils import get_node_for_sequence, get_nodes_dict - -STREAM = "stream" -P_STREAM = "p_stream" -P = "p" -MAIN_STREAM = "main" - -PARALLEL_GATEWAYS = {"ParallelGateway", "ConditionalParallelGateway"} - - -def not_in_parallel_gateway(gateway_stack, start_from=None): - """ - check whether there is parallel gateway in stack from specific gateway - :param gateway_stack: - :param start_from: - :return: - """ - start = 0 - if start_from: - id_stack = [g["id"] for g in gateway_stack] - start = id_stack.index(start_from) - - for i in range(start, len(gateway_stack)): - gateway = gateway_stack[i] - if gateway["type"] in PARALLEL_GATEWAYS: - return False - return True - - -def matched_in_prev_blocks(gid, current_start, block_nodes): - """ - check whether gateway with gid is matched in previous block - :param gid: - :param current_start: - :param block_nodes: - :return: - """ - prev_nodes = set() - for prev_start, nodes in list(block_nodes.items()): - if prev_start == current_start: - continue - prev_nodes.update(nodes) - - return gid in prev_nodes - - -def match_converge( - converges, - gateways, - cur_index, - end_event_id, - block_start, - block_nodes, - converged, - dist_from_start, - converge_in_len, - stack=None, -): - """ - find converge for parallel and exclusive in blocks, and check sanity of gateway - :param converges: - :param gateways: - :param cur_index: - :param end_event_id: - :param block_start: - :param block_nodes: - :param converged: - :param dist_from_start: - :param stack: - :param converge_in_len: - :return: - """ - - if stack is None: - stack = [] - - if cur_index not in gateways: - return None, False - - # return if this node is already matched - if gateways[cur_index]["match"]: - return gateways[cur_index]["match"], gateways[cur_index]["share_converge"] - - current_gateway = gateways[cur_index] - target = gateways[cur_index]["target"] - stack.append(gateways[cur_index]) - stack_id_set = {g["id"] for g in stack} - - # find closest converge recursively - for i in range(len(target)): - - # do not process prev blocks nodes - if matched_in_prev_blocks(target[i], block_start, block_nodes): - target[i] = None - continue - - block_nodes[block_start].add(target[i]) - - # do not find self's converge node again - while target[i] in gateways and target[i] != current_gateway["id"]: - - if target[i] in stack_id_set: - # return to previous gateway - - if not_in_parallel_gateway(stack, start_from=target[i]): - # do not trace back - target[i] = None - break - else: - raise exceptions.ConvergeMatchError(cur_index, "并行网关中的分支网关必须将所有分支汇聚到一个汇聚网关") - - converge_id, shared = match_converge( - converges=converges, - gateways=gateways, - cur_index=target[i], - end_event_id=end_event_id, - block_start=block_start, - block_nodes=block_nodes, - stack=stack, - converged=converged, - dist_from_start=dist_from_start, - converge_in_len=converge_in_len, - ) - if converge_id: - target[i] = converge_id - - if not shared: - # try to get next node fo converge which is not shared - target[i] = converges[converge_id]["target"][0] - - else: - # can't find corresponding converge gateway, which means this gateway will reach end event directly - target[i] = end_event_id - - if target[i] in converges and dist_from_start[target[i]] < dist_from_start[cur_index]: - # do not match previous converge - target[i] = None - - stack.pop() - - is_exg = current_gateway["type"] == "ExclusiveGateway" - converge_id = None - shared = False - cur_to_converge = len(target) - converge_end = False - - # gateway match validation - for i in range(len(target)): - - # mark first converge - if target[i] in converges and not converge_id: - converge_id = target[i] - - # same converge node - elif target[i] in converges and converge_id == target[i]: - pass - - # exclusive gateway point to end - elif is_exg and target[i] == end_event_id: - if not_in_parallel_gateway(stack): - converge_end = True - else: - raise exceptions.ConvergeMatchError(cur_index, "并行网关中的分支网关必须将所有分支汇聚到一个汇聚网关") - - # exclusive gateway point back to self - elif is_exg and target[i] == current_gateway["id"]: - # not converge behavior - cur_to_converge -= 1 - pass - - # exclusive gateway converge at different converge gateway - elif is_exg and target[i] in converges and converge_id != target[i]: - raise exceptions.ConvergeMatchError(cur_index, "分支网关的所有分支第一个遇到的汇聚网关必须是同一个") - - # meet previous node - elif is_exg and target[i] is None: - # not converge behavior - cur_to_converge -= 1 - pass - - # invalid cases - else: - raise exceptions.ConvergeMatchError(cur_index, "非法网关,请检查其分支是否符合规则") - - if is_exg: - if converge_id in converges: - # this converge is shared by multiple gateway - # only compare to the number of positive incoming - shared = converge_in_len[converge_id] > cur_to_converge or converge_id in converged - else: - # for parallel gateway - - converge_incoming = len(converges[converge_id]["incoming"]) - gateway_outgoing = len(target) - - if converge_incoming > gateway_outgoing: - for gateway_id in converged.get(converge_id, []): - # find another parallel gateway - if gateways[gateway_id]["type"] in PARALLEL_GATEWAYS: - raise exceptions.ConvergeMatchError(converge_id, "汇聚网关只能汇聚来自同一个并行网关的分支") - - shared = True - - elif converge_incoming < gateway_outgoing: - raise exceptions.ConvergeMatchError(converge_id, "汇聚网关没有汇聚其对应的并行网关的所有分支") - - current_gateway["match"] = converge_id - current_gateway["share_converge"] = shared - current_gateway["converge_end"] = converge_end - - converged.setdefault(converge_id, []).append(current_gateway["id"]) - block_nodes[block_start].add(current_gateway["id"]) - - return converge_id, shared - - -def distance_from(origin, node, tree, marked, visited=None): - """ - get max distance from origin to node - :param origin: - :param node: - :param tree: - :param marked: - :param visited: - :return: - """ - if visited is None: - visited = set() - - if node["id"] in marked: - return marked[node["id"]] - - if node["id"] == origin["id"]: - return 0 - - if node["id"] in visited: - # do not trace circle - return None - - visited.add(node["id"]) - - incoming_dist = [] - for incoming in node["incoming"]: - prev_node = get_node_for_sequence(incoming, tree, "source") - - # get incoming node's distance recursively - dist = distance_from(origin=origin, node=prev_node, tree=tree, marked=marked, visited=visited) - - # if this incoming do not trace back to current node - if dist is not None: - incoming_dist.append(dist + 1) - - if not incoming_dist: - return None - - # get max distance - res = max(incoming_dist) - marked[node["id"]] = res - return res - - -def validate_gateways(tree): - """ - check sanity of gateways and find their converge gateway - :param tree: - :return: - """ - converges = {} - gateways = {} - all = {} - distances = {} - converge_positive_in = {} - process_order = [] - - # data preparation - for i, item in list(tree["gateways"].items()): - node = { - "incoming": item["incoming"] if isinstance(item["incoming"], list) else [item["incoming"]], - "outgoing": item["outgoing"] if isinstance(item["outgoing"], list) else [item["outgoing"]], - "type": item["type"], - "target": [], - "source": [], - "id": item["id"], - "match": None, - } - - # find all first reach nodes(ConvergeGateway, ExclusiveGateway, ParallelGateway, EndEvent) - # which is not ServiceActivity for each gateway - for index in node["outgoing"]: - index = tree["flows"][index]["target"] - while index in tree["activities"]: - index = tree["flows"][tree["activities"][index]["outgoing"]]["target"] - - # append this node's id to current gateway's target list - node["target"].append(index) - - # get current node's distance from start event - if not distance_from(node=node, origin=tree["start_event"], tree=tree, marked=distances): - raise exceptions.ConvergeMatchError(node["id"], "无法获取该网关距离开始节点的距离") - - if item["type"] == "ConvergeGateway": - converges[i] = node - else: - process_order.append(i) - gateways[i] = node - - all[i] = node - - # calculate positive incoming number for converge - for nid, node in list(all.items()): - for t in node["target"]: - if t in converges and distances[t] > distances[nid]: - converge_positive_in[t] = converge_positive_in.setdefault(t, 0) + 1 - - process_order.sort(key=lambda gid: distances[gid]) - end_event_id = tree["end_event"]["id"] - converged = {} - block_nodes = {} - visited = set() - - # process in distance order - for gw in process_order: - if gw in visited or "match" in gw: - continue - visited.add(gw) - - block_nodes[gw] = set() - - match_converge( - converges=converges, - gateways=gateways, - cur_index=gw, - end_event_id=end_event_id, - converged=converged, - block_start=gw, - block_nodes=block_nodes, - dist_from_start=distances, - converge_in_len=converge_positive_in, - ) - - # set converge gateway - for i in gateways: - if gateways[i]["match"]: - tree["gateways"][i]["converge_gateway_id"] = gateways[i]["match"] - - return converged - - -def blend(source, target, custom_stream=None): - """ - blend source and target streams - :param source: - :param target: - :param custom_stream: - :return: - """ - - if custom_stream: - # use custom stream instead of source's stream - if isinstance(custom_stream, set): - for stream in custom_stream: - target[STREAM].add(stream) - else: - target[STREAM].add(custom_stream) - - return - - if len(source[STREAM]) == 0: - raise exceptions.InvalidOperationException("stream validation error, node(%s) stream is empty" % source["id"]) - - # blend - for s in source[STREAM]: - target[STREAM].add(s) - - -def streams_for_parallel(p): - streams = set() - for i, target_id in enumerate(p["target"]): - streams.add("{}_{}".format(p["id"], i)) - - return streams - - -def flowing(where, to, parallel_converges): - """ - mark target's stream from target - :param where: - :param to: - :param parallel_converges: - :return: - """ - is_parallel = where["type"] in PARALLEL_GATEWAYS - - stream = None - if is_parallel: - # add parallel's stream to its converge - parallel_converge = to[where["converge_gateway_id"]] - blend(source=where, target=parallel_converge, custom_stream=stream) - - if len(parallel_converge[STREAM]) > 1: - raise exceptions.StreamValidateError(node_id=parallel_converge) - - # flow to target - for i, target_id in enumerate(where["target"]): - target = to[target_id] - fake = False - - # generate different stream - if is_parallel: - stream = "{}_{}".format(where["id"], i) - - if target_id in parallel_converges: - - is_valid_branch = where[STREAM].issubset(parallel_converges[target_id][P_STREAM]) - is_direct_connect = where.get("converge_gateway_id") == target_id - - if is_valid_branch or is_direct_connect: - # do not flow when branch of parallel converge to its converge gateway - fake = True - - if not fake: - blend(source=where, target=target, custom_stream=stream) - - # sanity check - if len(target[STREAM]) != 1: - raise exceptions.StreamValidateError(node_id=target_id) - - -def validate_stream(tree): - """ - validate flow stream - :param tree: pipeline tree - :return: - """ - # data preparation - start_event_id = tree["start_event"]["id"] - end_event_id = tree["end_event"]["id"] - nodes = get_nodes_dict(tree) - nodes[start_event_id][STREAM] = {MAIN_STREAM} - nodes[end_event_id][STREAM] = {MAIN_STREAM} - parallel_converges = {} - visited = set({}) - - for nid, node in list(nodes.items()): - node.setdefault(STREAM, set()) - - # set allow streams for parallel's converge - if node["type"] in PARALLEL_GATEWAYS: - parallel_converges[node["converge_gateway_id"]] = { - P_STREAM: streams_for_parallel(node), - P: nid, - } - - # build stream from start - node_queue = queue.Queue() - node_queue.put(nodes[start_event_id]) - while not node_queue.empty(): - - # get node - node = node_queue.get() - - if node["id"] in visited: - # flow again to validate stream, but do not add target to queue - flowing(where=node, to=nodes, parallel_converges=parallel_converges) - continue - - # add to queue - for target_id in node["target"]: - node_queue.put(nodes[target_id]) - - # mark as visited - visited.add(node["id"]) - - # flow - flowing(where=node, to=nodes, parallel_converges=parallel_converges) - - # data clean - for nid, n in list(nodes.items()): - if len(n[STREAM]) != 1: - raise exceptions.StreamValidateError(node_id=nid) - - # replace set to str - n[STREAM] = n[STREAM].pop() - - # isolate node check - for __, node in list(nodes.items()): - if not node[STREAM]: - raise exceptions.IsolateNodeError() - - return nodes diff --git a/lib/bamboo_engine/validator/rules.py b/lib/bamboo_engine/validator/rules.py deleted file mode 100644 index 5107343..0000000 --- a/lib/bamboo_engine/validator/rules.py +++ /dev/null @@ -1,87 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from bamboo_engine.eri import NodeType - -MAX_IN = 1000 -MAX_OUT = 1000 -FLOW_NODES_WITHOUT_STARTEVENT = [ - NodeType.ServiceActivity.value, - NodeType.SubProcess.value, - NodeType.EmptyEndEvent.value, - NodeType.ParallelGateway.value, - NodeType.ConditionalParallelGateway.value, - NodeType.ExclusiveGateway.value, - NodeType.ConvergeGateway.value, -] - -FLOW_NODES_WITHOUT_START_AND_END = [ - NodeType.ServiceActivity.value, - NodeType.SubProcess.value, - NodeType.ParallelGateway.value, - NodeType.ConditionalParallelGateway.value, - NodeType.ExclusiveGateway.value, - NodeType.ConvergeGateway.value, -] - -SOURCE_RULE = { - "min_in": 0, - "max_in": 0, - "min_out": 1, - "max_out": 1, - "allowed_out": FLOW_NODES_WITHOUT_START_AND_END, -} - -SINK_RULE = { - "min_in": 1, - "max_in": MAX_IN, - "min_out": 0, - "max_out": 0, - "allowed_out": [], -} - -ACTIVITY_RULE = { - "min_in": 1, - "max_in": MAX_IN, - "min_out": 1, - "max_out": 1, - "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, -} - -EMIT_RULE = { - "min_in": 1, - "max_in": MAX_IN, - "min_out": 1, - "max_out": MAX_OUT, - "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, -} - -CONVERGE_RULE = { - "min_in": 1, - "max_in": MAX_IN, - "min_out": 1, - "max_out": 1, - "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, -} - -# rules of activity graph -NODE_RULES = { - NodeType.EmptyStartEvent.value: SOURCE_RULE, - NodeType.EmptyEndEvent.value: SINK_RULE, - NodeType.ServiceActivity.value: ACTIVITY_RULE, - NodeType.ExclusiveGateway.value: EMIT_RULE, - NodeType.ParallelGateway.value: EMIT_RULE, - NodeType.ConditionalParallelGateway.value: EMIT_RULE, - NodeType.ConvergeGateway.value: CONVERGE_RULE, - NodeType.SubProcess.value: ACTIVITY_RULE, -} diff --git a/lib/bamboo_engine/validator/utils.py b/lib/bamboo_engine/validator/utils.py deleted file mode 100644 index d74c707..0000000 --- a/lib/bamboo_engine/validator/utils.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from copy import deepcopy - -from bamboo_engine.exceptions import ValueError - - -def format_to_list(notype): - """ - format a data to list - :return: - """ - if isinstance(notype, list): - return notype - if not notype: - return [] - return [notype] - - -def format_node_io_to_list(node, i=True, o=True): - if i: - node["incoming"] = format_to_list(node["incoming"]) - - if o: - node["outgoing"] = format_to_list(node["outgoing"]) - - -def format_pipeline_tree_io_to_list(pipeline_tree): - """ - :summary: format incoming and outgoing to list - :param pipeline_tree: - :return: - """ - for act in list(pipeline_tree["activities"].values()): - format_node_io_to_list(act, o=False) - - for gateway in list(pipeline_tree["gateways"].values()): - format_node_io_to_list(gateway, o=False) - - format_node_io_to_list(pipeline_tree["end_event"], o=False) - - -def get_node_for_sequence(sid, tree, node_type): - target_id = tree["flows"][sid][node_type] - - if target_id in tree["activities"]: - return tree["activities"][target_id] - elif target_id in tree["gateways"]: - return tree["gateways"][target_id] - elif target_id == tree["end_event"]["id"]: - return tree["end_event"] - elif target_id == tree["start_event"]["id"]: - return tree["start_event"] - - raise ValueError("node(%s) not in data" % target_id) - - -def get_nodes_dict(data): - """ - get all FlowNodes of a pipeline - """ - data = deepcopy(data) - start = data["start_event"]["id"] - end = data["end_event"]["id"] - - nodes = {start: data["start_event"], end: data["end_event"]} - - nodes.update(data["activities"]) - nodes.update(data["gateways"]) - - for node in list(nodes.values()): - # format to list - node["incoming"] = format_to_list(node["incoming"]) - node["outgoing"] = format_to_list(node["outgoing"]) - - node["source"] = [data["flows"][incoming]["source"] for incoming in node["incoming"]] - node["target"] = [data["flows"][outgoing]["target"] for outgoing in node["outgoing"]] - - return nodes diff --git a/lib/pipeline/__init__.py b/lib/pipeline/__init__.py deleted file mode 100644 index 86513bb..0000000 --- a/lib/pipeline/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -default_app_config = "pipeline.apps.PipelineConfig" - -__version__ = "3.14.1" diff --git a/lib/pipeline/admin.py b/lib/pipeline/admin.py deleted file mode 100644 index 5c87ed0..0000000 --- a/lib/pipeline/admin.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.contrib import admin - -from pipeline import models - - -@admin.register(models.PipelineTemplate) -class PipelineTemplateAdmin(admin.ModelAdmin): - list_display = ["id", "template_id", "name", "create_time", "edit_time"] - list_filter = ["is_deleted"] - search_fields = ["name"] - raw_id_fields = ["snapshot"] - - -@admin.register(models.PipelineInstance) -class PipelineInstanceAdmin(admin.ModelAdmin): - list_display = ["id", "template", "name", "instance_id", "create_time", "start_time", "finish_time", "is_deleted"] - list_filter = ["is_started", "is_finished", "is_revoked", "is_deleted"] - search_fields = ["name"] - raw_id_fields = ["template", "snapshot", "execution_snapshot", "tree_info"] diff --git a/lib/pipeline/apps.py b/lib/pipeline/apps.py deleted file mode 100644 index 757b689..0000000 --- a/lib/pipeline/apps.py +++ /dev/null @@ -1,100 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import sys -import logging -import traceback - -import redis -from django.apps import AppConfig -from django.conf import settings -from redis.sentinel import Sentinel -from rediscluster import RedisCluster - -logger = logging.getLogger("root") - - -def get_client_through_sentinel(): - kwargs = {"sentinel_kwargs": {}} - sentinel_pwd = settings.REDIS.get("sentinel_password") - if sentinel_pwd: - kwargs["sentinel_kwargs"]["password"] = sentinel_pwd - if "password" in settings.REDIS: - kwargs["password"] = settings.REDIS["password"] - host = settings.REDIS["host"] - port = settings.REDIS["port"] - sentinels = list(zip([h.strip() for h in host.split(",")], [p.strip() for p in str(port).split(",")],)) - rs = Sentinel(sentinels, **kwargs) - # avoid None value in settings.REDIS - r = rs.master_for(settings.REDIS.get("service_name") or "mymaster") - # try to connect master - r.echo("Hello Redis") - return r - - -def get_cluster_client(): - kwargs = {"startup_nodes": [{"host": settings.REDIS["host"], "port": settings.REDIS["port"]}]} - if "password" in settings.REDIS: - kwargs["password"] = settings.REDIS["password"] - - r = RedisCluster(**kwargs) - r.echo("Hello Redis") - return r - - -def get_single_client(): - kwargs = { - "host": settings.REDIS["host"], - "port": settings.REDIS["port"], - } - if "password" in settings.REDIS: - kwargs["password"] = settings.REDIS["password"] - if "db" in settings.REDIS: - kwargs["db"] = settings.REDIS["db"] - - pool = redis.ConnectionPool(**kwargs) - return redis.StrictRedis(connection_pool=pool) - - -CLIENT_GETTER = { - "replication": get_client_through_sentinel, - "cluster": get_cluster_client, - "single": get_single_client, -} - - -class PipelineConfig(AppConfig): - name = "pipeline" - verbose_name = "Pipeline" - - def ready(self): - from pipeline.signals.handlers import pipeline_template_post_save_handler # noqa - from pipeline.validators.handlers import post_new_end_event_register_handler # noqa - - # init redis pool - if hasattr(settings, "REDIS"): - mode = settings.REDIS.get("mode") or "single" - try: - settings.REDIS_INST = CLIENT_GETTER[mode]() - settings.redis_inst = CLIENT_GETTER[mode]() - except Exception: - # fall back to single node mode - logger.error("redis client init error: %s" % traceback.format_exc()) - elif ( - getattr(settings, "PIPELINE_DATA_BACKEND", None) - == "pipeline.engine.core.data.redis_backend.RedisDataBackend" - ): - logger.error("can not find REDIS in settings!") - - # avoid big flow pickle raise maximum recursion depth exceeded error - sys.setrecursionlimit(10000) diff --git a/lib/pipeline/builder/__init__.py b/lib/pipeline/builder/__init__.py deleted file mode 100644 index 01852d1..0000000 --- a/lib/pipeline/builder/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .builder import * # noqa -from .flow import * # noqa diff --git a/lib/pipeline/builder/builder.py b/lib/pipeline/builder/builder.py deleted file mode 100644 index 41e8fe9..0000000 --- a/lib/pipeline/builder/builder.py +++ /dev/null @@ -1,227 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import copy -import queue - -from pipeline.builder.flow.data import Data, Params -from pipeline.builder.flow.event import ExecutableEndEvent -from pipeline.core.constants import PE -from pipeline.parser.utils import replace_all_id -from pipeline.utils.uniqid import uniqid - -__all__ = ["build_tree"] - -__skeleton = { - PE.id: None, - PE.start_event: None, - PE.end_event: None, - PE.activities: {}, - PE.gateways: {}, - PE.flows: {}, - PE.data: {PE.inputs: {}, PE.outputs: {}}, -} - -__node_type = { - PE.ServiceActivity: PE.activities, - PE.SubProcess: PE.activities, - PE.EmptyEndEvent: PE.end_event, - PE.EmptyStartEvent: PE.start_event, - PE.ParallelGateway: PE.gateways, - PE.ConditionalParallelGateway: PE.gateways, - PE.ExclusiveGateway: PE.gateways, - PE.ConvergeGateway: PE.gateways, -} - -__start_elem = {PE.EmptyStartEvent} - -__end_elem = {PE.EmptyEndEvent} - -__multiple_incoming_type = { - PE.ServiceActivity, - PE.ConvergeGateway, - PE.EmptyEndEvent, - PE.ParallelGateway, - PE.ConditionalParallelGateway, - PE.ExclusiveGateway, - PE.SubProcess, -} - -__incoming = "__incoming" - - -def build_tree(start_elem, id=None, data=None, replace_id=False): - tree = copy.deepcopy(__skeleton) - elem_queue = queue.Queue() - processed_elem = set() - - tree[__incoming] = {} - elem_queue.put(start_elem) - - while not elem_queue.empty(): - # get elem - elem = elem_queue.get() - - # update node when we meet again - if elem.id in processed_elem: - __update(tree, elem) - continue - - # add to queue - for e in elem.outgoing: - elem_queue.put(e) - - # mark as processed - processed_elem.add(elem.id) - - # tree grow - __grow(tree, elem) - - del tree[__incoming] - tree[PE.id] = id or uniqid() - user_data = data.to_dict() if isinstance(data, Data) else data - tree[PE.data] = user_data or tree[PE.data] - if replace_id: - replace_all_id(tree) - return tree - - -def __update(tree, elem): - node_type = __node_type[elem.type()] - node = tree[node_type] if node_type == PE.end_event else tree[node_type][elem.id] - node[PE.incoming] = tree[__incoming][elem.id] - - -def __grow(tree, elem): - if elem.type() in __start_elem: - outgoing = uniqid() - tree[PE.start_event] = { - PE.incoming: "", - PE.outgoing: outgoing, - PE.type: elem.type(), - PE.id: elem.id, - PE.name: elem.name, - } - - next_elem = elem.outgoing[0] - __grow_flow(tree, outgoing, elem, next_elem) - - elif elem.type() in __end_elem or isinstance(elem, ExecutableEndEvent): - tree[PE.end_event] = { - PE.incoming: tree[__incoming][elem.id], - PE.outgoing: "", - PE.type: elem.type(), - PE.id: elem.id, - PE.name: elem.name, - } - - elif elem.type() == PE.ServiceActivity: - outgoing = uniqid() - - tree[PE.activities][elem.id] = { - PE.incoming: tree[__incoming][elem.id], - PE.outgoing: outgoing, - PE.type: elem.type(), - PE.id: elem.id, - PE.name: elem.name, - PE.error_ignorable: elem.error_ignorable, - PE.timeout: elem.timeout, - PE.skippable: elem.skippable, - PE.retryable: elem.retryable, - PE.component: elem.component_dict(), - PE.optional: False, - PE.failure_handler: elem.failure_handler, - } - - next_elem = elem.outgoing[0] - __grow_flow(tree, outgoing, elem, next_elem) - - elif elem.type() == PE.SubProcess: - outgoing = uniqid() - - subprocess_param = elem.params.to_dict() if isinstance(elem.params, Params) else elem.params - - subprocess = { - PE.id: elem.id, - PE.incoming: tree[__incoming][elem.id], - PE.name: elem.name, - PE.outgoing: outgoing, - PE.type: elem.type(), - PE.params: subprocess_param, - } - - if elem.template_id: - subprocess[PE.template_id] = elem.template_id - else: - subprocess[PE.pipeline] = build_tree( - start_elem=elem.start, id=elem.id, data=elem.data, replace_id=elem.replace_id - ) - - tree[PE.activities][elem.id] = subprocess - - next_elem = elem.outgoing[0] - __grow_flow(tree, outgoing, elem, next_elem) - - elif elem.type() == PE.ParallelGateway: - outgoing = [uniqid() for _ in range(len(elem.outgoing))] - - tree[PE.gateways][elem.id] = { - PE.id: elem.id, - PE.incoming: tree[__incoming][elem.id], - PE.outgoing: outgoing, - PE.type: elem.type(), - PE.name: elem.name, - } - - for i, next_elem in enumerate(elem.outgoing): - __grow_flow(tree, outgoing[i], elem, next_elem) - - elif elem.type() in {PE.ExclusiveGateway, PE.ConditionalParallelGateway}: - outgoing = [uniqid() for _ in range(len(elem.outgoing))] - - tree[PE.gateways][elem.id] = { - PE.id: elem.id, - PE.incoming: tree[__incoming][elem.id], - PE.outgoing: outgoing, - PE.type: elem.type(), - PE.name: elem.name, - PE.conditions: elem.link_conditions_with(outgoing), - } - - for i, next_elem in enumerate(elem.outgoing): - __grow_flow(tree, outgoing[i], elem, next_elem) - - elif elem.type() == PE.ConvergeGateway: - outgoing = uniqid() - - tree[PE.gateways][elem.id] = { - PE.id: elem.id, - PE.incoming: tree[__incoming][elem.id], - PE.outgoing: outgoing, - PE.type: elem.type(), - PE.name: elem.name, - } - - next_elem = elem.outgoing[0] - __grow_flow(tree, outgoing, elem, next_elem) - - else: - raise Exception() - - -def __grow_flow(tree, outgoing, elem, next_element): - tree[PE.flows][outgoing] = {PE.is_default: False, PE.source: elem.id, PE.target: next_element.id, PE.id: outgoing} - if next_element.type() in __multiple_incoming_type: - tree[__incoming].setdefault(next_element.id, []).append(outgoing) - else: - tree[__incoming][next_element.id] = outgoing diff --git a/lib/pipeline/builder/flow/__init__.py b/lib/pipeline/builder/flow/__init__.py deleted file mode 100644 index 3d63692..0000000 --- a/lib/pipeline/builder/flow/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .activity import * # noqa -from .event import * # noqa -from .gateway import * # noqa -from .data import * # noqa diff --git a/lib/pipeline/builder/flow/activity.py b/lib/pipeline/builder/flow/activity.py deleted file mode 100644 index 90b41c6..0000000 --- a/lib/pipeline/builder/flow/activity.py +++ /dev/null @@ -1,75 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.builder.flow.base import * # noqa -from pipeline.utils.collections import FancyDict - -__all__ = ["ServiceActivity", "SubProcess"] - - -class ServiceActivity(Element): - def __init__( - self, - component_code=None, - failure_handler=None, - error_ignorable=False, - timeout=None, - skippable=True, - retryable=True, - *args, - **kwargs - ): - self.component = FancyDict({"code": component_code, "inputs": FancyDict({})}) - self.failure_handler = ( - "{module}.{name}".format(module=failure_handler.__module__, name=failure_handler.__name__) - if failure_handler - else None - ) - self.error_ignorable = error_ignorable - self.timeout = timeout - self.skippable = skippable - self.retryable = retryable - super(ServiceActivity, self).__init__(*args, **kwargs) - - def type(self): - return PE.ServiceActivity - - def component_dict(self): - return { - "code": self.component.code, - "inputs": {key: var.to_dict() for key, var in list(self.component.inputs.items())}, - } - - -class SubProcess(Element): - def __init__( - self, - start=None, - data=None, - params=None, - global_outputs=None, - replace_id=False, - template_id=None, - *args, - **kwargs - ): - self.start = start - self.data = data - self.params = params or {} - self.replace_id = replace_id - self.global_outputs = FancyDict(global_outputs or {}) - self.template_id = template_id - super(SubProcess, self).__init__(*args, **kwargs) - - def type(self): - return PE.SubProcess diff --git a/lib/pipeline/builder/flow/base.py b/lib/pipeline/builder/flow/base.py deleted file mode 100644 index f9e21c2..0000000 --- a/lib/pipeline/builder/flow/base.py +++ /dev/null @@ -1,79 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.core.constants import PE -from pipeline.utils.uniqid import uniqid - -__all__ = ["Element", "PE"] - - -class Element(object): - def __init__(self, id=None, name=None, outgoing=None): - self.id = id or uniqid() - self.name = name - self.outgoing = outgoing or [] - - def extend(self, element): - """ - build a connection from self to element and return element - :param element: target - :rtype: Element - """ - self.outgoing.append(element) - return element - - def connect(self, *args): - """ - build connections from self to elements in args and return self - :param args: target elements - :rtype: Element - """ - for e in args: - self.outgoing.append(e) - return self - - def converge(self, element): - """ - converge all connection those diverge from self to element and return element - :param element: target - :rtype: Element - """ - for e in self.outgoing: - e.tail().connect(element) - return element - - def to(self, element): - return element - - def tail(self): - """ - get tail element for self - :rtype: Element - """ - is_tail = len(self.outgoing) == 0 - e = self - - while not is_tail: - e = e.outgoing[0] - is_tail = len(e.outgoing) == 0 - - return e - - def type(self): - raise NotImplementedError() - - def __eq__(self, other): - return self.id == other.id - - def __repr__(self): - return "<{cls} {name}:{id}>".format(cls=type(self).__name__, name=self.name, id=self.id) diff --git a/lib/pipeline/builder/flow/data.py b/lib/pipeline/builder/flow/data.py deleted file mode 100644 index e121fe5..0000000 --- a/lib/pipeline/builder/flow/data.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.core.constants import PE -from pipeline.utils.collections import FancyDict - - -class Data(object): - def __init__(self, inputs=None, outputs=None): - self.inputs = FancyDict(inputs or {}) - self.outputs = outputs or [] - - def to_dict(self): - base = {"inputs": {}, "outputs": self.outputs} - - for key, value in list(self.inputs.items()): - base["inputs"][key] = value.to_dict() if isinstance(value, Var) else value - - return base - - -class Params(object): - def __init__(self, params=None): - self.params = FancyDict(params or {}) - - def to_dict(self): - base = {} - - for key, value in list(self.params.items()): - base[key] = value.to_dict() if isinstance(value, Var) else value - - return base - - -class Var(object): - PLAIN = PE.plain - SPLICE = PE.splice - LAZY = PE.lazy - - def __init__(self, type, value, custom_type=None): - self.type = type - self.value = value - self.custom_type = custom_type - - def to_dict(self): - base = {"type": self.type, "value": self.value} - if self.type == self.LAZY: - base["custom_type"] = self.custom_type - - return base - - -class DataInput(Var): - def __init__(self, *args, **kwargs): - super(DataInput, self).__init__(*args, **kwargs) - - def to_dict(self): - base = super(DataInput, self).to_dict() - base["is_param"] = True - return base - - -class NodeOutput(Var): - def __init__(self, source_act, source_key, *args, **kwargs): - self.source_act = source_act - self.source_key = source_key - kwargs["value"] = None - super(NodeOutput, self).__init__(*args, **kwargs) - - def to_dict(self): - base = super(NodeOutput, self).to_dict() - base["source_act"] = self.source_act - base["source_key"] = self.source_key - return base - - -class RewritableNodeOutput(Var): - def __init__(self, source_act, *args, **kwargs): - self.source_act = source_act - kwargs["value"] = None - super(RewritableNodeOutput, self).__init__(*args, **kwargs) - - def to_dict(self): - base = super(RewritableNodeOutput, self).to_dict() - base["source_act"] = self.source_act - return base diff --git a/lib/pipeline/builder/flow/event.py b/lib/pipeline/builder/flow/event.py deleted file mode 100644 index 3be3d1a..0000000 --- a/lib/pipeline/builder/flow/event.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.builder.flow.base import * # noqa - -__all__ = ["EmptyEndEvent", "EmptyStartEvent", "ExecutableEndEvent"] - - -class EmptyStartEvent(Element): - def type(self): - return PE.EmptyStartEvent - - -class EmptyEndEvent(Element): - def type(self): - return PE.EmptyEndEvent - - -class ExecutableEndEvent(Element): - def __init__(self, type, **kwargs): - self._type = type - super(ExecutableEndEvent, self).__init__(**kwargs) - - def type(self): - return self._type diff --git a/lib/pipeline/builder/flow/gateway.py b/lib/pipeline/builder/flow/gateway.py deleted file mode 100644 index e9d2d03..0000000 --- a/lib/pipeline/builder/flow/gateway.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.builder.flow.base import * # noqa - -__all__ = ["ParallelGateway", "ExclusiveGateway", "ConvergeGateway", "ConditionalParallelGateway"] - - -class ParallelGateway(Element): - def type(self): - return PE.ParallelGateway - - -class ConditionGateway(Element): - def __init__(self, conditions=None, *args, **kwargs): - self.conditions = conditions or {} - super(ConditionGateway, self).__init__(*args, **kwargs) - - def add_condition(self, index, evaluate): - self.conditions[index] = evaluate - - def link_conditions_with(self, outgoing): - conditions = {} - for i, out in enumerate(outgoing): - conditions[out] = {PE.evaluate: self.conditions[i]} - - return conditions - - -class ConditionalParallelGateway(ConditionGateway): - def type(self): - return PE.ConditionalParallelGateway - - -class ExclusiveGateway(ConditionGateway): - def type(self): - return PE.ExclusiveGateway - - -class ConvergeGateway(Element): - def type(self): - return PE.ConvergeGateway diff --git a/lib/pipeline/celery/__init__.py b/lib/pipeline/celery/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/celery/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/celery/queues.py b/lib/pipeline/celery/queues.py deleted file mode 100644 index 1a5ef20..0000000 --- a/lib/pipeline/celery/queues.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -class ScalableQueues(object): - _queues = {} - - @classmethod - def queues(cls): - return cls._queues - - @classmethod - def add(cls, name, routing_key="", queue_arguments=None): - queue_arguments = queue_arguments or {} - cls._queues[name] = {"name": name, "routing_key": routing_key or name, "queue_arguments": queue_arguments} - - @classmethod - def has_queue(cls, queue): - return queue in cls._queues - - @classmethod - def routing_key_for(cls, queue): - return cls._queues[queue]["routing_key"] diff --git a/lib/pipeline/celery/settings.py b/lib/pipeline/celery/settings.py deleted file mode 100644 index 710d5d4..0000000 --- a/lib/pipeline/celery/settings.py +++ /dev/null @@ -1,185 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import copy - -from kombu import Exchange, Queue - -from pipeline.celery.queues import ScalableQueues -from pipeline.constants import PIPELINE_MAX_PRIORITY - -default_exchange = Exchange("default", type="direct") - -# 设置时区 -CELERY_TIMEZONE = "Asia/Shanghai" -# 启动时区设置 -CELERY_ENABLE_UTC = False - -# new priority queues -PUSH_DEFAULT_QUEUE_NAME = "pipeline_priority" -PUSH_DEFAULT_ROUTING_KEY = "pipeline_push_priority" - -SCHEDULE_DEFAULT_QUEUE_NAME = "service_schedule_priority" -SCHEDULE_DEFAULT_ROUTING_KEY = "schedule_service_priority" - -ADDITIONAL_DEFAULT_QUEUE_NAME = "pipeline_additional_task_priority" -ADDITIONAL_DEFAULT_ROUTING_KEY = "additional_task_priority" - -STATISTICS_PRIORITY_QUEUE_NAME = "pipeline_statistics_priority" -STATISTICS_PRIORITY_ROUTING_KEY = "pipeline_statistics_priority" - -SCALABLE_QUEUES_CONFIG = { - PUSH_DEFAULT_QUEUE_NAME: {"name": PUSH_DEFAULT_QUEUE_NAME, "routing_key": PUSH_DEFAULT_ROUTING_KEY}, - SCHEDULE_DEFAULT_QUEUE_NAME: {"name": SCHEDULE_DEFAULT_QUEUE_NAME, "routing_key": SCHEDULE_DEFAULT_ROUTING_KEY}, -} - -PIPELINE_PRIORITY_ROUTING = { - "queue": PUSH_DEFAULT_QUEUE_NAME, - "routing_key": PUSH_DEFAULT_ROUTING_KEY, -} - -PIPELINE_SCHEDULE_PRIORITY_ROUTING = { - "queue": SCHEDULE_DEFAULT_QUEUE_NAME, - "routing_key": SCHEDULE_DEFAULT_ROUTING_KEY, -} - -PIPELINE_ADDITIONAL_PRIORITY_ROUTING = { - "queue": ADDITIONAL_DEFAULT_QUEUE_NAME, - "routing_key": ADDITIONAL_DEFAULT_ROUTING_KEY, -} - -PIPELINE_STATISTICS_PRIORITY_ROUTING = { - "queue": STATISTICS_PRIORITY_QUEUE_NAME, - "routing_key": STATISTICS_PRIORITY_ROUTING_KEY, -} - -CELERY_ROUTES = { - # schedule - "pipeline.engine.tasks.service_schedule": PIPELINE_SCHEDULE_PRIORITY_ROUTING, - # pipeline - "pipeline.engine.tasks.batch_wake_up": PIPELINE_PRIORITY_ROUTING, - "pipeline.engine.tasks.dispatch": PIPELINE_PRIORITY_ROUTING, - "pipeline.engine.tasks.process_wake_up": PIPELINE_PRIORITY_ROUTING, - "pipeline.engine.tasks.start": PIPELINE_PRIORITY_ROUTING, - "pipeline.engine.tasks.wake_from_schedule": PIPELINE_PRIORITY_ROUTING, - "pipeline.engine.tasks.wake_up": PIPELINE_PRIORITY_ROUTING, - "pipeline.engine.tasks.process_unfreeze": PIPELINE_PRIORITY_ROUTING, - # another - "pipeline.log.tasks.clean_expired_log": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, - "pipeline.engine.tasks.node_timeout_check": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, - "pipeline.contrib.periodic_task.tasks.periodic_task_start": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, - "pipeline.contrib.periodic_task.tasks.bamboo_engine_periodic_task_start": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, - "pipeline.engine.tasks.heal_zombie_process": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, - "pipeline.engine.tasks.expired_tasks_clean": PIPELINE_ADDITIONAL_PRIORITY_ROUTING, - # statistics - "pipeline.contrib.statistics.tasks.pipeline_post_save_statistics_task": PIPELINE_STATISTICS_PRIORITY_ROUTING, - "pipeline.contrib.statistics.tasks.pipeline_archive_statistics_task": PIPELINE_STATISTICS_PRIORITY_ROUTING, -} - - -class QueueResolver(object): - def __init__(self, queue): - self.queue = queue - - def default_setting_for(self, task, setting_key): - if not isinstance(task, str): - task = task.name - - return CELERY_ROUTES[task][setting_key] - - def resolve_task_routing_key(self, task): - default_key = self.default_setting_for(task, "routing_key") - default_queue = self.default_setting_for(task, "queue") - - if default_queue not in SCALABLE_QUEUES_CONFIG or not self.queue: - return default_key - - return self.resolve_routing_key(default_key) - - def resolve_task_queue_name(self, task): - default_queue = self.default_setting_for(task, "queue") - - return self.resolve_queue_name(default_queue) - - def resolve_queue_name(self, default_name): - if not self.queue: - return default_name - - return "{}_{}".format(self.queue, default_name) - - def resolve_routing_key(self, default_key): - if not self.queue: - return default_key - - return "{}_{}".format(ScalableQueues.routing_key_for(self.queue), default_key) - - -USER_QUEUES = [] - -for name, queue in ScalableQueues.queues().items(): - queue_arguments = copy.copy(queue["queue_arguments"]) - queue_arguments["x-max-priority"] = PIPELINE_MAX_PRIORITY - - for config in SCALABLE_QUEUES_CONFIG.values(): - resolver = QueueResolver(name) - USER_QUEUES.append( - Queue( - resolver.resolve_queue_name(config["name"]), - default_exchange, - routing_key=resolver.resolve_routing_key(config["routing_key"]), - queue_arguments=queue_arguments, - ) - ) - -CELERY_QUEUES = [ - # user queues - *USER_QUEUES, # noqa - # keep old queue to process message left in broker, remove on next version - Queue("default", default_exchange, routing_key="default"), - Queue("pipeline", default_exchange, routing_key="pipeline_push"), - Queue("service_schedule", default_exchange, routing_key="schedule_service"), - Queue("pipeline_additional_task", default_exchange, routing_key="additional_task"), - # priority queues - Queue( - PUSH_DEFAULT_QUEUE_NAME, - default_exchange, - routing_key=PUSH_DEFAULT_ROUTING_KEY, - queue_arguments={"x-max-priority": PIPELINE_MAX_PRIORITY}, - ), - Queue( - SCHEDULE_DEFAULT_QUEUE_NAME, - default_exchange, - routing_key=SCHEDULE_DEFAULT_ROUTING_KEY, - queue_arguments={"x-max-priority": PIPELINE_MAX_PRIORITY}, - ), - Queue( - ADDITIONAL_DEFAULT_QUEUE_NAME, - default_exchange, - routing_key=ADDITIONAL_DEFAULT_ROUTING_KEY, - queue_arguments={"x-max-priority": PIPELINE_MAX_PRIORITY}, - ), - Queue( - STATISTICS_PRIORITY_QUEUE_NAME, - default_exchange, - routing_key=STATISTICS_PRIORITY_ROUTING_KEY, - queue_arguments={"x-max-priority": PIPELINE_MAX_PRIORITY}, - ), -] - -CELERY_DEFAULT_QUEUE = "default" -CELERY_DEFAULT_EXCHANGE = "default" -CELERY_DEFAULT_ROUTING_KEY = "default" - -CELERYBEAT_SCHEDULER = "django_celery_beat.schedulers.DatabaseScheduler" - -CELERY_ACCEPT_CONTENT = ["json", "pickle", "msgpack", "yaml"] diff --git a/lib/pipeline/component_framework/__init__.py b/lib/pipeline/component_framework/__init__.py deleted file mode 100644 index 5c691bb..0000000 --- a/lib/pipeline/component_framework/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -default_app_config = "pipeline.component_framework.apps.ComponentFrameworkConfig" diff --git a/lib/pipeline/component_framework/admin.py b/lib/pipeline/component_framework/admin.py deleted file mode 100644 index 339cc05..0000000 --- a/lib/pipeline/component_framework/admin.py +++ /dev/null @@ -1,23 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.contrib import admin - -from pipeline.component_framework import models - - -@admin.register(models.ComponentModel) -class ComponentModelAdmin(admin.ModelAdmin): - list_display = ["id", "code", "name", "status"] - search_fields = ["code", "name"] - list_filter = ["status"] diff --git a/lib/pipeline/component_framework/apps.py b/lib/pipeline/component_framework/apps.py deleted file mode 100644 index 1e93d0e..0000000 --- a/lib/pipeline/component_framework/apps.py +++ /dev/null @@ -1,72 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import sys - -from django.apps import AppConfig -from django.db.utils import InternalError, OperationalError, ProgrammingError - -from pipeline.conf import settings -from pipeline.component_framework import context -from pipeline.utils.register import autodiscover_collections - -logger = logging.getLogger("root") - -DJANGO_MANAGE_CMD = "manage.py" -INIT_PASS_TRIGGER = {"migrate"} - - -class ComponentFrameworkConfig(AppConfig): - name = "pipeline.component_framework" - verbose_name = "PipelineComponentFramework" - - def ready(self): - """ - @summary: 注册公共部分和当前RUN_VER下的标准插件到数据库 - @return: - """ - - if sys.argv and sys.argv[0] == DJANGO_MANAGE_CMD: - try: - command = sys.argv[1] - except IndexError: - return - else: - if command in INIT_PASS_TRIGGER: - print("ignore components init for command: {}".format(sys.argv)) - return - - for path in settings.COMPONENT_AUTO_DISCOVER_PATH: - autodiscover_collections(path) - - if context.skip_update_comp_models(): - return - - from pipeline.component_framework.models import ComponentModel - from pipeline.component_framework.library import ComponentLibrary - - try: - print("update component models") - ComponentModel.objects.all().update(status=False) - for code in ComponentLibrary.codes(): - ComponentModel.objects.filter(code=code, version__in=ComponentLibrary.versions(code)).update( - status=True - ) - print("update component models finish") - except InternalError: - # version field migration - logger.exception("update component model fail") - except (ProgrammingError, OperationalError): - # first migrate - logger.exception("update component model fail") diff --git a/lib/pipeline/component_framework/base.py b/lib/pipeline/component_framework/base.py deleted file mode 100644 index 6500416..0000000 --- a/lib/pipeline/component_framework/base.py +++ /dev/null @@ -1,94 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import importlib -import logging - -from django.db.utils import ProgrammingError - -from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION -from pipeline.component_framework.library import ComponentLibrary -from pipeline.component_framework.models import ComponentModel -from pipeline.component_framework import context -from pipeline.core.flow.activity import Service - -logger = logging.getLogger(__name__) - - -class ComponentMeta(type): - def __new__(cls, name, bases, attrs): - super_new = super(ComponentMeta, cls).__new__ - - # Also ensure initialization is only performed for subclasses of Model - # (excluding Model class itself). - parents = [b for b in bases if isinstance(b, ComponentMeta)] - if not parents: - return super_new(cls, name, bases, attrs) - - # Create the class - module_name = attrs.pop("__module__") - new_class = super_new(cls, name, bases, {"__module__": module_name}) - module = importlib.import_module(new_class.__module__) - - # Add all attributes to the class - attrs.setdefault("desc", "") - attrs.setdefault("base", "") - for obj_name, obj in list(attrs.items()): - setattr(new_class, obj_name, obj) - - # check - if not getattr(new_class, "name", None): - raise ValueError("component %s name can't be empty" % new_class.__name__) - - if not getattr(new_class, "code", None): - raise ValueError("component %s code can't be empty" % new_class.__name__) - - if not getattr(new_class, "bound_service", None) or not issubclass(new_class.bound_service, Service): - raise ValueError("component %s service can't be empty and must be subclass of Service" % new_class.__name__) - - if not getattr(new_class, "form", None): - setattr(new_class, "form", None) - - if not getattr(new_class, "output_form", None): - setattr(new_class, "output_form", None) - - if not getattr(new_class, "version", None): - setattr(new_class, "version", LEGACY_PLUGINS_VERSION) - - # category/group name - group_name = getattr(module, "__group_name__", new_class.__module__.split(".")[-1].title()) - setattr(new_class, "group_name", group_name) - new_name = "{}-{}".format(group_name, new_class.name) - - # category/group name - group_icon = getattr(module, "__group_icon__", "") - setattr(new_class, "group_icon", group_icon) - - if not getattr(module, "__register_ignore__", False): - ComponentLibrary.register_component( - component_code=new_class.code, version=new_class.version, component_cls=new_class - ) - - if context.skip_update_comp_models(): - return new_class - - try: - print("update {} component model".format(new_class.code)) - ComponentModel.objects.update_or_create( - code=new_class.code, version=new_class.version, defaults={"name": new_name, "status": __debug__} - ) - except Exception as e: - if not isinstance(e, ProgrammingError): - logging.exception(e) - - return new_class diff --git a/lib/pipeline/component_framework/component.py b/lib/pipeline/component_framework/component.py deleted file mode 100644 index 1707b08..0000000 --- a/lib/pipeline/component_framework/component.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.component_framework.base import ComponentMeta -from pipeline.core.data.base import DataObject -from pipeline.core.data.converter import get_variable -from pipeline.exceptions import ComponentDataLackException - - -class Component(object, metaclass=ComponentMeta): - def __init__(self, data_dict): - self.data_dict = data_dict - - @classmethod - def outputs_format(cls): - outputs = cls.bound_service().outputs() - outputs = [oi.as_dict() for oi in outputs] - return outputs - - @classmethod - def inputs_format(cls): - inputs = cls.bound_service().inputs() - inputs = [ii.as_dict() for ii in inputs] - return inputs - - @classmethod - def _get_item_schema(cls, type, key): - items = getattr(cls.bound_service(), type)() - for item in items: - if item.key == key: - return item - - return None - - @classmethod - def get_output_schema(cls, key): - return cls._get_item_schema(type="outputs", key=key).schema - - @classmethod - def get_input_schema(cls, key): - return cls._get_item_schema(type="inputs", key=key).schema - - @classmethod - def form_is_embedded(cls): - return getattr(cls, "embedded_form", False) - - def clean_execute_data(self, context): - """ - @summary: hook for subclass of Component to clean execute data with context - @param context: - @return: - """ - return self.data_dict - - def data_for_execution(self, context, pipeline_data): - data_dict = self.clean_execute_data(context) - inputs = {} - - for key, tag_info in list(data_dict.items()): - if tag_info is None: - raise ComponentDataLackException("Lack of inputs: %s" % key) - - inputs[key] = get_variable(key, tag_info, context, pipeline_data) - - return DataObject(inputs) - - def service(self): - return self.bound_service() diff --git a/lib/pipeline/component_framework/constant.py b/lib/pipeline/component_framework/constant.py deleted file mode 100644 index 5f9016e..0000000 --- a/lib/pipeline/component_framework/constant.py +++ /dev/null @@ -1,101 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import copy - -from pipeline.core.data.expression import ConstantTemplate, deformat_constant_key -from pipeline.exceptions import ConstantNotExistException, ConstantReferenceException -from pipeline.utils.graph import Graph - - -class ConstantPool(object): - def __init__(self, pool, lazy=False): - self.raw_pool = pool - self.pool = None - - if not lazy: - self.resolve() - - def resolve(self): - if self.pool: - return - - refs = self.get_reference_info() - - nodes = list(refs.keys()) - flows = [] - for node in nodes: - for ref in refs[node]: - if ref in nodes: - flows.append([node, ref]) - graph = Graph(nodes, flows) - # circle reference check - trace = graph.get_cycle() - if trace: - raise ConstantReferenceException("Exist circle reference between constants: %s" % "->".join(trace)) - - # resolve the constants reference - pool = {} - temp_pool = copy.deepcopy(self.raw_pool) - # get those constants which are referenced only(not refer other constants) - referenced_only = ConstantPool._get_referenced_only(temp_pool) - while temp_pool: - for ref in referenced_only: - value = temp_pool[ref]["value"] - - # resolve those constants which reference the 'ref' - for key, info in list(temp_pool.items()): - maps = {deformat_constant_key(ref): value} - temp_pool[key]["value"] = ConstantTemplate(info["value"]).resolve_data(maps) - - pool[ref] = temp_pool[ref] - temp_pool.pop(ref) - referenced_only = ConstantPool._get_referenced_only(temp_pool) - - self.pool = pool - - @staticmethod - def _get_referenced_only(pool): - referenced_only = [] - for key, info in list(pool.items()): - reference = ConstantTemplate(info["value"]).get_reference() - formatted_reference = ["${%s}" % ref for ref in reference] - reference = [c for c in formatted_reference if c in pool] - if not reference: - referenced_only.append(key) - return referenced_only - - def get_reference_info(self, strict=True): - refs = {} - for key, info in list(self.raw_pool.items()): - reference = ConstantTemplate(info["value"]).get_reference() - formatted_reference = ["${%s}" % ref for ref in reference] - ref = [c for c in formatted_reference if not strict or c in self.raw_pool] - refs[key] = ref - return refs - - def resolve_constant(self, constant): - if not self.pool: - self.resolve() - - if constant not in self.pool: - raise ConstantNotExistException("constant %s not exist." % constant) - return self.pool[constant]["value"] - - def resolve_value(self, val): - if not self.pool: - self.resolve() - - maps = {deformat_constant_key(key): self.pool[key]["value"] for key in self.pool} - - return ConstantTemplate(val).resolve_data(maps) diff --git a/lib/pipeline/component_framework/constants.py b/lib/pipeline/component_framework/constants.py deleted file mode 100644 index afdfe24..0000000 --- a/lib/pipeline/component_framework/constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -LEGACY_PLUGINS_VERSION = "legacy" diff --git a/lib/pipeline/component_framework/context.py b/lib/pipeline/component_framework/context.py deleted file mode 100644 index 590f9af..0000000 --- a/lib/pipeline/component_framework/context.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.conf import settings -from pipeline.utils import env - -UPDATE_TRIGGER = "update_component_models" - - -def skip_update_comp_models(): - if settings.AUTO_UPDATE_COMPONENT_MODELS: - return False - - django_command = env.get_django_command() - if django_command is None: - return True - - return django_command != UPDATE_TRIGGER diff --git a/lib/pipeline/component_framework/library.py b/lib/pipeline/component_framework/library.py deleted file mode 100644 index 6083147..0000000 --- a/lib/pipeline/component_framework/library.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION -from pipeline.exceptions import ComponentNotExistException - - -class ComponentLibrary(object): - components = {} - - def __new__(cls, *args, **kwargs): - if args: - component_code = args[0] - else: - component_code = kwargs.get("component_code", None) - version = kwargs.get("version", None) - if not component_code: - raise ValueError( - "please pass a component_code in args or kwargs: " - "ComponentLibrary('code') or ComponentLibrary(component_code='code')" - ) - return cls.get_component_class(component_code=component_code, version=version) - - @classmethod - def component_list(cls): - components = [] - for _, component_map in cls.components.items(): - components.extend(component_map.values()) - - return components - - @classmethod - def get_component_class(cls, component_code, version=None): - version = version or LEGACY_PLUGINS_VERSION - component_cls = cls.components.get(component_code, {}).get(version) - if component_cls is None: - raise ComponentNotExistException("component %s does not exist." % component_code) - return component_cls - - @classmethod - def get_component(cls, component_code, data_dict, version=None): - version = version or LEGACY_PLUGINS_VERSION - return cls.get_component_class(component_code=component_code, version=version)(data_dict) - - @classmethod - def register_component(cls, component_code, version, component_cls): - cls.components.setdefault(component_code, {})[version] = component_cls - - @classmethod - def codes(cls): - return cls.components.keys() - - @classmethod - def versions(cls, code): - return cls.components.get(code, {}).keys() diff --git a/lib/pipeline/component_framework/management/__init__.py b/lib/pipeline/component_framework/management/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/component_framework/management/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/component_framework/management/commands/__init__.py b/lib/pipeline/component_framework/management/commands/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/component_framework/management/commands/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/component_framework/management/commands/run_component.py b/lib/pipeline/component_framework/management/commands/run_component.py deleted file mode 100644 index 84e7eb7..0000000 --- a/lib/pipeline/component_framework/management/commands/run_component.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import sys - -import ujson as json -from django.core.management import BaseCommand - -from pipeline.component_framework.library import ComponentLibrary -from pipeline.component_framework.runner import ComponentRunner -from pipeline.exceptions import ComponentNotExistException - - -class Command(BaseCommand): - - help = "Run the specified component" - - def add_arguments(self, parser): - parser.add_argument("code", nargs=1, type=str) - parser.add_argument("-d", dest="data", nargs="?", type=str) - parser.add_argument("-p", dest="parent_data", nargs="?", type=str) - parser.add_argument("-c", dest="callbackdata", nargs="?", type=str) - - def handle(self, *args, **options): - code = options["code"][0] - data = options["data"] - parent_data = options["parent_data"] - callbackdata = options["callbackdata"] - - try: - data = json.loads(data) if data else {} - except Exception: - sys.stdout.write("data is not a valid json.\n") - exit(1) - - try: - parent_data = json.loads(parent_data) if parent_data else {} - except Exception: - sys.stdout.write("parent_data is not a valid json.\n") - exit(1) - - try: - callbackdata = json.loads(callbackdata) if callbackdata else {} - except Exception: - sys.stdout.write("callbackdata is not a valid json.\n") - exit(1) - - try: - component_cls = ComponentLibrary.get_component_class(code) - except ComponentNotExistException: - sys.stdout.write("component [{}] does not exist.\n".format(code)) - exit(1) - - runner = ComponentRunner(component_cls) - runner.run(data, parent_data, callbackdata) diff --git a/lib/pipeline/component_framework/management/commands/update_component_models.py b/lib/pipeline/component_framework/management/commands/update_component_models.py deleted file mode 100644 index b7f1787..0000000 --- a/lib/pipeline/component_framework/management/commands/update_component_models.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.core.management import BaseCommand - - -class Command(BaseCommand): - def handle(self, *args, **options): - # do not need to do anything, the app ready will handle model update work - print("component models update finished.") diff --git a/lib/pipeline/component_framework/migrations/0001_initial.py b/lib/pipeline/component_framework/migrations/0001_initial.py deleted file mode 100644 index 39c4f0e..0000000 --- a/lib/pipeline/component_framework/migrations/0001_initial.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import models, migrations - - -class Migration(migrations.Migration): - - dependencies = [] - - operations = [ - migrations.CreateModel( - name="ComponentModel", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ("code", models.CharField(unique=True, max_length=255, verbose_name="\u7ec4\u4ef6\u7f16\u7801")), - ("name", models.CharField(max_length=255, verbose_name="\u7ec4\u4ef6\u540d\u79f0")), - ("status", models.BooleanField(default=True, verbose_name="\u7ec4\u4ef6\u662f\u5426\u53ef\u7528")), - ], - options={"ordering": ["-id"], "verbose_name": "\u7ec4\u4ef6", "verbose_name_plural": "\u7ec4\u4ef6"}, - ), - ] diff --git a/lib/pipeline/component_framework/migrations/0002_delete_componentmodel.py b/lib/pipeline/component_framework/migrations/0002_delete_componentmodel.py deleted file mode 100644 index 153c86c..0000000 --- a/lib/pipeline/component_framework/migrations/0002_delete_componentmodel.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import models, migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ("component_framework", "0001_initial"), - ] - - operations = [ - migrations.DeleteModel(name="ComponentModel",), - ] diff --git a/lib/pipeline/component_framework/migrations/0003_componentmodel.py b/lib/pipeline/component_framework/migrations/0003_componentmodel.py deleted file mode 100644 index 3dbd40f..0000000 --- a/lib/pipeline/component_framework/migrations/0003_componentmodel.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -# Generated by Django 1.11.2 on 2017-11-15 12:09 - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ("component_framework", "0002_delete_componentmodel"), - ] - - operations = [ - migrations.CreateModel( - name="ComponentModel", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("code", models.CharField(max_length=255, unique=True, verbose_name="\u7ec4\u4ef6\u7f16\u7801")), - ("name", models.CharField(max_length=255, verbose_name="\u7ec4\u4ef6\u540d\u79f0")), - ("status", models.BooleanField(default=True, verbose_name="\u7ec4\u4ef6\u662f\u5426\u53ef\u7528")), - ], - options={"ordering": ["-id"], "verbose_name": "\u7ec4\u4ef6", "verbose_name_plural": "\u7ec4\u4ef6"}, - ), - ] diff --git a/lib/pipeline/component_framework/migrations/0004_auto_20180413_1800.py b/lib/pipeline/component_framework/migrations/0004_auto_20180413_1800.py deleted file mode 100644 index 4a2fb04..0000000 --- a/lib/pipeline/component_framework/migrations/0004_auto_20180413_1800.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("component_framework", "0003_componentmodel"), - ] - - operations = [ - migrations.AlterModelOptions( - name="componentmodel", - options={ - "ordering": ["-id"], - "verbose_name": "\u7ec4\u4ef6 Component", - "verbose_name_plural": "\u7ec4\u4ef6 Component", - }, - ), - ] diff --git a/lib/pipeline/component_framework/migrations/0005_auto_20190723_1806.py b/lib/pipeline/component_framework/migrations/0005_auto_20190723_1806.py deleted file mode 100644 index 7fb3029..0000000 --- a/lib/pipeline/component_framework/migrations/0005_auto_20190723_1806.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("component_framework", "0004_auto_20180413_1800"), - ] - - operations = [ - migrations.AddField( - model_name="componentmodel", - name="version", - field=models.CharField(default=b"legacy", max_length=64, verbose_name="\u7ec4\u4ef6\u7248\u672c"), - ), - migrations.AlterField( - model_name="componentmodel", - name="code", - field=models.CharField(max_length=255, verbose_name="\u7ec4\u4ef6\u7f16\u7801"), - ), - ] diff --git a/lib/pipeline/component_framework/migrations/0006_auto_20200213_0743.py b/lib/pipeline/component_framework/migrations/0006_auto_20200213_0743.py deleted file mode 100644 index e915a6e..0000000 --- a/lib/pipeline/component_framework/migrations/0006_auto_20200213_0743.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.23 on 2020-02-13 07:43 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("component_framework", "0005_auto_20190723_1806"), - ] - - operations = [ - migrations.AlterField( - model_name="componentmodel", - name="version", - field=models.CharField(default="legacy", max_length=64, verbose_name="组件版本"), - ), - ] diff --git a/lib/pipeline/component_framework/migrations/0007_auto_20201112_2244.py b/lib/pipeline/component_framework/migrations/0007_auto_20201112_2244.py deleted file mode 100644 index 084242b..0000000 --- a/lib/pipeline/component_framework/migrations/0007_auto_20201112_2244.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("component_framework", "0006_auto_20200213_0743"), - ] - - operations = [ - migrations.AlterField( - model_name="componentmodel", - name="code", - field=models.CharField(db_index=True, max_length=255, verbose_name="组件编码"), - ), - migrations.AlterField( - model_name="componentmodel", - name="version", - field=models.CharField(db_index=True, default="legacy", max_length=64, verbose_name="组件版本"), - ), - migrations.AlterUniqueTogether(name="componentmodel", unique_together=set([("code", "version")]),), - ] diff --git a/lib/pipeline/component_framework/migrations/__init__.py b/lib/pipeline/component_framework/migrations/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/component_framework/migrations/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/component_framework/models.py b/lib/pipeline/component_framework/models.py deleted file mode 100644 index 040bd19..0000000 --- a/lib/pipeline/component_framework/models.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db import models -from django.utils.translation import ugettext_lazy as _ - -from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION -from pipeline.component_framework.library import ComponentLibrary - - -class ComponentManager(models.Manager): - def get_component_dict(self): - """ - 获得标准插件对应的dict类型 - :return: - """ - components = self.filter(status=True) - component_dict = {} - for bundle in components: - name = bundle.name.split("-") - group_name = _(name[0]) - name = _(name[1]) - component_dict[bundle.code] = "{}-{}".format(group_name, name) - return component_dict - - def get_component_dicts(self, other_component_list): - """ - :param other_component_list: 结果集 - :param index: 结果集中指标字段 - :return: - """ - components = self.filter(status=True).values("code", "version", "name") - total = components.count() - groups = [] - for comp in components: - version = comp["version"] - # 插件名国际化 - name = comp["name"].split("-") - name = "{}-{}-{}".format(_(name[0]), _(name[1]), version) - code = "{}-{}".format(comp["code"], comp["version"]) - value = 0 - for oth_com_tmp in other_component_list: - if comp["code"] == oth_com_tmp[1] and comp["version"] == oth_com_tmp[2]: - value = oth_com_tmp[0] - groups.append({"code": code, "name": name, "value": value}) - return total, groups - - -class ComponentModel(models.Model): - """ - 注册的组件 - """ - - code = models.CharField(_("组件编码"), max_length=255, db_index=True) - version = models.CharField(_("组件版本"), max_length=64, default=LEGACY_PLUGINS_VERSION, db_index=True) - name = models.CharField(_("组件名称"), max_length=255) - status = models.BooleanField(_("组件是否可用"), default=True) - - objects = ComponentManager() - - class Meta: - verbose_name = _("组件 Component") - verbose_name_plural = _("组件 Component") - ordering = ["-id"] - unique_together = (("code", "version"),) - - def __unicode__(self): - return self.name - - @property - def group_name(self): - return ComponentLibrary.get_component_class(self.code, self.version).group_name - - @property - def group_icon(self): - return ComponentLibrary.get_component_class(self.code, self.version).group_icon diff --git a/lib/pipeline/component_framework/runner.py b/lib/pipeline/component_framework/runner.py deleted file mode 100644 index 59e33aa..0000000 --- a/lib/pipeline/component_framework/runner.py +++ /dev/null @@ -1,106 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import time - -from pipeline.core.data.base import DataObject -from pipeline.utils.uniqid import uniqid - - -def get_console_logger(): - # create logger - logger = logging.getLogger("simple_example") - logger.setLevel(logging.DEBUG) - - # create console handler and set level to debug - ch = logging.StreamHandler() - ch.setLevel(logging.DEBUG) - - # create formatter - formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s") - - # add formatter to ch - ch.setFormatter(formatter) - - # add ch to logger - logger.addHandler(ch) - - return logger - - -logger = get_console_logger() - - -class ComponentRunner: - def __init__(self, component_cls): - self.component_cls = component_cls - - def run(self, data, parent_data, callback_data=None): - - service = self.component_cls.bound_service() - - setattr(service, "id", uniqid()) - setattr(service, "logger", logger) - - data_object = DataObject(inputs=data) - parent_data_object = DataObject(inputs=parent_data) - - logger.info( - "Start to run component [{}] with data: {}, parent_data: {}".format( - self.component_cls.code, data_object, parent_data_object - ) - ) - - result = service.execute(data_object, parent_data_object) - - if result is False: - logger.info("Execute return [{}], stop running.".format(result)) - return - - if not service.need_schedule(): - logger.info("Execute return [{}], and component do not need schedule, finish running".format(result)) - return - - if service.interval is None: - logger.info("Start to callback component with callbackdata: {}".format(callback_data)) - result = service.schedule(data_object, parent_data_object, callback_data) - - if result is False: - logger.info("Schedule return [{}], stop running.".format(result)) - return - else: - logger.info("Schedule return [{}], finish running".format(result)) - else: - - schedue_times = 0 - - while not service.is_schedule_finished(): - - schedue_times += 1 - - logger.info( - "Schedule {} with data: {}, parent_data: {}".format(schedue_times, data_object, parent_data_object) - ) - - result = service.schedule(data_object, parent_data_object, None) - - if result is False: - logger.info("Schedule return [{}], stop running.".format(result)) - return - - interval = service.interval.next() - logger.info("Schedule return [{}], wait for next schedule in {}s".format(result, interval)) - time.sleep(interval) - - logger.info("Schedule finished") diff --git a/lib/pipeline/component_framework/test.py b/lib/pipeline/component_framework/test.py deleted file mode 100644 index 5b2466d..0000000 --- a/lib/pipeline/component_framework/test.py +++ /dev/null @@ -1,360 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import importlib -import sys -import traceback -from contextlib import contextmanager - -from mock import MagicMock, call, patch - -from pipeline.core.data.base import DataObject -from pipeline.core.flow.io import SimpleItemSchema, ArrayItemSchema, ObjectItemSchema, ItemSchema -from pipeline.utils.uniqid import uniqid - - -@contextmanager -def patch_context(patchers): - for patcher in patchers: - patcher.start() - - yield - - for patcher in patchers: - patcher.stop() - - -class ComponentTestMixin(object): - def component_cls(self): - raise NotImplementedError() - - def cases(self): - raise NotImplementedError() - - def input_output_format_valid(self): - component = self._component_cls({}) - bound_service = component.service() - inputs_format = bound_service.inputs() - self._format_valid(inputs_format, ["name", "key", "type", "schema", "required"]) - outputs_format = bound_service.outputs() - self._format_valid(outputs_format, ["name", "key", "type", "schema"]) - - @property - def _cases(self): - return self.cases() - - @property - def _component_cls(self): - return self.component_cls() - - @property - def _component_cls_name(self): - return self._component_cls.__name__ - - @property - def _failed_cases(self): - return getattr(self, "__failed_cases", None) - - def _format_valid(self, component_format, format_keys): - assert isinstance(component_format, list) - for item in component_format: - assert set(item.as_dict().keys()) == set( - format_keys - ), "item {} is expected to contain attributes {} but {} obtained".format( - item.key, str(format_keys), str(item.as_dict().keys()) - ) - if item.schema is not None: - assert item.type == item.schema.type, "type of {} is expected to be {} but {} obtained".format( - item.key, item.schema.type, item.type - ) - self._item_schema_valid(item.schema) - - def _item_schema_valid(self, item_schema): - common_keys = {"type", "description", "enum"} - assert common_keys.issubset( - set(item_schema.as_dict().keys()) - ), "ItemSchema should contain attributes type, description and enum" - - if isinstance(item_schema, SimpleItemSchema): - return - if isinstance(item_schema, ArrayItemSchema): - assert hasattr(item_schema, "item_schema") and isinstance( - item_schema.item_schema, ItemSchema - ), "ArrayItemSchema should contain attribute item_schema" - self._item_schema_valid(item_schema.item_schema) - return - if isinstance(item_schema, ObjectItemSchema): - assert hasattr(item_schema, "property_schemas") and isinstance( - item_schema.property_schemas, dict - ), "ObjectItemSchema should contain attribute property_schemas with type dict" - for child_item_schema in item_schema.property_schemas.values(): - self._item_schema_valid(child_item_schema) - return - raise AssertionError("item_schema type error: {}".format(item_schema.description)) - - def _format_failure_message(self, no, name, msg): - return "[{component_cls} case {no}] - [{name}] fail: {msg}".format( - component_cls=self._component_cls_name, no=no + 1, name=name, msg=msg - ) - - def _do_case_assert(self, service, method, assertion, no, name, args=None, kwargs=None): - - args = args or [service] - kwargs = kwargs or {} - - data = kwargs.get("data") or args[0] - - result = getattr(service, method)(*args, **kwargs) - - assert_success = result in [None, True] # return none will consider as success - do_continue = not assert_success - - assert_method = "assertTrue" if assert_success else "assertFalse" - - getattr(self, assert_method)( - assertion.success, - msg=self._format_failure_message( - no=no, - name=name, - msg="{method} success assertion failed, {method} execute success".format(method=method), - ), - ) - - self.assertDictEqual( - data.outputs, - assertion.outputs, - msg=self._format_failure_message( - no=no, - name=name, - msg="{method} outputs assertion failed,\nexcept: {e}\nactual: {a}".format( - method=method, e=assertion.outputs, a=data.outputs - ), - ), - ) - - return do_continue - - def _do_call_assertion(self, name, no, assertion): - try: - assertion.do_assert() - except AssertionError as e: - self.assertTrue( - False, - msg=self._format_failure_message( - no=no, name=name, msg="{func} call assert failed: {e}".format(func=assertion.func, e=e) - ), - ) - - def _case_pass(self, case): - sys.stdout.write( - "\n[√] <{component}> - [{case_name}]\n".format( - component=self._component_cls_name, - case_name=case.name, - ) - ) - - def _case_fail(self, case): - sys.stdout.write( - "\n[×] <{component}> - [{case_name}]\n".format( - component=self._component_cls_name, - case_name=case.name, - ) - ) - - if not hasattr(self, "__failed_cases"): - setattr(self, "__failed_cases", []) - - self._failed_cases.append(case) - - def _test_fail(self): - raise AssertionError("{} cases fail".format([case.name for case in self._failed_cases])) - - def test_component(self): - self.input_output_format_valid() - - component = self._component_cls({}) - - for no, case in enumerate(self._cases): - try: - - patchers = [patcher.mock_patcher() for patcher in case.patchers] - - with patch_context(patchers): - - bound_service = component.service() - - setattr(bound_service, "root_pipeline_id", case.root_pipeline_id) - setattr(bound_service, "id", case.service_id) - setattr(bound_service, "logger", MagicMock()) - - data = DataObject(inputs=case.inputs) - parent_data = DataObject(inputs=case.parent_data) - - # execute result check - do_continue = self._do_case_assert( - service=bound_service, - method="execute", - args=(data, parent_data), - assertion=case.execute_assertion, - no=no, - name=case.name, - ) - - for call_assertion in case.execute_call_assertion: - self._do_call_assertion(name=case.name, no=no, assertion=call_assertion) - - if do_continue: - self._case_pass(case) - continue - - if bound_service.need_schedule(): - - if bound_service.interval is None: - # callback case - self._do_case_assert( - service=bound_service, - method="schedule", - args=(data, parent_data, case.schedule_assertion.callback_data), - assertion=case.schedule_assertion, - no=no, - name=case.name, - ) - - else: - # schedule case - assertions = case.schedule_assertion - assertions = assertions if isinstance(assertions, list) else [assertions] - - for assertion in assertions: - do_continue = self._do_case_assert( - service=bound_service, - method="schedule", - args=(data, parent_data), - assertion=assertion, - no=no, - name=case.name, - ) - - self.assertEqual( - assertion.schedule_finished, - bound_service.is_schedule_finished(), - msg=self._format_failure_message( - no=no, - name=case.name, - msg="schedule_finished assertion failed:" - "\nexpected: {expected}\nactual: {actual}".format( - expected=assertion.schedule_finished, # noqa - actual=bound_service.is_schedule_finished(), - ), - ), - ) # noqa - - if do_continue: - break - - for call_assertion in case.schedule_call_assertion: - self._do_call_assertion(name=case.name, no=no, assertion=call_assertion) - - self._case_pass(case) - - except Exception: - self._case_fail(case) - sys.stdout.write("{}\n".format(traceback.format_exc())) - - if self._failed_cases: - self._test_fail() - - -class ComponentTestCase(object): - def __init__( - self, - inputs, - parent_data, - execute_assertion, - schedule_assertion, - name="", - patchers=None, - execute_call_assertion=None, - schedule_call_assertion=None, - service_id=None, - root_pipeline_id=None, - ): - self.inputs = inputs - self.parent_data = parent_data - self.execute_assertion = execute_assertion - self.execute_call_assertion = execute_call_assertion or [] - self.schedule_call_assertion = schedule_call_assertion or [] - self.schedule_assertion = schedule_assertion - self.name = name - self.patchers = patchers or [] - self.service_id = service_id or uniqid() - self.root_pipeline_id = root_pipeline_id or uniqid() - - -class CallAssertion(object): - def __init__(self, func, calls, any_order=False): - self.func = func - self.calls = calls - self.any_order = any_order - - def do_assert(self): - if not callable(self.func): - module_and_func = self.func.rsplit(".", 1) - mod_path = module_and_func[0] - func_name = module_and_func[1] - mod = importlib.import_module(mod_path) - func = getattr(mod, func_name) - else: - func = self.func - - if not self.calls: - func.assert_not_called() - else: - assert func.call_count == len( - self.calls - ), "Expected 'mock' have been called {expect} times. " "Called {actual} times".format( - expect=len(self.calls), actual=func.call_count - ) - func.assert_has_calls(calls=self.calls, any_order=self.any_order) - - func.reset_mock() - - -class Assertion(object): - def __init__(self, success, outputs): - self.success = success - self.outputs = outputs - - -class ExecuteAssertion(Assertion): - pass - - -class ScheduleAssertion(Assertion): - def __init__(self, callback_data=None, schedule_finished=False, *args, **kwargs): - self.callback_data = callback_data - self.schedule_finished = schedule_finished - super(ScheduleAssertion, self).__init__(*args, **kwargs) - - -class Patcher(object): - def __init__(self, target, return_value=None, side_effect=None): - self.target = target - self.return_value = return_value - self.side_effect = side_effect - - def mock_patcher(self): - return patch(target=self.target, new=MagicMock(return_value=self.return_value, side_effect=self.side_effect)) - - -Call = call diff --git a/lib/pipeline/components/__init__.py b/lib/pipeline/components/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/components/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/components/collections/__init__.py b/lib/pipeline/components/collections/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/components/collections/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/components/collections/examples.py b/lib/pipeline/components/collections/examples.py deleted file mode 100644 index 564612b..0000000 --- a/lib/pipeline/components/collections/examples.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from pipeline.component_framework.component import Component -from pipeline.conf import settings -from pipeline.core.flow.activity import Service - -logger = logging.getLogger("celery") - -__register_ignore__ = not settings.ENABLE_EXAMPLE_COMPONENTS - - -class SimpleExampleService(Service): - def execute(self, data, parent_data): - return True - - def outputs_format(self): - return [] - - -class SimpleExampleComponent(Component): - name = "example component" - code = "example_component" - bound_service = SimpleExampleService - - -class PipeExampleService(Service): - def execute(self, data, parent_data): - for key, val in list(data.inputs.items()): - data.set_outputs(key, val) - return True - - def outputs_format(self): - return [] - - -class PipeExampleComponent(Component): - name = "pipe example component" - code = "pipe_example_component" - bound_service = PipeExampleService diff --git a/lib/pipeline/conf/__init__.py b/lib/pipeline/conf/__init__.py deleted file mode 100644 index 3ba02d5..0000000 --- a/lib/pipeline/conf/__init__.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.conf import settings as django_settings - -from pipeline.conf import default_settings - - -class PipelineSettings(object): - def __getattr__(self, key): - if hasattr(django_settings, key): - return getattr(django_settings, key) - - if hasattr(default_settings, key): - return getattr(default_settings, key) - - raise AttributeError("Settings object has no attribute %s" % key) - - -settings = PipelineSettings() diff --git a/lib/pipeline/conf/default_settings.py b/lib/pipeline/conf/default_settings.py deleted file mode 100644 index 0cdc651..0000000 --- a/lib/pipeline/conf/default_settings.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.conf import settings - -# pipeline template context module, to use this, you need -# 1) config PIPELINE_TEMPLATE_CONTEXT in your django settings, such as -# PIPELINE_TEMPLATE_CONTEXT = 'home_application.utils.get_template_context' -# 2) define get_template_context function in your app, which show accept one arg, such as -# def get_template_context(obj): -# context = { -# 'biz_cc_id': '1', -# 'biz_cc_name': 'test1', -# } -# if obj is not None: -# context.update({'template': '1'}) -# return context - -PIPELINE_TEMPLATE_CONTEXT = getattr(settings, "PIPELINE_TEMPLATE_CONTEXT", "") -PIPELINE_INSTANCE_CONTEXT = getattr(settings, "PIPELINE_INSTANCE_CONTEXT", "") - -PIPELINE_ENGINE_ADAPTER_API = getattr( - settings, "PIPELINE_ENGINE_ADAPTER_API", "pipeline.service.pipeline_engine_adapter.adapter_api", -) - -PIPELINE_DATA_BACKEND = getattr( - settings, "PIPELINE_DATA_BACKEND", "pipeline.engine.core.data.mysql_backend.MySQLDataBackend", -) -PIPELINE_DATA_CANDIDATE_BACKEND = getattr(settings, "PIPELINE_DATA_CANDIDATE_BACKEND", None) -PIPELINE_DATA_BACKEND_AUTO_EXPIRE = getattr(settings, "PIPELINE_DATA_BACKEND_AUTO_EXPIRE", False) -PIPELINE_DATA_BACKEND_AUTO_EXPIRE_SECONDS = int( - getattr(settings, "PIPELINE_DATA_BACKEND_AUTO_EXPIRE_SECONDS", 60 * 60 * 24) -) - -PIPELINE_END_HANDLER = getattr( - settings, "PIPELINE_END_HANDLER", "pipeline.engine.signals.handlers.pipeline_end_handler", -) -PIPELINE_WORKER_STATUS_CACHE_EXPIRES = getattr(settings, "PIPELINE_WORKER_STATUS_CACHE_EXPIRES", 30) -PIPELINE_RERUN_MAX_TIMES = getattr(settings, "PIPELINE_RERUN_MAX_TIMES", 0) -PIPELINE_RERUN_INDEX_OFFSET = getattr(settings, "PIPELINE_RERUN_INDEX_OFFSET", -1) - -COMPONENT_AUTO_DISCOVER_PATH = [ - "components.collections", -] - -COMPONENT_AUTO_DISCOVER_PATH += getattr(settings, "COMPONENT_PATH", []) - -AUTO_UPDATE_COMPONENT_MODELS = getattr(settings, "AUTO_UPDATE_COMPONENT_MODELS", True) - -VARIABLE_AUTO_DISCOVER_PATH = [ - "variables.collections", -] - -VARIABLE_AUTO_DISCOVER_PATH += getattr(settings, "VARIABLE_PATH", []) - -AUTO_UPDATE_VARIABLE_MODELS = getattr(settings, "AUTO_UPDATE_VARIABLE_MODELS", True) - -PIPELINE_PARSER_CLASS = getattr(settings, "PIPELINE_PARSER_CLASS", "pipeline.parser.pipeline_parser.PipelineParser") - -ENABLE_EXAMPLE_COMPONENTS = getattr(settings, "ENABLE_EXAMPLE_COMPONENTS", True) - -UUID_DIGIT_STARTS_SENSITIVE = getattr(settings, "UUID_DIGIT_STARTS_SENSITIVE", False) - -PIPELINE_LOG_LEVEL = getattr(settings, "PIPELINE_LOG_LEVEL", "INFO") - -# 远程插件包源默认配置 -EXTERNAL_PLUGINS_SOURCE_PROXY = getattr(settings, "EXTERNAL_PLUGINS_SOURCE_PROXY", None) -EXTERNAL_PLUGINS_SOURCE_SECURE_RESTRICT = getattr(settings, "EXTERNAL_PLUGINS_SOURCE_SECURE_RESTRICT", True) - -# 僵尸进程扫描配置 -ENGINE_ZOMBIE_PROCESS_DOCTORS = getattr(settings, "ENGINE_ZOMBIE_PROCESS_DOCTORS", None) -ENGINE_ZOMBIE_PROCESS_HEAL_CRON = getattr(settings, "ENGINE_ZOMBIE_PROCESS_HEAL_CRON", {"minute": "*/10"}) - -# 过期任务运行时清理配置 -EXPIRED_TASK_CLEAN = getattr(settings, "EXPIRED_TASK_CLEAN", False) -EXPIRED_TASK_CLEAN_CRON = getattr(settings, "EXPIRED_TASK_CLEAN_CRON", {"minute": "37", "hour": "*"}) -EXPIRED_TASK_CLEAN_NUM_LIMIT = getattr(settings, "EXPIRED_TASK_CLEAN_NUM_LIMIT", 100) -TASK_EXPIRED_MONTH = getattr(settings, "TASK_EXPIRED_MONTH", 6) - -# MAKO sandbox config -MAKO_SANDBOX_SHIELD_WORDS = getattr(settings, "MAKO_SANDBOX_SHIELD_WORDS", []) -MAKO_SANDBOX_IMPORT_MODULES = getattr(settings, "MAKO_SANDBOX_IMPORT_MODULES", {}) -MAKO_SAFETY_CHECK = getattr(settings, "MAKO_SAFETY_CHECK", True) - -# 开发者自定义插件和变量异常类 -PLUGIN_SPECIFIC_EXCEPTIONS = getattr(settings, "PLUGIN_SPECIFIC_EXCEPTIONS", ()) -VARIABLE_SPECIFIC_EXCEPTIONS = getattr(settings, "VARIABLE_SPECIFIC_EXCEPTIONS", ()) diff --git a/lib/pipeline/constants.py b/lib/pipeline/constants.py deleted file mode 100644 index 9fb3805..0000000 --- a/lib/pipeline/constants.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -PIPELINE_DEFAULT_PRIORITY = 100 -PIPELINE_MIN_PRIORITY = 0 -PIPELINE_MAX_PRIORITY = 255 diff --git a/lib/pipeline/contrib/__init__.py b/lib/pipeline/contrib/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/contrib/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/contrib/external_plugins/__init__.py b/lib/pipeline/contrib/external_plugins/__init__.py deleted file mode 100644 index 7acd738..0000000 --- a/lib/pipeline/contrib/external_plugins/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -default_app_config = "pipeline.contrib.external_plugins.apps.ExternalPluginsConfig" diff --git a/lib/pipeline/contrib/external_plugins/admin.py b/lib/pipeline/contrib/external_plugins/admin.py deleted file mode 100644 index 4edcab0..0000000 --- a/lib/pipeline/contrib/external_plugins/admin.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.contrib import admin - -from pipeline.contrib.external_plugins.models import FileSystemSource, GitRepoSource, S3Source -from pipeline.contrib.external_plugins.models.forms import JsonFieldModelForm - -# Register your models here. - - -@admin.register(GitRepoSource) -class GitRepoSourceAdmin(admin.ModelAdmin): - form = JsonFieldModelForm - list_display = ["name", "from_config", "repo_raw_address", "branch"] - search_fields = ["name", "branch", "repo_raw_address"] - - -@admin.register(S3Source) -class S3SourceAdmin(admin.ModelAdmin): - form = JsonFieldModelForm - list_display = ["name", "from_config", "service_address", "bucket", "source_dir"] - search_fields = ["name", "bucket", "service_address"] - - -@admin.register(FileSystemSource) -class FileSystemSourceAdmin(admin.ModelAdmin): - form = JsonFieldModelForm - list_display = ["name", "from_config", "path"] - search_fields = ["name", "path"] diff --git a/lib/pipeline/contrib/external_plugins/apps.py b/lib/pipeline/contrib/external_plugins/apps.py deleted file mode 100644 index cb420c6..0000000 --- a/lib/pipeline/contrib/external_plugins/apps.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import sys -import traceback - -from django.apps import AppConfig -from django.conf import settings -from django.db.utils import ProgrammingError - -from pipeline.utils import env - -logger = logging.getLogger("root") - -DJANGO_MANAGE_CMD = "manage.py" -DEFAULT_TRIGGERS = {"runserver", "celery", "worker", "uwsgi", "shell", "update_component_models"} - - -class ExternalPluginsConfig(AppConfig): - name = "pipeline.contrib.external_plugins" - label = "pipeline_external_plugins" - verbose_name = "PipelineExternalPlugins" - - def ready(self): - from pipeline.contrib.external_plugins import loader # noqa - from pipeline.contrib.external_plugins.models import ExternalPackageSource # noqa - - # load external components when start command in trigger list - if self.should_load_external_module(): - try: - logger.info("Start to update package source from config file...") - ExternalPackageSource.update_package_source_from_config( - getattr(settings, "COMPONENTS_PACKAGE_SOURCES", {}) - ) - except ProgrammingError: - logger.warning( - "update package source failed, maybe first migration? " - "exception: {traceback}".format(traceback=traceback.format_exc()) - ) - # first migrate - return - - logger.info("Start to load external modules...") - - loader.load_external_modules() - - @staticmethod - def should_load_external_module(): - django_command = env.get_django_command() - if django_command is None: - print("app is not start with django manage command, current argv: {argv}".format(argv=sys.argv)) - return True - - triggers = getattr(settings, "EXTERNAL_COMPONENTS_LOAD_TRIGGER", DEFAULT_TRIGGERS) - print("should_load_external_module: {}".format(django_command in triggers)) - return django_command in triggers diff --git a/lib/pipeline/contrib/external_plugins/exceptions.py b/lib/pipeline/contrib/external_plugins/exceptions.py deleted file mode 100644 index 56ce1d2..0000000 --- a/lib/pipeline/contrib/external_plugins/exceptions.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -class InvalidOperationException(Exception): - pass diff --git a/lib/pipeline/contrib/external_plugins/loader.py b/lib/pipeline/contrib/external_plugins/loader.py deleted file mode 100644 index 52cd103..0000000 --- a/lib/pipeline/contrib/external_plugins/loader.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import importlib -import logging -import traceback - -from pipeline.contrib.external_plugins.models import source_cls_factory -from pipeline.contrib.external_plugins.utils.importer import importer_context - -logger = logging.getLogger("root") - - -def load_external_modules(): - for source_type, source_model_cls in list(source_cls_factory.items()): - # get all external source - sources = source_model_cls.objects.all() - - # get importer for source - for source in sources: - _import_modules_in_source(source) - - -def _import_modules_in_source(source): - try: - importer = source.importer() - - with importer_context(importer): - for mod in source.modules: - importlib.import_module(mod) - except Exception: - logger.error("An error occurred when loading {{{}}}: {}".format(source.name, traceback.format_exc())) diff --git a/lib/pipeline/contrib/external_plugins/migrations/0001_initial.py b/lib/pipeline/contrib/external_plugins/migrations/0001_initial.py deleted file mode 100644 index c420b28..0000000 --- a/lib/pipeline/contrib/external_plugins/migrations/0001_initial.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models -import pipeline.contrib.external_plugins.models.fields - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [] - - operations = [ - migrations.CreateModel( - name="FileSystemSource", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("name", models.CharField(max_length=128, unique=True, verbose_name="\u5305\u6e90\u540d")), - ( - "from_config", - models.BooleanField( - default=False, - verbose_name="\u662f\u5426\u662f\u4ece\u914d\u7f6e\u6587\u4ef6\u4e2d\u8bfb\u53d6\u7684", - ), - ), - ( - "packages", - pipeline.contrib.external_plugins.models.fields.JSONTextField( - verbose_name="\u6a21\u5757\u914d\u7f6e" - ), - ), - ("path", models.TextField(verbose_name="\u6587\u4ef6\u7cfb\u7edf\u8def\u5f84")), - ], - options={"abstract": False}, - ), - migrations.CreateModel( - name="GitRepoSource", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("name", models.CharField(max_length=128, unique=True, verbose_name="\u5305\u6e90\u540d")), - ( - "from_config", - models.BooleanField( - default=False, - verbose_name="\u662f\u5426\u662f\u4ece\u914d\u7f6e\u6587\u4ef6\u4e2d\u8bfb\u53d6\u7684", - ), - ), - ( - "packages", - pipeline.contrib.external_plugins.models.fields.JSONTextField( - verbose_name="\u6a21\u5757\u914d\u7f6e" - ), - ), - ("repo_raw_address", models.TextField(verbose_name="\u6587\u4ef6\u6258\u7ba1\u4ed3\u5e93\u94fe\u63a5")), - ("branch", models.CharField(max_length=128, verbose_name="\u5206\u652f\u540d")), - ], - options={"abstract": False}, - ), - migrations.CreateModel( - name="S3Source", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("name", models.CharField(max_length=128, unique=True, verbose_name="\u5305\u6e90\u540d")), - ( - "from_config", - models.BooleanField( - default=False, - verbose_name="\u662f\u5426\u662f\u4ece\u914d\u7f6e\u6587\u4ef6\u4e2d\u8bfb\u53d6\u7684", - ), - ), - ( - "packages", - pipeline.contrib.external_plugins.models.fields.JSONTextField( - verbose_name="\u6a21\u5757\u914d\u7f6e" - ), - ), - ("service_address", models.TextField(verbose_name="\u5bf9\u8c61\u5b58\u50a8\u670d\u52a1\u5730\u5740")), - ("bucket", models.TextField(verbose_name="bucket \u540d")), - ("access_key", models.TextField(verbose_name="access key")), - ("secret_key", models.TextField(verbose_name="secret key")), - ], - options={"abstract": False}, - ), - ] diff --git a/lib/pipeline/contrib/external_plugins/migrations/0002_s3source_source_dir.py b/lib/pipeline/contrib/external_plugins/migrations/0002_s3source_source_dir.py deleted file mode 100644 index 7367e19..0000000 --- a/lib/pipeline/contrib/external_plugins/migrations/0002_s3source_source_dir.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.29 on 2020-11-09 11:01 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline_external_plugins", "0001_initial"), - ] - - operations = [ - migrations.AddField( - model_name="s3source", name="source_dir", field=models.TextField(default="", verbose_name="源目录名"), - ), - ] diff --git a/lib/pipeline/contrib/external_plugins/migrations/__init__.py b/lib/pipeline/contrib/external_plugins/migrations/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/contrib/external_plugins/migrations/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/contrib/external_plugins/models/__init__.py b/lib/pipeline/contrib/external_plugins/models/__init__.py deleted file mode 100644 index 8a81d52..0000000 --- a/lib/pipeline/contrib/external_plugins/models/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.contrib.external_plugins.models.base import source_cls_factory # noqa -from pipeline.contrib.external_plugins.models.source import * # noqa diff --git a/lib/pipeline/contrib/external_plugins/models/base.py b/lib/pipeline/contrib/external_plugins/models/base.py deleted file mode 100644 index 755fb8f..0000000 --- a/lib/pipeline/contrib/external_plugins/models/base.py +++ /dev/null @@ -1,147 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import sys -from abc import abstractmethod -from copy import deepcopy - -from django.db import IntegrityError, models -from django.utils.translation import ugettext_lazy as _ - -from pipeline.component_framework.library import ComponentLibrary -from pipeline.contrib.external_plugins import exceptions -from pipeline.contrib.external_plugins.models.fields import JSONTextField - -GIT = "git" -S3 = "s3" -FILE_SYSTEM = "fs" -logger = logging.getLogger("root") -source_cls_factory = {} - - -def package_source(cls): - source_cls_factory[cls.type()] = cls - return cls - - -class SourceManager(models.Manager): - def create_source(self, name, packages, from_config, **kwargs): - create_kwargs = deepcopy(kwargs) - create_kwargs.update({"name": name, "packages": packages, "from_config": from_config}) - return self.create(**create_kwargs) - - def remove_source(self, source_id): - source = self.get(id=source_id) - - if source.from_config: - raise exceptions.InvalidOperationException("Can not remove source create from config") - - source.delete() - - def update_source_from_config(self, configs): - - sources_from_config = self.filter(from_config=True).all() - existing_source_names = {source.name for source in sources_from_config} - source_name_in_config = {config["name"] for config in configs} - - invalid_source_names = existing_source_names - source_name_in_config - - # remove invalid source - self.filter(name__in=invalid_source_names).delete() - - # update and create source - for config in configs: - defaults = deepcopy(config["details"]) - defaults["packages"] = config["packages"] - - try: - self.update_or_create(name=config["name"], from_config=True, defaults=defaults) - except IntegrityError: - raise exceptions.InvalidOperationException( - 'There is a external source named "{source_name}" but not create from config, ' - "can not do source update operation".format(source_name=config["name"]) - ) - - -class ExternalPackageSource(models.Model): - name = models.CharField(_("包源名"), max_length=128, unique=True) - from_config = models.BooleanField(_("是否是从配置文件中读取的"), default=False) - packages = JSONTextField(_("模块配置")) - - objects = SourceManager() - - class Meta: - abstract = True - - @staticmethod - @abstractmethod - def type(): - raise NotImplementedError() - - @abstractmethod - def importer(self): - raise NotImplementedError() - - @abstractmethod - def details(self): - raise NotImplementedError() - - @property - def imported_plugins(self): - plugins = [] - try: - importer = self.importer() - except ValueError as e: - logger.exception("ExternalPackageSource[name={}] call importer error: {}".format(self.name, e)) - return plugins - for component in ComponentLibrary.component_list(): - component_importer = getattr(sys.modules[component.__module__], "__loader__", None) - if isinstance(component_importer, type(importer)) and component_importer.name == self.name: - plugins.append( - { - "code": component.code, - "name": component.name, - "group_name": component.group_name, - "class_name": component.__name__, - "module": component.__module__, - } - ) - return plugins - - @property - def modules(self): - modules = [] - - for package_info in list(self.packages.values()): - modules.extend(package_info["modules"]) - - return modules - - @staticmethod - def update_package_source_from_config(source_configs): - classified_config = {source_type: [] for source_type in list(source_cls_factory.keys())} - - for config in deepcopy(source_configs): - classified_config.setdefault(config.pop("type"), []).append(config) - - for source_type, configs in list(classified_config.items()): - try: - source_model_cls = source_cls_factory[source_type] - except KeyError: - raise KeyError("Unsupported external source type: %s" % source_type) - source_model_cls.objects.update_source_from_config(configs=configs) - - @staticmethod - def package_source_types(): - return list(source_cls_factory.keys()) diff --git a/lib/pipeline/contrib/external_plugins/models/fields.py b/lib/pipeline/contrib/external_plugins/models/fields.py deleted file mode 100644 index e756898..0000000 --- a/lib/pipeline/contrib/external_plugins/models/fields.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import ujson as json -from django.db import models - - -class JSONTextField(models.TextField): - def __init__(self, *args, **kwargs): - super(JSONTextField, self).__init__(*args, **kwargs) - - def get_prep_value(self, value): - return json.dumps(value) - - def to_python(self, value): - value = super(JSONTextField, self).to_python(value) - return json.loads(value) - - def from_db_value(self, value, expression, connection, context=None): - return self.to_python(value) diff --git a/lib/pipeline/contrib/external_plugins/models/forms.py b/lib/pipeline/contrib/external_plugins/models/forms.py deleted file mode 100644 index 99ced3f..0000000 --- a/lib/pipeline/contrib/external_plugins/models/forms.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import ujson as json -from django import forms - -from pipeline.contrib.external_plugins.models.fields import JSONTextField - - -class JsonFieldModelForm(forms.ModelForm): - def __init__(self, *args, **kwargs): - super(JsonFieldModelForm, self).__init__(*args, **kwargs) - # for edit in django admin web - all_fields = self.instance.__class__._meta.get_fields() - for field in all_fields: - if isinstance(field, JSONTextField): - self.initial[field.name] = json.dumps(getattr(self.instance, field.name)) diff --git a/lib/pipeline/contrib/external_plugins/models/source.py b/lib/pipeline/contrib/external_plugins/models/source.py deleted file mode 100644 index 5082a66..0000000 --- a/lib/pipeline/contrib/external_plugins/models/source.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db import models -from django.utils.translation import ugettext_lazy as _ - -from pipeline.conf import settings -from pipeline.contrib.external_plugins.models.base import FILE_SYSTEM, GIT, S3, ExternalPackageSource, package_source -from pipeline.contrib.external_plugins.utils.importer import FSModuleImporter, GitRepoModuleImporter, S3ModuleImporter - - -@package_source -class GitRepoSource(ExternalPackageSource): - repo_raw_address = models.TextField(_("文件托管仓库链接")) - branch = models.CharField(_("分支名"), max_length=128) - - @staticmethod - def type(): - return GIT - - def importer(self): - return GitRepoModuleImporter( - name=self.name, - repo_raw_url=self.repo_raw_address, - branch=self.branch, - modules=list(self.packages.keys()), - proxy=settings.EXTERNAL_PLUGINS_SOURCE_PROXY, - secure_only=settings.EXTERNAL_PLUGINS_SOURCE_SECURE_RESTRICT, - ) - - def details(self): - return {"repo_raw_address": self.repo_raw_address, "branch": self.branch} - - -@package_source -class S3Source(ExternalPackageSource): - service_address = models.TextField(_("对象存储服务地址")) - bucket = models.TextField(_("bucket 名")) - source_dir = models.TextField(_("源目录名"), default="") - access_key = models.TextField(_("access key")) - secret_key = models.TextField(_("secret key")) - - @staticmethod - def type(): - return S3 - - def importer(self): - return S3ModuleImporter( - name=self.name, - modules=list(self.packages.keys()), - service_address=self.service_address, - bucket=self.bucket, - source_dir=self.source_dir, - access_key=self.access_key, - secret_key=self.secret_key, - secure_only=settings.EXTERNAL_PLUGINS_SOURCE_SECURE_RESTRICT, - ) - - def details(self): - return { - "service_address": self.service_address, - "bucket": self.bucket, - "source_dir": self.source_dir, - "access_key": self.access_key, - "secret_key": self.secret_key, - } - - -@package_source -class FileSystemSource(ExternalPackageSource): - path = models.TextField(_("文件系统路径")) - - @staticmethod - def type(): - return FILE_SYSTEM - - def importer(self): - return FSModuleImporter(name=self.name, modules=list(self.packages.keys()), path=self.path) - - def details(self): - return {"path": self.path} diff --git a/lib/pipeline/contrib/external_plugins/utils/__init__.py b/lib/pipeline/contrib/external_plugins/utils/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/contrib/external_plugins/utils/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/contrib/external_plugins/utils/importer/__init__.py b/lib/pipeline/contrib/external_plugins/utils/importer/__init__.py deleted file mode 100644 index fd3bfda..0000000 --- a/lib/pipeline/contrib/external_plugins/utils/importer/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.contrib.external_plugins.utils.importer.utils import importer_context # noqa -from pipeline.contrib.external_plugins.utils.importer.git import GitRepoModuleImporter # noqa -from pipeline.contrib.external_plugins.utils.importer.s3 import S3ModuleImporter # noqa -from pipeline.contrib.external_plugins.utils.importer.fs import FSModuleImporter # noqa diff --git a/lib/pipeline/contrib/external_plugins/utils/importer/base.py b/lib/pipeline/contrib/external_plugins/utils/importer/base.py deleted file mode 100644 index 772e31f..0000000 --- a/lib/pipeline/contrib/external_plugins/utils/importer/base.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import imp -import logging -import sys -import traceback -from abc import ABCMeta, abstractmethod -from contextlib import contextmanager - -from pipeline.contrib.external_plugins.utils import requirement - -logger = logging.getLogger("root") - - -@contextmanager -def hook_sandbox(hook, fullname): - hook_name = hook.__func__.__name__ - try: - logger.info("Execute {hook_name} for {module}".format(module=fullname, hook_name=hook_name)) - yield - except Exception: - logger.error( - "{module} {hook_name} raise exception: {traceback}".format( - module=fullname, hook_name=hook_name, traceback=traceback.format_exc() - ) - ) - - -class NonstandardModuleImporter(object, metaclass=ABCMeta): - def __init__(self, modules, name=None): - self.name = name - self.modules = modules - - def find_module(self, fullname, path=None): - logger.info("=============FINDER: {cls}".format(cls=self.__class__.__name__)) - logger.info("Try to find module: {module} in path: {path}".format(module=fullname, path=path)) - - logger.info("Check if in declared nonstandard modules: {modules}".format(modules=self.modules)) - root_parent = fullname.split(".")[0] - if root_parent not in self.modules: - logger.info("Root module({module}) are not find in nonstandard modules".format(module=root_parent)) - return None - - logger.info("Check if is built-in module") - try: - loader = imp.find_module(fullname, path) - if loader: - logger.info("Found {module} locally".format(module=fullname)) - return None - except ImportError: - pass - - logger.info("Checking if is name repetition") - if fullname.split(".").count(fullname.split(".")[-1]) > 1: - logger.info("Found {module} locally".format(module=fullname)) - return None - - with hook_sandbox(fullname=fullname, hook=self.accept_find_module_request_hook): - self.accept_find_module_request_hook(fullname=fullname, path=path) - - return self - - def load_module(self, fullname): - try: - imp.acquire_lock() - - logger.info("=============LOADER: {cls}".format(cls=self.__class__.__name__)) - logger.info("Try to load module: {module}".format(module=fullname)) - - if fullname in sys.modules: - logger.info("Module {module} already loaded".format(module=fullname)) - return sys.modules[fullname] - - is_pkg = self.is_package(fullname) - - try: - src_code = self.get_source(fullname) - except ImportError as e: - logger.info("Get source code for {module} error: {message}".format(module=fullname, message=e)) - return None - - logger.info("Importing {module}".format(module=fullname)) - mod = sys.modules.setdefault(fullname, imp.new_module(fullname)) - - with hook_sandbox(fullname=fullname, hook=self.pre_load_module_hook): - self.pre_load_module_hook(fullname=fullname, module=mod) - - mod.__file__ = self.get_file(fullname) - mod.__loader__ = self - mod.__name__ = fullname - if is_pkg: - mod.__path__ = self.get_path(fullname) - mod.__package__ = fullname - else: - mod.__package__ = fullname.rpartition(".")[0] - - logger.info("Module prepared, ready to execute source code for {module}".format(module=fullname)) - logger.info("Source code for {module}:\n{src_code}".format(module=fullname, src_code=src_code)) - - self._execute_src_code(src_code=src_code, module=mod) - - with hook_sandbox(fullname=fullname, hook=self.post_load_module_hook): - self.post_load_module_hook(fullname=fullname, module=mod) - - return mod - - except Exception: - - with hook_sandbox(fullname=fullname, hook=self.import_error_hook): - self.import_error_hook(fullname) - - err_msg = "{module} import raise exception: {traceback}".format( - module=fullname, traceback=traceback.format_exc() - ) - logger.error(err_msg) - - if fullname in sys.modules: - logger.info("Remove module {module} from sys.modules".format(module=fullname)) - del sys.modules[fullname] - - raise ImportError(err_msg) - - finally: - imp.release_lock() - - def _execute_src_code(self, src_code, module): - exec(src_code, module.__dict__) - - @abstractmethod - def is_package(self, fullname): - raise NotImplementedError() - - @abstractmethod - def get_code(self, fullname): - raise NotImplementedError() - - @abstractmethod - def get_source(self, fullname): - raise NotImplementedError() - - @abstractmethod - def get_file(self, fullname): - return NotImplementedError() - - @abstractmethod - def get_path(self, fullname): - return NotImplementedError() - - def accept_find_module_request_hook(self, fullname, path): - pass - - def pre_load_module_hook(self, fullname, module): - pass - - def post_load_module_hook(self, fullname, module): - pass - - def import_error_hook(self, fullname): - pass - - -class AutoInstallRequirementsImporter(NonstandardModuleImporter, metaclass=ABCMeta): - def post_load_module_hook(self, fullname, module): - requirements = getattr(module, "__requirements__", []) - if not isinstance(requirements, list) or not requirements: - return - - sys.stdout.write( - "Start to install requirements({reqs}) for module({mod})\n".format( - reqs=",".join(requirements), mod=fullname - ) - ) - requirement.install(requirements) diff --git a/lib/pipeline/contrib/external_plugins/utils/importer/fs.py b/lib/pipeline/contrib/external_plugins/utils/importer/fs.py deleted file mode 100644 index 0c43742..0000000 --- a/lib/pipeline/contrib/external_plugins/utils/importer/fs.py +++ /dev/null @@ -1,82 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import os -import traceback - -from pipeline.contrib.external_plugins.utils.importer.base import AutoInstallRequirementsImporter - -logger = logging.getLogger("root") - - -class FSModuleImporter(AutoInstallRequirementsImporter): - def __init__(self, name, modules, path, use_cache=True): - super(FSModuleImporter, self).__init__(name=name, modules=modules) - - self.path = path if path.endswith("/") else "%s/" % path - self.use_cache = use_cache - self.file_cache = {} - - def is_package(self, fullname): - return os.path.exists(self._file_path(fullname, is_pkg=True)) - - def get_code(self, fullname): - return compile(self.get_source(fullname), self.get_file(fullname), "exec") - - def get_source(self, fullname): - source_code = self._fetch_file_content(self._file_path(fullname, is_pkg=self.is_package(fullname))) - - if source_code is None: - raise ImportError("Can not find {module} in {path}".format(module=fullname, path=self.path)) - - return source_code - - def get_path(self, fullname): - return [self._file_path(fullname, is_pkg=True).rpartition("/")[0]] - - def get_file(self, fullname): - return self._file_path(fullname, is_pkg=self.is_package(fullname)) - - def _file_path(self, fullname, is_pkg=False): - base_path = "{path}{file_path}".format(path=self.path, file_path=fullname.replace(".", "/")) - file_path = "%s/__init__.py" % base_path if is_pkg else "%s.py" % base_path - return file_path - - def _fetch_file_content(self, file_path): - logger.info("Try to fetch file {file_path}".format(file_path=file_path)) - - if self.use_cache and file_path in self.file_cache: - logger.info("Use content in cache for file: {file_path}".format(file_path=file_path)) - return self.file_cache[file_path] - - file_content = self._get_file_content(file_path) - - if self.use_cache: - self.file_cache[file_path] = file_content - - return file_content - - def _get_file_content(self, file_path): - try: - with open(file_path) as f: - file_content = f.read() - except IOError: - logger.info( - "Error occurred when read {file_path} content: {trace}".format( - file_path=file_path, trace=traceback.format_exc() - ) - ) - file_content = None - - return file_content diff --git a/lib/pipeline/contrib/external_plugins/utils/importer/git.py b/lib/pipeline/contrib/external_plugins/utils/importer/git.py deleted file mode 100644 index 3507e73..0000000 --- a/lib/pipeline/contrib/external_plugins/utils/importer/git.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import urllib.parse - -import requests - -from pipeline.contrib.external_plugins.utils.importer.base import AutoInstallRequirementsImporter - -logger = logging.getLogger("root") - - -class GitRepoModuleImporter(AutoInstallRequirementsImporter): - def __init__(self, name, modules, repo_raw_url, branch, use_cache=True, secure_only=True, proxy=None): - super(GitRepoModuleImporter, self).__init__(name=name, modules=modules) - - if secure_only and not repo_raw_url.startswith("https"): - raise ValueError("Only accept https when secure_only is True.") - elif not secure_only: - logger.warning("Using not secure protocol is extremely dangerous!!") - - self.repo_raw_url = repo_raw_url if repo_raw_url.endswith("/") else "%s/" % repo_raw_url - self.branch = branch - self.use_cache = use_cache - self.file_cache = {} - self.proxy = proxy or {} - - def is_package(self, fullname): - return self._fetch_repo_file(self._file_url(fullname, is_pkg=True)) is not None - - def get_code(self, fullname): - return compile(self.get_source(fullname), self.get_file(fullname), "exec") - - def get_source(self, fullname): - source_code = self._fetch_repo_file(self._file_url(fullname, is_pkg=self.is_package(fullname))) - - if source_code is None: - raise ImportError( - "Can not find {module} in {repo}{branch}".format( - module=fullname, repo=self.repo_raw_url, branch=self.branch - ) - ) - return source_code - - def get_path(self, fullname): - return [self._file_url(fullname, is_pkg=True).rpartition("/")[0]] - - def get_file(self, fullname): - return self._file_url(fullname, is_pkg=self.is_package(fullname)) - - def _file_url(self, fullname, is_pkg=False): - base_url = "%s/" % urllib.parse.urljoin(self.repo_raw_url, self.branch) - path = fullname.replace(".", "/") - file_name = "%s/__init__.py" % path if is_pkg else "%s.py" % path - return urllib.parse.urljoin(base_url, file_name) - - def _fetch_repo_file(self, file_url): - logger.info("Try to fetch git file: {file_url}".format(file_url=file_url)) - - if self.use_cache and file_url in self.file_cache: - logger.info("Use content in cache for git file: {file_url}".format(file_url=file_url)) - return self.file_cache[file_url] - - resp = requests.get(file_url, timeout=10, proxies=self.proxy) - - file_content = resp.content if resp.ok else None - - if self.use_cache: - self.file_cache[file_url] = file_content - logger.info("Content cached for git file: {file_url}".format(file_url=file_url)) - - return file_content diff --git a/lib/pipeline/contrib/external_plugins/utils/importer/s3.py b/lib/pipeline/contrib/external_plugins/utils/importer/s3.py deleted file mode 100644 index b4d0559..0000000 --- a/lib/pipeline/contrib/external_plugins/utils/importer/s3.py +++ /dev/null @@ -1,121 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -import boto3 -from botocore.client import Config -from botocore.exceptions import ClientError - -from pipeline.contrib.external_plugins.utils.importer.base import AutoInstallRequirementsImporter - -logger = logging.getLogger("root") -CONFIG = Config(connect_timeout=10, read_timeout=10, retries={"max_attempts": 2}) - - -class S3ModuleImporter(AutoInstallRequirementsImporter): - def __init__( - self, - name, - modules, - service_address, - bucket, - access_key, - secret_key, - use_cache=True, - secure_only=True, - source_dir="", - ): - super(S3ModuleImporter, self).__init__(name=name, modules=modules) - - if secure_only and not service_address.startswith("https"): - raise ValueError("Only accept https when secure_only is True.") - elif not secure_only: - logger.warning("Using not secure protocol is extremely dangerous!!") - - self.service_address = service_address if service_address.endswith("/") else "%s/" % service_address - self.bucket = bucket - self.source_dir = source_dir if source_dir == "" or source_dir.endswith("/") else "%s/" % source_dir - self.use_cache = use_cache - self.s3 = boto3.resource( - "s3", - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - endpoint_url=self.service_address, - config=CONFIG, - ) - self.obj_cache = {} - - def is_package(self, fullname): - return self._fetch_obj_content(self._obj_key(fullname, is_pkg=True)) is not None - - def get_code(self, fullname): - return compile(self.get_source(fullname), self.get_file(fullname), "exec") - - def get_source(self, fullname): - source_code = self._fetch_obj_content(self._obj_key(fullname, is_pkg=self.is_package(fullname))) - - if source_code is None: - raise ImportError( - "Can not find {module} in {service_address}{bucket}/{source_dir}".format( - module=fullname, - service_address=self.service_address, - bucket=self.bucket, - source_dir=self.source_dir, - ) - ) - - return source_code - - def get_path(self, fullname): - return [self.get_file(fullname).rpartition("/")[0]] - - def get_file(self, fullname): - return "{service_address}{bucket}/{key}".format( - service_address=self.service_address, - bucket=self.bucket, - key=self._obj_key(fullname, is_pkg=self.is_package(fullname)), - ) - - def _obj_key(self, fullname, is_pkg): - base_key = self.source_dir + fullname.replace(".", "/") - key = "%s/__init__.py" % base_key if is_pkg else "%s.py" % base_key - return key - - def _fetch_obj_content(self, key): - logger.info("Try to fetch object: {key}".format(key=key)) - - if self.use_cache and key in self.obj_cache: - logger.info("Use content in cache for s3 object: {key}".format(key=key)) - return self.obj_cache[key] - - obj_content = self._get_s3_obj_content(key) - - if self.use_cache: - self.obj_cache[key] = obj_content - - return obj_content - - def _get_s3_obj_content(self, key): - obj = self.s3.Object(bucket_name=self.bucket, key=key) - - try: - resp = obj.get() - obj_content = resp["Body"].read() - except ClientError as e: - if e.response["Error"]["Code"] == "NoSuchKey": - obj_content = None - else: - raise - - return obj_content diff --git a/lib/pipeline/contrib/external_plugins/utils/importer/utils.py b/lib/pipeline/contrib/external_plugins/utils/importer/utils.py deleted file mode 100644 index 5d1dc29..0000000 --- a/lib/pipeline/contrib/external_plugins/utils/importer/utils.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import sys -from contextlib import contextmanager - -logger = logging.getLogger("root") - - -@contextmanager -def importer_context(importer): - _setup_importer(importer) - try: - yield - except Exception as e: - raise e - finally: - _remove_importer(importer) - - -def _setup_importer(importer): - logger.info("========== setup importer: %s" % importer) - sys.meta_path.insert(0, importer) - - -def _remove_importer(importer): - for hooked_importer in sys.meta_path: - if hooked_importer is importer: - logger.info("========== remove importer: %s" % importer) - sys.meta_path.remove(hooked_importer) - return diff --git a/lib/pipeline/contrib/external_plugins/utils/requirement.py b/lib/pipeline/contrib/external_plugins/utils/requirement.py deleted file mode 100644 index 869228b..0000000 --- a/lib/pipeline/contrib/external_plugins/utils/requirement.py +++ /dev/null @@ -1,23 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -try: - from pip import main as pipmain -except ImportError: - from pip._internal import main as pipmain - - -def install(requirements): - for r in requirements: - if pipmain(["install", r]) != 0: - raise RuntimeError("can not install requirement %s" % r) diff --git a/lib/pipeline/contrib/periodic_task/__init__.py b/lib/pipeline/contrib/periodic_task/__init__.py deleted file mode 100644 index e794cec..0000000 --- a/lib/pipeline/contrib/periodic_task/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -default_app_config = "pipeline.contrib.periodic_task.apps.PeriodicTaskConfig" diff --git a/lib/pipeline/contrib/periodic_task/admin.py b/lib/pipeline/contrib/periodic_task/admin.py deleted file mode 100644 index 1323cab..0000000 --- a/lib/pipeline/contrib/periodic_task/admin.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.contrib import admin - -from pipeline.contrib.periodic_task import models - - -@admin.register(models.PeriodicTask) -class PeriodicTaskAdmin(admin.ModelAdmin): - list_display = ["id", "name", "total_run_count", "last_run_at", "creator"] - search_fields = ["id", "name"] - raw_id_fields = ["template", "celery_task", "snapshot"] - - -@admin.register(models.PeriodicTaskHistory) -class PeriodicTaskHistoryAdmin(admin.ModelAdmin): - list_display = ["id", "start_at", "ex_data", "start_success", "periodic_task"] - search_fields = ["periodic_task__id"] - raw_id_fields = ["periodic_task", "pipeline_instance"] diff --git a/lib/pipeline/contrib/periodic_task/apps.py b/lib/pipeline/contrib/periodic_task/apps.py deleted file mode 100644 index 39653d3..0000000 --- a/lib/pipeline/contrib/periodic_task/apps.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.apps import AppConfig - - -class PeriodicTaskConfig(AppConfig): - name = "pipeline.contrib.periodic_task" - verbose_name = "PipelinePeriodicTask" - - def ready(self): - from pipeline.contrib.periodic_task.tasks import periodic_task_start # noqa # register task diff --git a/lib/pipeline/contrib/periodic_task/context.py b/lib/pipeline/contrib/periodic_task/context.py deleted file mode 100644 index 69745f9..0000000 --- a/lib/pipeline/contrib/periodic_task/context.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.utils.module_loading import import_string - -from pipeline.conf import settings - - -def get_periodic_task_root_pipeline_context(root_pipeline_data: dict): - try: - provider = import_string(settings.BAMBOO_PERIODIC_TASK_ROOT_PIPELINE_CONTEXT_PROVIER) - except ImportError: - return {} - - return provider(root_pipeline_data) - - -def get_periodic_task_subprocess_context(root_pipeline_data: dict): - try: - provider = import_string(settings.BAMBOO_PERIODIC_TASK_SUBPROCESS_CONTEXT_PROVIER) - except ImportError: - return {} - - return provider(root_pipeline_data) diff --git a/lib/pipeline/contrib/periodic_task/djcelery/__init__.py b/lib/pipeline/contrib/periodic_task/djcelery/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/contrib/periodic_task/djcelery/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/contrib/periodic_task/djcelery/compat.py b/lib/pipeline/contrib/periodic_task/djcelery/compat.py deleted file mode 100644 index 4cafa71..0000000 --- a/lib/pipeline/contrib/periodic_task/djcelery/compat.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import sys - -PY2 = sys.version_info[0] == 2 -PY3 = sys.version_info[0] == 3 - - -def python_2_unicode_compatible(cls): - """Taken from Django project (django/utils/encoding.py) & modified a bit to - always have __unicode__ method available. - """ - if "__str__" not in cls.__dict__: - raise ValueError( - "@python_2_unicode_compatible cannot be applied " - "to %s because it doesn't define __str__()." % cls.__name__ - ) - - cls.__unicode__ = cls.__str__ - - if PY2: - cls.__str__ = lambda self: self.__unicode__().encode("utf-8") - - return cls diff --git a/lib/pipeline/contrib/periodic_task/djcelery/db.py b/lib/pipeline/contrib/periodic_task/djcelery/db.py deleted file mode 100644 index 4359cd6..0000000 --- a/lib/pipeline/contrib/periodic_task/djcelery/db.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from contextlib import contextmanager - -import django -from django.db import transaction - -if django.VERSION < (1, 6): # pragma: no cover - - def get_queryset(s): - return s.get_query_set() - - -else: - - def get_queryset(s): # noqa - return s.get_queryset() - - -try: - from django.db.transaction import atomic # noqa -except ImportError: # pragma: no cover - - try: - from django.db.transaction import Transaction # noqa - except ImportError: - - @contextmanager - def commit_on_success(*args, **kwargs): - try: - transaction.enter_transaction_management(*args, **kwargs) - transaction.managed(True, *args, **kwargs) - try: - yield - except Exception: - if transaction.is_dirty(*args, **kwargs): - transaction.rollback(*args, **kwargs) - raise - else: - if transaction.is_dirty(*args, **kwargs): - try: - transaction.commit(*args, **kwargs) - except Exception: - transaction.rollback(*args, **kwargs) - raise - finally: - transaction.leave_transaction_management(*args, **kwargs) - - else: # pragma: no cover - from django.db.transaction import commit_on_success # noqa - - commit_unless_managed = transaction.commit_unless_managed - rollback_unless_managed = transaction.rollback_unless_managed -else: - - @contextmanager - def commit_on_success(using=None): # noqa - connection = transaction.get_connection(using) - if connection.features.autocommits_when_autocommit_is_off: - # ignore stupid warnings and errors - yield - else: - with transaction.atomic(using): - yield - - def commit_unless_managed(*args, **kwargs): # noqa - pass - - def rollback_unless_managed(*args, **kwargs): # noqa - pass diff --git a/lib/pipeline/contrib/periodic_task/djcelery/managers.py b/lib/pipeline/contrib/periodic_task/djcelery/managers.py deleted file mode 100644 index c0c6049..0000000 --- a/lib/pipeline/contrib/periodic_task/djcelery/managers.py +++ /dev/null @@ -1,208 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import warnings -from functools import wraps -from itertools import count - -from django.conf import settings -from django.db import connection, models -from django.db.models.query import QuerySet - -from pipeline.contrib.periodic_task.djcelery.db import ( - commit_on_success, - get_queryset, - rollback_unless_managed, -) -from pipeline.contrib.periodic_task.djcelery.utils import now - -try: - from django.db import connections, router -except ImportError: # pre-Django 1.2 - connections = router = None # noqa - - -try: - from celery.utils.timeutils import maybe_timedelta -except ImportError: - from celery.utils.time import maybe_timedelta - - -def update_model_with_dict(obj, fields): - [setattr(obj, attr_name, attr_value) for attr_name, attr_value in list(fields.items())] - obj.save() - return obj - - -class TxIsolationWarning(UserWarning): - pass - - -def transaction_retry(max_retries=1): - """Decorator for methods doing database operations. - - If the database operation fails, it will retry the operation - at most ``max_retries`` times. - - """ - - def _outer(fun): - @wraps(fun) - def _inner(*args, **kwargs): - _max_retries = kwargs.pop("exception_retry_count", max_retries) - for retries in count(0): - try: - return fun(*args, **kwargs) - except Exception: # pragma: no cover - # Depending on the database backend used we can experience - # various exceptions. E.g. psycopg2 raises an exception - # if some operation breaks the transaction, so saving - # the task result won't be possible until we rollback - # the transaction. - if retries >= _max_retries: - raise - try: - rollback_unless_managed() - except Exception: - pass - - return _inner - - return _outer - - -class ExtendedQuerySet(QuerySet): - def update_or_create(self, **kwargs): - obj, created = self.get_or_create(**kwargs) - - if not created: - fields = dict(kwargs.pop("defaults", {})) - fields.update(kwargs) - update_model_with_dict(obj, fields) - - return obj, created - - -class ExtendedManager(models.Manager): - def get_queryset(self): - return ExtendedQuerySet(self.model) - - get_query_set = get_queryset # Pre django 1.6 - - def update_or_create(self, **kwargs): - return get_queryset(self).update_or_create(**kwargs) - - def connection_for_write(self): - if connections: - return connections[router.db_for_write(self.model)] - return connection - - def connection_for_read(self): - if connections: - return connections[self.db] - return connection - - def current_engine(self): - try: - return settings.DATABASES[self.db]["ENGINE"] - except AttributeError: - return settings.DATABASE_ENGINE - - -class ResultManager(ExtendedManager): - def get_all_expired(self, expires): - """Get all expired task results.""" - return self.filter(date_done__lt=now() - maybe_timedelta(expires)) - - def delete_expired(self, expires): - """Delete all expired taskset results.""" - meta = self.model._meta - with commit_on_success(): - self.get_all_expired(expires).update(hidden=True) - cursor = self.connection_for_write().cursor() - cursor.execute( - "DELETE FROM {0.db_table} WHERE hidden=%s".format(meta), (True,), - ) - - -class PeriodicTaskManager(ExtendedManager): - def enabled(self): - return self.filter(enabled=True) - - -class TaskManager(ResultManager): - """Manager for :class:`celery.models.Task` models.""" - - _last_id = None - - def get_task(self, task_id): - """Get task meta for task by ``task_id``. - - :keyword exception_retry_count: How many times to retry by - transaction rollback on exception. This could theoretically - happen in a race condition if another worker is trying to - create the same task. The default is to retry once. - - """ - try: - return self.get(task_id=task_id) - except self.model.DoesNotExist: - if self._last_id == task_id: - self.warn_if_repeatable_read() - self._last_id = task_id - return self.model(task_id=task_id) - - @transaction_retry(max_retries=2) - def store_result(self, task_id, result, status, traceback=None, children=None): - """Store the result and status of a task. - - :param task_id: task id - - :param result: The return value of the task, or an exception - instance raised by the task. - - :param status: Task status. See - :meth:`celery.result.AsyncResult.get_status` for a list of - possible status values. - - :keyword traceback: The traceback at the point of exception (if the - task failed). - - :keyword children: List of serialized results of subtasks - of this task. - - :keyword exception_retry_count: How many times to retry by - transaction rollback on exception. This could theoretically - happen in a race condition if another worker is trying to - create the same task. The default is to retry twice. - - """ - return self.update_or_create( - task_id=task_id, - defaults={"status": status, "result": result, "traceback": traceback, "meta": {"children": children}}, - ) - - def warn_if_repeatable_read(self): - if "mysql" in self.current_engine().lower(): - cursor = self.connection_for_read().cursor() - if cursor.execute("SELECT @@tx_isolation"): - isolation = cursor.fetchone()[0] - if isolation == "REPEATABLE-READ": - warnings.warn( - TxIsolationWarning( - "Polling results with transaction isolation level " - "repeatable-read within the same transaction " - "may give outdated results. Be sure to commit the " - "transaction for each poll iteration." - ) - ) diff --git a/lib/pipeline/contrib/periodic_task/djcelery/migrate.py b/lib/pipeline/contrib/periodic_task/djcelery/migrate.py deleted file mode 100644 index 3921dac..0000000 --- a/lib/pipeline/contrib/periodic_task/djcelery/migrate.py +++ /dev/null @@ -1,95 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db import transaction -from django_celery_beat.models import ( - IntervalSchedule, - CrontabSchedule, - PeriodicTask, -) - -from pipeline.contrib.periodic_task.djcelery.models import ( - IntervalSchedule as DjCeleryIntervalSchedule, - CrontabSchedule as DjCeleryCrontabSchedule, - DjCeleryPeriodicTask, -) - -BATCH_SIZE = 500 - - -@transaction.atomic -def try_to_migrate_to_django_celery_beat(): - """ - try to migrate djcelery to django_celery_beat - if django_celery_beat models has data, indicate that pipeline is first use in project - (because old version pipeline is not compatible with django celery beat) - so we will not do the migration works - """ - if IntervalSchedule.objects.exists() or CrontabSchedule.objects.exists() or PeriodicTask.objects.exists(): - print("django_celery_beat in used, skip pipeline djcelery migration works") - return - - # migrate IntervalScheudle - old_intervals = DjCeleryIntervalSchedule.objects.all() - new_intervals = [] - for oi in old_intervals: - new_intervals.append(IntervalSchedule(id=oi.id, every=oi.every, period=oi.period)) - IntervalSchedule.objects.bulk_create(new_intervals, batch_size=BATCH_SIZE) - print("[pipeline]migrate {} interval objects".format(len(new_intervals))) - - # migrate CrontabSchedule - old_crontabs = DjCeleryCrontabSchedule.objects.all() - new_crontabs = [] - for oc in old_crontabs: - new_crontabs.append( - CrontabSchedule( - id=oc.id, - minute=oc.minute, - hour=oc.hour, - day_of_week=oc.day_of_week, - day_of_month=oc.day_of_month, - month_of_year=oc.month_of_year, - timezone=oc.timezone, - ) - ) - CrontabSchedule.objects.bulk_create(new_crontabs, batch_size=BATCH_SIZE) - print("[pipeline]migrate {} crontab objects".format(len(new_crontabs))) - - # migrate PeriodicTask - old_tasks = DjCeleryPeriodicTask.objects.all() - new_tasks = [] - for ot in old_tasks: - new_tasks.append( - PeriodicTask( - id=ot.id, - name=ot.name, - task=ot.task, - interval_id=ot.interval_id, - crontab_id=ot.crontab_id, - solar_id=None, - clocked_id=None, - args=ot.args, - kwargs=ot.kwargs, - queue=ot.queue, - exchange=ot.exchange, - routing_key=ot.routing_key, - expires=ot.expires, - enabled=ot.enabled, - last_run_at=ot.last_run_at, - total_run_count=ot.total_run_count, - date_changed=ot.date_changed, - description=ot.description, - ) - ) - PeriodicTask.objects.bulk_create(new_tasks, batch_size=BATCH_SIZE) - print("[pipeline]migrate {} periodic tasks".format(len(new_tasks))) diff --git a/lib/pipeline/contrib/periodic_task/djcelery/models.py b/lib/pipeline/contrib/periodic_task/djcelery/models.py deleted file mode 100644 index 7bdb164..0000000 --- a/lib/pipeline/contrib/periodic_task/djcelery/models.py +++ /dev/null @@ -1,218 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from datetime import timedelta - -import timezone_field -from celery import schedules -from django.core.exceptions import MultipleObjectsReturned, ValidationError -from django.db import models -from django.db.models import signals -from django.utils.translation import ugettext_lazy as _ -from pipeline.contrib.periodic_task.djcelery import managers -from pipeline.contrib.periodic_task.djcelery.tzcrontab import TzAwareCrontab -from pipeline.contrib.periodic_task.djcelery.utils import now -from pipeline.contrib.periodic_task.djcelery.compat import python_2_unicode_compatible - - -PERIOD_CHOICES = ( - ("days", _("Days")), - ("hours", _("Hours")), - ("minutes", _("Minutes")), - ("seconds", _("Seconds")), - ("microseconds", _("Microseconds")), -) - - -@python_2_unicode_compatible -class IntervalSchedule(models.Model): - every = models.IntegerField(_("every"), null=False) - period = models.CharField(_("period"), max_length=24, choices=PERIOD_CHOICES,) - - class Meta: - verbose_name = _("interval") - verbose_name_plural = _("intervals") - ordering = ["period", "every"] - - @property - def schedule(self): - return schedules.schedule(timedelta(**{self.period: self.every})) - - @classmethod - def from_schedule(cls, schedule, period="seconds"): - every = max(schedule.run_every.total_seconds(), 0) - try: - return cls.objects.get(every=every, period=period) - except cls.DoesNotExist: - return cls(every=every, period=period) - except MultipleObjectsReturned: - cls.objects.filter(every=every, period=period).delete() - return cls(every=every, period=period) - - def __str__(self): - if self.every == 1: - return _("every {0.period_singular}").format(self) - return _("every {0.every:d} {0.period}").format(self) - - @property - def period_singular(self): - return self.period[:-1] - - -def cronexp(field): - return field and str(field).replace(" ", "") or "*" - - -@python_2_unicode_compatible -class CrontabSchedule(models.Model): - minute = models.CharField(_("minute"), max_length=64, default="*") - hour = models.CharField(_("hour"), max_length=64, default="*") - day_of_week = models.CharField(_("day of week"), max_length=64, default="*",) - day_of_month = models.CharField(_("day of month"), max_length=64, default="*",) - month_of_year = models.CharField(_("month of year"), max_length=64, default="*",) - timezone = timezone_field.TimeZoneField(default="UTC") - - class Meta: - verbose_name = _("crontab") - verbose_name_plural = _("crontabs") - ordering = ["month_of_year", "day_of_month", "day_of_week", "hour", "minute"] - - def __str__(self): - return "{} {} {} {} {} (m/h/d/dM/MY)".format( - cronexp(self.minute), - cronexp(self.hour), - cronexp(self.day_of_week), - cronexp(self.day_of_month), - cronexp(self.month_of_year), - ) - - @property - def schedule(self): - return TzAwareCrontab( - minute=self.minute, - hour=self.hour, - day_of_week=self.day_of_week, - day_of_month=self.day_of_month, - month_of_year=self.month_of_year, - tz=self.timezone, - ) - - @classmethod - def from_schedule(cls, schedule): - spec = { - "minute": schedule._orig_minute, - "hour": schedule._orig_hour, - "day_of_week": schedule._orig_day_of_week, - "day_of_month": schedule._orig_day_of_month, - "month_of_year": schedule._orig_month_of_year, - "timezone": schedule.tz, - } - try: - return cls.objects.get(**spec) - except cls.DoesNotExist: - return cls(**spec) - except MultipleObjectsReturned: - cls.objects.filter(**spec).delete() - return cls(**spec) - - -class DjCeleryPeriodicTasks(models.Model): - ident = models.SmallIntegerField(default=1, primary_key=True, unique=True) - last_update = models.DateTimeField(null=False) - - objects = managers.ExtendedManager() - - @classmethod - def changed(cls, instance, **kwargs): - if not instance.no_changes: - cls.objects.update_or_create(ident=1, defaults={"last_update": now()}) - - @classmethod - def last_change(cls): - try: - return cls.objects.get(ident=1).last_update - except cls.DoesNotExist: - pass - - -@python_2_unicode_compatible -class DjCeleryPeriodicTask(models.Model): - name = models.CharField(_("name"), max_length=200, unique=True, help_text=_("Useful description"),) - task = models.CharField(_("task name"), max_length=200) - interval = models.ForeignKey( - IntervalSchedule, null=True, blank=True, verbose_name=_("interval"), on_delete=models.CASCADE, - ) - crontab = models.ForeignKey( - CrontabSchedule, - null=True, - blank=True, - verbose_name=_("crontab"), - on_delete=models.CASCADE, - help_text=_("Use one of interval/crontab"), - ) - args = models.TextField(_("Arguments"), blank=True, default="[]", help_text=_("JSON encoded positional arguments"),) - kwargs = models.TextField( - _("Keyword arguments"), blank=True, default="{}", help_text=_("JSON encoded keyword arguments"), - ) - queue = models.CharField( - _("queue"), max_length=200, blank=True, null=True, default=None, help_text=_("Queue defined in CELERY_QUEUES"), - ) - exchange = models.CharField(_("exchange"), max_length=200, blank=True, null=True, default=None,) - routing_key = models.CharField(_("routing key"), max_length=200, blank=True, null=True, default=None,) - expires = models.DateTimeField(_("expires"), blank=True, null=True,) - enabled = models.BooleanField(_("enabled"), default=True,) - last_run_at = models.DateTimeField(auto_now=False, auto_now_add=False, editable=False, blank=True, null=True,) - total_run_count = models.PositiveIntegerField(default=0, editable=False,) - date_changed = models.DateTimeField(auto_now=True) - description = models.TextField(_("description"), blank=True) - - objects = managers.PeriodicTaskManager() - no_changes = False - - class Meta: - verbose_name = _("djcelery periodic task") - verbose_name_plural = _("djcelery periodic tasks") - - def validate_unique(self, *args, **kwargs): - super(DjCeleryPeriodicTask, self).validate_unique(*args, **kwargs) - if not self.interval and not self.crontab: - raise ValidationError({"interval": ["One of interval or crontab must be set."]}) - if self.interval and self.crontab: - raise ValidationError({"crontab": ["Only one of interval or crontab must be set"]}) - - def save(self, *args, **kwargs): - self.exchange = self.exchange or None - self.routing_key = self.routing_key or None - self.queue = self.queue or None - if not self.enabled: - self.last_run_at = None - super(DjCeleryPeriodicTask, self).save(*args, **kwargs) - - def __str__(self): - fmt = "{0.name}: {{no schedule}}" - if self.interval: - fmt = "{0.name}: {0.interval}" - if self.crontab: - fmt = "{0.name}: {0.crontab}" - return fmt.format(self) - - @property - def schedule(self): - if self.interval: - return self.interval.schedule - if self.crontab: - return self.crontab.schedule - - -signals.pre_delete.connect(DjCeleryPeriodicTasks.changed, sender=DjCeleryPeriodicTask) -signals.pre_save.connect(DjCeleryPeriodicTasks.changed, sender=DjCeleryPeriodicTask) diff --git a/lib/pipeline/contrib/periodic_task/djcelery/tzcrontab.py b/lib/pipeline/contrib/periodic_task/djcelery/tzcrontab.py deleted file mode 100644 index b0fdee7..0000000 --- a/lib/pipeline/contrib/periodic_task/djcelery/tzcrontab.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -from collections import namedtuple -from datetime import datetime - -import pytz -from celery import schedules -from celery.utils.time import is_naive, make_aware - -schedstate = namedtuple("schedstate", ("is_due", "next")) -logger = logging.getLogger("celery") - - -class TzAwareCrontab(schedules.crontab): - """Timezone Aware Crontab.""" - - def __init__( - self, minute="*", hour="*", day_of_week="*", day_of_month="*", month_of_year="*", tz=pytz.utc, app=None, - ): - """Overwrite Crontab constructor to include a timezone argument.""" - self.tz = tz - - nowfun = self.nowfunc - - super(TzAwareCrontab, self).__init__( - minute=minute, - hour=hour, - day_of_week=day_of_week, - day_of_month=day_of_month, - month_of_year=month_of_year, - nowfun=nowfun, - app=app, - ) - - def nowfunc(self): - return self.tz.normalize(pytz.utc.localize(datetime.utcnow())) - - def is_due(self, last_run_at): - """Calculate when the next run will take place. - Return tuple of (is_due, next_time_to_check). - The last_run_at argument needs to be timezone aware. - """ - logger.debug("################### is_due begin ###################") - logger.debug("native last_run_at: %s" % last_run_at) - - last_run_at = last_run_at.astimezone(self.tz) - - now = datetime.now(self.tz) - logger.debug("last_run_at: %s" % last_run_at) - logger.debug("now: %s" % now) - - rem_delta = self.remaining_estimate(last_run_at) - - logger.debug("rem_delta: %s" % rem_delta) - logger.debug("next run at: %s" % (now + rem_delta)) - - rem = max(rem_delta.total_seconds(), 0) - due = rem == 0 - if due: - rem_delta = self.remaining_estimate(self.now()) - rem = max(rem_delta.total_seconds(), 0) - - logger.debug("self: %s" % self) - logger.debug("due: {} {} {}".format(self.tz, due, rem)) - logger.debug("################### is_due end ###################") - return schedstate(due, rem) - - # Needed to support pickling - def __repr__(self): - return ( - "".format(self) - ) - - def __reduce__(self): - return ( - self.__class__, - ( - self._orig_minute, - self._orig_hour, - self._orig_day_of_week, - self._orig_day_of_month, - self._orig_month_of_year, - self.tz, - ), - None, - ) - - def __eq__(self, other): - if isinstance(other, schedules.crontab): - return ( - other.month_of_year == self.month_of_year - and other.day_of_month == self.day_of_month - and other.day_of_week == self.day_of_week - and other.hour == self.hour - and other.minute == self.minute - and other.tz == self.tz - ) - return NotImplemented - - def maybe_make_aware(self, dt): - if not is_naive(dt): - return dt - return make_aware(dt, self.tz) - - def to_local(self, dt): - return self.maybe_make_aware(dt) diff --git a/lib/pipeline/contrib/periodic_task/djcelery/utils.py b/lib/pipeline/contrib/periodic_task/djcelery/utils.py deleted file mode 100644 index 4d3a182..0000000 --- a/lib/pipeline/contrib/periodic_task/djcelery/utils.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -# -- XXX This module must not use translation as that causes -# -- a recursive loader import! - - -from django.conf import settings -from django.utils import timezone - - -def make_aware(value): - if settings.USE_TZ: - # naive datetimes are assumed to be in UTC. - if timezone.is_naive(value): - value = timezone.make_aware(value, timezone.utc) - # then convert to the Django configured timezone. - default_tz = timezone.get_default_timezone() - value = timezone.localtime(value, default_tz) - return value - - -def now(): - return make_aware(timezone.now()) diff --git a/lib/pipeline/contrib/periodic_task/migrations/0001_initial.py b/lib/pipeline/contrib/periodic_task/migrations/0001_initial.py deleted file mode 100644 index 7cc1120..0000000 --- a/lib/pipeline/contrib/periodic_task/migrations/0001_initial.py +++ /dev/null @@ -1,225 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models -import django.db.models.deletion -import timezone_field.fields -import pipeline.models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0016_auto_20181220_0958"), - ] - - operations = [ - migrations.CreateModel( - name="CrontabSchedule", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True,),), - ("minute", models.CharField(default=b"*", max_length=64, verbose_name="minute"),), - ("hour", models.CharField(default=b"*", max_length=64, verbose_name="hour"),), - ("day_of_week", models.CharField(default=b"*", max_length=64, verbose_name="day of week"),), - ("day_of_month", models.CharField(default=b"*", max_length=64, verbose_name="day of month"),), - ("month_of_year", models.CharField(default=b"*", max_length=64, verbose_name="month of year"),), - ("timezone", timezone_field.fields.TimeZoneField(default=b"UTC")), - ], - options={ - "ordering": ["month_of_year", "day_of_month", "day_of_week", "hour", "minute",], - "verbose_name": "crontab", - "verbose_name_plural": "crontabs", - }, - ), - migrations.CreateModel( - name="DjCeleryPeriodicTask", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True,),), - ( - "name", - models.CharField(help_text="Useful description", unique=True, max_length=200, verbose_name="name",), - ), - ("task", models.CharField(max_length=200, verbose_name="task name")), - ( - "args", - models.TextField( - default=b"[]", - help_text="JSON encoded positional arguments", - verbose_name="Arguments", - blank=True, - ), - ), - ( - "kwargs", - models.TextField( - default=b"{}", - help_text="JSON encoded keyword arguments", - verbose_name="Keyword arguments", - blank=True, - ), - ), - ( - "queue", - models.CharField( - default=None, - max_length=200, - blank=True, - help_text="Queue defined in CELERY_QUEUES", - null=True, - verbose_name="queue", - ), - ), - ( - "exchange", - models.CharField(default=None, max_length=200, null=True, verbose_name="exchange", blank=True,), - ), - ( - "routing_key", - models.CharField(default=None, max_length=200, null=True, verbose_name="routing key", blank=True,), - ), - ("expires", models.DateTimeField(null=True, verbose_name="expires", blank=True),), - ("enabled", models.BooleanField(default=True, verbose_name="enabled")), - ("last_run_at", models.DateTimeField(null=True, editable=False, blank=True),), - ("total_run_count", models.PositiveIntegerField(default=0, editable=False),), - ("date_changed", models.DateTimeField(auto_now=True)), - ("description", models.TextField(verbose_name="description", blank=True),), - ( - "crontab", - models.ForeignKey( - blank=True, - to="periodic_task.CrontabSchedule", - help_text="Use one of interval/crontab", - null=True, - verbose_name="crontab", - on_delete=models.CASCADE, - ), - ), - ], - options={"verbose_name": "periodic task", "verbose_name_plural": "periodic tasks",}, - ), - migrations.CreateModel( - name="DjCeleryPeriodicTasks", - fields=[ - ("ident", models.SmallIntegerField(default=1, unique=True, serialize=False, primary_key=True),), - ("last_update", models.DateTimeField()), - ], - ), - migrations.CreateModel( - name="IntervalSchedule", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True,),), - ("every", models.IntegerField(verbose_name="every")), - ( - "period", - models.CharField( - max_length=24, - verbose_name="period", - choices=[ - (b"days", "Days"), - (b"hours", "Hours"), - (b"minutes", "Minutes"), - (b"seconds", "Seconds"), - (b"microseconds", "Microseconds"), - ], - ), - ), - ], - options={"ordering": ["period", "every"], "verbose_name": "interval", "verbose_name_plural": "intervals",}, - ), - migrations.CreateModel( - name="PeriodicTask", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True,),), - ("name", models.CharField(max_length=64, verbose_name="\u5468\u671f\u4efb\u52a1\u540d\u79f0",),), - ("cron", models.CharField(max_length=128, verbose_name="\u8c03\u5ea6\u7b56\u7565"),), - ("total_run_count", models.PositiveIntegerField(default=0, verbose_name="\u6267\u884c\u6b21\u6570"),), - ("last_run_at", models.DateTimeField(null=True, verbose_name="\u4e0a\u6b21\u8fd0\u884c\u65f6\u95f4"),), - ("creator", models.CharField(default=b"", max_length=32, verbose_name="\u521b\u5efa\u8005"),), - ("extra_info", pipeline.models.CompressJSONField(verbose_name="\u989d\u5916\u4fe1\u606f", null=True),), - ( - "celery_task", - models.ForeignKey( - verbose_name="celery \u5468\u671f\u4efb\u52a1\u5b9e\u4f8b", - to="periodic_task.DjCeleryPeriodicTask", - null=True, - on_delete=models.SET_NULL, - ), - ), - ( - "snapshot", - models.ForeignKey( - related_name="periodic_tasks", - verbose_name="\u7528\u4e8e\u521b\u5efa\u6d41\u7a0b\u5b9e\u4f8b\u7684\u7ed3\u6784\u6570\u636e", - to="pipeline.Snapshot", - on_delete=models.DO_NOTHING, - ), - ), - ( - "template", - models.ForeignKey( - related_name="periodic_tasks", - on_delete=django.db.models.deletion.SET_NULL, - verbose_name="\u5468\u671f\u4efb\u52a1\u5bf9\u5e94\u7684\u6a21\u677f", - to_field="template_id", - to="pipeline.PipelineTemplate", - null=True, - ), - ), - ], - ), - migrations.CreateModel( - name="PeriodicTaskHistory", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True,),), - ("ex_data", models.TextField(verbose_name="\u5f02\u5e38\u4fe1\u606f")), - ("start_at", models.DateTimeField(auto_now_add=True, verbose_name="\u5f00\u59cb\u65f6\u95f4"),), - ( - "start_success", - models.BooleanField(default=True, verbose_name="\u662f\u5426\u542f\u52a8\u6210\u529f",), - ), - ( - "periodic_task", - models.ForeignKey( - related_name="instance_rel", - verbose_name="\u5468\u671f\u4efb\u52a1", - to="periodic_task.PeriodicTask", - null=True, - on_delete=models.DO_NOTHING, - ), - ), - ( - "pipeline_instance", - models.ForeignKey( - related_name="periodic_task_rel", - verbose_name="Pipeline \u5b9e\u4f8b", - to_field="instance_id", - to="pipeline.PipelineInstance", - null=True, - on_delete=models.DO_NOTHING, - ), - ), - ], - ), - migrations.AddField( - model_name="djceleryperiodictask", - name="interval", - field=models.ForeignKey( - verbose_name="interval", - blank=True, - to="periodic_task.IntervalSchedule", - null=True, - on_delete=models.CASCADE, - ), - ), - ] diff --git a/lib/pipeline/contrib/periodic_task/migrations/0002_auto_20190103_1918.py b/lib/pipeline/contrib/periodic_task/migrations/0002_auto_20190103_1918.py deleted file mode 100644 index aeb98a6..0000000 --- a/lib/pipeline/contrib/periodic_task/migrations/0002_auto_20190103_1918.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("periodic_task", "0001_initial"), - ] - - operations = [ - migrations.AlterModelOptions( - name="djceleryperiodictask", - options={"verbose_name": "djcelery periodic task", "verbose_name_plural": "djcelery periodic tasks",}, - ), - ] diff --git a/lib/pipeline/contrib/periodic_task/migrations/0003_auto_20191213_0819.py b/lib/pipeline/contrib/periodic_task/migrations/0003_auto_20191213_0819.py deleted file mode 100644 index 0f221ea..0000000 --- a/lib/pipeline/contrib/periodic_task/migrations/0003_auto_20191213_0819.py +++ /dev/null @@ -1,102 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.23 on 2019-12-13 08:19 -from __future__ import unicode_literals - -import timezone_field.fields -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("periodic_task", "0002_auto_20190103_1918"), - ] - - operations = [ - migrations.AddField( - model_name="periodictask", - name="priority", - field=models.IntegerField(default=100, verbose_name="流程优先级"), - ), - migrations.AddField( - model_name="periodictask", - name="queue", - field=models.CharField(default="", max_length=512, verbose_name="流程使用的队列名"), - ), - migrations.AlterField( - model_name="crontabschedule", - name="day_of_month", - field=models.CharField( - default="*", max_length=64, verbose_name="day of month" - ), - ), - migrations.AlterField( - model_name="crontabschedule", - name="day_of_week", - field=models.CharField( - default="*", max_length=64, verbose_name="day of week" - ), - ), - migrations.AlterField( - model_name="crontabschedule", - name="hour", - field=models.CharField(default="*", max_length=64, verbose_name="hour"), - ), - migrations.AlterField( - model_name="crontabschedule", - name="minute", - field=models.CharField(default="*", max_length=64, verbose_name="minute"), - ), - migrations.AlterField( - model_name="crontabschedule", - name="month_of_year", - field=models.CharField( - default="*", max_length=64, verbose_name="month of year" - ), - ), - migrations.AlterField( - model_name="crontabschedule", - name="timezone", - field=timezone_field.fields.TimeZoneField(default="UTC"), - ), - migrations.AlterField( - model_name="djceleryperiodictask", - name="args", - field=models.TextField( - blank=True, - default="[]", - help_text="JSON encoded positional arguments", - verbose_name="Arguments", - ), - ), - migrations.AlterField( - model_name="djceleryperiodictask", - name="kwargs", - field=models.TextField( - blank=True, - default="{}", - help_text="JSON encoded keyword arguments", - verbose_name="Keyword arguments", - ), - ), - migrations.AlterField( - model_name="intervalschedule", - name="period", - field=models.CharField( - choices=[ - ("days", "Days"), - ("hours", "Hours"), - ("minutes", "Minutes"), - ("seconds", "Seconds"), - ("microseconds", "Microseconds"), - ], - max_length=24, - verbose_name="period", - ), - ), - migrations.AlterField( - model_name="periodictask", - name="creator", - field=models.CharField(default="", max_length=32, verbose_name="创建者"), - ), - ] diff --git a/lib/pipeline/contrib/periodic_task/migrations/0004_auto_20191213_0828.py b/lib/pipeline/contrib/periodic_task/migrations/0004_auto_20191213_0828.py deleted file mode 100644 index 3571e4d..0000000 --- a/lib/pipeline/contrib/periodic_task/migrations/0004_auto_20191213_0828.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.23 on 2019-12-13 08:28 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("periodic_task", "0003_auto_20191213_0819"), - ] - - operations = [ - migrations.AddField( - model_name="periodictaskhistory", - name="priority", - field=models.IntegerField(default=100, verbose_name="流程优先级"), - ), - migrations.AddField( - model_name="periodictaskhistory", - name="queue", - field=models.CharField(default="", max_length=512, verbose_name="流程使用的队列名"), - ), - ] diff --git a/lib/pipeline/contrib/periodic_task/migrations/0005_migrate_task.py b/lib/pipeline/contrib/periodic_task/migrations/0005_migrate_task.py deleted file mode 100644 index 759220b..0000000 --- a/lib/pipeline/contrib/periodic_task/migrations/0005_migrate_task.py +++ /dev/null @@ -1,22 +0,0 @@ -# Generated by Django 2.2.6 on 2020-12-16 02:56 - -from django.db import migrations - - -def reverse_func(apps, schema_editor): - raise Exception("task migrate cannot reverse") - - -def forward_func(apps, schema_editor): - from pipeline.contrib.periodic_task.djcelery import migrate - - migrate.try_to_migrate_to_django_celery_beat() - - -class Migration(migrations.Migration): - - dependencies = [ - ("periodic_task", "0004_auto_20191213_0828"), - ] - - operations = [migrations.RunPython(forward_func, reverse_func)] diff --git a/lib/pipeline/contrib/periodic_task/migrations/0006_change_task_ref_table.py b/lib/pipeline/contrib/periodic_task/migrations/0006_change_task_ref_table.py deleted file mode 100644 index 44f177e..0000000 --- a/lib/pipeline/contrib/periodic_task/migrations/0006_change_task_ref_table.py +++ /dev/null @@ -1,24 +0,0 @@ -# Generated by Django 2.2.6 on 2020-11-02 13:19 - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ("periodic_task", "0005_migrate_task"), - ] - - operations = [ - migrations.AlterField( - model_name="periodictask", - name="celery_task", - field=models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="django_celery_beat.PeriodicTask", - verbose_name="celery 周期任务实例", - ), - ), - ] diff --git a/lib/pipeline/contrib/periodic_task/migrations/__init__.py b/lib/pipeline/contrib/periodic_task/migrations/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/contrib/periodic_task/migrations/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/contrib/periodic_task/models.py b/lib/pipeline/contrib/periodic_task/models.py deleted file mode 100644 index 5ff5bff..0000000 --- a/lib/pipeline/contrib/periodic_task/models.py +++ /dev/null @@ -1,209 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import ujson as json -from django.db import models -from django.utils.translation import ugettext_lazy as _ - -from pipeline.constants import PIPELINE_DEFAULT_PRIORITY -from pipeline.contrib.periodic_task.signals import periodic_task_start_failed -from pipeline.exceptions import InvalidOperationException -from pipeline.models import ( - CompressJSONField, - PipelineInstance, - PipelineTemplate, - Snapshot, -) -from pipeline.utils.uniqid import uniqid -from django_celery_beat.models import ( - PeriodicTask as DjangoCeleryBeatPeriodicTask, - CrontabSchedule as DjangoCeleryBeatCrontabSchedule, -) - -from pipeline.contrib.periodic_task.djcelery.models import * # noqa - -BAMBOO_ENGINE_TRIGGER_TASK = "pipeline.contrib.periodic_task.tasks.bamboo_engine_periodic_task_start" - - -class PeriodicTaskManager(models.Manager): - def create_task( - self, - name, - template, - cron, - data, - creator, - timezone=None, - extra_info=None, - spread=False, - priority=PIPELINE_DEFAULT_PRIORITY, - queue="", - trigger_task="", - ): - snapshot = Snapshot.objects.create_snapshot(data) - schedule, _ = DjangoCeleryBeatCrontabSchedule.objects.get_or_create( - minute=cron.get("minute", "*"), - hour=cron.get("hour", "*"), - day_of_week=cron.get("day_of_week", "*"), - day_of_month=cron.get("day_of_month", "*"), - month_of_year=cron.get("month_of_year", "*"), - timezone=timezone or "UTC", - ) - _ = schedule.schedule # noqa - - task = self.create( - name=name, - template=template, - snapshot=snapshot, - cron=schedule.__str__(), - creator=creator, - extra_info=extra_info, - priority=priority, - queue=queue, - ) - - kwargs = {"period_task_id": task.id, "spread": spread} - celery_task = DjangoCeleryBeatPeriodicTask.objects.create( - crontab=schedule, - name=uniqid(), - task=trigger_task or "pipeline.contrib.periodic_task.tasks.periodic_task_start", - enabled=False, - kwargs=json.dumps(kwargs), - ) - task.celery_task = celery_task - task.save() - return task - - -class PeriodicTask(models.Model): - name = models.CharField(_("周期任务名称"), max_length=64) - template = models.ForeignKey( - PipelineTemplate, - related_name="periodic_tasks", - to_field="template_id", - verbose_name=_("周期任务对应的模板"), - null=True, - on_delete=models.deletion.SET_NULL, - ) - cron = models.CharField(_("调度策略"), max_length=128) - celery_task = models.ForeignKey( - DjangoCeleryBeatPeriodicTask, verbose_name=_("celery 周期任务实例"), null=True, on_delete=models.SET_NULL, - ) - snapshot = models.ForeignKey( - Snapshot, related_name="periodic_tasks", verbose_name=_("用于创建流程实例的结构数据"), on_delete=models.DO_NOTHING, - ) - total_run_count = models.PositiveIntegerField(_("执行次数"), default=0) - last_run_at = models.DateTimeField(_("上次运行时间"), null=True) - creator = models.CharField(_("创建者"), max_length=32, default="") - priority = models.IntegerField(_("流程优先级"), default=PIPELINE_DEFAULT_PRIORITY) - queue = models.CharField(_("流程使用的队列名"), max_length=512, default="") - extra_info = CompressJSONField(verbose_name=_("额外信息"), null=True) - - objects = PeriodicTaskManager() - - def __unicode__(self): - return "{name}({id})".format(name=self.name, id=self.id) - - @property - def enabled(self): - return self.celery_task.enabled - - @property - def execution_data(self): - return self.snapshot.data - - @property - def form(self): - form = { - key: var_info - for key, var_info in list(self.execution_data["constants"].items()) - if var_info["show_type"] == "show" - } - return form - - def delete(self, using=None): - self.set_enabled(False) - self.celery_task.delete() - PeriodicTaskHistory.objects.filter(periodic_task=self).delete() - return super(PeriodicTask, self).delete(using) - - def set_enabled(self, enabled): - self.celery_task.enabled = enabled - self.celery_task.save() - - def modify_cron(self, cron, timezone=None): - if self.enabled: - raise InvalidOperationException("can not modify cron when task is enabled") - schedule, _ = DjangoCeleryBeatCrontabSchedule.objects.get_or_create( - minute=cron.get("minute", "*"), - hour=cron.get("hour", "*"), - day_of_week=cron.get("day_of_week", "*"), - day_of_month=cron.get("day_of_month", "*"), - month_of_year=cron.get("month_of_year", "*"), - timezone=timezone or "UTC", - ) - # try to initiate schedule object - _ = schedule.schedule # noqa - self.cron = schedule.__str__() - self.celery_task.crontab = schedule - self.celery_task.save() - self.save() - - def modify_constants(self, constants): - if self.enabled: - raise InvalidOperationException("can not modify constants when task is enabled") - exec_data = self.execution_data - for key, value in list(constants.items()): - if key in exec_data["constants"]: - exec_data["constants"][key]["value"] = value - self.snapshot.data = exec_data - self.snapshot.save() - return exec_data["constants"] - - -class PeriodicTaskHistoryManager(models.Manager): - def record_schedule(self, periodic_task, pipeline_instance, ex_data, start_success=True): - history = self.create( - periodic_task=periodic_task, - pipeline_instance=pipeline_instance, - ex_data=ex_data, - start_success=start_success, - priority=periodic_task.priority, - queue=periodic_task.queue, - ) - - if not start_success: - periodic_task_start_failed.send(sender=PeriodicTask, periodic_task=periodic_task, history=history) - - return history - - -class PeriodicTaskHistory(models.Model): - periodic_task = models.ForeignKey( - PeriodicTask, related_name="instance_rel", verbose_name=_("周期任务"), null=True, on_delete=models.DO_NOTHING, - ) - pipeline_instance = models.ForeignKey( - PipelineInstance, - related_name="periodic_task_rel", - verbose_name=_("Pipeline 实例"), - to_field="instance_id", - null=True, - on_delete=models.DO_NOTHING, - ) - ex_data = models.TextField(_("异常信息")) - start_at = models.DateTimeField(_("开始时间"), auto_now_add=True) - start_success = models.BooleanField(_("是否启动成功"), default=True) - priority = models.IntegerField(_("流程优先级"), default=PIPELINE_DEFAULT_PRIORITY) - queue = models.CharField(_("流程使用的队列名"), max_length=512, default="") - - objects = PeriodicTaskHistoryManager() diff --git a/lib/pipeline/contrib/periodic_task/signals/__init__.py b/lib/pipeline/contrib/periodic_task/signals/__init__.py deleted file mode 100644 index 3aaf776..0000000 --- a/lib/pipeline/contrib/periodic_task/signals/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.dispatch import Signal - -pre_periodic_task_start = Signal(providing_args=["periodic_task", "pipeline_instance"]) -post_periodic_task_start = Signal(providing_args=["periodic_task", "pipeline_instance"]) -periodic_task_start_failed = Signal(providing_args=["periodic_task", "history"]) diff --git a/lib/pipeline/contrib/periodic_task/tasks.py b/lib/pipeline/contrib/periodic_task/tasks.py deleted file mode 100644 index 4264a73..0000000 --- a/lib/pipeline/contrib/periodic_task/tasks.py +++ /dev/null @@ -1,168 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import datetime -import logging -import traceback - -import pytz -from celery import task -from django.utils import timezone -from django.utils.module_loading import import_string -from bamboo_engine import api as bamboo_engine_api - -from pipeline.contrib.periodic_task import signals -from pipeline.contrib.periodic_task.models import PeriodicTask, PeriodicTaskHistory -from pipeline.engine.models import FunctionSwitch -from pipeline.models import PipelineInstance -from pipeline.parser.context import get_pipeline_context -from pipeline.eri.runtime import BambooDjangoRuntime -from pipeline.contrib.periodic_task.context import ( - get_periodic_task_root_pipeline_context, - get_periodic_task_subprocess_context, -) - -logger = logging.getLogger("celery") - - -@task(ignore_result=True) -def periodic_task_start(*args, **kwargs): - try: - periodic_task = PeriodicTask.objects.get(id=kwargs["period_task_id"]) - except PeriodicTask.DoesNotExist: - # task has been deleted - return - - if FunctionSwitch.objects.is_frozen(): - PeriodicTaskHistory.objects.record_schedule( - periodic_task=periodic_task, - pipeline_instance=None, - ex_data="engine is frozen, can not start task", - start_success=False, - ) - return - - try: - tz = periodic_task.celery_task.crontab.timezone - now = datetime.datetime.now(tz=pytz.utc).astimezone(tz) - instance, _ = PipelineInstance.objects.create_instance( - template=periodic_task.template, - exec_data=periodic_task.execution_data, - spread=kwargs.get("spread", True), - name="{}_{}".format(periodic_task.name[:113], now.strftime("%Y%m%d%H%M%S")), - creator=periodic_task.creator, - description="periodic task instance", - ) - - signals.pre_periodic_task_start.send( - sender=PeriodicTask, periodic_task=periodic_task, pipeline_instance=instance - ) - - result = instance.start( - periodic_task.creator, check_workers=False, priority=periodic_task.priority, queue=periodic_task.queue, - ) - except Exception: - et = traceback.format_exc() - logger.error(et) - PeriodicTaskHistory.objects.record_schedule( - periodic_task=periodic_task, pipeline_instance=None, ex_data=et, start_success=False, - ) - return - - if not result.result: - PeriodicTaskHistory.objects.record_schedule( - periodic_task=periodic_task, pipeline_instance=None, ex_data=result.message, start_success=False, - ) - return - - periodic_task.total_run_count += 1 - periodic_task.last_run_at = timezone.now() - periodic_task.save() - signals.post_periodic_task_start.send(sender=PeriodicTask, periodic_task=periodic_task, pipeline_instance=instance) - - PeriodicTaskHistory.objects.record_schedule(periodic_task=periodic_task, pipeline_instance=instance, ex_data="") - - -@task(ignore_result=True) -def bamboo_engine_periodic_task_start(*args, **kwargs): - try: - periodic_task = PeriodicTask.objects.get(id=kwargs["period_task_id"]) - except PeriodicTask.DoesNotExist: - # task has been deleted - return - - try: - tz = periodic_task.celery_task.crontab.timezone - now = datetime.datetime.now(tz=pytz.utc).astimezone(tz) - instance, _ = PipelineInstance.objects.create_instance( - template=periodic_task.template, - exec_data=periodic_task.execution_data, - spread=kwargs.get("spread", True), - name="{}_{}".format(periodic_task.name[:113], now.strftime("%Y%m%d%H%M%S")), - creator=periodic_task.creator, - description="periodic task instance", - ) - - signals.pre_periodic_task_start.send( - sender=PeriodicTask, periodic_task=periodic_task, pipeline_instance=instance - ) - - # convert web pipeline to pipeline - pipeline_formator = import_string(periodic_task.extra_info["pipeline_formator"]) - pipeline = pipeline_formator(instance.execution_data) - - # run pipeline - instance.calculate_tree_info() - PipelineInstance.objects.filter(instance_id=instance.instance_id).update( - tree_info_id=instance.tree_info.id, - start_time=timezone.now(), - is_started=True, - executor=periodic_task.creator, - ) - root_pipeline_data = get_pipeline_context( - instance, obj_type="instance", data_type="data", username=periodic_task.creator - ) - root_pipeline_context = get_periodic_task_root_pipeline_context(root_pipeline_data) - subprocess_context = get_periodic_task_subprocess_context(root_pipeline_data) - result = bamboo_engine_api.run_pipeline( - runtime=BambooDjangoRuntime(), - pipeline=pipeline, - root_pipeline_data=root_pipeline_data, - root_pipeline_context=root_pipeline_context, - subprocess_context=subprocess_context, - queue=periodic_task.queue, - cycle_tolerate=True, - ) - except Exception: - et = traceback.format_exc() - logger.error(et) - PeriodicTaskHistory.objects.record_schedule( - periodic_task=periodic_task, pipeline_instance=None, ex_data=et, start_success=False, - ) - return - - if not result.result: - PipelineInstance.objects.filter(id=instance.instance_id).update( - start_time=None, is_started=False, executor="", - ) - PeriodicTaskHistory.objects.record_schedule( - periodic_task=periodic_task, pipeline_instance=None, ex_data=result.message, start_success=False, - ) - return - - periodic_task.total_run_count += 1 - periodic_task.last_run_at = timezone.now() - periodic_task.save() - signals.post_periodic_task_start.send(sender=PeriodicTask, periodic_task=periodic_task, pipeline_instance=instance) - - PeriodicTaskHistory.objects.record_schedule(periodic_task=periodic_task, pipeline_instance=instance, ex_data="") diff --git a/lib/pipeline/contrib/periodic_task/tests.py b/lib/pipeline/contrib/periodic_task/tests.py deleted file mode 100644 index baf1914..0000000 --- a/lib/pipeline/contrib/periodic_task/tests.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import copy - -from django.test import TestCase - -from django_celery_beat.models import PeriodicTask -from pipeline.contrib.periodic_task.models import PeriodicTask as PipelinePeriodicTask -from pipeline.exceptions import InvalidOperationException - - -class PeriodicTestCase(TestCase): - def setUp(self): - self.name = "test" - self.creator = "tester" - self.extra_info = {"extra_info": "val"} - self.data = { - "constants": { - "key_1": {"value": "val_1", "show_type": "show"}, - "key_2": {"value": "val_2", "show_type": "hide"}, - } - } - self.task = self.create_a_task() - - def tearDown(self): - if self.task: - self.task = self.task.delete() - - def create_a_task(self): - return PipelinePeriodicTask.objects.create_task( - name=self.name, template=None, cron={}, data=self.data, creator=self.creator, extra_info=self.extra_info, - ) - - def test_create_task(self): - self.assertIsInstance(self.task, PipelinePeriodicTask) - self.assertIsInstance(self.task.celery_task, PeriodicTask) - self.assertEqual(self.task.name, self.name) - self.assertEqual(self.task.template, None) - self.assertEqual(self.task.creator, self.creator) - self.assertEqual(self.task.extra_info, self.extra_info) - self.assertEqual(self.task.cron, self.task.celery_task.crontab.__str__()) - self.assertEqual(self.task.snapshot.data, self.data) - self.assertEqual(self.task.total_run_count, 0) - self.assertEqual(self.task.last_run_at, None) - - def test_enabled(self): - self.assertEqual(self.task.enabled, self.task.celery_task.enabled) - - def test_set_enabled(self): - self.task.set_enabled(True) - self.assertTrue(self.task.enabled) - self.assertTrue(self.task.celery_task.enabled) - self.task.set_enabled(False) - self.assertFalse(self.task.enabled) - self.assertFalse(self.task.celery_task.enabled) - - def test_execution_data(self): - self.assertEqual(self.task.execution_data, self.data) - - def test_delete(self): - celery_task_id = self.task.celery_task.id - self.task.delete() - self.assertRaises(PeriodicTask.DoesNotExist, PeriodicTask.objects.get, id=celery_task_id) - self.task = None - - def test_modify_cron(self): - self.task.set_enabled(True) - self.assertRaises(InvalidOperationException, self.task.modify_cron, {}) - self.task.set_enabled(False) - self.task.modify_cron({"minite": "*/1"}) - self.assertEqual(self.task.cron, self.task.celery_task.crontab.__str__()) - - def test_modify_constants(self): - expect_constants = copy.deepcopy(self.task.execution_data["constants"]) - expect_constants["key_1"]["value"] = "val_3" - new_constants = self.task.modify_constants({"key_1": "val_3"}) - self.assertEqual(self.task.execution_data["constants"], expect_constants) - self.assertEqual(new_constants, expect_constants) - - self.task.set_enabled(True) - self.assertRaises(InvalidOperationException, self.task.modify_constants, {}) - - def test_form(self): - expect_form = {k: v for k, v in list(self.data["constants"].items()) if v["show_type"] == "show"} - self.assertEqual(self.task.form, expect_form) diff --git a/lib/pipeline/contrib/periodic_task/views.py b/lib/pipeline/contrib/periodic_task/views.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/contrib/periodic_task/views.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/contrib/statistics/__init__.py b/lib/pipeline/contrib/statistics/__init__.py deleted file mode 100644 index af18888..0000000 --- a/lib/pipeline/contrib/statistics/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -default_app_config = "pipeline.contrib.statistics.apps.StatisticsConfig" diff --git a/lib/pipeline/contrib/statistics/admin.py b/lib/pipeline/contrib/statistics/admin.py deleted file mode 100644 index a6239d9..0000000 --- a/lib/pipeline/contrib/statistics/admin.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.contrib import admin - -from .models import ComponentExecuteData, ComponentInTemplate, InstanceInPipeline, TemplateInPipeline - - -@admin.register(ComponentInTemplate) -class ComponentInTemplateAdmin(admin.ModelAdmin): - list_display = ("id", "component_code", "template_id", "node_id", "is_sub", "version") - search_fields = ( - "template_id", - "node_id", - ) - list_filter = ("component_code", "is_sub") - - -@admin.register(ComponentExecuteData) -class ComponentExecuteDataAdmin(admin.ModelAdmin): - list_display = ( - "id", - "component_code", - "instance_id", - "node_id", - "is_sub", - "started_time", - "archived_time", - "elapsed_time", - "status", - "is_skip", - "is_retry", - "version", - ) - search_fields = ( - "instance_id", - "node_id", - ) - list_filter = ( - "component_code", - "is_sub", - "status", - "is_skip", - ) - - -@admin.register(TemplateInPipeline) -class TemplateInPipelineAdmin(admin.ModelAdmin): - list_display = ("template_id", "atom_total", "subprocess_total", "gateways_total") - - search_fields = ("template_id",) - list_filter = ("template_id", "atom_total", "subprocess_total", "gateways_total") - - -@admin.register(InstanceInPipeline) -class InstanceInPipelineAdmin(admin.ModelAdmin): - list_display = ("instance_id", "atom_total", "subprocess_total", "gateways_total") - - search_fields = ("instance_id",) - list_filter = ("instance_id", "atom_total", "subprocess_total", "gateways_total") diff --git a/lib/pipeline/contrib/statistics/apps.py b/lib/pipeline/contrib/statistics/apps.py deleted file mode 100644 index 13e0f4a..0000000 --- a/lib/pipeline/contrib/statistics/apps.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.apps import AppConfig - - -class StatisticsConfig(AppConfig): - name = "pipeline.contrib.statistics" - verbose_name = "PipelineContribStatistics" - - def ready(self): - from pipeline.contrib.statistics.signals.handlers import ( # noqa - template_post_save_handler, - pipeline_post_save_handler, - ) diff --git a/lib/pipeline/contrib/statistics/migrations/0001_initial.py b/lib/pipeline/contrib/statistics/migrations/0001_initial.py deleted file mode 100644 index 04f8655..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0001_initial.py +++ /dev/null @@ -1,87 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [] - - operations = [ - migrations.CreateModel( - name="ComponentExecuteData", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ("tag_code", models.CharField(max_length=255, verbose_name="\u7ec4\u4ef6\u7f16\u7801")), - ("instance_id", models.CharField(max_length=32, verbose_name="\u5b9e\u4f8bID")), - ("node_id", models.CharField(max_length=32, verbose_name="\u8282\u70b9ID")), - ( - "is_sub", - models.BooleanField(default=False, verbose_name="\u662f\u5426\u5b50\u6d41\u7a0b\u5f15\u7528"), - ), - ( - "subprocess_stack", - models.TextField( - default=b"[]", - help_text="JSON \u683c\u5f0f\u7684\u5217\u8868", - verbose_name="\u5b50\u6d41\u7a0b\u5806\u6808", - ), - ), - ("begin_time", models.DateTimeField(verbose_name="\u539f\u5b50\u6267\u884c\u5f00\u59cb\u65f6\u95f4")), - ( - "end_time", - models.DateTimeField( - null=True, verbose_name="\u539f\u5b50\u6267\u884c\u7ed3\u675f\u65f6\u95f4", blank=True - ), - ), - ( - "elapse_time", - models.IntegerField(null=True, verbose_name="\u539f\u5b50\u6267\u884c\u8017\u65f6(s)", blank=True), - ), - ("status", models.BooleanField(default=False, verbose_name="\u662f\u5426\u6267\u884c\u6210\u529f")), - ("is_skip", models.BooleanField(default=False, verbose_name="\u662f\u5426\u8df3\u8fc7")), - ], - options={ - "ordering": ["-id"], - "verbose_name": "Pipeline\u539f\u5b50\u6267\u884c\u6570\u636e", - "verbose_name_plural": "Pipeline\u539f\u5b50\u6267\u884c\u6570\u636e", - }, - ), - migrations.CreateModel( - name="ComponentInTemplate", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ("component_code", models.CharField(max_length=255, verbose_name="\u7ec4\u4ef6\u7f16\u7801")), - ("template_id", models.CharField(max_length=32, verbose_name="\u6a21\u677fID")), - ("node_id", models.CharField(max_length=32, verbose_name="\u8282\u70b9ID")), - ( - "is_sub", - models.BooleanField(default=False, verbose_name="\u662f\u5426\u5b50\u6d41\u7a0b\u5f15\u7528"), - ), - ( - "subprocess_stack", - models.TextField( - default=b"[]", - help_text="JSON \u683c\u5f0f\u7684\u5217\u8868", - verbose_name="\u5b50\u6d41\u7a0b\u5806\u6808", - ), - ), - ], - options={ - "verbose_name": "Pipeline\u539f\u5b50\u5f15\u7528\u6570\u636e", - "verbose_name_plural": "Pipeline\u539f\u5b50\u5f15\u7528\u6570\u636e", - }, - ), - ] diff --git a/lib/pipeline/contrib/statistics/migrations/0002_auto_20180817_1212.py b/lib/pipeline/contrib/statistics/migrations/0002_auto_20180817_1212.py deleted file mode 100644 index 26229f5..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0002_auto_20180817_1212.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("statistics", "0001_initial"), - ] - - operations = [ - migrations.RenameField(model_name="componentexecutedata", old_name="tag_code", new_name="component_code",), - ] diff --git a/lib/pipeline/contrib/statistics/migrations/0003_auto_20180821_2015.py b/lib/pipeline/contrib/statistics/migrations/0003_auto_20180821_2015.py deleted file mode 100644 index 8f76c0f..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0003_auto_20180821_2015.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("statistics", "0002_auto_20180817_1212"), - ] - - operations = [ - migrations.RenameField(model_name="componentexecutedata", old_name="end_time", new_name="archived_time",), - migrations.RenameField(model_name="componentexecutedata", old_name="elapse_time", new_name="elapsed_time",), - migrations.RenameField(model_name="componentexecutedata", old_name="begin_time", new_name="started_time",), - migrations.AddField( - model_name="componentexecutedata", - name="is_retry", - field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u91cd\u8bd5\u8bb0\u5f55"), - ), - ] diff --git a/lib/pipeline/contrib/statistics/migrations/0004_instanceinpipeline_templateinpipeline.py b/lib/pipeline/contrib/statistics/migrations/0004_instanceinpipeline_templateinpipeline.py deleted file mode 100644 index 5087b90..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0004_instanceinpipeline_templateinpipeline.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("statistics", "0003_auto_20180821_2015"), - ] - - operations = [ - migrations.CreateModel( - name="InstanceInPipeline", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ("instance_id", models.IntegerField(null=True, verbose_name="\u5b9e\u4f8bID", blank=True)), - ("atom_total", models.IntegerField(null=True, verbose_name="\u539f\u5b50\u603b\u6570", blank=True)), - ( - "subprocess_total", - models.IntegerField(null=True, verbose_name="\u5b50\u6d41\u7a0b\u603b\u6570", blank=True), - ), - ("gateways_total", models.IntegerField(null=True, verbose_name="\u7f51\u5173\u603b\u6570", blank=True)), - ], - options={ - "verbose_name": "\u5b9e\u4f8b\u4f7f\u7528\u6570\u636e", - "verbose_name_plural": "\u5b9e\u4f8b\u4f7f\u7528\u6570\u636e", - }, - ), - migrations.CreateModel( - name="TemplateInPipeline", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ("template_id", models.IntegerField(null=True, verbose_name="\u6a21\u677fID", blank=True)), - ("atom_total", models.IntegerField(null=True, verbose_name="\u539f\u5b50\u603b\u6570", blank=True)), - ( - "subprocess_total", - models.IntegerField(null=True, verbose_name="\u5b50\u6d41\u7a0b\u603b\u6570", blank=True), - ), - ("gateways_total", models.IntegerField(null=True, verbose_name="\u7f51\u5173\u603b\u6570", blank=True)), - ], - options={ - "verbose_name": "\u6a21\u677f\u4f7f\u7528\u6570\u636e", - "verbose_name_plural": "\u6a21\u677f\u4f7f\u7528\u6570\u636e", - }, - ), - ] diff --git a/lib/pipeline/contrib/statistics/migrations/0005_init_pipeline_data.py b/lib/pipeline/contrib/statistics/migrations/0005_init_pipeline_data.py deleted file mode 100644 index 30654be..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0005_init_pipeline_data.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [("statistics", "0004_instanceinpipeline_templateinpipeline")] diff --git a/lib/pipeline/contrib/statistics/migrations/0006_auto_20181115_1208.py b/lib/pipeline/contrib/statistics/migrations/0006_auto_20181115_1208.py deleted file mode 100644 index ef322bf..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0006_auto_20181115_1208.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("statistics", "0005_init_pipeline_data"), - ] - - operations = [ - migrations.AlterField( - model_name="instanceinpipeline", - name="instance_id", - field=models.CharField(max_length=255, null=True, verbose_name="\u5b9e\u4f8bID", blank=True), - ), - migrations.AlterField( - model_name="templateinpipeline", - name="template_id", - field=models.CharField(max_length=255, null=True, verbose_name="\u6a21\u677fID", blank=True), - ), - ] diff --git a/lib/pipeline/contrib/statistics/migrations/0007_init_pipeline_data.py b/lib/pipeline/contrib/statistics/migrations/0007_init_pipeline_data.py deleted file mode 100644 index 08915de..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0007_init_pipeline_data.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db import migrations - -from pipeline.models import PipelineInstance, PipelineTemplate -from pipeline.contrib.statistics.models import InstanceInPipeline, TemplateInPipeline - - -def load_data(apps, schema_editor): - # 清空数据 - TemplateInPipeline.objects.all().delete() - InstanceInPipeline.objects.all().delete() - template_list = PipelineTemplate.objects.filter(is_deleted=False) - template_data = [] - for template in template_list: - template_id = template.template_id - try: - result = statistics_total(template.data) - data = TemplateInPipeline( - template_id=template_id, - atom_total=result["atom_total"], - subprocess_total=result["subprocess_total"], - gateways_total=result["gateways_total"], - ) - template_data.append(data) - except Exception: - pass - TemplateInPipeline.objects.bulk_create(template_data) - - instance_list = PipelineInstance.objects.filter(is_deleted=False) - instance_data = [] - for instance in instance_list: - instance_id = instance.instance_id - try: - result = statistics_total(instance.execution_data) - data = InstanceInPipeline( - instance_id=instance_id, - atom_total=result["atom_total"], - subprocess_total=result["subprocess_total"], - gateways_total=result["gateways_total"], - ) - instance_data.append(data) - except Exception: - pass - InstanceInPipeline.objects.bulk_create(instance_data) - - -def statistics_total(pipeline_tree): - atom_total = 0 - subprocess_total = 0 - tree_activities = pipeline_tree["activities"] - # 获取网关数量 - gateways_total = len(pipeline_tree["gateways"]) - - # 遍历activities节点 - for activity in tree_activities: - activity_type = tree_activities[activity]["type"] - if activity_type == "ServiceActivity": - atom_total += 1 - elif activity_type == "SubProcess": - subprocess_total += 1 - return {"atom_total": atom_total, "subprocess_total": subprocess_total, "gateways_total": gateways_total} - - -class Migration(migrations.Migration): - dependencies = [("statistics", "0006_auto_20181115_1208")] - operations = [migrations.RunPython(load_data)] diff --git a/lib/pipeline/contrib/statistics/migrations/0008_auto_20181116_1448.py b/lib/pipeline/contrib/statistics/migrations/0008_auto_20181116_1448.py deleted file mode 100644 index 34ff67b..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0008_auto_20181116_1448.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("statistics", "0007_init_pipeline_data"), - ] - - operations = [ - migrations.AlterField( - model_name="instanceinpipeline", - name="atom_total", - field=models.IntegerField(verbose_name="\u539f\u5b50\u603b\u6570"), - ), - migrations.AlterField( - model_name="instanceinpipeline", - name="gateways_total", - field=models.IntegerField(verbose_name="\u7f51\u5173\u603b\u6570"), - ), - migrations.AlterField( - model_name="instanceinpipeline", - name="instance_id", - field=models.CharField(max_length=255, verbose_name="\u5b9e\u4f8bID"), - ), - migrations.AlterField( - model_name="instanceinpipeline", - name="subprocess_total", - field=models.IntegerField(verbose_name="\u5b50\u6d41\u7a0b\u603b\u6570"), - ), - migrations.AlterField( - model_name="templateinpipeline", - name="atom_total", - field=models.IntegerField(verbose_name="\u539f\u5b50\u603b\u6570"), - ), - migrations.AlterField( - model_name="templateinpipeline", - name="gateways_total", - field=models.IntegerField(verbose_name="\u7f51\u5173\u603b\u6570"), - ), - migrations.AlterField( - model_name="templateinpipeline", - name="subprocess_total", - field=models.IntegerField(verbose_name="\u5b50\u6d41\u7a0b\u603b\u6570"), - ), - migrations.AlterField( - model_name="templateinpipeline", - name="template_id", - field=models.CharField(max_length=255, verbose_name="\u6a21\u677fID"), - ), - ] diff --git a/lib/pipeline/contrib/statistics/migrations/0009_auto_20181116_1627.py b/lib/pipeline/contrib/statistics/migrations/0009_auto_20181116_1627.py deleted file mode 100644 index e5dae2d..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0009_auto_20181116_1627.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("statistics", "0008_auto_20181116_1448"), - ] - - operations = [ - migrations.AlterModelOptions( - name="componentintemplate", - options={ - "verbose_name": "Pipeline\u539f\u5b50\u88ab\u5f15\u7528\u6570\u636e", - "verbose_name_plural": "Pipeline\u539f\u5b50\u88ab\u5f15\u7528\u6570\u636e", - }, - ), - migrations.AlterModelOptions( - name="instanceinpipeline", - options={ - "verbose_name": "Pipeline\u5b9e\u4f8b\u5f15\u7528\u6570\u636e", - "verbose_name_plural": "Pipeline\u5b9e\u4f8b\u5f15\u7528\u6570\u636e", - }, - ), - migrations.AlterModelOptions( - name="templateinpipeline", - options={ - "verbose_name": "Pipeline\u6a21\u677f\u5f15\u7528\u6570\u636e", - "verbose_name_plural": "Pipeline\u6a21\u677f\u5f15\u7528\u6570\u636e", - }, - ), - ] diff --git a/lib/pipeline/contrib/statistics/migrations/0010_auto_20190304_1747.py b/lib/pipeline/contrib/statistics/migrations/0010_auto_20190304_1747.py deleted file mode 100644 index 60a8c8e..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0010_auto_20190304_1747.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("statistics", "0009_auto_20181116_1627"), - ] - - operations = [ - migrations.AlterModelOptions( - name="componentexecutedata", - options={ - "ordering": ["-id"], - "verbose_name": "Pipeline\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u6570\u636e", - "verbose_name_plural": "Pipeline\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u6570\u636e", - }, - ), - migrations.AlterModelOptions( - name="componentintemplate", - options={ - "verbose_name": "Pipeline\u6807\u51c6\u63d2\u4ef6\u88ab\u5f15\u7528\u6570\u636e", - "verbose_name_plural": "Pipeline\u6807\u51c6\u63d2\u4ef6\u88ab\u5f15\u7528\u6570\u636e", - }, - ), - migrations.AlterField( - model_name="componentexecutedata", - name="archived_time", - field=models.DateTimeField( - blank=True, null=True, verbose_name="\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u7ed3\u675f\u65f6\u95f4" - ), - ), - migrations.AlterField( - model_name="componentexecutedata", - name="elapsed_time", - field=models.IntegerField( - blank=True, null=True, verbose_name="\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u8017\u65f6(s)" - ), - ), - migrations.AlterField( - model_name="componentexecutedata", - name="started_time", - field=models.DateTimeField(verbose_name="\u6807\u51c6\u63d2\u4ef6\u6267\u884c\u5f00\u59cb\u65f6\u95f4"), - ), - migrations.AlterField( - model_name="instanceinpipeline", - name="atom_total", - field=models.IntegerField(verbose_name="\u6807\u51c6\u63d2\u4ef6\u603b\u6570"), - ), - migrations.AlterField( - model_name="templateinpipeline", - name="atom_total", - field=models.IntegerField(verbose_name="\u6807\u51c6\u63d2\u4ef6\u603b\u6570"), - ), - ] diff --git a/lib/pipeline/contrib/statistics/migrations/0011_auto_20200217_0822.py b/lib/pipeline/contrib/statistics/migrations/0011_auto_20200217_0822.py deleted file mode 100644 index 57b2a6c..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0011_auto_20200217_0822.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.23 on 2020-02-17 08:22 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("statistics", "0010_auto_20190304_1747"), - ] - - operations = [ - migrations.AddField( - model_name="componentexecutedata", - name="version", - field=models.CharField(default="legacy", max_length=255, verbose_name="插件版本"), - ), - migrations.AddField( - model_name="componentintemplate", - name="version", - field=models.CharField(default="legacy", max_length=255, verbose_name="插件版本"), - ), - migrations.AlterField( - model_name="componentexecutedata", - name="subprocess_stack", - field=models.TextField(default="[]", help_text="JSON 格式的列表", verbose_name="子流程堆栈"), - ), - migrations.AlterField( - model_name="componentintemplate", - name="subprocess_stack", - field=models.TextField(default="[]", help_text="JSON 格式的列表", verbose_name="子流程堆栈"), - ), - ] diff --git a/lib/pipeline/contrib/statistics/migrations/0012_auto_20201123_1552.py b/lib/pipeline/contrib/statistics/migrations/0012_auto_20201123_1552.py deleted file mode 100644 index 0869b87..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0012_auto_20201123_1552.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.29 on 2020-11-23 07:52 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("statistics", "0011_auto_20200217_0822"), - ] - - operations = [ - migrations.AlterField( - model_name="instanceinpipeline", - name="instance_id", - field=models.CharField(db_index=True, max_length=255, verbose_name="实例ID"), - ), - migrations.AlterField( - model_name="templateinpipeline", - name="template_id", - field=models.CharField(db_index=True, max_length=255, verbose_name="模板ID"), - ), - ] diff --git a/lib/pipeline/contrib/statistics/migrations/0013_auto_20201201_1506.py b/lib/pipeline/contrib/statistics/migrations/0013_auto_20201201_1506.py deleted file mode 100644 index acd0544..0000000 --- a/lib/pipeline/contrib/statistics/migrations/0013_auto_20201201_1506.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.29 on 2020-12-01 07:06 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("statistics", "0012_auto_20201123_1552"), - ] - - operations = [ - migrations.AlterField( - model_name="componentexecutedata", - name="component_code", - field=models.CharField(db_index=True, max_length=255, verbose_name="组件编码"), - ), - migrations.AlterField( - model_name="componentexecutedata", - name="instance_id", - field=models.CharField(db_index=True, max_length=32, verbose_name="实例ID"), - ), - ] diff --git a/lib/pipeline/contrib/statistics/migrations/__init__.py b/lib/pipeline/contrib/statistics/migrations/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/contrib/statistics/migrations/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/contrib/statistics/models.py b/lib/pipeline/contrib/statistics/models.py deleted file mode 100644 index 98d51ed..0000000 --- a/lib/pipeline/contrib/statistics/models.py +++ /dev/null @@ -1,82 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db import models -from django.utils.translation import ugettext_lazy as _ - - -class ComponentInTemplate(models.Model): - component_code = models.CharField(_("组件编码"), max_length=255) - template_id = models.CharField(_("模板ID"), max_length=32) - node_id = models.CharField(_("节点ID"), max_length=32) - is_sub = models.BooleanField(_("是否子流程引用"), default=False) - subprocess_stack = models.TextField(_("子流程堆栈"), default="[]", help_text=_("JSON 格式的列表")) - version = models.CharField(_("插件版本"), max_length=255, default="legacy") - - class Meta: - verbose_name = _("Pipeline标准插件被引用数据") - verbose_name_plural = _("Pipeline标准插件被引用数据") - - def __unicode__(self): - return "{}_{}".format(self.component_code, self.template_id) - - -class ComponentExecuteData(models.Model): - component_code = models.CharField(_("组件编码"), max_length=255, db_index=True) - instance_id = models.CharField(_("实例ID"), max_length=32, db_index=True) - node_id = models.CharField(_("节点ID"), max_length=32) - is_sub = models.BooleanField(_("是否子流程引用"), default=False) - subprocess_stack = models.TextField(_("子流程堆栈"), default="[]", help_text=_("JSON 格式的列表")) - started_time = models.DateTimeField(_("标准插件执行开始时间")) - archived_time = models.DateTimeField(_("标准插件执行结束时间"), null=True, blank=True) - elapsed_time = models.IntegerField(_("标准插件执行耗时(s)"), null=True, blank=True) - status = models.BooleanField(_("是否执行成功"), default=False) - is_skip = models.BooleanField(_("是否跳过"), default=False) - is_retry = models.BooleanField(_("是否重试记录"), default=False) - version = models.CharField(_("插件版本"), max_length=255, default="legacy") - - class Meta: - verbose_name = _("Pipeline标准插件执行数据") - verbose_name_plural = _("Pipeline标准插件执行数据") - ordering = ["-id"] - - def __unicode__(self): - return "{}_{}".format(self.component_code, self.instance_id) - - -class TemplateInPipeline(models.Model): - template_id = models.CharField(_("模板ID"), max_length=255, db_index=True) - atom_total = models.IntegerField(_("标准插件总数")) - subprocess_total = models.IntegerField(_("子流程总数")) - gateways_total = models.IntegerField(_("网关总数")) - - class Meta: - verbose_name = _("Pipeline模板引用数据") - verbose_name_plural = _("Pipeline模板引用数据") - - def __unicode__(self): - return "{}_{}_{}_{}".format(self.template_id, self.atom_total, self.subprocess_total, self.gateways_total) - - -class InstanceInPipeline(models.Model): - instance_id = models.CharField(_("实例ID"), max_length=255, db_index=True) - atom_total = models.IntegerField(_("标准插件总数")) - subprocess_total = models.IntegerField(_("子流程总数")) - gateways_total = models.IntegerField(_("网关总数")) - - class Meta: - verbose_name = _("Pipeline实例引用数据") - verbose_name_plural = _("Pipeline实例引用数据") - - def __unicode__(self): - return "{}_{}_{}_{}".format(self.instance_id, self.atom_total, self.subprocess_total, self.gateways_total) diff --git a/lib/pipeline/contrib/statistics/signals/__init__.py b/lib/pipeline/contrib/statistics/signals/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/contrib/statistics/signals/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/contrib/statistics/signals/handlers.py b/lib/pipeline/contrib/statistics/signals/handlers.py deleted file mode 100644 index f7fa84d..0000000 --- a/lib/pipeline/contrib/statistics/signals/handlers.py +++ /dev/null @@ -1,112 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import ujson as json - -from django.db.models.signals import post_save -from django.dispatch import receiver - -from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION -from pipeline.contrib.statistics.models import ( - ComponentInTemplate, - TemplateInPipeline, -) -from pipeline.contrib.statistics.tasks import pipeline_post_save_statistics_task, pipeline_archive_statistics_task -from pipeline.contrib.statistics.utils import count_pipeline_tree_nodes -from pipeline.core.constants import PE -from pipeline.models import PipelineInstance, PipelineTemplate -from pipeline.signals import post_pipeline_finish, post_pipeline_revoke - -logger = logging.getLogger("root") - - -@receiver(post_save, sender=PipelineTemplate) -def template_post_save_handler(sender, instance, created, **kwargs): - """ - 模板执行保存处理 - :param sender: - :param instance: 任务实例 Instance.Object对象 - :param created: 是否是创建(可为更新) - :param kwargs: 参数序列 - :return: - """ - template = instance - template_id = template.template_id - # 删除原先该项模板数据(无论是更新还是创建,都需要重新创建统计数据) - ComponentInTemplate.objects.filter(template_id=template_id).delete() - data = template.data - component_list = [] - # 任务节点引用标准插件统计(包含间接通过子流程引用) - for act_id, act in data[PE.activities].items(): - # 标准插件节点直接引用 - if act["type"] == PE.ServiceActivity: - component = ComponentInTemplate( - component_code=act["component"]["code"], - template_id=template_id, - node_id=act_id, - version=act["component"].get("version", LEGACY_PLUGINS_VERSION), - ) - component_list.append(component) - # 子流程节点间接引用 - else: - components = ComponentInTemplate.objects.filter(template_id=act["template_id"]).values( - "subprocess_stack", "component_code", "node_id", "version" - ) - for component_sub in components: - # 子流程的执行堆栈(子流程的执行过程) - stack = json.loads(component_sub["subprocess_stack"]) - # 添加节点id - stack.insert(0, act_id) - component = ComponentInTemplate( - component_code=component_sub["component_code"], - template_id=template_id, - node_id=component_sub["node_id"], - is_sub=True, - subprocess_stack=json.dumps(stack), - version=component_sub["version"], - ) - component_list.append(component) - ComponentInTemplate.objects.bulk_create(component_list) - - # 统计流程标准插件个数,子流程个数,网关个数 - atom_total, subprocess_total, gateways_total = count_pipeline_tree_nodes(template.data) - TemplateInPipeline.objects.update_or_create( - template_id=template_id, - defaults={"atom_total": atom_total, "subprocess_total": subprocess_total, "gateways_total": gateways_total}, - ) - - -@receiver(post_save, sender=PipelineInstance) -def pipeline_post_save_handler(sender, instance, created, **kwargs): - try: - if created: - pipeline_post_save_statistics_task.delay(instance_id=instance.instance_id) - except Exception: - logger.exception("pipeline_post_save_handler[instance_id={}] send message error".format(instance.id)) - - -@receiver(post_pipeline_finish, sender=PipelineInstance) -def pipeline_post_finish_handler(sender, instance_id, **kwargs): - try: - pipeline_archive_statistics_task.delay(instance_id=instance_id) - except Exception: - logger.exception("pipeline_post_finish_handler[instance_id={}] send message error".format(instance_id)) - - -@receiver(post_pipeline_revoke, sender=PipelineInstance) -def pipeline_post_revoke_handler(sender, instance_id, **kwargs): - try: - pipeline_archive_statistics_task.delay(instance_id=instance_id) - except Exception: - logger.exception("pipeline_post_revoke_handler[instance_id={}] send message error".format(instance_id)) diff --git a/lib/pipeline/contrib/statistics/tasks.py b/lib/pipeline/contrib/statistics/tasks.py deleted file mode 100644 index 46b0230..0000000 --- a/lib/pipeline/contrib/statistics/tasks.py +++ /dev/null @@ -1,175 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -import logging -import ujson as json -from copy import deepcopy - -from celery import task -from bamboo_engine import api as bamboo_engine_api - -from pipeline.component_framework.constants import LEGACY_PLUGINS_VERSION -from pipeline.contrib.statistics.models import ( - ComponentExecuteData, - InstanceInPipeline, -) -from pipeline.contrib.statistics.utils import count_pipeline_tree_nodes -from pipeline.core.constants import PE -from pipeline.engine import api as pipeline_api -from pipeline.engine import states -from pipeline.engine.exceptions import InvalidOperationException -from pipeline.engine.utils import calculate_elapsed_time -from pipeline.models import PipelineInstance -from pipeline.eri.runtime import BambooDjangoRuntime - -logger = logging.getLogger("celery") - - -def recursive_collect_components(activities, status_tree, instance_id, stack=None, engine_ver=1): - """ - @summary 递归流程树,获取所有执行成功/失败的插件 - @param activities: 当前流程树的任务节点信息 - @param status_tree: 当前流程树的任务节点状态 - @param instance_id: 根流程的示例 instance_id - @param stack: 子流程堆栈 - """ - if stack is None: - stack = [] - is_sub = False - else: - is_sub = True - component_list = [] - for act_id, act in activities.items(): - # 只有执行了才会查询到 status,兼容中途撤销的任务 - if act_id in status_tree: - exec_act = status_tree[act_id] - # 属于标准插件节点 - if act[PE.type] == PE.ServiceActivity: - if exec_act["state"] in states.ARCHIVED_STATES: - create_kwargs = { - "component_code": act["component"]["code"], - "instance_id": instance_id, - "is_sub": is_sub, - "node_id": act_id, - "subprocess_stack": json.dumps(stack), - "started_time": exec_act["started_time"], - "archived_time": exec_act["archived_time"], - "elapsed_time": exec_act.get( - "elapsed_time", calculate_elapsed_time(exec_act["started_time"], exec_act["archived_time"]) - ), - "is_skip": exec_act["skip"], - "is_retry": False, - "status": exec_act["state"] == "FINISHED", - "version": act["component"].get("version", LEGACY_PLUGINS_VERSION), - } - component_list.append(ComponentExecuteData(**create_kwargs)) - if exec_act["retry"] > 0: - # 需要通过执行历史获得 - if engine_ver == 1: - history_list = pipeline_api.get_activity_histories(act_id) - else: - history_list_result = bamboo_engine_api.get_node_short_histories( - runtime=BambooDjangoRuntime(), node_id=act_id - ) - history_list = history_list_result.data if history_list_result.result else [] - - for history in history_list: - create_kwargs.update( - { - "started_time": history["started_time"], - "archived_time": history["archived_time"], - "elapsed_time": history.get( - "elapsed_time", - calculate_elapsed_time(history["started_time"], history["archived_time"]), - ), - "is_retry": True, - "is_skip": False, - "status": False, - } - ) - component_list.append(ComponentExecuteData(**create_kwargs)) - # 子流程的执行堆栈(子流程的执行过程) - elif act[PE.type] == PE.SubProcess: - # 递归子流程树 - sub_activities = act[PE.pipeline][PE.activities] - # 防止stack共用 - copied_stack = deepcopy(stack) - copied_stack.insert(0, act_id) - component_list += recursive_collect_components( - sub_activities, exec_act["children"], instance_id, copied_stack - ) - return component_list - - -@task -def pipeline_post_save_statistics_task(instance_id): - instance = PipelineInstance.objects.get(instance_id=instance_id) - # 统计流程标准插件个数,子流程个数,网关个数 - try: - atom_total, subprocess_total, gateways_total = count_pipeline_tree_nodes(instance.execution_data) - InstanceInPipeline.objects.update_or_create( - instance_id=instance_id, - defaults={ - "atom_total": atom_total, - "subprocess_total": subprocess_total, - "gateways_total": gateways_total, - }, - ) - except Exception as e: - logger.error( - ( - "pipeline_post_save_handler save InstanceInPipeline[instance_id={instance_id}] " "raise error: {error}" - ).format(instance_id=instance_id, error=e) - ) - - -@task -def pipeline_archive_statistics_task(instance_id): - instance = PipelineInstance.objects.get(instance_id=instance_id) - engine_ver = 1 - # 获得任务实例的执行树 - try: - status_tree = pipeline_api.get_status_tree(instance_id, 99) - except InvalidOperationException: - engine_ver = 2 - status_tree_result = bamboo_engine_api.get_pipeline_states( - runtime=BambooDjangoRuntime(), root_id=instance_id, flat_children=False - ) - if not status_tree_result.result: - logger.error( - "pipeline_archive_statistics_task bamboo_engine_api.get_pipeline_states fail: {}".format( - status_tree_result.result.exc_trace - ) - ) - return - status_tree = status_tree_result.data[instance_id] - - # 删除原有标准插件数据 - ComponentExecuteData.objects.filter(instance_id=instance_id).delete() - # 获得任务实例的执行数据 - data = instance.execution_data - try: - component_list = recursive_collect_components( - activities=data[PE.activities], - status_tree=status_tree["children"], - instance_id=instance_id, - engine_ver=engine_ver, - ) - ComponentExecuteData.objects.bulk_create(component_list) - except Exception: - logger.exception( - ("pipeline_post_save_handler save ComponentExecuteData[instance_id={instance_id}] raise error").format( - instance_id=instance_id - ) - ) diff --git a/lib/pipeline/contrib/statistics/utils.py b/lib/pipeline/contrib/statistics/utils.py deleted file mode 100644 index 2a9b48d..0000000 --- a/lib/pipeline/contrib/statistics/utils.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.core.constants import PE - - -def count_pipeline_tree_nodes(pipeline_tree): - gateways_total = len(pipeline_tree["gateways"]) - activities = pipeline_tree["activities"] - atom_total = len([act for act in activities.values() if act["type"] == PE.ServiceActivity]) - subprocess_total = len([act for act in activities.values() if act["type"] == PE.SubProcess]) - return atom_total, subprocess_total, gateways_total diff --git a/lib/pipeline/core/__init__.py b/lib/pipeline/core/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/core/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/core/constants.py b/lib/pipeline/core/constants.py deleted file mode 100644 index e90fe2f..0000000 --- a/lib/pipeline/core/constants.py +++ /dev/null @@ -1,82 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -class PipelineElement(object): - ServiceActivity = "ServiceActivity" - SubProcess = "SubProcess" - ExclusiveGateway = "ExclusiveGateway" - ParallelGateway = "ParallelGateway" - ConditionalParallelGateway = "ConditionalParallelGateway" - ConvergeGateway = "ConvergeGateway" - EmptyStartEvent = "EmptyStartEvent" - EmptyEndEvent = "EmptyEndEvent" - - Activities = {ServiceActivity} - TaskNodes = {ServiceActivity, SubProcess} - BranchGateways = {ExclusiveGateway, ParallelGateway, ConditionalParallelGateway} - Gateways = {ExclusiveGateway, ParallelGateway, ConditionalParallelGateway, ConvergeGateway} - - pipeline = "pipeline" - id = "id" - type = "type" - start_event = "start_event" - end_event = "end_event" - activities = "activities" - flows = "flows" - gateways = "gateways" - constants = "constants" - conditions = "conditions" - incoming = "incoming" - outgoing = "outgoing" - source = "source" - target = "target" - data = "data" - component = "component" - evaluate = "evaluate" - name = "name" - stage_name = "stage_name" - failure_handler = "failure_handler" - inputs = "inputs" - outputs = "outputs" - pre_render_keys = "pre_render_keys" - source_act = "source_act" - source_key = "source_key" - code = "code" - error_ignorable = "error_ignorable" - skippable = "skippable" - # 兼容3.3.X不规范的命名 - skippable_old = "isSkipped" - retryable = "retryable" - # 兼容3.3.X不规范的命名 - retryable_old = "can_retry" - timeout = "timeout" - loop_times = "loop_times" - converge_gateway_id = "converge_gateway_id" - is_param = "is_param" - value = "value" - params = "params" - is_default = "is_default" - optional = "optional" - template_id = "template_id" - plain = "plain" - splice = "splice" - lazy = "lazy" - version = "version" - subprocess_detail = "subprocess_detail" - custom_type = "custom_type" - - -PE = PipelineElement() - -ESCAPED_CHARS = {"\n": r"\n", "\r": r"\r", "\t": r"\t"} diff --git a/lib/pipeline/core/data/__init__.py b/lib/pipeline/core/data/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/core/data/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/core/data/base.py b/lib/pipeline/core/data/base.py deleted file mode 100644 index 7056c41..0000000 --- a/lib/pipeline/core/data/base.py +++ /dev/null @@ -1,88 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import copy - -import ujson as json - -from pipeline import exceptions -from pipeline.utils.collections import FancyDict -from pipeline.utils.utils import convert_bytes_to_str - - -class DataObject(object): - def __init__(self, inputs, outputs=None): - if not isinstance(inputs, dict): - raise exceptions.DataTypeErrorException("inputs is not dict") - self.inputs = FancyDict(inputs) - if outputs is None: - outputs = {} - if not isinstance(outputs, dict): - raise exceptions.DataTypeErrorException("outputs is not dict") - self.outputs = FancyDict(outputs) - - def get_inputs(self): - return self.inputs - - def get_outputs(self): - return self.outputs - - def get_one_of_inputs(self, key, default=None): - return self.inputs.get(key, default) - - def get_one_of_outputs(self, key, default=None): - return self.outputs.get(key, default) - - def set_outputs(self, key, value): - self.outputs.update({key: value}) - return True - - def reset_outputs(self, outputs): - if not isinstance(outputs, dict): - raise exceptions.DataTypeErrorException("outputs is not dict") - self.outputs = FancyDict(outputs) - return True - - def update_outputs(self, dic): - self.outputs.update(dic) - - def inputs_copy(self): - return copy.deepcopy(self.inputs) - - def outputs_copy(self): - return copy.deepcopy(self.outputs) - - def override_inputs(self, inputs): - if not isinstance(inputs, FancyDict): - inputs = FancyDict(inputs) - self.inputs = inputs - - def override_outputs(self, outputs): - if not isinstance(outputs, FancyDict): - outputs = FancyDict(outputs) - self.outputs = outputs - - def serializer(self): - result = {"inputs": self.inputs, "outputs": self.outputs} - return json.dumps(result) - - def __setstate__(self, state): - # py2 pickle dumps data compatible - input_key = b"inputs" if b"inputs" in state else "inputs" - outputs_key = b"outputs" if b"outputs" in state else "outputs" - - self.inputs = FancyDict(convert_bytes_to_str(state[input_key])) - self.outputs = FancyDict(convert_bytes_to_str(state[outputs_key])) - - def __str__(self): - return "".format(self.inputs, self.outputs) diff --git a/lib/pipeline/core/data/context.py b/lib/pipeline/core/data/context.py deleted file mode 100644 index 949a0f6..0000000 --- a/lib/pipeline/core/data/context.py +++ /dev/null @@ -1,155 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from copy import deepcopy -from pprint import pformat - -from pipeline.exceptions import InvalidOperationException, ReferenceNotExistError - - -class Context(object): - def __init__(self, act_outputs, output_key=None, scope=None): - self.variables = scope or {} - self.act_outputs = act_outputs - self._output_key = set(output_key or []) - self._change_keys = set() - self._raw_variables = None - - def extract_output(self, activity, set_miss=True): - self.extract_output_from_data(activity.id, activity.data, set_miss=set_miss) - - def extract_output_from_data(self, activity_id, data, set_miss=True): - if activity_id in self.act_outputs: - global_outputs = self.act_outputs[activity_id] - output = data.get_outputs() - for key in global_outputs: - # set value to key if can not find - # e.g. key: result - # e.g. global_outputs[key]: result_5hoi2 - if key not in output and not set_miss: - continue - - self.variables[global_outputs[key]] = output.get(key, global_outputs[key]) - self.change_keys.add(global_outputs[key]) - - def get(self, key): - try: - return self.variables[key] - except KeyError: - raise ReferenceNotExistError('reference "%s" does not exist.' % key) - - def set_global_var(self, key, val): - self.variables[key] = val - self.change_keys.add(key) - - def update_global_var(self, var_dict): - self.variables.update(var_dict) - self.change_keys.update(list(var_dict.keys())) - - def mark_as_output(self, key): - self._output_key.add(key) - - def write_output(self, pipeline): - from pipeline.core.data import var - - data = pipeline.data - for key in self._output_key: - try: - value = self.get(key) - except ReferenceNotExistError: - value = key - - if issubclass(value.__class__, var.Variable): - value = value.get() - # break circle - data.set_outputs(key, value) - - def duplicate_variables(self): - self._raw_variables = deepcopy(self.variables) - - def clear(self): - self.variables.clear() - if self.raw_variables: - self.raw_variables.clear() - - def recover_variable(self): - if self.raw_variables is None: - raise InvalidOperationException("make sure duplicate_variables() is called before do recover") - - # collect all act output key - act_outputs_keys = set() - for global_outputs in list(self.act_outputs.values()): - for output_key in list(global_outputs.values()): - act_outputs_keys.add(output_key) - - # recover to Variable for which key not in act output - for key, var in list(self.raw_variables.items()): - if key not in act_outputs_keys: - self.variables[key] = deepcopy(var) - - def clear_change_keys(self): - if hasattr(self, "_change_keys"): - self.change_keys.clear() - - def sync_change(self, context): - from pipeline.core.data.var import SpliceVariable - - # sync obvious change keys - for k in context.change_keys: - self.set_global_var(k, context.get(k)) - - # sync resolved splice value - for k, child_v in context.variables.items(): - parent_v = self.variables.get(k) - if isinstance(child_v, SpliceVariable) and isinstance(parent_v, SpliceVariable): - # if var is resolved in child - if parent_v._value is None and child_v._value is not None: - parent_v._value = child_v._value - - def __repr__(self): - return "variables:{}\nact_outputs:{}\n_output_key:{}".format( - pformat(self.variables), pformat(self.act_outputs), pformat(self._output_key) - ) - - def __str__(self): - return self.__repr__() - - def __unicode__(self): - return self.__repr__() - - @property - def change_keys(self): - if not hasattr(self, "_change_keys"): - self._change_keys = set() - - return self._change_keys - - @property - def raw_variables(self): - if not hasattr(self, "_raw_variables"): - self._raw_variables = None - - return self._raw_variables - - -class OutputRef(object): - def __init__(self, key, context): - self.key = key - self.context = context - - @property - def value(self): - return self.context.get(self.key) - - def __deepcopy__(self, memodict={}): - return self diff --git a/lib/pipeline/core/data/converter.py b/lib/pipeline/core/data/converter.py deleted file mode 100644 index 311624d..0000000 --- a/lib/pipeline/core/data/converter.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline import exceptions -from pipeline.core.data.var import PlainVariable, SpliceVariable, Variable -from pipeline.core.data import library - - -def get_variable(key, info, context, pipeline_data): - if isinstance(info["value"], Variable): - variable = info["value"] - else: - if info.get("type", "plain") == "plain": - variable = PlainVariable(key, info["value"]) - elif info["type"] == "splice": - variable = SpliceVariable(key, info["value"], context) - elif info["type"] == "lazy": - variable = library.VariableLibrary.get_var_class(info["custom_type"])( - key, info["value"], context, pipeline_data - ) - else: - raise exceptions.DataTypeErrorException( - "Unknown type: %s, which should be one of [plain, splice, lazy]" % info["type"] - ) - return variable diff --git a/lib/pipeline/core/data/expression.py b/lib/pipeline/core/data/expression.py deleted file mode 100644 index c479dd7..0000000 --- a/lib/pipeline/core/data/expression.py +++ /dev/null @@ -1,166 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import copy -import re -import logging - -from mako.template import Template -from mako import lexer, codegen -from mako.exceptions import MakoException - -from pipeline import exceptions -from pipeline.conf.default_settings import MAKO_SAFETY_CHECK -from pipeline.core.data.sandbox import SANDBOX -from pipeline.core.data import mako_safety -from pipeline.utils.mako_utils.checker import check_mako_template_safety -from pipeline.utils.mako_utils.exceptions import ForbiddenMakoTemplateException - - -logger = logging.getLogger("root") -# find mako template(format is ${xxx},and ${}# not in xxx, # may raise memory error) -TEMPLATE_PATTERN = re.compile(r"\${[^$#]+}") - - -def format_constant_key(key): - """ - @summary: format key to ${key} - @param key: - @return: - """ - return "${%s}" % key - - -def deformat_constant_key(key): - """ - @summary: deformat ${key} to key - @param key: - @return: - """ - return key[2:-1] - - -class ConstantTemplate(object): - def __init__(self, data): - self.data = data - - def get_reference(self): - reference = [] - templates = self.get_templates() - for tpl in templates: - reference += self.get_template_reference(tpl) - reference = list(set(reference)) - return reference - - def get_templates(self): - templates = [] - data = self.data - if isinstance(data, str): - templates += self.get_string_templates(data) - if isinstance(data, (list, tuple)): - for item in data: - templates += ConstantTemplate(item).get_templates() - if isinstance(data, dict): - for value in list(data.values()): - templates += ConstantTemplate(value).get_templates() - return list(set(templates)) - - def resolve_data(self, value_maps): - data = self.data - if isinstance(data, str): - return self.resolve_string(data, value_maps) - if isinstance(data, list): - ldata = [""] * len(data) - for index, item in enumerate(data): - ldata[index] = ConstantTemplate(copy.deepcopy(item)).resolve_data(value_maps) - return ldata - if isinstance(data, tuple): - ldata = [""] * len(data) - for index, item in enumerate(data): - ldata[index] = ConstantTemplate(copy.deepcopy(item)).resolve_data(value_maps) - return tuple(ldata) - if isinstance(data, dict): - for key, value in list(data.items()): - data[key] = ConstantTemplate(copy.deepcopy(value)).resolve_data(value_maps) - return data - return data - - @staticmethod - def get_string_templates(string): - return list(set(TEMPLATE_PATTERN.findall(string))) - - @staticmethod - def get_template_reference(template): - lex = lexer.Lexer(template) - - try: - node = lex.parse() - except MakoException as e: - logger.warning("pipeline get template[{}] reference error[{}]".format(template, e)) - return [] - - # Dummy compiler. _Identifiers class requires one - # but only interested in the reserved_names field - def compiler(): - return None - - compiler.reserved_names = set() - identifiers = codegen._Identifiers(compiler, node) - - return list(identifiers.undeclared) - - @staticmethod - def resolve_string(string, value_maps): - if not isinstance(string, str): - return string - templates = ConstantTemplate.get_string_templates(string) - - # TODO keep render return object, here only process simple situation - if len(templates) == 1 and templates[0] == string and deformat_constant_key(string) in value_maps: - return value_maps[deformat_constant_key(string)] - - for tpl in templates: - if MAKO_SAFETY_CHECK: - try: - check_mako_template_safety( - tpl, mako_safety.SingleLineNodeVisitor(), mako_safety.SingleLinCodeExtractor() - ) - except ForbiddenMakoTemplateException as e: - logger.warning("forbidden template: {}, exception: {}".format(tpl, e)) - continue - except Exception: - logger.exception("{} safety check error.".format(tpl)) - continue - resolved = ConstantTemplate.resolve_template(tpl, value_maps) - string = string.replace(tpl, resolved) - return string - - @staticmethod - def resolve_template(template, value_maps): - data = {} - data.update(SANDBOX) - data.update(value_maps) - if not isinstance(template, str): - raise exceptions.ConstantTypeException("constant resolve error, template[%s] is not a string" % template) - try: - tm = Template(template) - except (MakoException, SyntaxError) as e: - logger.error("pipeline resolve template[{}] error[{}]".format(template, e)) - return template - try: - resolved = tm.render_unicode(**data) - except Exception as e: - logger.warning("constant content({}) is invalid, data({}), error: {}".format(template, data, e)) - return template - else: - return resolved diff --git a/lib/pipeline/core/data/hydration.py b/lib/pipeline/core/data/hydration.py deleted file mode 100644 index 8c86b6f..0000000 --- a/lib/pipeline/core/data/hydration.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -def hydrate_node_data(node): - """ - 替换当前节点的 data 中的变量 - :param node: - :return: - """ - data = node.data - hydrated = hydrate_data(data.get_inputs()) - data.get_inputs().update(hydrated) - - -def hydrate_data(data): - hydrated = {} - for k, v in list(data.items()): - from pipeline.core.data import var - - if issubclass(v.__class__, var.Variable): - hydrated[k] = v.get() - else: - hydrated[k] = v - return hydrated - - -def hydrate_subprocess_context(subprocess_act): - # hydrate data - hydrate_node_data(subprocess_act) - - # context injection - data = subprocess_act.pipeline.data - context = subprocess_act.pipeline.context - for k, v in list(data.get_inputs().items()): - context.set_global_var(k, v) - - hydrated = hydrate_data(context.variables) - context.update_global_var(hydrated) diff --git a/lib/pipeline/core/data/library.py b/lib/pipeline/core/data/library.py deleted file mode 100644 index f53941d..0000000 --- a/lib/pipeline/core/data/library.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -class VariableLibrary(object): - variables = {} - - @classmethod - def get_var_class(cls, code): - return cls.variables.get(code) - - @classmethod - def get_var(cls, code, name, data, context=None, pipeline_data=None): - if not context: - return cls.variables[code](name, data) - return cls.variables[code](name, data, context, pipeline_data) diff --git a/lib/pipeline/core/data/mako_safety.py b/lib/pipeline/core/data/mako_safety.py deleted file mode 100644 index db5f75f..0000000 --- a/lib/pipeline/core/data/mako_safety.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from ast import NodeVisitor - -from mako import parsetree - -from pipeline.utils.mako_utils.code_extract import MakoNodeCodeExtractor -from pipeline.utils.mako_utils.exceptions import ForbiddenMakoTemplateException - - -class SingleLineNodeVisitor(NodeVisitor): - """ - 遍历语法树节点,遇到魔术方法使用或 import 时,抛出异常 - """ - - def __init__(self, *args, **kwargs): - super(SingleLineNodeVisitor, self).__init__(*args, **kwargs) - - def visit_Attribute(self, node): - if node.attr.startswith("__"): - raise ForbiddenMakoTemplateException("can not access private attribute") - - def visit_Name(self, node): - if node.id.startswith("__"): - raise ForbiddenMakoTemplateException("can not access private method") - - def visit_Import(self, node): - raise ForbiddenMakoTemplateException("can not use import statement") - - def visit_ImportFrom(self, node): - self.visit_Import(node) - - -class SingleLinCodeExtractor(MakoNodeCodeExtractor): - def extract(self, node): - if isinstance(node, parsetree.Code) or isinstance(node, parsetree.Expression): - return node.text - elif isinstance(node, parsetree.Text): - return None - else: - raise ForbiddenMakoTemplateException("Unsupported node: [{}]".format(node.__class__.__name__)) diff --git a/lib/pipeline/core/data/sandbox.py b/lib/pipeline/core/data/sandbox.py deleted file mode 100644 index 7e47e93..0000000 --- a/lib/pipeline/core/data/sandbox.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -# mock str return value of Built-in Functions,make str(func) return "func" rather than "" - -import builtins -import importlib - -from pipeline.conf import default_settings - -SANDBOX = {} - - -class MockStrMeta(type): - def __new__(cls, name, bases, attrs): - new_cls = super(MockStrMeta, cls).__new__(cls, name, bases, attrs) - SANDBOX.update({new_cls.str_return: new_cls}) - return new_cls - - def __str__(cls): - return cls.str_return - - def __call__(cls, *args, **kwargs): - return cls.call(*args, **kwargs) - - -def _shield_words(sandbox, words): - for shield_word in words: - sandbox[shield_word] = None - - -class ModuleObject: - def __init__(self, sub_paths, module): - if len(sub_paths) == 1: - setattr(self, sub_paths[0], module) - return - setattr(self, sub_paths[0], ModuleObject(sub_paths[1:], module)) - - -def _import_modules(sandbox, modules): - for mod_path, alias in modules.items(): - mod = importlib.import_module(mod_path) - sub_paths = alias.split(".") - if len(sub_paths) == 1: - sandbox[alias] = mod - else: - sandbox[sub_paths[0]] = ModuleObject(sub_paths[1:], mod) - - -def _mock_builtins(): - """ - @summary: generate mock class of built-in functions like id,int - """ - for func_name in dir(builtins): - if func_name.lower() == func_name and not func_name.startswith("_"): - new_func_name = "Mock{}".format(func_name.capitalize()) - MockStrMeta(new_func_name, (object,), {"call": getattr(builtins, func_name), "str_return": func_name}) - - -_mock_builtins() - -_shield_words(SANDBOX, default_settings.MAKO_SANDBOX_SHIELD_WORDS) - -_import_modules(SANDBOX, default_settings.MAKO_SANDBOX_IMPORT_MODULES) diff --git a/lib/pipeline/core/data/schemas.py b/lib/pipeline/core/data/schemas.py deleted file mode 100644 index 27c6cda..0000000 --- a/lib/pipeline/core/data/schemas.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -BASE_PARAM = { - "type": "object", - "required": ["inputs", "outputs"], - "properties": {"inputs": {"type": "object"}, "outputs": {"type": "object"}}, -} diff --git a/lib/pipeline/core/data/var.py b/lib/pipeline/core/data/var.py deleted file mode 100644 index 0cb8ece..0000000 --- a/lib/pipeline/core/data/var.py +++ /dev/null @@ -1,149 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -from abc import abstractmethod - -from pipeline import exceptions -from pipeline.conf import settings -from pipeline.core.data import library -from pipeline.core.data.context import OutputRef -from pipeline.core.data.expression import ConstantTemplate, format_constant_key -from pipeline.core.signals import pre_variable_register - -logger = logging.getLogger("root") - - -class Variable(object): - def __init__(self, name, value): - self.name = name - self.value = value - - @abstractmethod - def get(self): - pass - - -class PlainVariable(Variable): - def __init__(self, name, value): - super(PlainVariable, self).__init__(name, value) - self.name = name - self.value = value - - def get(self): - return self.value - - def __repr__(self): - return "[plain_var] {}".format(self.name) - - def __str__(self): - return self.__repr__() - - def __unicode__(self): - return self.__repr__() - - -class SpliceVariable(Variable): - def __init__(self, name, value, context): - super(SpliceVariable, self).__init__(name, value) - self._value = None - self._build_reference(context) - - def get(self): - if not self._value: - try: - self._resolve() - except settings.VARIABLE_SPECIFIC_EXCEPTIONS as e: - logger.error("get value[{}] of Variable[{}] error[{}]".format(self.value, self.name, e)) - return "Error: {}".format(e) - except Exception as e: - logger.error("get value[{}] of Variable[{}] error[{}]".format(self.value, self.name, e)) - return self.value - return self._value - - def _build_reference(self, context): - keys = ConstantTemplate(self.value).get_reference() - refs = {} - for key in keys: - refs[key] = OutputRef(format_constant_key(key), context) - self._refs = refs - - def _resolve(self): - maps = {} - for key in self._refs: - try: - ref_val = self._refs[key].value - if issubclass(ref_val.__class__, Variable): - ref_val = ref_val.get() - except exceptions.ReferenceNotExistError: - continue - maps[key] = ref_val - val = ConstantTemplate(self.value).resolve_data(maps) - - self._value = val - - def __repr__(self): - return "[splice_var] {}".format(self.name) - - def __str__(self): - return self.__repr__() - - def __unicode__(self): - return self.__repr__() - - -class RegisterVariableMeta(type): - def __new__(cls, name, bases, attrs): - super_new = super(RegisterVariableMeta, cls).__new__ - - # Also ensure initialization is only performed for subclasses of Model - # (excluding Model class itself). - parents = [b for b in bases if isinstance(b, RegisterVariableMeta)] - if not parents: - return super_new(cls, name, bases, attrs) - - # Create the class - new_class = super_new(cls, name, bases, attrs) - - if not new_class.code: - raise exceptions.ConstantReferenceException("LazyVariable %s: code can't be empty." % new_class.__name__) - - pre_variable_register.send(sender=LazyVariable, variable_cls=new_class) - - library.VariableLibrary.variables[new_class.code] = new_class - - return new_class - - -class LazyVariable(SpliceVariable, metaclass=RegisterVariableMeta): - def __init__(self, name, value, context, pipeline_data): - super(LazyVariable, self).__init__(name, value, context) - self.context = context - self.pipeline_data = pipeline_data - - # variable reference resolve - def get(self): - self.value = super(LazyVariable, self).get() - try: - return self.get_value() - except settings.VARIABLE_SPECIFIC_EXCEPTIONS as e: - logger.error("get value[{}] of Variable[{}] error[{}]".format(self.value, self.name, e)) - return "Error: {}".format(e) - except Exception as e: - logger.error("get value[{}] of Variable[{}] error[{}]".format(self.value, self.name, e)) - return self.value - - # get real value by user code - @abstractmethod - def get_value(self): - pass diff --git a/lib/pipeline/core/flow/__init__.py b/lib/pipeline/core/flow/__init__.py deleted file mode 100644 index 157b9ac..0000000 --- a/lib/pipeline/core/flow/__init__.py +++ /dev/null @@ -1,87 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .activity import SubProcess # noqa -from .activity import AbstractIntervalGenerator # noqa - -from .activity import ( # noqa - DefaultIntervalGenerator, - LinearIntervalGenerator, - NullIntervalGenerator, - Service, - ServiceActivity, - SquareIntervalGenerator, - StaticIntervalGenerator, -) -from .base import SequenceFlow # noqa -from .event import ( # noqa - EmptyEndEvent, - EmptyStartEvent, - EndEvent, - ExecutableEndEvent, - StartEvent, -) -from .gateway import ( # noqa - Condition, - ConditionalParallelGateway, - ConvergeGateway, - ExclusiveGateway, - ParallelGateway, -) -from .signals import post_new_end_event_register - - -class FlowNodeClsFactory(object): - nodes_cls = { - ServiceActivity.__name__: ServiceActivity, - SubProcess.__name__: SubProcess, - EmptyEndEvent.__name__: EmptyEndEvent, - EmptyStartEvent.__name__: EmptyStartEvent, - ParallelGateway.__name__: ParallelGateway, - ConditionalParallelGateway.__name__: ConditionalParallelGateway, - ExclusiveGateway.__name__: ExclusiveGateway, - ConvergeGateway.__name__: ConvergeGateway, - } - - @classmethod - def _nodes_types_filter(cls, cls_filter): - types = [] - for node_type, node_cls in list(cls.nodes_cls.items()): - if not cls_filter(node_cls): - types.append(node_type) - - return types - - @classmethod - def node_types_without_start_event(cls): - return cls._nodes_types_filter(cls_filter=lambda node_cls: issubclass(node_cls, StartEvent)) - - @classmethod - def node_types_without_start_end_event(cls): - return cls._nodes_types_filter( - cls_filter=lambda node_cls: issubclass(node_cls, EndEvent) or issubclass(node_cls, StartEvent) - ) - - @classmethod - def get_node_cls(cls, key): - return cls.nodes_cls.get(key) - - @classmethod - def register_node(cls, key, node_cls): - if key in cls.nodes_cls: - raise KeyError("node with key({key}) is already exist: {node}".format(key=key, node=cls.nodes_cls[key])) - - cls.nodes_cls[key] = node_cls - - if issubclass(node_cls, EndEvent): - post_new_end_event_register.send(sender=EndEvent, node_type=key, node_cls=node_cls) diff --git a/lib/pipeline/core/flow/activity/__init__.py b/lib/pipeline/core/flow/activity/__init__.py deleted file mode 100644 index 8fb98df..0000000 --- a/lib/pipeline/core/flow/activity/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.core.flow.activity.base import * # noqa -from pipeline.core.flow.activity.base import _empty_method # noqa -from pipeline.core.flow.activity.service_activity import * # noqa -from pipeline.core.flow.activity.subprocess import * # noqa diff --git a/lib/pipeline/core/flow/activity/base.py b/lib/pipeline/core/flow/activity/base.py deleted file mode 100644 index d853659..0000000 --- a/lib/pipeline/core/flow/activity/base.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from abc import ABCMeta - -from pipeline.core.flow.base import FlowNode - - -def _empty_method(data, parent_data): - return - - -class Activity(FlowNode, metaclass=ABCMeta): - def __init__(self, id, name=None, data=None, failure_handler=None): - super(Activity, self).__init__(id, name, data) - self._failure_handler = failure_handler or _empty_method - - def next(self): - return self.outgoing.unique_one().target - - def failure_handler(self, parent_data): - return self._failure_handler(data=self.data, parent_data=parent_data) - - def skip(self): - raise NotImplementedError() - - def prepare_rerun_data(self): - raise NotImplementedError() diff --git a/lib/pipeline/core/flow/activity/service_activity.py b/lib/pipeline/core/flow/activity/service_activity.py deleted file mode 100644 index b55405d..0000000 --- a/lib/pipeline/core/flow/activity/service_activity.py +++ /dev/null @@ -1,292 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from abc import ABCMeta, abstractmethod -from copy import deepcopy - -from django.utils.translation import ugettext_lazy as _ - -from pipeline.conf import settings -from pipeline.core.flow.activity.base import Activity -from pipeline.core.flow.io import BooleanItemSchema, InputItem, IntItemSchema, OutputItem -from pipeline.utils.utils import convert_bytes_to_str - - -class Service(object, metaclass=ABCMeta): - schedule_result_attr = "__schedule_finish__" - schedule_determine_attr = "__need_schedule__" - multi_callback_determine_attr = "__multi_callback_enabled__" - InputItem = InputItem - OutputItem = OutputItem - interval = None - default_outputs = [ - OutputItem( - name=_("执行结果"), - key="_result", - type="boolean", - schema=BooleanItemSchema(description=_("执行结果的布尔值,True or False")), - ), - OutputItem(name=_("循环次数"), key="_loop", type="int", schema=IntItemSchema(description=_("循环执行次数"))), - OutputItem( - name=_("当前流程循环次数"), - key="_inner_loop", - type="int", - schema=IntItemSchema(description=_("在当前流程节点循环执行次数,由父流程重新进入时会重置(仅支持新版引擎)")), - ), - ] - - def __init__(self, name=None): - self.name = name - self.interval = deepcopy(self.interval) - self._runtime_attrs = {} - - def __getattr__(self, name): - if name not in self.__dict__.get("_runtime_attrs", {}): - raise AttributeError() - - return self._runtime_attrs[name] - - def __getstate__(self): - if "logger" in self.__dict__: - del self.__dict__["logger"] - # compatible with old version pickle obj - if "_runtime_attrs" in self.__dict__: - if "logger" in self._runtime_attrs: - del self._runtime_attrs["logger"] - - return self.__dict__ - - @abstractmethod - def execute(self, data, parent_data): - # get params from data - pass - - def outputs_format(self): - return [] - - def inputs_format(self): - return [] - - def inputs(self): - return self.inputs_format() - - def outputs(self): - custom_format = self.outputs_format() - assert isinstance(custom_format, list) - custom_format += self.default_outputs - return custom_format - - def need_schedule(self): - return getattr(self, Service.schedule_determine_attr, False) - - def schedule(self, data, parent_data, callback_data=None): - return True - - def finish_schedule(self): - setattr(self, self.schedule_result_attr, True) - - def is_schedule_finished(self): - return getattr(self, self.schedule_result_attr, False) - - def multi_callback_enabled(self): - return getattr(self, self.multi_callback_determine_attr, False) - - def clean_status(self): - setattr(self, self.schedule_result_attr, False) - - def setup_runtime_attrs(self, **kwargs): - # compatible with old version pickle obj - if "_runtime_attrs" not in self.__dict__: - self._runtime_attrs = {} - self._runtime_attrs.update(**kwargs) - - -class ServiceActivity(Activity): - result_bit = "_result" - loop = "_loop" - ON_RETRY = "_on_retry" - - def __init__( - self, - id, - service, - name=None, - data=None, - error_ignorable=False, - failure_handler=None, - skippable=True, - retryable=True, - timeout=None, - ): - super(ServiceActivity, self).__init__(id, name, data, failure_handler) - self.service = service - self.error_ignorable = error_ignorable - self.skippable = skippable - self.retryable = retryable - self.timeout = timeout - - if data: - self._prepared_inputs = self.data.inputs_copy() - self._prepared_outputs = self.data.outputs_copy() - - def __setstate__(self, state): - - for attr, obj in list(state.items()): - # py2 pickle dumps data compatible - if isinstance(attr, bytes): - attr = attr.decode("utf-8") - obj = convert_bytes_to_str(obj) - - setattr(self, attr, obj) - - if "timeout" not in state: - self.timeout = None - - def execute_pre_process(self, parent_data): - # return True if the plugin does not complete execute_pre_process function - if not (hasattr(self.service, "execute_pre_process") and callable(self.service.execute_pre_process)): - return True - - result = self.service.execute_pre_process(self.data, parent_data) - - # set result - self.set_result_bit(result) - - if self.error_ignorable: - return True - return result - - def execute(self, parent_data): - self.setup_logger() - try: - result = self.service.execute(self.data, parent_data) - except settings.PLUGIN_SPECIFIC_EXCEPTIONS as e: - self.data.set_outputs("ex_data", e) - result = False - - # set result - self.set_result_bit(result) - - if self.error_ignorable: - return True - return result - - def set_result_bit(self, result): - if result is False: - self.data.set_outputs(self.result_bit, False) - else: - self.data.set_outputs(self.result_bit, True) - - def get_result_bit(self): - return self.data.get_one_of_outputs(self.result_bit, False) - - def skip(self): - self.set_result_bit(True) - return True - - def ignore_error(self): - self.set_result_bit(False) - return True - - def clear_outputs(self): - self.data.reset_outputs({}) - - def need_schedule(self): - return self.service.need_schedule() - - def schedule(self, parent_data, callback_data=None): - self.setup_logger() - try: - result = self.service.schedule(self.data, parent_data, callback_data) - except settings.PLUGIN_SPECIFIC_EXCEPTIONS as e: - self.data.set_outputs("ex_data", e) - result = False - self.set_result_bit(result) - - if result is False: - if self.error_ignorable: - self.service.finish_schedule() - return True - - return result - - def is_schedule_done(self): - return self.service.is_schedule_finished() - - def finish_schedule(self): - self.service.finish_schedule() - - def shell(self): - shell = ServiceActivity( - id=self.id, - service=self.service, - name=self.name, - data=self.data, - error_ignorable=self.error_ignorable, - timeout=self.timeout, - ) - return shell - - def schedule_fail(self): - return - - def schedule_success(self): - return - - def prepare_rerun_data(self): - self.data.override_inputs(deepcopy(self._prepared_inputs)) - self.data.override_outputs(deepcopy(self._prepared_outputs)) - - def setup_runtime_attrs(self, **kwargs): - self.service.setup_runtime_attrs(**kwargs) - - def setup_logger(self): - self.service.setup_runtime_attrs(logger=self.logger) - - -class AbstractIntervalGenerator(object, metaclass=ABCMeta): - def __init__(self): - self.count = 0 - - def next(self): - self.count += 1 - - -class DefaultIntervalGenerator(AbstractIntervalGenerator): - def next(self): - super(DefaultIntervalGenerator, self).next() - return self.count ** 2 - - -class SquareIntervalGenerator(AbstractIntervalGenerator): - def next(self): - super(SquareIntervalGenerator, self).next() - return self.count ** 2 - - -class NullIntervalGenerator(AbstractIntervalGenerator): - pass - - -class LinearIntervalGenerator(AbstractIntervalGenerator): - pass - - -class StaticIntervalGenerator(AbstractIntervalGenerator): - def __init__(self, interval): - super(StaticIntervalGenerator, self).__init__() - self.interval = interval - - def next(self): - super(StaticIntervalGenerator, self).next() - return self.interval diff --git a/lib/pipeline/core/flow/activity/subprocess.py b/lib/pipeline/core/flow/activity/subprocess.py deleted file mode 100644 index 14ff3ae..0000000 --- a/lib/pipeline/core/flow/activity/subprocess.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from copy import deepcopy - -from pipeline.core.flow.activity.base import Activity -from pipeline.utils.utils import convert_bytes_to_str - - -class SubProcess(Activity): - def __init__(self, id, pipeline, name=None): - super(SubProcess, self).__init__(id, name, pipeline.data) - self.pipeline = pipeline - self._prepared_inputs = self.pipeline.data.inputs_copy() - self._prepared_outputs = self.pipeline.data.outputs_copy() - - def prepare_rerun_data(self): - self.data.override_inputs(deepcopy(self._prepared_inputs)) - self.data.override_outputs(deepcopy(self._prepared_outputs)) - - def __setstate__(self, state): - for attr, obj in list(state.items()): - if isinstance(attr, bytes): - attr = attr.decode("utf-8") - obj = convert_bytes_to_str(obj) - setattr(self, attr, obj) - - if "_prepared_inputs" not in state: - self._prepared_inputs = self.pipeline.data.inputs_copy() - - if "_prepared_outputs" not in state: - self._prepared_outputs = self.pipeline.data.outputs_copy() diff --git a/lib/pipeline/core/flow/base.py b/lib/pipeline/core/flow/base.py deleted file mode 100644 index 5d62ae1..0000000 --- a/lib/pipeline/core/flow/base.py +++ /dev/null @@ -1,181 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import weakref -from abc import ABCMeta, abstractmethod -from functools import wraps - -from pipeline.exceptions import InvalidOperationException - - -def extra_inject(func): - @wraps(func) - def wrapper(*args, **kwargs): - if "extra" not in kwargs: - kwargs["extra"] = {} - kwargs["extra"]["_id"] = args[0].id - return func(*args, **kwargs) - - return wrapper - - -class FlowElement(object, metaclass=ABCMeta): - def __init__(self, id, name=None): - self.id = id - self.name = name - - -class FlowNode(FlowElement, metaclass=ABCMeta): - ON_RETRY = "_on_retry" - - def __init__(self, id, name=None, data=None): - super(FlowNode, self).__init__(id, name) - self.incoming = SequenceFlowCollection() - self.outgoing = SequenceFlowCollection() - self.data = data - - def on_retry(self): - return hasattr(self, self.ON_RETRY) - - def next_exec_is_retry(self): - setattr(self, self.ON_RETRY, True) - - def retry_at_current_exec(self): - delattr(self, self.ON_RETRY) - - @abstractmethod - def next(self): - """ - 该节点的下一个节点,由子类来实现 - :return: - """ - raise NotImplementedError() - - class FlowNodeLogger: - def __init__(self, id): - self.id = id - self._logger = logging.getLogger("pipeline.logging") - - @extra_inject - def info(self, *args, **kwargs): - self._logger.info(*args, **kwargs) - - @extra_inject - def warning(self, *args, **kwargs): - self._logger.warning(*args, **kwargs) - - @extra_inject - def error(self, *args, **kwargs): - self._logger.error(*args, **kwargs) - - @extra_inject - def critical(self, *args, **kwargs): - self._logger.critical(*args, **kwargs) - - @property - def logger(self): - _logger = getattr(self, "_logger", None) - if not _logger: - _logger = self.FlowNodeLogger(self.id) - setattr(self, "_logger", _logger) - return _logger - - def __getstate__(self): - if "_logger" in self.__dict__: - del self.__dict__["_logger"] - return self.__dict__ - - -class SequenceFlow(FlowElement): - def __init__(self, id, source, target, is_default=False, name=None): - super(SequenceFlow, self).__init__(id, name) - self.source = weakref.proxy(source) if source is not None else source - self.target = weakref.proxy(target) if target is not None else target - self.is_default = is_default - - -class SequenceFlowCollection(object): - def __init__(self, *flows): - flow_dict = {} - for flow in flows: - flow_dict[flow.id] = flow - - self.flows = list(flows) - self.flow_dict = flow_dict - - def get_flow(self, id): - """ - 获取 flow.id = id 的某个 flow - :param id: flow id - :return: - """ - return self.flow_dict.get(id) - - def unique_one(self): - """ - 获取唯一的一个 flow,若当前集合内 flow 不只一条则抛出异常 - :return: - """ - if len(self.flows) != 1: - raise InvalidOperationException("this collection contains multiple flow, can not get unique one.") - return self.flows[0] - - def is_empty(self): - """ - 当前集合是否为空 - :return: - """ - return len(self.flows) == 0 - - def default_flow(self): - """ - 获取当前集合中默认的 flow - :return: 若存在默认的 flow 则返回,否则返回 None - """ - for flow in self.flows: - if flow.is_default: - return flow - return None - - def add_flow(self, flow): - """ - 向当前结合中添加一条 flow - :param flow: 待添加的 flow - :return: - """ - self.flows.append(flow) - self.flow_dict[flow.id] = flow - - def all_target_node(self): - """ - 返回当前集合中所有 flow 的 target - :return: - """ - nodes = [] - for flow in self.flows: - nodes.append(flow.target) - return nodes - - def all_source_node(self): - """ - 返回当前集合中所有 flow 的 source - :return: - """ - nodes = [] - for flow in self.flows: - nodes.append(flow.source) - return nodes - - def __iter__(self): - return iter(self.flows) diff --git a/lib/pipeline/core/flow/event.py b/lib/pipeline/core/flow/event.py deleted file mode 100644 index a5394e4..0000000 --- a/lib/pipeline/core/flow/event.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import traceback -from abc import ABCMeta, abstractmethod - -from pipeline.core.flow.base import FlowNode -from pipeline.core.pipeline import Pipeline -from pipeline.engine.signals import pipeline_end - -logger = logging.getLogger("celery") - - -class Event(FlowNode, metaclass=ABCMeta): - def __init__(self, id, name=None, data=None): - super(Event, self).__init__(id, name, data) - - def next(self): - return self.outgoing.unique_one().target - - -class ThrowEvent(Event, metaclass=ABCMeta): - pass - - -class CatchEvent(Event, metaclass=ABCMeta): - pass - - -class EndEvent(ThrowEvent, metaclass=ABCMeta): - def pipeline_finish(self, root_pipeline_id): - try: - pipeline_end.send(sender=Pipeline, root_pipeline_id=root_pipeline_id) - except Exception: - logger.error("pipeline end handler error %s" % traceback.format_exc()) - - -class StartEvent(CatchEvent, metaclass=ABCMeta): - pass - - -class EmptyStartEvent(StartEvent): - pass - - -class EmptyEndEvent(EndEvent): - pass - - -class ExecutableEndEvent(EndEvent, metaclass=ABCMeta): - @abstractmethod - def execute(self, in_subprocess, root_pipeline_id, current_pipeline_id): - raise NotImplementedError() diff --git a/lib/pipeline/core/flow/gateway.py b/lib/pipeline/core/flow/gateway.py deleted file mode 100644 index 9b83296..0000000 --- a/lib/pipeline/core/flow/gateway.py +++ /dev/null @@ -1,173 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -import logging -from abc import ABCMeta - -import ujson as json - -from pipeline.core.constants import ESCAPED_CHARS -from pipeline.core.data.expression import ConstantTemplate, deformat_constant_key -from pipeline.core.flow.base import FlowNode -from pipeline.exceptions import ConditionExhaustedException, EvaluationException, InvalidOperationException -from pipeline.utils.boolrule import BoolRule - -logger = logging.getLogger("pipeline_engine") - - -class Gateway(FlowNode, metaclass=ABCMeta): - pass - - -class ExclusiveGateway(Gateway): - def __init__(self, id, conditions=None, name=None, data=None): - super(ExclusiveGateway, self).__init__(id, name, data) - self.conditions = conditions or [] - - def add_condition(self, condition): - self.conditions.append(condition) - - def next(self, data=None): - default_flow = self.outgoing.default_flow() - next_flow = self._determine_next_flow_with_boolrule(data) - - if not next_flow: # determine fail - if not default_flow: # try to use default flow - raise ConditionExhaustedException( - "all conditions of branches are False " "while default flow is not appointed" - ) - return default_flow.target - - return next_flow.target - - def target_for_sequence_flow(self, flow_id): - flow_to_target = {c.sequence_flow.id: c.sequence_flow.target for c in self.conditions} - if flow_id not in flow_to_target: - raise InvalidOperationException("sequence flow(%s) does not exist." % flow_id) - return flow_to_target[flow_id] - - @staticmethod - def _transform_escape_char(string): - """ - 对未转义的字符串进行转义,现有的转义字符包括\n, \r, \t - """ - if not isinstance(string, str): - return string - # 已转义的情况 - if len([c for c in ESCAPED_CHARS.values() if c in string]) > 0: - return string - for key, value in ESCAPED_CHARS.items(): - if key in string: - string = string.replace(key, value) - return string - - def _determine_next_flow_with_boolrule(self, data): - """ - 根据当前传入的数据判断下一个应该流向的 flow ( 不使用 eval 的版本) - :param data: - :return: - """ - for key, value in data.items(): - data[key] = self._transform_escape_char(value) - logger.info("[{}] ready to resolve conditions: {}".format(self.id, [c.evaluate for c in self.conditions])) - for condition in self.conditions: - deformatted_data = {deformat_constant_key(key): value for key, value in list(data.items())} - try: - logger.info("[{}] before resolve condition: {}".format(self.id, condition.evaluate)) - resolved_evaluate = ConstantTemplate(condition.evaluate).resolve_data(deformatted_data) - logger.info("[{}] test {} with data {}".format(self.id, resolved_evaluate, data)) - result = BoolRule(resolved_evaluate).test(data) - logger.info("[{}] {} test result: {}".format(self.id, resolved_evaluate, result)) - except Exception as e: - raise EvaluationException( - "evaluate[%s] fail with data[%s] message: %s" - % (condition.evaluate, json.dumps(deformatted_data), e) - ) - if result: - return condition.sequence_flow - - return None - - def skip(self): - return True - - -class ParallelGateway(Gateway): - def __init__(self, id, converge_gateway_id, name=None, data=None): - super(ParallelGateway, self).__init__(id, name, data) - self.converge_gateway_id = converge_gateway_id - - def next(self): - raise InvalidOperationException("can not determine next node for parallel gateway.") - - -class ConditionalParallelGateway(Gateway): - def __init__(self, id, converge_gateway_id, conditions=None, name=None, data=None): - super(ConditionalParallelGateway, self).__init__(id, name, data) - self.converge_gateway_id = converge_gateway_id - self.conditions = conditions or [] - - def add_condition(self, condition): - self.conditions.append(condition) - - def targets_meet_condition(self, data): - - targets = [] - - logger.info("[{}] ready to resolve conditions: {}".format(self.id, [c.evaluate for c in self.conditions])) - for condition in self.conditions: - deformatted_data = {deformat_constant_key(key): value for key, value in list(data.items())} - try: - logger.info("[{}] before resolve condition: {}".format(self.id, condition.evaluate)) - resolved_evaluate = ConstantTemplate(condition.evaluate).resolve_data(deformatted_data) - logger.info("[{}] test {} with data {}".format(self.id, resolved_evaluate, data)) - result = BoolRule(resolved_evaluate).test(data) - logger.info("[{}] {} test result: {}".format(self.id, resolved_evaluate, result)) - except Exception as e: - raise EvaluationException( - "evaluate[%s] fail with data[%s] message: %s" - % (condition.evaluate, json.dumps(deformatted_data), e) - ) - if result: - targets.append(condition.sequence_flow.target) - - if not targets: - raise ConditionExhaustedException("all conditions of branches are False") - - return targets - - def target_for_sequence_flows(self, flow_ids): - flow_to_target = {c.sequence_flow.id: c.sequence_flow.target for c in self.conditions} - if not set(flow_ids).issubset(set(flow_to_target.keys())): - not_exist_flow_ids = set(flow_ids) - set(flow_to_target.keys()) - raise InvalidOperationException(f"sequence flows {not_exist_flow_ids} does not exist.") - return [flow_to_target[flow_id] for flow_id in flow_ids] - - def next(self): - raise InvalidOperationException("can not determine next node for conditional parallel gateway.") - - def skip(self): - return True - - -class ConvergeGateway(Gateway): - def next(self): - return self.outgoing.unique_one().target - - def skip(self): - raise InvalidOperationException("can not skip conditional converge gateway.") - - -class Condition(object): - def __init__(self, evaluate, sequence_flow): - self.evaluate = evaluate - self.sequence_flow = sequence_flow diff --git a/lib/pipeline/core/flow/io.py b/lib/pipeline/core/flow/io.py deleted file mode 100644 index f9829a8..0000000 --- a/lib/pipeline/core/flow/io.py +++ /dev/null @@ -1,127 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import abc -from collections import Mapping - - -class DataItem(object, metaclass=abc.ABCMeta): - def __init__(self, name, key, type, schema=None): - self.name = name - self.key = key - self.type = type - self.schema = schema - - def as_dict(self): - return { - "name": self.name, - "key": self.key, - "type": self.type, - "schema": self.schema.as_dict() if self.schema else {}, - } - - -class InputItem(DataItem): - def __init__(self, required=True, *args, **kwargs): - self.required = required - super(InputItem, self).__init__(*args, **kwargs) - - def as_dict(self): - base = super(InputItem, self).as_dict() - base["required"] = self.required - return base - - -class OutputItem(DataItem): - pass - - -class ItemSchema(object, metaclass=abc.ABCMeta): - def __init__(self, description, enum=None): - self.type = self._type() - self.description = description - self.enum = enum or [] - - def as_dict(self): - return {"type": self.type, "description": self.description, "enum": self.enum} - - @abc.abstractmethod - def _type(self): - raise NotImplementedError() - - -class SimpleItemSchema(ItemSchema, metaclass=abc.ABCMeta): - pass - - -class IntItemSchema(SimpleItemSchema): - @classmethod - def _type(cls): - return "int" - - -class StringItemSchema(SimpleItemSchema): - @classmethod - def _type(cls): - return "string" - - -class FloatItemSchema(SimpleItemSchema): - @classmethod - def _type(cls): - return "float" - - -class BooleanItemSchema(SimpleItemSchema): - @classmethod - def _type(cls): - return "boolean" - - -class ArrayItemSchema(ItemSchema): - def __init__(self, item_schema, *args, **kwargs): - if not isinstance(item_schema, ItemSchema): - raise TypeError("item_schema of ArrayItemSchema must be subclass of ItemSchema") - self.item_schema = item_schema - super(ArrayItemSchema, self).__init__(*args, **kwargs) - - def as_dict(self): - base = super(ArrayItemSchema, self).as_dict() - base["items"] = self.item_schema.as_dict() - return base - - @classmethod - def _type(cls): - return "array" - - -class ObjectItemSchema(ItemSchema): - def __init__(self, property_schemas, *args, **kwargs): - if not isinstance(property_schemas, Mapping): - raise TypeError("property_schemas of ObjectItemSchema must be Mapping type") - - if not all([isinstance(value, ItemSchema) for value in list(property_schemas.values())]): - raise TypeError("value in property_schemas of ObjectItemSchema must be subclass of ItemSchema") - - self.property_schemas = property_schemas - super(ObjectItemSchema, self).__init__(*args, **kwargs) - - def as_dict(self): - base = super(ObjectItemSchema, self).as_dict() - properties = {prop: schema.as_dict() for prop, schema in list(self.property_schemas.items())} - base["properties"] = properties - return base - - @classmethod - def _type(cls): - return "object" diff --git a/lib/pipeline/core/flow/signals.py b/lib/pipeline/core/flow/signals.py deleted file mode 100644 index 4c23fed..0000000 --- a/lib/pipeline/core/flow/signals.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.dispatch import Signal - -post_new_end_event_register = Signal(providing_args=["node_type", "node_cls"]) diff --git a/lib/pipeline/core/pipeline.py b/lib/pipeline/core/pipeline.py deleted file mode 100644 index 4fa7593..0000000 --- a/lib/pipeline/core/pipeline.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from queue import Queue - -from pipeline.core.flow.activity import Activity -from pipeline.core.flow.gateway import Gateway -from pipeline.exceptions import PipelineException - - -class PipelineSpec(object): - def __init__(self, start_event, end_event, flows, activities, gateways, data, context): - objects = {start_event.id: start_event, end_event.id: end_event} - for act in activities: - objects[act.id] = act - for gw in gateways: - objects[gw.id] = gw - - self.start_event = start_event - self.end_event = end_event - self.flows = flows - self.activities = activities - self.gateways = gateways - self.data = data - self.objects = objects - self.context = context - - def prune(self, keep_from, keep_to): - if keep_from != self.start_event.id: - self.start_event.outgoing = None - - if keep_to != self.end_event.id: - self.end_event.incoming = None - - self.activities = [] - self.gateways = [] - self.flows = [] - - keep_from_node = self.objects[keep_from] - keep_to_node = self.objects[keep_to] - - keep_from_node.incoming = None - keep_to_node.outgoing = None - - to_be_process = Queue() - to_be_process.put(keep_from_node) - - new_objects = {} - keep_to_incoming_flows = [] - - while not to_be_process.empty(): - node = to_be_process.get() - - if issubclass(node.__class__, Activity): - self.activities.append(node) - elif issubclass(node.__class__, Gateway): - self.gateways.append(node) - - new_objects[node.id] = node - - if node.id == keep_to_node.id: - continue - - for out in node.outgoing: - - self.flows.append(out) - - if out.target.id not in new_objects: - next_node = out.target - if next_node.id == keep_to_node.id: - keep_to_incoming_flows.append(out) - to_be_process.put(next_node) - - keep_to_node.incoming.flows = keep_to_incoming_flows - keep_to_node.incoming.flow_dict = {} - for flow in keep_to_incoming_flows: - keep_to_node.incoming.flow_dict[flow.id] = flow - - self.objects = new_objects - - -class PipelineShell(object): - def __init__(self, id, data): - self.id = id - self.data = data - - def shell(self): - return PipelineShell(id=self.id, data=self.data) - - -class Pipeline(object): - def __init__(self, id, pipeline_spec, parent=None): - self.id = id - self.spec = pipeline_spec - self.parent = parent - - @property - def data(self): - return self.spec.data - - @property - def context(self): - return self.spec.context - - @property - def start_event(self): - return self.spec.start_event - - @property - def end_event(self): - return self.spec.end_event - - @property - def all_nodes(self): - return self.spec.objects - - def data_for_node(self, node): - node = self.spec.objects.get(node.id) - if not node: - raise PipelineException("Can not find node %s in this pipeline." % node.id) - return node.data - - def node(self, id): - return self.spec.objects.get(id) - - def prune(self, keep_from, keep_to): - self.spec.prune(keep_from=keep_from, keep_to=keep_to) - - def shell(self): - return PipelineShell(id=self.id, data=self.data) diff --git a/lib/pipeline/core/signals/__init__.py b/lib/pipeline/core/signals/__init__.py deleted file mode 100644 index 22849a4..0000000 --- a/lib/pipeline/core/signals/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.dispatch import Signal - -pre_variable_register = Signal(providing_args=["variable_code"]) diff --git a/lib/pipeline/django_signal_valve/__init__.py b/lib/pipeline/django_signal_valve/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/django_signal_valve/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/django_signal_valve/admin.py b/lib/pipeline/django_signal_valve/admin.py deleted file mode 100644 index be469df..0000000 --- a/lib/pipeline/django_signal_valve/admin.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.contrib import admin - -from pipeline.django_signal_valve.models import Signal - - -@admin.register(Signal) -class SignalAdmin(admin.ModelAdmin): - list_display = ["id", "module_path", "name", "kwargs"] - search_fields = ["id", "module_path", "name"] diff --git a/lib/pipeline/django_signal_valve/migrations/0001_initial.py b/lib/pipeline/django_signal_valve/migrations/0001_initial.py deleted file mode 100644 index 4afb904..0000000 --- a/lib/pipeline/django_signal_valve/migrations/0001_initial.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models -import pipeline.django_signal_valve.models - - -class Migration(migrations.Migration): - - dependencies = [] - - operations = [ - migrations.CreateModel( - name="Signal", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ("module_path", models.TextField(verbose_name="\u4fe1\u53f7\u6a21\u5757\u540d")), - ("name", models.CharField(max_length=64, verbose_name="\u4fe1\u53f7\u5c5e\u6027\u540d")), - ("kwargs", pipeline.django_signal_valve.models.IOField(verbose_name="\u4fe1\u53f7\u53c2\u6570")), - ], - ), - ] diff --git a/lib/pipeline/django_signal_valve/migrations/__init__.py b/lib/pipeline/django_signal_valve/migrations/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/django_signal_valve/migrations/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/django_signal_valve/models.py b/lib/pipeline/django_signal_valve/models.py deleted file mode 100644 index ea78d89..0000000 --- a/lib/pipeline/django_signal_valve/models.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import zlib -import pickle - -from django.db import models -from django.utils.translation import ugettext_lazy as _ - - -class IOField(models.BinaryField): - def __init__(self, compress_level=6, *args, **kwargs): - super(IOField, self).__init__(*args, **kwargs) - self.compress_level = compress_level - - def get_prep_value(self, value): - value = super(IOField, self).get_prep_value(value) - return zlib.compress(pickle.dumps(value), self.compress_level) - - def to_python(self, value): - value = super(IOField, self).to_python(value) - return pickle.loads(zlib.decompress(value)) - - def from_db_value(self, value, expression, connection, context=None): - return self.to_python(value) - - -class SignalManager(models.Manager): - def dump(self, module_path, signal_name, kwargs): - self.create(module_path=module_path, name=signal_name, kwargs=kwargs) - - -class Signal(models.Model): - module_path = models.TextField(_("信号模块名")) - name = models.CharField(_("信号属性名"), max_length=64) - kwargs = IOField(verbose_name=_("信号参数")) - - objects = SignalManager() diff --git a/lib/pipeline/django_signal_valve/valve.py b/lib/pipeline/django_signal_valve/valve.py deleted file mode 100644 index 215165f..0000000 --- a/lib/pipeline/django_signal_valve/valve.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import traceback - -from .models import Signal - -logger = logging.getLogger(__name__) - - -def set_valve_function(func): - global __valve_function - if __valve_function is not None: - raise Exception("valve function can only be set once.") - if not callable(func): - raise Exception("valve function must be a callable object") - - __valve_function = func - - -def send(signal_mod, signal_name, **kwargs): - if not __valve_function or not __valve_function(): - try: - return getattr(signal_mod, signal_name).send(**kwargs) - except Exception: - raise - else: - Signal.objects.dump(signal_mod.__path__, signal_name, kwargs) - return None - - -def open_valve(signal_mod): - signal_list = Signal.objects.filter(module_path=signal_mod.__path__).order_by("id") - response = [] - for signal in signal_list: - try: - response.append(getattr(signal_mod, signal.name).send(**signal.kwargs)) - signal.delete() - except Exception: - logger.error( - "signal({} - {}) resend failed: {}".format(signal.module_path, signal.name, traceback.format_exc()) - ) - return response - - -def unload_valve_function(): - global __valve_function - __valve_function = None - - -def valve_function(): - return __valve_function - - -__valve_function = None diff --git a/lib/pipeline/engine/__init__.py b/lib/pipeline/engine/__init__.py deleted file mode 100644 index dc9ff24..0000000 --- a/lib/pipeline/engine/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -default_app_config = "pipeline.engine.apps.EngineConfig" diff --git a/lib/pipeline/engine/admin.py b/lib/pipeline/engine/admin.py deleted file mode 100644 index 023be3c..0000000 --- a/lib/pipeline/engine/admin.py +++ /dev/null @@ -1,186 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.contrib import admin -from django.utils.translation import ugettext_lazy as _ - -from pipeline.engine import models -from pipeline.engine.conf.function_switch import FREEZE_ENGINE -from pipeline.engine.core import api -from pipeline.service import task_service - - -@admin.register(models.PipelineModel) -class PipelineModelAdmin(admin.ModelAdmin): - list_display = ["id", "process"] - search_fields = ["id__exact", "process__id__exact"] - raw_id_fields = ["process"] - - -@admin.register(models.PipelineProcess) -class PipelineProcessAdmin(admin.ModelAdmin): - list_display = [ - "id", - "root_pipeline_id", - "current_node_id", - "destination_id", - "parent_id", - "need_ack", - "ack_num", - "is_alive", - "is_sleep", - "is_frozen", - ] - search_fields = ["id__exact", "root_pipeline_id__exact", "current_node_id__exact"] - list_filter = ["is_alive", "is_sleep"] - raw_id_fields = ["snapshot"] - - -def force_fail_node(modeladmin, request, queryset): - for item in queryset: - task_service.forced_fail(item.id) - - -@admin.register(models.Status) -class StatusAdmin(admin.ModelAdmin): - list_display = [ - "id", - "name", - "state", - "retry", - "skip", - "loop", - "created_time", - "started_time", - "archived_time", - ] - search_fields = ["=id"] - actions = [force_fail_node] - - -@admin.register(models.ScheduleService) -class ScheduleServiceAdmin(admin.ModelAdmin): - list_display = [ - "id", - "activity_id", - "process_id", - "schedule_times", - "wait_callback", - "is_finished", - ] - search_fields = ["id__exact"] - list_filter = ["wait_callback", "is_finished"] - - -@admin.register(models.ProcessCeleryTask) -class ProcessCeleryTaskAdmin(admin.ModelAdmin): - list_display = ["id", "process_id", "celery_task_id"] - search_fields = ["id__exact", "process_id__exact"] - - -@admin.register(models.Data) -class DataAdmin(admin.ModelAdmin): - list_display = ["id", "inputs", "outputs", "ex_data"] - search_fields = ["id__exact"] - - -@admin.register(models.HistoryData) -class HistoryDataAdmin(admin.ModelAdmin): - list_display = ["id", "inputs", "outputs", "ex_data"] - search_fields = ["id__exact"] - - -@admin.register(models.History) -class HistoryAdmin(admin.ModelAdmin): - list_display = ["identifier", "started_time", "archived_time"] - search_fields = ["identifier__exact"] - raw_id_fields = ["data"] - - -@admin.register(models.ScheduleCeleryTask) -class ScheduleCeleryTaskAdmin(admin.ModelAdmin): - list_display = ["schedule_id", "celery_task_id"] - search_fields = ["schedule_id__exact"] - - -@admin.register(models.NodeCeleryTask) -class NodeCeleryTaskAdmin(admin.ModelAdmin): - list_display = ["node_id", "celery_task_id"] - search_fields = ["node_id__exact"] - - -on = True -off = False - -switch_hook = {FREEZE_ENGINE: {on: api.freeze, off: api.unfreeze}} - - -def turn_on_function(modeladmin, request, queryset): - for item in queryset: - if not item.is_active: - switch_hook[item.name][on]() - - -def turn_off_function(modeladmin, request, queryset): - for item in queryset: - if item.is_active: - switch_hook[item.name][off]() - - -turn_on_function.short_description = _("打开所选的功能") -turn_off_function.short_description = _("关闭所选的功能") - - -@admin.register(models.FunctionSwitch) -class FunctionAdmin(admin.ModelAdmin): - list_display = ["name", "description", "is_active"] - search_fields = ["name", "description"] - actions = [turn_on_function, turn_off_function] - - def has_delete_permission(self, request, obj=None): - return False - - def get_actions(self, request): - actions = super(FunctionAdmin, self).get_actions(request) - if "delete_selected" in actions: - del actions["delete_selected"] - return actions - - def get_readonly_fields(self, request, obj=None): - if obj: # obj is not None, so this is an edit - return [ - "name", - "is_active", - ] # Return a list or tuple of readonly fields' names - else: # This is an addition - return [] - - -def resend_task(modeladmin, request, queryset): - for item in queryset: - item.resend() - - -@admin.register(models.SendFailedCeleryTask) -class SendFailedCeleryTaskAdmin(admin.ModelAdmin): - list_display = [ - "id", - "name", - "kwargs", - "type", - "extra_kwargs", - "exec_trace", - "created_at", - ] - search_fields = ["id__exact", "name"] - actions = [resend_task] diff --git a/lib/pipeline/engine/api.py b/lib/pipeline/engine/api.py deleted file mode 100644 index 15658d7..0000000 --- a/lib/pipeline/engine/api.py +++ /dev/null @@ -1,586 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import functools -import logging -import time -import traceback - -from celery import current_app -from django.db import transaction -from redis.exceptions import ConnectionError as RedisConnectionError - -from pipeline.celery.queues import ScalableQueues -from pipeline.constants import PIPELINE_DEFAULT_PRIORITY, PIPELINE_MAX_PRIORITY, PIPELINE_MIN_PRIORITY -from pipeline.core.flow.activity import ServiceActivity -from pipeline.core.flow.gateway import ExclusiveGateway, ParallelGateway, ConditionalParallelGateway -from pipeline.engine import exceptions, states -from pipeline.engine.core.api import workers -from pipeline.engine.models import ( - Data, - FunctionSwitch, - History, - NodeRelationship, - Pipeline, - PipelineModel, - PipelineProcess, - ProcessCeleryTask, - ScheduleService, - Status, - SubProcessRelationship, -) -from pipeline.engine.signals import pipeline_revoke -from pipeline.engine.utils import ActionResult, calculate_elapsed_time -from pipeline.exceptions import PipelineException -from pipeline.utils import uniqid - -logger = logging.getLogger("celery") - - -def _node_existence_check(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - id_from_kwargs = kwargs.get("node_id") - node_id = id_from_kwargs if id_from_kwargs else args[0] - try: - Status.objects.get(id=node_id) - except Status.DoesNotExist: - return ActionResult(result=False, message="node not exists or not be executed yet") - return func(*args, **kwargs) - - return wrapper - - -def _frozen_check(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - if FunctionSwitch.objects.is_frozen(): - return ActionResult(result=False, message="engine is frozen, can not perform operation") - - return func(*args, **kwargs) - - return wrapper - - -def _worker_check(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - def on_connection_error(exc, interval): - logger.warning("Connection Error: {!r}. Retry in {}s.".format(exc, interval)) - - if kwargs.get("check_workers", True): - try: - with current_app.connection() as conn: - try: - conn.ensure_connection(on_connection_error, current_app.conf.BROKER_CONNECTION_MAX_RETRIES) - except conn.connection_errors + conn.channel_errors as exc: - logger.warning("Connection lost: {!r}".format(exc)) - if not workers(conn): - return ActionResult( - result=False, message="can not find celery workers, please check worker status" - ) - except exceptions.RabbitMQConnectionError as e: - return ActionResult( - result=False, - message="celery worker status check failed with message: %s, " "check rabbitmq status please" % e, - ) - except RedisConnectionError: - return ActionResult(result=False, message="redis connection error, check redis status please") - - return func(*args, **kwargs) - - return wrapper - - -@_worker_check -@_frozen_check -def start_pipeline(pipeline_instance, check_workers=True, priority=PIPELINE_DEFAULT_PRIORITY, queue=""): - """ - start a pipeline - :param pipeline_instance: - :param priority: - :return: - """ - - if priority > PIPELINE_MAX_PRIORITY or priority < PIPELINE_MIN_PRIORITY: - raise exceptions.InvalidOperationException( - "pipeline priority must between [{min}, {max}]".format(min=PIPELINE_MIN_PRIORITY, max=PIPELINE_MAX_PRIORITY) - ) - - if queue and not ScalableQueues.has_queue(queue): - return ActionResult(result=False, message="can't not find queue({}) in any config queues.".format(queue)) - - Status.objects.prepare_for_pipeline(pipeline_instance) - process = PipelineProcess.objects.prepare_for_pipeline(pipeline_instance) - PipelineModel.objects.prepare_for_pipeline(pipeline_instance, process, priority, queue=queue) - - PipelineModel.objects.pipeline_ready(process_id=process.id) - - return ActionResult(result=True, message="success") - - -@_frozen_check -def pause_pipeline(pipeline_id): - """ - pause a running pipeline - :param pipeline_id: - :return: - """ - - return Status.objects.transit(id=pipeline_id, to_state=states.SUSPENDED, is_pipeline=True, appoint=True) - - -@_worker_check -@_frozen_check -def resume_pipeline(pipeline_id): - """ - resume a pipeline from suspended - :param pipeline_id: - :return: - """ - if not Status.objects.filter(id=pipeline_id).exists(): - return ActionResult(result=False, message="only started pipeline can be resumed.") - - action_result = Status.objects.transit(id=pipeline_id, to_state=states.READY, is_pipeline=True, appoint=True) - if not action_result.result: - return action_result - - process = PipelineModel.objects.get(id=pipeline_id).process - to_be_waked = [] - _get_process_to_be_waked(process, to_be_waked) - PipelineProcess.objects.batch_process_ready(process_id_list=to_be_waked, pipeline_id=pipeline_id) - - return action_result - - -@_frozen_check -def revoke_pipeline(pipeline_id): - """ - revoke a pipeline - :param pipeline_id: - :return: - """ - - try: - pipeline_model = PipelineModel.objects.get(id=pipeline_id) - except PipelineModel.DoesNotExist: - return ActionResult(result=False, message="pipeline to be revoked does not exist.") - - action_result = Status.objects.transit(id=pipeline_id, to_state=states.REVOKED, is_pipeline=True, appoint=True) - if not action_result.result: - return action_result - - process = pipeline_model.process - - if not process: - return ActionResult(result=False, message="relate process is none, this pipeline may be revoked.") - - with transaction.atomic(): - PipelineProcess.objects.select_for_update().get(id=process.id) - process.revoke_subprocess() - process.destroy_all() - - pipeline_revoke.send(sender=Pipeline, root_pipeline_id=pipeline_id) - - return action_result - - -@_frozen_check -def pause_node_appointment(node_id): - """ - make a appointment to pause a node - :param node_id: - :return: - """ - - return Status.objects.transit(id=node_id, to_state=states.SUSPENDED, appoint=True) - - -@_worker_check -@_frozen_check -@_node_existence_check -def resume_node_appointment(node_id): - """ - make a appointment to resume a node - :param node_id: - :return: - """ - - qs = PipelineProcess.objects.filter(current_node_id=node_id, is_sleep=True) - if qs.exists(): - # a process had sleep caused by pause reservation - action_result = Status.objects.transit(id=node_id, to_state=states.READY, appoint=True) - if not action_result.result: - return action_result - - process = qs.first() - Status.objects.recover_from_block(process.root_pipeline.id, process.subprocess_stack) - PipelineProcess.objects.process_ready(process_id=process.id) - return ActionResult(result=True, message="success") - - processing_sleep = SubProcessRelationship.objects.get_relate_process(subprocess_id=node_id) - if processing_sleep.exists(): - action_result = Status.objects.transit(id=node_id, to_state=states.RUNNING, appoint=True, is_pipeline=True) - if not action_result.result: - return action_result - # processes had sleep caused by subprocess pause - root_pipeline_id = processing_sleep.first().root_pipeline_id - - process_can_be_waked = [p for p in processing_sleep if p.can_be_waked()] - can_be_waked_ids = [p.id for p in process_can_be_waked] - - # get subprocess id which should be transited - subprocess_to_be_transit = set() - for process in process_can_be_waked: - _, subproc_above = process.subproc_sleep_check() - for subproc in subproc_above: - subprocess_to_be_transit.add(subproc) - - Status.objects.recover_from_block(root_pipeline_id, subprocess_to_be_transit) - PipelineProcess.objects.batch_process_ready(process_id_list=can_be_waked_ids, pipeline_id=root_pipeline_id) - return ActionResult(result=True, message="success") - - return ActionResult(result=False, message="node not exists or not be executed yet") - - -@_worker_check -@_frozen_check -@_node_existence_check -def retry_node(node_id, inputs=None): - """ - retry a node - :param node_id: - :param inputs: - :return: - """ - - try: - PipelineProcess.objects.get(current_node_id=node_id) - except PipelineProcess.DoesNotExist: # can not retry subprocess - return ActionResult(result=False, message="can't not retry a subprocess or this process has been revoked") - - process = PipelineProcess.objects.get(current_node_id=node_id) - - # try to get next - node = process.top_pipeline.node(node_id) - if not (isinstance(node, ServiceActivity) or isinstance(node, ParallelGateway)): - return ActionResult(result=False, message="can't retry this type of node") - - if hasattr(node, "retryable") and not node.retryable: - return ActionResult(result=False, message="the node is set to not be retryable, try skip it please.") - - action_result = Status.objects.retry(process, node, inputs) - if not action_result.result: - return action_result - - # wake up process - PipelineProcess.objects.process_ready(process_id=process.id) - - return action_result - - -@_worker_check -@_frozen_check -@_node_existence_check -def skip_node(node_id): - """ - skip a node - :param node_id: - :return: - """ - - try: - process = PipelineProcess.objects.get(current_node_id=node_id) - except PipelineProcess.DoesNotExist: # can not skip subprocess - return ActionResult(result=False, message="can't not skip a subprocess or this process has been revoked") - - # try to get next - node = process.top_pipeline.node(node_id) - if not isinstance(node, ServiceActivity): - return ActionResult(result=False, message="can't skip this type of node") - - if hasattr(node, "skippable") and not node.skippable: - return ActionResult(result=False, message="this node is set to not be skippable, try retry it please.") - - # skip and write result bit - action_result = Status.objects.skip(process, node) - if not action_result.result: - return action_result - - next_node_id = node.next().id - - # extract outputs and wake up process - process.top_pipeline.context.extract_output(node) - process.save() - PipelineProcess.objects.process_ready(process_id=process.id, current_node_id=next_node_id) - - return action_result - - -@_worker_check -@_frozen_check -@_node_existence_check -def skip_exclusive_gateway(node_id, flow_id): - """ - skip a failed exclusive gateway and appoint the flow to be pushed - :param node_id: - :param flow_id: - :return: - """ - - try: - process = PipelineProcess.objects.get(current_node_id=node_id) - except PipelineProcess.DoesNotExist: - return ActionResult( - result=False, message="invalid operation, this gateway is finished or pipeline have been revoked" - ) - - exclusive_gateway = process.top_pipeline.node(node_id) - - if not isinstance(exclusive_gateway, ExclusiveGateway): - return ActionResult(result=False, message="invalid operation, this node is not a exclusive gateway") - - next_node_id = exclusive_gateway.target_for_sequence_flow(flow_id).id - - action_result = Status.objects.skip(process, exclusive_gateway) - if not action_result.result: - return action_result - - # wake up process - PipelineProcess.objects.process_ready(process_id=process.id, current_node_id=next_node_id) - - return action_result - - -@_worker_check -@_frozen_check -@_node_existence_check -def skip_conditional_parallel_gateway(node_id, flow_ids, converge_gateway_id): - """ - skip a failed conditional parallel gateway and appoint the flow to be pushed - :param node_id: - :param flow_ids: - :param converge_gateway_id: - :return: - """ - - try: - process = PipelineProcess.objects.get(current_node_id=node_id) - except PipelineProcess.DoesNotExist: - return ActionResult( - result=False, message="invalid operation, this gateway is finished or pipeline have been revoked" - ) - - if process.children: - process.clean_children() - - conditional_parallel_gateway = process.top_pipeline.node(node_id) - - if not isinstance(conditional_parallel_gateway, ConditionalParallelGateway): - return ActionResult(result=False, message="invalid operation, this node is not a conditional parallel gateway") - - children = [] - targets = conditional_parallel_gateway.target_for_sequence_flows(flow_ids) - - for target in targets: - try: - child = PipelineProcess.objects.fork_child( - parent=process, current_node_id=target.id, destination_id=converge_gateway_id - ) - except PipelineException as e: - logger.error(traceback.format_exc()) - Status.objects.fail(conditional_parallel_gateway, ex_data=str(e)) - return ActionResult(result=False, message=e) - - children.append(child) - - action_result = Status.objects.skip(process, conditional_parallel_gateway) - if not action_result.result: - return action_result - - Status.objects.transit(id=process.top_pipeline.id, to_state=states.RUNNING, is_pipeline=True) - process.join(children) - process.sleep(adjust_status=True) - - return action_result - - -def get_status_tree(node_id, max_depth=1): - """ - get state and children states for a node - :param node_id: - :param max_depth: - :return: - """ - rel_qs = NodeRelationship.objects.filter(ancestor_id=node_id, distance__lte=max_depth) - if not rel_qs.exists(): - raise exceptions.InvalidOperationException( - "node(%s) does not exist, may have not by executed or expired" % node_id - ) - descendants = [rel.descendant_id for rel in rel_qs] - # remove root node - descendants.remove(node_id) - - rel_qs = NodeRelationship.objects.filter(descendant_id__in=descendants, distance=1) - targets = [rel.descendant_id for rel in rel_qs] - - root_status = Status.objects.filter(id=node_id).values().first() - root_status["elapsed_time"] = calculate_elapsed_time(root_status["started_time"], root_status["archived_time"]) - status_map = {node_id: root_status} - status_qs = Status.objects.filter(id__in=targets).values() - for status in status_qs: - status["elapsed_time"] = calculate_elapsed_time(status["started_time"], status["archived_time"]) - status_map[status["id"]] = status - - relationships = [(s.ancestor_id, s.descendant_id) for s in rel_qs] - for (parent_id, child_id) in relationships: - if parent_id not in status_map: - return - - parent_status = status_map[parent_id] - child_status = status_map[child_id] - child_status.setdefault("children", {}) - - parent_status.setdefault("children", {}).setdefault(child_id, child_status) - - return status_map[node_id] - - -@_worker_check -@_frozen_check -def activity_callback(activity_id, callback_data): - """ - callback a schedule node - :param activity_id: - :param callback_data: - :return: - """ - - version = Status.objects.version_for(activity_id) - times = 0 - - # it's possible that ScheduleService is not be set when callback make - while times < 3: - try: - service = ScheduleService.objects.schedule_for(activity_id, version) - break - except ScheduleService.DoesNotExist as e: - times += 1 - time.sleep(times) - if times >= 3: - raise e - - try: - process_id = PipelineProcess.objects.get(current_node_id=activity_id).id - except PipelineProcess.DoesNotExist: - return ActionResult( - result=False, message="invalid operation, this node is finished or pipeline have been revoked" - ) - - if service.is_finished: - raise exceptions.InvalidOperationException("activity(%s) callback already finished" % activity_id) - service.callback(callback_data, process_id) - return ActionResult(result=True, message="success") - - -def get_inputs(node_id): - """ - get inputs data for a node - :param node_id: - :return: - """ - return Data.objects.get(id=node_id).inputs - - -def get_outputs(node_id): - """ - get outputs data for a node - :param node_id: - :return: - """ - data = Data.objects.get(id=node_id) - return {"outputs": data.outputs, "ex_data": data.ex_data} - - -def get_batch_outputs(node_ids): - """ - get outputs data for a batch of nodes - :param node_ids: a list of node_id - :return: - """ - nodes_data = Data.objects.filter(id__in=node_ids) - return {node_data.id: {"outputs": node_data.outputs, "ex_data": node_data.ex_data} for node_data in nodes_data} - - -def get_activity_histories(node_id, loop=None): - """ - get get_activity_histories data for a node - :param node_id: 节点 ID - :param loop: 循环序号 - :return: - """ - return History.objects.get_histories(node_id, loop) - - -@_frozen_check -@_node_existence_check -def forced_fail(node_id, kill=False, ex_data=""): - """ - forced fail a node - :param node_id: - :param kill: - :param ex_data: - :return: - """ - - try: - process = PipelineProcess.objects.get(current_node_id=node_id) - except PipelineProcess.DoesNotExist: - return ActionResult( - result=False, message="invalid operation, this node is finished or pipeline have been revoked" - ) - - node = process.top_pipeline.node(node_id) - if not isinstance(node, ServiceActivity): - return ActionResult(result=False, message="can't not forced fail this type of node") - - action_result = Status.objects.transit(node_id, to_state=states.FAILED) - if not action_result.result: - return action_result - - try: - node.failure_handler(process.root_pipeline.data) - except Exception: - pass - - with transaction.atomic(): - s = Status.objects.get(id=node.id) - ScheduleService.objects.delete_schedule(s.id, s.version) - Data.objects.forced_fail(node_id, ex_data) - ProcessCeleryTask.objects.revoke(process.id, kill) - process.adjust_status() - process.is_sleep = True - process.save() - s.version = uniqid.uniqid() - s.save() - - return ActionResult(result=True, message="success") - - -def _get_process_to_be_waked(process, to_be_waked): - if process.can_be_waked(): - to_be_waked.append(process.id) - elif process.children: - for child_id in process.children: - child = PipelineProcess.objects.get(id=child_id) - _get_process_to_be_waked(child, to_be_waked) diff --git a/lib/pipeline/engine/apps.py b/lib/pipeline/engine/apps.py deleted file mode 100644 index 9b8a4ae..0000000 --- a/lib/pipeline/engine/apps.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.apps import AppConfig - - -class EngineConfig(AppConfig): - name = "pipeline.engine" - verbose_name = "PipelineEngine" - - def ready(self): - from pipeline.engine.signals import dispatch - - dispatch.dispatch() - - from pipeline.django_signal_valve import valve - from pipeline.engine.models import FunctionSwitch - - valve.set_valve_function(FunctionSwitch.objects.is_frozen) - FunctionSwitch.objects.init_db() diff --git a/lib/pipeline/engine/conf/__init__.py b/lib/pipeline/engine/conf/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/engine/conf/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/engine/conf/function_switch.py b/lib/pipeline/engine/conf/function_switch.py deleted file mode 100644 index 1de5435..0000000 --- a/lib/pipeline/engine/conf/function_switch.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.utils.translation import ugettext_lazy as _ - -FREEZE_ENGINE = "FREEZE_ENGINE" - -switch_list = [ - {"name": FREEZE_ENGINE, "description": _("用于冻结引擎, 冻结期间会屏蔽所有内部信号及暂停所有进程,同时拒绝所有流程控制请求"), "is_active": False} -] diff --git a/lib/pipeline/engine/core/__init__.py b/lib/pipeline/engine/core/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/engine/core/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/engine/core/api.py b/lib/pipeline/engine/core/api.py deleted file mode 100644 index 9e602fe..0000000 --- a/lib/pipeline/engine/core/api.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import socket - -import kombu -from celery import current_app -from django.conf import settings as django_settings -from redis.exceptions import ConnectionError - -from pipeline.celery.settings import CELERY_QUEUES -from pipeline.conf import settings -from pipeline.django_signal_valve import valve -from pipeline.engine import signals -from pipeline.engine.core import data -from pipeline.engine.exceptions import RabbitMQConnectionError -from pipeline.engine.models import FunctionSwitch, PipelineProcess - -logger = logging.getLogger("root") -WORKER_PING_TIMES = 2 - - -def freeze(): - # turn on switch - FunctionSwitch.objects.freeze_engine() - - -def unfreeze(): - # turn off switch - FunctionSwitch.objects.unfreeze_engine() - - # resend signal - valve.open_valve(signals) - - # unfreeze process - frozen_process_list = PipelineProcess.objects.filter(is_frozen=True) - for process in frozen_process_list: - process.unfreeze() - - -def workers(connection=None): - try: - worker_list = data.cache_for("__pipeline__workers__") - except ConnectionError as e: - logger.exception("pipeline cache_for __pipeline__workers__ raise error: %s" % e) - raise e - - if not worker_list: - tries = 0 - while tries < WORKER_PING_TIMES: - kwargs = {"timeout": tries + 1} - if connection is not None: - kwargs["connection"] = connection - try: - worker_list = current_app.control.ping(**kwargs) - except socket.error as err: - logger.exception("pipeline current_app.control.ping raise error: %s" % err) - # raise error at last loop - if tries >= WORKER_PING_TIMES - 1: - raise RabbitMQConnectionError(err) - - if worker_list: - break - - tries += 1 - - if worker_list: - data.expire_cache("__pipeline__workers__", worker_list, settings.PIPELINE_WORKER_STATUS_CACHE_EXPIRES) - - return worker_list - - -def stats(): - inspect = current_app.control.inspect() - - stats = {"workers": {}, "queues": {}} - - worker_stats = inspect.stats() - active_queues = inspect.active_queues() - - if worker_stats: - - for name, stat in worker_stats.items(): - stats["workers"].setdefault(name, {"stat": {}, "queues": {}})["stat"] = stat - - if active_queues: - - for name, queues in active_queues.items(): - stats["workers"].setdefault(name, {"stat": {}, "queues": {}})["queues"] = queues - - if not hasattr(django_settings, "BROKER_VHOST"): - stats["queues"] = "can not find BROKER_VHOST in django settings" - - return stats - - with kombu.Connection(django_settings.BROKER_URL) as conn: - client = conn.get_manager() - - if not hasattr(client, "get_queue"): - stats["queues"] = "broker does not support queues info query" - - return stats - - for queue in CELERY_QUEUES: - stats["queues"][queue.name] = client.get_queue(django_settings.BROKER_VHOST, queue.name) - - return stats diff --git a/lib/pipeline/engine/core/context.py b/lib/pipeline/engine/core/context.py deleted file mode 100644 index 329cd47..0000000 --- a/lib/pipeline/engine/core/context.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -from werkzeug.local import Local - -logger = logging.getLogger("celery") -local = Local() - - -def set_node_id(node_id): - try: - local.currnet_node_id = node_id - except Exception: - logger.exception("[engine context] set current_node_id for node({}) err.".format(node_id)) - - -def get_node_id(): - return getattr(local, "currnet_node_id", None) diff --git a/lib/pipeline/engine/core/data/__init__.py b/lib/pipeline/engine/core/data/__init__.py deleted file mode 100644 index 34ebae8..0000000 --- a/lib/pipeline/engine/core/data/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.engine.core.data.api import * # noqa diff --git a/lib/pipeline/engine/core/data/api.py b/lib/pipeline/engine/core/data/api.py deleted file mode 100644 index 9f72086..0000000 --- a/lib/pipeline/engine/core/data/api.py +++ /dev/null @@ -1,141 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import contextlib -import logging -import traceback - -from django.utils.module_loading import import_string - -from pipeline.conf import settings -from pipeline.engine.exceptions import InvalidDataBackendError - -logger = logging.getLogger("celery") - -_backend = None -_candidate_backend = None - - -def _import_backend(backend_cls_path): - try: - backend_cls = import_string(backend_cls_path) - return backend_cls() - except ImportError: - raise InvalidDataBackendError( - "data backend({}) import error with exception: {}".format( - settings.PIPELINE_DATA_BACKEND, traceback.format_exc() - ) - ) - - -@contextlib.contextmanager -def _candidate_exc_ensure(propagate): - try: - yield - except Exception: - logger.error("candidate data backend operate error: {}".format(traceback.format_exc())) - - if propagate: - raise - - -if not _backend: - _backend = _import_backend(settings.PIPELINE_DATA_BACKEND) - -if not _candidate_backend and settings.PIPELINE_DATA_CANDIDATE_BACKEND: - _candidate_backend = _import_backend(settings.PIPELINE_DATA_CANDIDATE_BACKEND) - - -if settings.PIPELINE_DATA_BACKEND_AUTO_EXPIRE and not (_backend and _candidate_backend): - raise RuntimeError( - "PIPELINE_DATA_BACKEND and PIPELINE_DATA_CANDIDATE_BACKEND can't both be empty when PIPELINE_DATA_BACKEND_AUTO_EXPIRE is set." # noqa - ) - - -def _write_operation(method, *args, **kwargs): - propagate = False - - try: - - if settings.PIPELINE_DATA_BACKEND_AUTO_EXPIRE and method == "set_object": - # change set_object to expire_cache - getattr(_backend, "expire_cache")( - *args, **kwargs, expires=settings.PIPELINE_DATA_BACKEND_AUTO_EXPIRE_SECONDS - ) - else: - getattr(_backend, method)(*args, **kwargs) - - except Exception: - logger.error("data backend operate error: {}".format(traceback.format_exc())) - - if not _candidate_backend: - raise - - propagate = True - - if _candidate_backend: - with _candidate_exc_ensure(propagate): - getattr(_candidate_backend, method)(*args, **kwargs) - - -def _read_operation(method, *args, **kwargs): - result = None - propagate = False - - try: - result = getattr(_backend, method)(*args, **kwargs) - except Exception: - logger.error("data backend operate error: {}".format(traceback.format_exc())) - - if not _candidate_backend: - raise - - propagate = True - - if result is None and _candidate_backend: - with _candidate_exc_ensure(propagate): - result = getattr(_candidate_backend, method)(*args, **kwargs) - - return result - - -def set_object(key, obj): - _write_operation("set_object", key, obj) - - -def del_object(key): - _write_operation("del_object", key) - - -def expire_cache(key, obj, expires): - _write_operation("expire_cache", key, obj, expires) - - -def get_object(key): - return _read_operation("get_object", key) - - -def cache_for(key): - return _read_operation("cache_for", key) - - -def set_schedule_data(schedule_id, parent_data): - return set_object("%s_schedule_parent_data" % schedule_id, parent_data) - - -def get_schedule_parent_data(schedule_id): - return get_object("%s_schedule_parent_data" % schedule_id) - - -def delete_parent_data(schedule_id): - return del_object("%s_schedule_parent_data" % schedule_id) diff --git a/lib/pipeline/engine/core/data/base_backend.py b/lib/pipeline/engine/core/data/base_backend.py deleted file mode 100644 index 143b6d9..0000000 --- a/lib/pipeline/engine/core/data/base_backend.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from abc import abstractmethod, ABCMeta - - -class BaseDataBackend(object, metaclass=ABCMeta): - @abstractmethod - def set_object(self, key, obj): - raise NotImplementedError() - - @abstractmethod - def get_object(self, key): - raise NotImplementedError() - - @abstractmethod - def del_object(self, key): - raise NotImplementedError() - - @abstractmethod - def expire_cache(self, key, value, expires): - raise NotImplementedError() - - @abstractmethod - def cache_for(self, key): - raise NotImplementedError() diff --git a/lib/pipeline/engine/core/data/mysql_backend.py b/lib/pipeline/engine/core/data/mysql_backend.py deleted file mode 100644 index 64d894c..0000000 --- a/lib/pipeline/engine/core/data/mysql_backend.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.core.cache import cache - -from pipeline.engine.core.data.base_backend import BaseDataBackend -from pipeline.engine.models.data import DataSnapshot - - -class MySQLDataBackend(BaseDataBackend): - def set_object(self, key, obj): - return DataSnapshot.objects.set_object(key, obj) - - def get_object(self, key): - return DataSnapshot.objects.get_object(key) - - def del_object(self, key): - return DataSnapshot.objects.del_object(key) - - def expire_cache(self, key, value, expires): - return cache.set(key, value, expires) - - def cache_for(self, key): - return cache.get(key) diff --git a/lib/pipeline/engine/core/data/redis_backend.py b/lib/pipeline/engine/core/data/redis_backend.py deleted file mode 100644 index d94c1fc..0000000 --- a/lib/pipeline/engine/core/data/redis_backend.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import pickle -import logging - -from pipeline.conf import settings -from pipeline.engine.core.data.base_backend import BaseDataBackend - -logger = logging.getLogger("celery") - - -class RedisDataBackend(BaseDataBackend): - def set_object(self, key, obj): - return settings.REDIS_INST.set(key, pickle.dumps(obj)) - - def get_object(self, key): - pickle_str = settings.REDIS_INST.get(key) - if not pickle_str: - logger.warning( - "[RedisDataBackend.get_object]redis(%s) get %s return %s" % (str(settings.REDIS_INST), key, pickle_str) - ) - return None - return pickle.loads(pickle_str) - - def del_object(self, key): - return settings.REDIS_INST.delete(key) - - def expire_cache(self, key, value, expires): - settings.REDIS_INST.set(key, pickle.dumps(value)) - settings.REDIS_INST.expire(key, expires) - return True - - def cache_for(self, key): - cache = settings.REDIS_INST.get(key) - return pickle.loads(cache) if cache else cache diff --git a/lib/pipeline/engine/core/handlers/__init__.py b/lib/pipeline/engine/core/handlers/__init__.py deleted file mode 100644 index 62eb73f..0000000 --- a/lib/pipeline/engine/core/handlers/__init__.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .conditional_parallel import ConditionalParallelGatewayHandler -from .converge_gateway import ConvergeGatewayHandler -from .empty_start_event import EmptyStartEventHandler -from .endevent import EmptyEndEventHandler, ExecutableEndEventHandler -from .exclusive_gateway import ExclusiveGatewayHandler -from .parallel_gateway import ParallelGatewayHandler -from .service_activity import ServiceActivityHandler -from .subprocess import SubprocessHandler - - -class HandlersFactory(object): - _handlers = { - EmptyStartEventHandler.element_cls(): EmptyStartEventHandler(), - EmptyEndEventHandler.element_cls(): EmptyEndEventHandler(), - ServiceActivityHandler.element_cls(): ServiceActivityHandler(), - SubprocessHandler.element_cls(): SubprocessHandler(), - ExclusiveGatewayHandler.element_cls(): ExclusiveGatewayHandler(), - ParallelGatewayHandler.element_cls(): ParallelGatewayHandler(), - ConditionalParallelGatewayHandler.element_cls(): ConditionalParallelGatewayHandler(), - ConvergeGatewayHandler.element_cls(): ConvergeGatewayHandler(), - ExecutableEndEventHandler.element_cls(): ExecutableEndEventHandler(), - } - - _cluster_roots = [ExecutableEndEventHandler.element_cls()] - - @classmethod - def find_cluster_root_cls(cls, element): - for root in cls._cluster_roots: - if issubclass(type(element), root): - return root - - return type(element) - - @classmethod - def handlers_for(cls, element): - handler = cls._handlers.get(cls.find_cluster_root_cls(element)) - if not handler: - raise KeyError("handler for element({element}) not found.".format(element=element)) - - return handler diff --git a/lib/pipeline/engine/core/handlers/base.py b/lib/pipeline/engine/core/handlers/base.py deleted file mode 100644 index 944b972..0000000 --- a/lib/pipeline/engine/core/handlers/base.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from abc import abstractmethod - - -class FlowElementHandler(object): - class HandleResult(object): - def __init__(self, next_node, should_return, should_sleep, after_sleep_call=None, args=[], kwargs={}): - self.next_node = next_node - self.should_return = should_return - self.should_sleep = should_sleep - self.after_sleep_call = after_sleep_call - self.args = args - self.kwargs = kwargs - - @staticmethod - @abstractmethod - def element_cls(): - raise NotImplementedError() - - @abstractmethod - def handle(self, process, element, status): - raise NotImplementedError() - - def __call__(self, *args, **kwargs): - return self.handle(*args, **kwargs) diff --git a/lib/pipeline/engine/core/handlers/conditional_parallel.py b/lib/pipeline/engine/core/handlers/conditional_parallel.py deleted file mode 100644 index f28897a..0000000 --- a/lib/pipeline/engine/core/handlers/conditional_parallel.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import traceback - -from pipeline.core.data.hydration import hydrate_data -from pipeline.core.flow.gateway import ConditionalParallelGateway -from pipeline.engine.models import PipelineProcess, Status -from pipeline.exceptions import PipelineException - -from .base import FlowElementHandler - -logger = logging.getLogger("pipeline_engine") - -__all__ = ["ConditionalParallelGatewayHandler"] - - -class ConditionalParallelGatewayHandler(FlowElementHandler): - @staticmethod - def element_cls(): - return ConditionalParallelGateway - - def handle(self, process, element, status): - if status.loop > 1: - process.top_pipeline.context.recover_variable() - - try: - hydrate_context = hydrate_data(process.top_pipeline.context.variables) - targets = element.targets_meet_condition(hydrate_context) - except PipelineException as e: - logger.error(traceback.format_exc()) - Status.objects.fail(element, ex_data=str(e)) - return self.HandleResult(next_node=None, should_return=True, should_sleep=True) - - children = [] - - for target in targets: - try: - child = PipelineProcess.objects.fork_child( - parent=process, current_node_id=target.id, destination_id=element.converge_gateway_id - ) - except PipelineException as e: - logger.error(traceback.format_exc()) - Status.objects.fail(element, ex_data=str(e)) - return self.HandleResult(next_node=None, should_return=True, should_sleep=True) - - children.append(child) - - process.join(children) - - Status.objects.finish(element) - - return self.HandleResult(next_node=None, should_return=True, should_sleep=True) diff --git a/lib/pipeline/engine/core/handlers/converge_gateway.py b/lib/pipeline/engine/core/handlers/converge_gateway.py deleted file mode 100644 index ea59889..0000000 --- a/lib/pipeline/engine/core/handlers/converge_gateway.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import traceback - -from pipeline.core.flow.gateway import ConvergeGateway -from pipeline.engine import exceptions -from pipeline.engine.models import Status - -from .base import FlowElementHandler - -logger = logging.getLogger("pipeline_engine") - -__all__ = ["ConvergeGatewayHandler"] - - -class ConvergeGatewayHandler(FlowElementHandler): - @staticmethod - def element_cls(): - return ConvergeGateway - - def handle(self, process, element, status): - # try to sync data if current process has children - if process.children: - try: - process.sync_with_children() - except exceptions.ChildDataSyncError: - logger.error(traceback.format_exc()) - # clean children and update current_node to prevent re execute child process - process.clean_children() - Status.objects.fail(element, ex_data="Sync branch context error, check data backend status please.") - return self.HandleResult(next_node=None, should_return=True, should_sleep=True) - - Status.objects.finish(element) - return self.HandleResult(next_node=element.next(), should_return=False, should_sleep=False) diff --git a/lib/pipeline/engine/core/handlers/empty_start_event.py b/lib/pipeline/engine/core/handlers/empty_start_event.py deleted file mode 100644 index 4f4e936..0000000 --- a/lib/pipeline/engine/core/handlers/empty_start_event.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from pipeline.core.data import var -from pipeline.core.flow.event import EmptyStartEvent -from pipeline.engine.models import Status - -from .base import FlowElementHandler - -logger = logging.getLogger("pipeline_engine") - -__all__ = ["EmptyStartEventHandler"] - - -class EmptyStartEventHandler(FlowElementHandler): - @staticmethod - def element_cls(): - return EmptyStartEvent - - @staticmethod - def _hydrate(value): - return value.get() if issubclass(value.__class__, var.Variable) else value - - def handle(self, process, element, status): - # 进行变量预渲染 - if hasattr(element.data, "inputs"): - for pre_render_key in element.data.inputs.get("pre_render_keys", []): - context_variables = process.top_pipeline.context.variables - if pre_render_key in context_variables: - context_variables[pre_render_key] = self._hydrate(context_variables[pre_render_key]) - - Status.objects.finish(element) - return self.HandleResult(next_node=element.next(), should_return=False, should_sleep=False) diff --git a/lib/pipeline/engine/core/handlers/endevent/__init__.py b/lib/pipeline/engine/core/handlers/endevent/__init__.py deleted file mode 100644 index 9111c44..0000000 --- a/lib/pipeline/engine/core/handlers/endevent/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .empty_end_event import EmptyEndEventHandler # noqa -from .executable_end_event import ExecutableEndEventHandler # noqa diff --git a/lib/pipeline/engine/core/handlers/endevent/base.py b/lib/pipeline/engine/core/handlers/endevent/base.py deleted file mode 100644 index 30644fb..0000000 --- a/lib/pipeline/engine/core/handlers/endevent/base.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from pipeline.core.flow import activity -from pipeline.engine import states -from pipeline.engine.models import Data, Status - -from ..base import FlowElementHandler - -logger = logging.getLogger("celery") - - -class EndEventHandler(FlowElementHandler): - @staticmethod - def element_cls(): - raise NotImplementedError() - - def handle(self, process, element, status): - pipeline = process.pop_pipeline() - if process.pipeline_stack: - # pop subprocess and return to top of stack - pipeline.context.write_output(pipeline) - Status.objects.finish(element) - sub_process_node = process.top_pipeline.node(pipeline.id) - Status.objects.finish(sub_process_node) - # extract subprocess output - process.top_pipeline.context.extract_output(sub_process_node) - return self.HandleResult(next_node=sub_process_node.next(), should_return=False, should_sleep=False) - else: - with Status.objects.lock(pipeline.id): - # save data and destroy process - pipeline.context.write_output(pipeline) - Data.objects.write_node_data(pipeline) - Status.objects.finish(element) - - Status.objects.transit(pipeline.id, to_state=states.FINISHED, is_pipeline=True) - # PipelineInstance.objects.set_finished(process.root_pipeline.id) - element.pipeline_finish(process.root_pipeline.id) - for act in pipeline.spec.activities: - if isinstance(act, activity.SubProcess): - act.pipeline.context.clear() - pipeline.context.clear() - process.destroy() - return self.HandleResult(next_node=None, should_return=True, should_sleep=False) diff --git a/lib/pipeline/engine/core/handlers/endevent/empty_end_event.py b/lib/pipeline/engine/core/handlers/endevent/empty_end_event.py deleted file mode 100644 index 959d7a6..0000000 --- a/lib/pipeline/engine/core/handlers/endevent/empty_end_event.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.core.flow.event import EmptyEndEvent - -from .base import EndEventHandler - - -class EmptyEndEventHandler(EndEventHandler): - @staticmethod - def element_cls(): - return EmptyEndEvent diff --git a/lib/pipeline/engine/core/handlers/endevent/executable_end_event.py b/lib/pipeline/engine/core/handlers/endevent/executable_end_event.py deleted file mode 100644 index bd684b9..0000000 --- a/lib/pipeline/engine/core/handlers/endevent/executable_end_event.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import traceback - -from pipeline.core.flow.event import ExecutableEndEvent -from pipeline.engine.models import Status - -from .base import EndEventHandler - -logger = logging.getLogger("celery") - - -class ExecutableEndEventHandler(EndEventHandler): - @staticmethod - def element_cls(): - return ExecutableEndEvent - - def handle(self, process, element, status): - try: - element.execute( - in_subprocess=process.in_subprocess, - root_pipeline_id=process.root_pipeline.id, - current_pipeline_id=process.top_pipeline.id, - ) - except Exception: - ex_data = traceback.format_exc() - element.data.outputs.ex_data = ex_data - logger.error(ex_data) - - Status.objects.fail(element, ex_data) - return self.HandleResult(next_node=None, should_return=False, should_sleep=True) - - return super(ExecutableEndEventHandler, self).handle(process, element, status) diff --git a/lib/pipeline/engine/core/handlers/exclusive_gateway.py b/lib/pipeline/engine/core/handlers/exclusive_gateway.py deleted file mode 100644 index 0e66df1..0000000 --- a/lib/pipeline/engine/core/handlers/exclusive_gateway.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import traceback -from copy import deepcopy - -from pipeline.core.data.hydration import hydrate_data -from pipeline.core.flow.gateway import ExclusiveGateway -from pipeline.engine.models import Status -from pipeline.exceptions import PipelineException - -from .base import FlowElementHandler - -logger = logging.getLogger("pipeline_engine") - -__all__ = ["ExclusiveGatewayHandler"] - - -class ExclusiveGatewayHandler(FlowElementHandler): - @staticmethod - def element_cls(): - return ExclusiveGateway - - def handle(self, process, element, status): - if status.loop > 1: - process.top_pipeline.context.recover_variable() - try: - # use temp variables instead of real variables to prevent output pre extract error - temp_variables = deepcopy(process.top_pipeline.context.variables) - hydrate_context = hydrate_data(temp_variables) - logger.info("[{}] hydrate_context: {}".format(element.id, hydrate_context)) - next_node = element.next(hydrate_context) - except PipelineException as e: - logger.error(traceback.format_exc()) - Status.objects.fail(element, ex_data=str(e)) - return self.HandleResult(next_node=None, should_return=True, should_sleep=True) - Status.objects.finish(element) - return self.HandleResult(next_node=next_node, should_return=False, should_sleep=False) diff --git a/lib/pipeline/engine/core/handlers/parallel_gateway.py b/lib/pipeline/engine/core/handlers/parallel_gateway.py deleted file mode 100644 index 0ca88e6..0000000 --- a/lib/pipeline/engine/core/handlers/parallel_gateway.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import traceback - -from pipeline.core.flow.gateway import ParallelGateway -from pipeline.engine.models import PipelineProcess, Status -from pipeline.exceptions import PipelineException - -from .base import FlowElementHandler - -logger = logging.getLogger("pipeline_engine") - -__all__ = ["ParallelGatewayHandler"] - - -class ParallelGatewayHandler(FlowElementHandler): - @staticmethod - def element_cls(): - return ParallelGateway - - def handle(self, process, element, status): - targets = element.outgoing.all_target_node() - children = [] - - for target in targets: - try: - child = PipelineProcess.objects.fork_child( - parent=process, current_node_id=target.id, destination_id=element.converge_gateway_id - ) - except PipelineException as e: - logger.error(traceback.format_exc()) - Status.objects.fail(element, str(e)) - return self.HandleResult(next_node=None, should_return=True, should_sleep=True) - - children.append(child) - - process.join(children) - - Status.objects.finish(element) - - return self.HandleResult(next_node=None, should_return=True, should_sleep=True) diff --git a/lib/pipeline/engine/core/handlers/service_activity.py b/lib/pipeline/engine/core/handlers/service_activity.py deleted file mode 100644 index c711fe2..0000000 --- a/lib/pipeline/engine/core/handlers/service_activity.py +++ /dev/null @@ -1,151 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import traceback - -from pipeline.conf import default_settings -from pipeline.core.data.hydration import hydrate_node_data -from pipeline.core.flow.activity import ServiceActivity -from pipeline.django_signal_valve import valve -from pipeline.engine import signals -from pipeline.engine.models import Data, ScheduleService, Status - -from .base import FlowElementHandler - -logger = logging.getLogger("pipeline_engine") - -__all__ = ["ServiceActivityHandler"] - - -class ServiceActivityHandler(FlowElementHandler): - @staticmethod - def element_cls(): - return ServiceActivity - - def handle(self, process, element, status): - pre_execute_success = False - success = False - exception_occurred = False - monitoring = False - version = status.version - root_pipeline = process.root_pipeline - - # rerun mode - if status.loop > 1 and not element.on_retry(): - element.prepare_rerun_data() - process.top_pipeline.context.recover_variable() - - elif element.on_retry(): - element.retry_at_current_exec() - - # set loop to data - element.data.inputs._loop = status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET - element.data.outputs._loop = status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET - - # pre output extract - process.top_pipeline.context.extract_output(element, set_miss=False) - - # hydrate inputs - hydrate_node_data(element) - - if element.timeout: - logger.info("node {} {} start timeout monitor, timeout: {}".format(element.id, version, element.timeout)) - signals.service_activity_timeout_monitor_start.send( - sender=element.__class__, - node_id=element.id, - version=version, - root_pipeline_id=root_pipeline.id, - countdown=element.timeout, - ) - monitoring = True - - element.setup_runtime_attrs( - id=element.id, root_pipeline_id=root_pipeline.id, - ) - - # pre_process inputs and execute service - try: - pre_execute_success = element.execute_pre_process(root_pipeline.data) - if pre_execute_success: - success = element.execute(root_pipeline.data) - except Exception: - if element.error_ignorable: - # ignore exception - pre_execute_success = True - success = True - exception_occurred = True - element.ignore_error() - ex_data = traceback.format_exc() - element.data.outputs.ex_data = ex_data - logger.error(ex_data) - - # process result - if pre_execute_success is False or success is False: - ex_data = element.data.get_one_of_outputs("ex_data") - Status.objects.fail(element, ex_data) - try: - element.failure_handler(root_pipeline.data) - except Exception: - logger.error("failure_handler({}) failed: {}".format(element.id, traceback.format_exc())) - - if monitoring: - signals.service_activity_timeout_monitor_end.send( - sender=element.__class__, node_id=element.id, version=version - ) - logger.info("node {} {} timeout monitor revoke".format(element.id, version)) - - # send activity error signal - valve.send( - signals, - "activity_failed", - sender=root_pipeline, - pipeline_id=root_pipeline.id, - pipeline_activity_id=element.id, - subprocess_id_stack=process.subprocess_stack, - ) - - return self.HandleResult(next_node=None, should_return=False, should_sleep=True) - else: - is_error_ignored = element.error_ignorable and not element.get_result_bit() - if element.need_schedule() and not exception_occurred and not is_error_ignored: - # write data before schedule - Data.objects.write_node_data(element) - return self.HandleResult( - next_node=None, - should_return=True, - should_sleep=True, - after_sleep_call=ScheduleService.objects.set_schedule, - args=[], - kwargs=dict( - activity_id=element.id, - service_act=element.shell(), - process_id=process.id, - version=version, - parent_data=process.top_pipeline.data, - ), - ) - - process.top_pipeline.context.extract_output(element) - error_ignorable = not element.get_result_bit() - - if monitoring: - signals.service_activity_timeout_monitor_end.send( - sender=element.__class__, node_id=element.id, version=version - ) - logger.info("node {} {} timeout monitor revoke".format(element.id, version)) - - if not Status.objects.finish(element, error_ignorable): - # has been forced failed - return self.HandleResult(next_node=None, should_return=False, should_sleep=True) - return self.HandleResult(next_node=element.next(), should_return=False, should_sleep=False) diff --git a/lib/pipeline/engine/core/handlers/subprocess.py b/lib/pipeline/engine/core/handlers/subprocess.py deleted file mode 100644 index 4f14921..0000000 --- a/lib/pipeline/engine/core/handlers/subprocess.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from pipeline.conf import default_settings -from pipeline.core.data.hydration import hydrate_node_data -from pipeline.core.flow.activity import SubProcess - -from .base import FlowElementHandler - -logger = logging.getLogger("pipeline_engine") - -__all__ = ["SubprocessHandler"] - - -class SubprocessHandler(FlowElementHandler): - @staticmethod - def element_cls(): - return SubProcess - - def handle(self, process, element, status): - # rerun mode - if status.loop > 1: - element.prepare_rerun_data() - element.pipeline.context.recover_variable() - process.top_pipeline.context.recover_variable() - - # set loop count - element.data.outputs._loop = status.loop + default_settings.PIPELINE_RERUN_INDEX_OFFSET - - # pre output extract - process.top_pipeline.context.extract_output(element, set_miss=False) - - # hydrate data - hydrate_node_data(element) - - # context injection - data = element.pipeline.data - context = element.pipeline.context - for k, v in list(data.get_inputs().items()): - context.set_global_var(k, v) - - sub_pipeline = element.pipeline - process.push_pipeline(sub_pipeline, is_subprocess=True) - process.take_snapshot() - return self.HandleResult(next_node=sub_pipeline.start_event, should_return=False, should_sleep=False) diff --git a/lib/pipeline/engine/core/runtime.py b/lib/pipeline/engine/core/runtime.py deleted file mode 100644 index d7e8a65..0000000 --- a/lib/pipeline/engine/core/runtime.py +++ /dev/null @@ -1,140 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import contextlib -import logging -import traceback - -from pipeline.conf import settings as pipeline_settings -from pipeline.core.flow.activity import SubProcess -from pipeline.engine import states -from pipeline.engine.core import context -from pipeline.engine.core.handlers import HandlersFactory -from pipeline.engine.models import NAME_MAX_LENGTH, FunctionSwitch, NodeRelationship, Status - -logger = logging.getLogger("pipeline_engine") -celery_logger = logging.getLogger("celery") - -RERUN_MAX_LIMIT = pipeline_settings.PIPELINE_RERUN_MAX_TIMES - - -@contextlib.contextmanager -def runtime_exception_handler(process): - try: - yield - except Exception as e: - logger.error(traceback.format_exc()) - process.exit_gracefully(e) - - -def run_loop(process): - """ - pipeline 推进主循环 - :param process: 当前进程 - :return: - """ - with runtime_exception_handler(process): - while True: - - current_node = process.top_pipeline.node(process.current_node_id) - celery_logger.info( - "[pipeline-trace](root_pipeline: %s) execute node %s" % (process.root_pipeline_id, current_node.id) - ) - - # check child process destination - if process.destination_id == current_node.id: - try: - process.destroy_and_wake_up_parent(current_node.id) - except Exception: - logger.error(traceback.format_exc()) - logger.info("child process(%s) finish." % process.id) - return - - # check root pipeline status - need_sleep, pipeline_state = process.root_sleep_check() - if need_sleep: - logger.info("pipeline(%s) turn to sleep." % process.root_pipeline.id) - process.sleep(do_not_save=(pipeline_state == states.REVOKED)) - return - - # check subprocess status - need_sleep, subproc_above = process.subproc_sleep_check() - if need_sleep: - logger.info("process(%s) turn to sleep." % process.root_pipeline.id) - process.sleep(adjust_status=True, adjust_scope=subproc_above) - return - - # check engine status - if FunctionSwitch.objects.is_frozen(): - logger.info("pipeline(%s) have been frozen." % process.id) - process.freeze() - return - - # try to transit current node to running state - name = (current_node.name or str(current_node.__class__))[:NAME_MAX_LENGTH] - action = Status.objects.transit(id=current_node.id, to_state=states.RUNNING, start=True, name=name) - - # check rerun limit - if ( - not isinstance(current_node, SubProcess) - and RERUN_MAX_LIMIT != 0 - and action.extra.loop > RERUN_MAX_LIMIT - ): - logger.info( - "node({nid}) rerun times exceed max limit: {limit}".format( - nid=current_node.id, limit=RERUN_MAX_LIMIT - ) - ) - - # fail - action = Status.objects.fail( - current_node, "rerun times exceed max limit: {limit}".format(limit=RERUN_MAX_LIMIT) - ) - - if not action.result: - logger.warning( - "can not transit node({}) to running, pipeline({}) turn to sleep. " - "message: {}".format(current_node.id, process.root_pipeline.id, action.message) - ) - - process.sleep(adjust_status=True) - return - - if not action.result: - logger.warning( - "can not transit node({}) to running, pipeline({}) turn to sleep. message: {}".format( - current_node.id, process.root_pipeline.id, action.message - ) - ) - process.sleep(adjust_status=True) - return - - # refresh current node - process.refresh_current_node(current_node.id) - - # build relationship - NodeRelationship.objects.build_relationship(process.top_pipeline.id, current_node.id) - # set up context - context.set_node_id(current_node.id) - - result = HandlersFactory.handlers_for(current_node)(process, current_node, action.extra) - - if result.should_return or result.should_sleep: - if result.should_sleep: - process.sleep(adjust_status=True) - if result.after_sleep_call: - result.after_sleep_call(*result.args, **result.kwargs) - return - - # store current node id - process.current_node_id = result.next_node.id diff --git a/lib/pipeline/engine/core/schedule.py b/lib/pipeline/engine/core/schedule.py deleted file mode 100644 index 13946b2..0000000 --- a/lib/pipeline/engine/core/schedule.py +++ /dev/null @@ -1,262 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import contextlib -import logging -import traceback - -from django.db import transaction - -from pipeline.django_signal_valve import valve -from pipeline.engine.core import context -from pipeline.engine import exceptions, signals, states -from pipeline.engine.core.data import delete_parent_data, get_schedule_parent_data, set_schedule_data -from pipeline.engine.models import Data, MultiCallbackData, PipelineProcess, ScheduleService, Status - -logger = logging.getLogger("pipeline_engine") -celery_logger = logging.getLogger("celery") - - -@contextlib.contextmanager -def schedule_exception_handler(process_id, schedule_id): - try: - yield - except Exception as e: - activity_id = schedule_id[: ScheduleService.SCHEDULE_ID_SPLIT_DIVISION] - version = schedule_id[ScheduleService.SCHEDULE_ID_SPLIT_DIVISION :] - if Status.objects.filter(id=activity_id, version=version).exists(): - logger.error(traceback.format_exc()) - process = PipelineProcess.objects.get(id=process_id) - process.exit_gracefully(e) - else: - logger.warning("schedule({} - {}) forced exit.".format(activity_id, version)) - - delete_parent_data(schedule_id) - - -@contextlib.contextmanager -def auto_release_schedule_lock(schedule_id): - yield - # release schedule lock before exit schedule - ScheduleService.objects.filter(id=schedule_id, is_scheduling=True).update(is_scheduling=False) - logger.warning("schedule({}) unlock success.".format(schedule_id)) - - -def schedule(process_id, schedule_id, data_id=None): - """ - 调度服务主函数 - :param process_id: 被调度的节点所属的 PipelineProcess - :param schedule_id: 调度 ID - :param data_id: 回调数据ID - :return: - """ - with schedule_exception_handler(process_id, schedule_id): - # set up context - context.set_node_id(schedule_id[: ScheduleService.SCHEDULE_ID_SPLIT_DIVISION]) - - # schedule maybe destroyed by other schedule - try: - sched_service = ScheduleService.objects.get(id=schedule_id) - # stop if schedule status finished - if sched_service.is_finished: - logger.warning("schedule already finished, give up, sched_id: {}".format(schedule_id)) - return - except ScheduleService.DoesNotExist: - logger.warning("schedule not exist, give up, sched_id: {}".format(schedule_id)) - return - - # check whether the node is in a state waiting for scheduling - service_act = sched_service.service_act - act_id = sched_service.activity_id - version = sched_service.version - - if not Status.objects.filter(id=act_id, version=version, state=states.RUNNING).exists(): - # forced failed - logger.warning( - "schedule service failed, schedule({} - {}) node state is not running or version do not match.".format( - act_id, version - ) - ) - sched_service.destroy() - return - - # try update lock schedule - is_updated = ScheduleService.objects.filter(id=schedule_id, is_scheduling=False).update(is_scheduling=True) - - # lock failed, other worker may locking - if is_updated == 0: - # only retry at multi calback enabled case - if not sched_service.multi_callback_enabled: - logger.warning( - "invalid schedule request, schedule({} - {}) node state is not multi callback enabled type.".format( - act_id, version - ) - ) - return - - # retry lock after seconds - logger.warning("schedule service lock-{} failed, retry after seconds".format(schedule_id)) - valve.send( - signals, - "schedule_ready", - sender=ScheduleService, - process_id=process_id, - schedule_id=schedule_id, - data_id=data_id, - countdown=2, - ) - return - - celery_logger.info("[pipeline-trace] schedule node %s with version %s" % (act_id, version)) - with auto_release_schedule_lock(schedule_id): - # get data - parent_data = get_schedule_parent_data(sched_service.id) - if parent_data is None: - raise exceptions.DataRetrieveError( - "child process({}) retrieve parent_data error, sched_id: {}".format(process_id, schedule_id) - ) - - # get schedule data - if sched_service.multi_callback_enabled and data_id: - try: - callback_data = MultiCallbackData.objects.get(id=data_id) - schedule_data = callback_data.data - except MultiCallbackData.DoesNotExist: - logger.warning( - "schedule get callback_data failed, give up schedule, sched_id: {}".format(schedule_id) - ) - return - else: - schedule_data = sched_service.callback_data - - # schedule - ex_data, success = None, False - try: - success = service_act.schedule(parent_data, schedule_data) - if success is None: - success = True - except Exception: - if service_act.error_ignorable: - success = True - service_act.ignore_error() - service_act.finish_schedule() - - ex_data = traceback.format_exc() - logging.error(ex_data) - - sched_service.schedule_times += 1 - set_schedule_data(sched_service.id, parent_data) - - # schedule failed - if not success: - if not Status.objects.transit(id=act_id, version=version, to_state=states.FAILED).result: - # forced failed - logger.warning( - "FAILED transit failed, schedule({} - {}) had been forced exit.".format(act_id, version) - ) - sched_service.destroy() - return - - if service_act.timeout: - signals.service_activity_timeout_monitor_end.send( - sender=service_act.__class__, node_id=service_act.id, version=version - ) - logger.info("node {} {} timeout monitor revoke".format(service_act.id, version)) - - Data.objects.write_node_data(service_act, ex_data=ex_data) - - with transaction.atomic(): - process = PipelineProcess.objects.select_for_update().get(id=sched_service.process_id) - if not process.is_alive: - logger.info("pipeline %s has been revoked, status adjust failed." % process.root_pipeline_id) - return - - process.adjust_status() - - # send activity error signal - try: - service_act.schedule_fail() - except Exception: - logger.error("schedule_fail handler fail: %s" % traceback.format_exc()) - - signals.service_schedule_fail.send( - sender=ScheduleService, activity_shell=service_act, schedule_service=sched_service, ex_data=ex_data - ) - - valve.send( - signals, - "activity_failed", - sender=process.root_pipeline, - pipeline_id=process.root_pipeline_id, - pipeline_activity_id=service_act.id, - subprocess_id_stack=process.subprocess_stack, - ) - return - - # schedule execute finished or one time callback finished - if service_act.is_schedule_done() or sched_service.is_one_time_callback(): - error_ignorable = not service_act.get_result_bit() - if not Status.objects.transit(id=act_id, version=version, to_state=states.FINISHED).result: - # forced failed - logger.warning( - "FINISHED transit failed, schedule({} - {}) had been forced exit.".format(act_id, version) - ) - sched_service.destroy() - return - - if service_act.timeout: - signals.service_activity_timeout_monitor_end.send( - sender=service_act.__class__, node_id=service_act.id, version=version - ) - logger.info("node {} {} timeout monitor revoke".format(service_act.id, version)) - - Data.objects.write_node_data(service_act) - if error_ignorable: - s = Status.objects.get(id=act_id) - s.error_ignorable = True - s.save() - - # sync parent data - process = PipelineProcess.objects.get(id=sched_service.process_id) - if not process.is_alive: - logger.warning("schedule({} - {}) revoked.".format(act_id, version)) - sched_service.destroy() - return - - process.top_pipeline.data.update_outputs(parent_data.get_outputs()) - # extract outputs - process.top_pipeline.context.extract_output(service_act) - process.save(save_snapshot=True) - - # clear temp data - delete_parent_data(sched_service.id) - # save schedule service - sched_service.finish() - - signals.service_schedule_success.send( - sender=ScheduleService, activity_shell=service_act, schedule_service=sched_service - ) - - valve.send( - signals, - "wake_from_schedule", - sender=ScheduleService, - process_id=sched_service.process_id, - activity_id=sched_service.activity_id, - ) - else: - Data.objects.write_node_data(service_act) - if sched_service.multi_callback_enabled: - sched_service.save() - else: - sched_service.set_next_schedule() diff --git a/lib/pipeline/engine/exceptions.py b/lib/pipeline/engine/exceptions.py deleted file mode 100644 index 10fc35d..0000000 --- a/lib/pipeline/engine/exceptions.py +++ /dev/null @@ -1,51 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.exceptions import PipelineException - - -class PipelineEngineException(PipelineException): - pass - - -class NodeNotExistException(PipelineEngineException): - pass - - -class InvalidOperationException(PipelineEngineException): - pass - - -class RabbitMQConnectionError(PipelineEngineException): - pass - - -class ChildDataSyncError(PipelineEngineException): - pass - - -class DataRetrieveError(PipelineEngineException): - pass - - -class InvalidDataBackendError(PipelineEngineException): - pass - - -class InvalidPipelineEndHandleError(PipelineEngineException): - pass - - -class CeleryFailedTaskCatchException(PipelineEngineException): - def __init__(self, task_name): - self.task_name = task_name diff --git a/lib/pipeline/engine/health/__init__.py b/lib/pipeline/engine/health/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/engine/health/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/engine/health/zombie/__init__.py b/lib/pipeline/engine/health/zombie/__init__.py deleted file mode 100644 index 15fe627..0000000 --- a/lib/pipeline/engine/health/zombie/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.engine.health.zombie.heal import get_healer # noqa diff --git a/lib/pipeline/engine/health/zombie/doctors.py b/lib/pipeline/engine/health/zombie/doctors.py deleted file mode 100644 index 9306884..0000000 --- a/lib/pipeline/engine/health/zombie/doctors.py +++ /dev/null @@ -1,134 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -import abc -import logging - -from django.utils import timezone - -from pipeline.core.pipeline import Pipeline -from pipeline.engine import signals, states -from pipeline.engine.models import ProcessCeleryTask, ScheduleService, Status -from pipeline.utils import uniqid - -logger = logging.getLogger("celery") - - -class ZombieProcDoctor(metaclass=abc.ABCMeta): - @abc.abstractmethod - def confirm(self, proc): - raise NotImplementedError() - - @abc.abstractmethod - def cure(self, proc): - raise NotImplementedError() - - -class RunningNodeZombieDoctor(ZombieProcDoctor): - def __init__(self, max_stuck_time: float, detect_wait_callback_proc: bool = False): - """ - :param max_stuck_time: 最大卡住时间 - :param detect_wait_callback_proc: 是否检测等待回调的进程 - """ - self.max_stuck_time = max_stuck_time - self.detect_wait_callback_proc = detect_wait_callback_proc - - def confirm(self, proc): - - # do not process none current node - if not proc.current_node_id: - logger.warning("Process({}) with current_node({}), skip".format(proc.id, proc.current_node_id)) - return False - - # do not process node status not exist - try: - status = Status.objects.get(id=proc.current_node_id) - except Status.DoesNotExist: - logger.warning("Process({})'s current_node({}) not exist, skip".format(proc.id, proc.current_node_id)) - return False - - # do not process legacy status data - if not status.state_refresh_at: - logger.warning( - "Process({})'s current_node({}) state_fresh_at({}) is invalid, skip".format( - proc.id, proc.current_node_id, status.state_refresh_at - ) - ) - return False - - # only process RUNNING node - if status.state != states.RUNNING: - return False - - try: - schedule = ScheduleService.objects.schedule_for(status.id, status.version) - except ScheduleService.DoesNotExist: - pass - else: - if schedule.wait_callback and not self.detect_wait_callback_proc: - return False - - stuck_time = (timezone.now() - status.state_refresh_at).total_seconds() - if float(stuck_time) > float(self.max_stuck_time): - logger.info( - "Process({}) with current_node({}) stuck_time({}) exceed max_stuck_time({}), " - "mark as zombie".format(proc.id, proc.current_node_id, stuck_time, self.max_stuck_time) - ) - return True - - return False - - def cure(self, proc): - - current_node_id = proc.current_node_id - - # try to transit current node to FAILURE - try: - result = Status.objects.raw_fail( - node_id=current_node_id, - ex_data="This node had been failed because the process diagnode as zombie process", - ) - except Exception: - logger.exception( - "An error occurred when transit node({}) for zombie process({}).".format(current_node_id, proc.id) - ) - else: - if not result.result: - logger.error( - "can't not transit node({}) for zombie process({}), message: {}".format( - current_node_id, proc.id, result.message - ) - ) - else: - status = result.extra - status.version = uniqid.uniqid() - status.save() - ProcessCeleryTask.objects.revoke(proc.id, kill=True) - - # adjust pipeline state - proc.adjust_status() - proc.is_sleep = True - proc.save() - logger.info( - "Zombie process({}) with node({}) had been cured by {}".format( - proc.id, current_node_id, self.__class__.__name__ - ) - ) - try: - signals.activity_failed.send( - sender=Pipeline, pipeline_id=proc.root_pipeline_id, pipeline_activity_id=current_node_id - ) - except Exception as e: - logger.exception( - "An error({}) occurred when send activity_failed signals node({}) " - "for zombie process({}).".format(e, current_node_id, proc.id) - ) diff --git a/lib/pipeline/engine/health/zombie/heal.py b/lib/pipeline/engine/health/zombie/heal.py deleted file mode 100644 index 75af7d5..0000000 --- a/lib/pipeline/engine/health/zombie/heal.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from django.utils.module_loading import import_string - -from pipeline.conf import default_settings -from pipeline.engine.models import PipelineProcess - -logger = logging.getLogger("celery") - - -def get_healer(): - if not default_settings.ENGINE_ZOMBIE_PROCESS_DOCTORS: - logger.info("ENGINE_ZOMBIE_PROCESS_DOCTORS settings is empty, use dummy healer") - return DummyZombieProcHealer() - - doctors = [] - - for dr_setting in default_settings.ENGINE_ZOMBIE_PROCESS_DOCTORS: - try: - doctors.append(import_string(dr_setting["class"])(**dr_setting["config"])) - except Exception: - logger.exception("Error occurred when init doctor({}), skip".format(dr_setting)) - - if not doctors: - logger.info("All doctor init failed, use dummy healer") - return DummyZombieProcHealer() - - return ZombieProcHealer(doctors=doctors) - - -class DummyZombieProcHealer(object): - def heal(self): - pass - - -class ZombieProcHealer(object): - def __init__(self, doctors): - self.doctors = doctors - - def heal(self): - - if not self.doctors: - return - - proc_ids = self._get_process_ids() - - for proc_id in proc_ids: - - # get proc every time for latest state - proc = PipelineProcess.objects.get(id=proc_id) - - if not proc.is_alive or proc.is_frozen: - continue - - for dr in self.doctors: - if dr.confirm(proc): - dr.cure(proc) - break - - def _get_process_ids(self): - return PipelineProcess.objects.filter(is_alive=True, is_frozen=False).values_list("id", flat=True) diff --git a/lib/pipeline/engine/logging.py b/lib/pipeline/engine/logging.py deleted file mode 100644 index e845852..0000000 --- a/lib/pipeline/engine/logging.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - - -def get_logger(): - return logging.getLogger(__name__) - - -logger = get_logger() diff --git a/lib/pipeline/engine/migrations/0001_initial.py b/lib/pipeline/engine/migrations/0001_initial.py deleted file mode 100644 index 8fbaab4..0000000 --- a/lib/pipeline/engine/migrations/0001_initial.py +++ /dev/null @@ -1,230 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -# Generated by Django 1.11.2 on 2017-11-24 10:43 - - -from django.db import migrations, models -import django.db.models.deletion -import pipeline.engine.models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [] - - operations = [ - migrations.CreateModel( - name="Data", - fields=[ - ( - "id", - models.CharField( - max_length=32, primary_key=True, serialize=False, unique=True, verbose_name="\u8282\u70b9 ID" - ), - ), - ("inputs", pipeline.engine.models.IOField(verbose_name="\u8f93\u5165\u6570\u636e")), - ("outputs", pipeline.engine.models.IOField(verbose_name="\u8f93\u51fa\u6570\u636e")), - ("ex_data", pipeline.engine.models.IOField(verbose_name="\u5f02\u5e38\u6570\u636e")), - ], - ), - migrations.CreateModel( - name="History", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("identifier", models.CharField(db_index=True, max_length=32, verbose_name="\u8282\u70b9 id")), - ("started_time", models.DateTimeField(verbose_name="\u5f00\u59cb\u65f6\u95f4")), - ("archived_time", models.DateTimeField(verbose_name="\u7ed3\u675f\u65f6\u95f4")), - ], - ), - migrations.CreateModel( - name="HistoryData", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("inputs", pipeline.engine.models.IOField(verbose_name="\u8f93\u5165\u6570\u636e")), - ("outputs", pipeline.engine.models.IOField(verbose_name="\u8f93\u51fa\u6570\u636e")), - ("ex_data", pipeline.engine.models.IOField(verbose_name="\u5f02\u5e38\u6570\u636e")), - ], - ), - migrations.CreateModel( - name="NodeRelationship", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("ancestor_id", models.CharField(db_index=True, max_length=32, verbose_name="\u7956\u5148 ID")), - ("descendant_id", models.CharField(db_index=True, max_length=32, verbose_name="\u540e\u4ee3 ID")), - ("distance", models.IntegerField(verbose_name="\u8ddd\u79bb")), - ], - ), - migrations.CreateModel( - name="PipelineModel", - fields=[ - ( - "id", - models.CharField( - max_length=32, primary_key=True, serialize=False, unique=True, verbose_name="pipeline ID" - ), - ), - ], - ), - migrations.CreateModel( - name="PipelineProcess", - fields=[ - ( - "id", - models.CharField( - max_length=32, primary_key=True, serialize=False, unique=True, verbose_name="Process ID" - ), - ), - ("root_pipeline_id", models.CharField(max_length=32, verbose_name="\u6839 pipeline \u7684 ID")), - ( - "current_node_id", - models.CharField( - db_index=True, - default=b"", - max_length=32, - verbose_name="\u5f53\u524d\u63a8\u8fdb\u5230\u7684\u8282\u70b9\u7684 ID", - ), - ), - ( - "destination_id", - models.CharField( - default=b"", - max_length=32, - verbose_name="\u9047\u5230\u8be5 ID \u7684\u8282\u70b9\u5c31\u505c\u6b62\u63a8\u8fdb", - ), - ), - ("parent_id", models.CharField(default=b"", max_length=32, verbose_name="\u7236 process \u7684 ID")), - ( - "ack_num", - models.IntegerField( - default=0, verbose_name="\u6536\u5230\u5b50\u8282\u70b9 ACK \u7684\u6570\u91cf" - ), - ), - ( - "need_ack", - models.IntegerField( - default=-1, - verbose_name="\u9700\u8981\u6536\u5230\u7684\u5b50\u8282\u70b9 ACK \u7684\u6570\u91cf", - ), - ), - ( - "is_alive", - models.BooleanField(default=True, verbose_name="\u8be5 process \u662f\u5426\u8fd8\u6709\u6548"), - ), - ( - "is_sleep", - models.BooleanField( - default=False, verbose_name="\u8be5 process \u662f\u5426\u6b63\u5728\u4f11\u7720" - ), - ), - ], - ), - migrations.CreateModel( - name="ProcessCeleryTask", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ( - "process_id", - models.CharField( - db_index=True, max_length=32, unique=True, verbose_name="pipeline \u8fdb\u7a0b ID" - ), - ), - ("celery_task_id", models.CharField(default=b"", max_length=40, verbose_name="celery \u4efb\u52a1 ID")), - ], - ), - migrations.CreateModel( - name="ProcessSnapshot", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("data", pipeline.engine.models.IOField(verbose_name="\u5b50 process ID \u4e0e pipeline_stack")), - ], - ), - migrations.CreateModel( - name="ScheduleService", - fields=[ - ( - "id", - models.CharField( - max_length=64, - primary_key=True, - serialize=False, - unique=True, - verbose_name="ID \u8282\u70b9ID+version", - ), - ), - ("activity_id", models.CharField(db_index=True, max_length=32, verbose_name="\u8282\u70b9 ID")), - ("process_id", models.CharField(max_length=32, verbose_name="Pipeline \u8fdb\u7a0b ID")), - ("schedule_times", models.IntegerField(default=0, verbose_name="\u88ab\u8c03\u5ea6\u6b21\u6570")), - ( - "wait_callback", - models.BooleanField(default=False, verbose_name="\u662f\u5426\u662f\u56de\u8c03\u578b\u8c03\u5ea6"), - ), - ( - "callback_data", - pipeline.engine.models.IOField(default=None, verbose_name="\u56de\u8c03\u6570\u636e"), - ), - ("service_act", pipeline.engine.models.IOField(verbose_name="\u5f85\u8c03\u5ea6\u670d\u52a1")), - ("is_finished", models.BooleanField(default=False, verbose_name="\u662f\u5426\u5df2\u5b8c\u6210")), - ("version", models.CharField(db_index=True, max_length=32, verbose_name="Activity \u7684\u7248\u672c")), - ], - ), - migrations.CreateModel( - name="Status", - fields=[ - ( - "id", - models.CharField( - max_length=32, primary_key=True, serialize=False, unique=True, verbose_name="\u8282\u70b9 ID" - ), - ), - ("state", models.CharField(max_length=10, verbose_name="\u72b6\u6001")), - ("name", models.CharField(default=b"", max_length=64, verbose_name="\u8282\u70b9\u540d\u79f0")), - ("retry", models.IntegerField(default=0, verbose_name="\u91cd\u8bd5\u6b21\u6570")), - ("loop", models.IntegerField(default=1, verbose_name="\u5faa\u73af\u6b21\u6570")), - ("skip", models.BooleanField(default=False, verbose_name="\u662f\u5426\u8df3\u8fc7")), - ("created_time", models.DateTimeField(auto_now_add=True, verbose_name="\u521b\u5efa\u65f6\u95f4")), - ("started_time", models.DateTimeField(null=True, verbose_name="\u5f00\u59cb\u65f6\u95f4")), - ("archived_time", models.DateTimeField(null=True, verbose_name="\u5f52\u6863\u65f6\u95f4")), - ("version", models.CharField(max_length=32, verbose_name="\u7248\u672c")), - ], - options={"ordering": ["-created_time"]}, - ), - migrations.CreateModel( - name="SubProcessRelationship", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("subprocess_id", models.CharField(db_index=True, max_length=32, verbose_name="\u5b50\u6d41\u7a0b ID")), - ("process_id", models.CharField(max_length=32, verbose_name="\u5bf9\u5e94\u7684\u8fdb\u7a0b ID")), - ], - ), - migrations.AddField( - model_name="pipelineprocess", - name="snapshot", - field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.CASCADE, to="engine.ProcessSnapshot" - ), - ), - migrations.AddField( - model_name="pipelinemodel", - name="process", - field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.SET_NULL, to="engine.PipelineProcess" - ), - ), - migrations.AddField( - model_name="history", - name="data", - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="engine.HistoryData"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0002_auto_20180109_1825.py b/lib/pipeline/engine/migrations/0002_auto_20180109_1825.py deleted file mode 100644 index d42e6f6..0000000 --- a/lib/pipeline/engine/migrations/0002_auto_20180109_1825.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -# Generated by Django 1.11.2 on 2018-01-09 18:25 - - -from django.db import migrations -import pipeline.engine.models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0001_initial"), - ] - - operations = [ - migrations.AlterField( - model_name="processsnapshot", - name="data", - field=pipeline.engine.models.IOField(verbose_name="pipeline \u8fd0\u884c\u65f6\u6570\u636e"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0003_auto_20180717_1148.py b/lib/pipeline/engine/migrations/0003_auto_20180717_1148.py deleted file mode 100644 index 9b95c16..0000000 --- a/lib/pipeline/engine/migrations/0003_auto_20180717_1148.py +++ /dev/null @@ -1,65 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("engine", "0002_auto_20180109_1825"), - ] - - operations = [ - migrations.CreateModel( - name="FunctionSwitch", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ("name", models.CharField(max_length=32, verbose_name="\u529f\u80fd\u540d\u79f0")), - ("description", models.TextField(default=b"", verbose_name="\u529f\u80fd\u63cf\u8ff0")), - ("is_active", models.BooleanField(default=False, verbose_name="\u662f\u5426\u6fc0\u6d3b")), - ], - ), - migrations.AddField( - model_name="pipelineprocess", - name="is_froze", - field=models.BooleanField(default=False, verbose_name="\u8be5 process \u662f\u5426\u88ab\u51bb\u7ed3"), - ), - migrations.AddField( - model_name="scheduleservice", - name="celery_id", - field=models.CharField(default=b"", max_length=36, verbose_name="celery \u4efb\u52a1ID"), - ), - migrations.AddField( - model_name="scheduleservice", - name="celery_info_lock", - field=models.IntegerField(default=0, verbose_name="celery \u4fe1\u606f\u66f4\u65b0\u9501"), - ), - migrations.AddField( - model_name="scheduleservice", - name="is_frozen", - field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u88ab\u51bb\u7ed3"), - ), - migrations.AddField( - model_name="scheduleservice", - name="is_scheduling", - field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u6b63\u5728\u88ab\u8c03\u5ea6"), - ), - migrations.AddField( - model_name="scheduleservice", - name="schedule_date", - field=models.DateTimeField( - null=True, verbose_name="\u4e0b\u4e00\u6b21\u88ab\u8c03\u5ea6\u7684\u65f6\u95f4" - ), - ), - ] diff --git a/lib/pipeline/engine/migrations/0004_auto_20180717_1411.py b/lib/pipeline/engine/migrations/0004_auto_20180717_1411.py deleted file mode 100644 index 1903fc8..0000000 --- a/lib/pipeline/engine/migrations/0004_auto_20180717_1411.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0003_auto_20180717_1148"), - ] - - operations = [ - migrations.RenameField(model_name="pipelineprocess", old_name="is_froze", new_name="is_frozen",), - ] diff --git a/lib/pipeline/engine/migrations/0005_auto_20180717_1433.py b/lib/pipeline/engine/migrations/0005_auto_20180717_1433.py deleted file mode 100644 index 6ea1378..0000000 --- a/lib/pipeline/engine/migrations/0005_auto_20180717_1433.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0004_auto_20180717_1411"), - ] - - operations = [ - migrations.AlterField( - model_name="functionswitch", - name="name", - field=models.CharField(unique=True, max_length=32, verbose_name="\u529f\u80fd\u540d\u79f0"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0006_auto_20180717_1543.py b/lib/pipeline/engine/migrations/0006_auto_20180717_1543.py deleted file mode 100644 index 6df40ec..0000000 --- a/lib/pipeline/engine/migrations/0006_auto_20180717_1543.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0005_auto_20180717_1433"), - ] - - operations = [ - migrations.AlterField( - model_name="scheduleservice", - name="celery_id", - field=models.CharField(max_length=36, null=True, verbose_name="celery \u4efb\u52a1ID"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0007_auto_20180717_2022.py b/lib/pipeline/engine/migrations/0007_auto_20180717_2022.py deleted file mode 100644 index e6f859f..0000000 --- a/lib/pipeline/engine/migrations/0007_auto_20180717_2022.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0006_auto_20180717_1543"), - ] - - operations = [ - migrations.RemoveField(model_name="scheduleservice", name="celery_id",), - migrations.RemoveField(model_name="scheduleservice", name="celery_info_lock",), - migrations.RemoveField(model_name="scheduleservice", name="is_frozen",), - migrations.RemoveField(model_name="scheduleservice", name="schedule_date",), - ] diff --git a/lib/pipeline/engine/migrations/0008_schedulecelerytask.py b/lib/pipeline/engine/migrations/0008_schedulecelerytask.py deleted file mode 100644 index f9d2d80..0000000 --- a/lib/pipeline/engine/migrations/0008_schedulecelerytask.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0007_auto_20180717_2022"), - ] - - operations = [ - migrations.CreateModel( - name="ScheduleCeleryTask", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ( - "schedule_id", - models.CharField(unique=True, max_length=64, verbose_name="schedule ID", db_index=True), - ), - ("celery_task_id", models.CharField(default=b"", max_length=40, verbose_name="celery \u4efb\u52a1 ID")), - ], - ), - ] diff --git a/lib/pipeline/engine/migrations/0009_status_error_ignorable.py b/lib/pipeline/engine/migrations/0009_status_error_ignorable.py deleted file mode 100644 index 7f7aa5d..0000000 --- a/lib/pipeline/engine/migrations/0009_status_error_ignorable.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0008_schedulecelerytask"), - ] - - operations = [ - migrations.AddField( - model_name="status", - name="error_ignorable", - field=models.BooleanField( - default=False, verbose_name="\u662f\u5426\u51fa\u9519\u540e\u81ea\u52a8\u5ffd\u7565" - ), - ), - ] diff --git a/lib/pipeline/engine/migrations/0010_auto_20180830_1203.py b/lib/pipeline/engine/migrations/0010_auto_20180830_1203.py deleted file mode 100644 index 083f290..0000000 --- a/lib/pipeline/engine/migrations/0010_auto_20180830_1203.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0009_status_error_ignorable"), - ] - - operations = [ - migrations.AlterField( - model_name="scheduleservice", - name="id", - field=models.CharField( - max_length=96, unique=True, serialize=False, verbose_name="ID \u8282\u70b9ID+version", primary_key=True - ), - ), - ] diff --git a/lib/pipeline/engine/migrations/0010_nodecelerytask.py b/lib/pipeline/engine/migrations/0010_nodecelerytask.py deleted file mode 100644 index fe9044c..0000000 --- a/lib/pipeline/engine/migrations/0010_nodecelerytask.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0010_auto_20180830_1203"), - ] - - operations = [ - migrations.CreateModel( - name="NodeCeleryTask", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ( - "node_id", - models.CharField(unique=True, max_length=32, verbose_name="\u8282\u70b9 ID", db_index=True), - ), - ("celery_task_id", models.CharField(default=b"", max_length=40, verbose_name="celery \u4efb\u52a1 ID")), - ], - ), - ] diff --git a/lib/pipeline/engine/migrations/0011_auto_20180830_1205.py b/lib/pipeline/engine/migrations/0011_auto_20180830_1205.py deleted file mode 100644 index 305052f..0000000 --- a/lib/pipeline/engine/migrations/0011_auto_20180830_1205.py +++ /dev/null @@ -1,32 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0010_nodecelerytask"), - ] - - operations = [ - migrations.AlterField( - model_name="scheduleservice", - name="id", - field=models.CharField( - max_length=64, unique=True, serialize=False, verbose_name="ID \u8282\u70b9ID+version", primary_key=True - ), - ), - ] diff --git a/lib/pipeline/engine/migrations/0015_datasnapshot.py b/lib/pipeline/engine/migrations/0015_datasnapshot.py deleted file mode 100644 index 48bd770..0000000 --- a/lib/pipeline/engine/migrations/0015_datasnapshot.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models -import pipeline.engine.models.fields - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0011_auto_20180830_1205"), - ] - - operations = [ - migrations.CreateModel( - name="DataSnapshot", - fields=[ - ( - "key", - models.CharField( - max_length=255, serialize=False, verbose_name="\u5bf9\u8c61\u552f\u4e00\u952e", primary_key=True - ), - ), - ("obj", pipeline.engine.models.fields.IOField(verbose_name="\u5bf9\u8c61\u5b58\u50a8\u5b57\u6bb5")), - ], - ), - ] diff --git a/lib/pipeline/engine/migrations/0016_auto_20181228_0345.py b/lib/pipeline/engine/migrations/0016_auto_20181228_0345.py deleted file mode 100644 index 3fcaa00..0000000 --- a/lib/pipeline/engine/migrations/0016_auto_20181228_0345.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0015_datasnapshot"), - ] - - operations = [ - migrations.AddField( - model_name="history", - name="loop", - field=models.IntegerField(default=1, verbose_name="\u5faa\u73af\u6b21\u6570"), - ), - migrations.AddField( - model_name="history", - name="skip", - field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u8df3\u8fc7"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0017_auto_20190719_1010.py b/lib/pipeline/engine/migrations/0017_auto_20190719_1010.py deleted file mode 100644 index b8589b4..0000000 --- a/lib/pipeline/engine/migrations/0017_auto_20190719_1010.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0016_auto_20181228_0345"), - ] - - operations = [ - migrations.AlterField( - model_name="status", - name="created_time", - field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="\u521b\u5efa\u65f6\u95f4"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0018_auto_20190729_1041.py b/lib/pipeline/engine/migrations/0018_auto_20190729_1041.py deleted file mode 100644 index 757dad7..0000000 --- a/lib/pipeline/engine/migrations/0018_auto_20190729_1041.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0017_auto_20190719_1010"), - ] - - operations = [ - migrations.AlterField( - model_name="history", - name="id", - field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), - ), - migrations.AlterField( - model_name="history", - name="data", - field=models.ForeignKey( - db_constraint=False, on_delete=django.db.models.deletion.CASCADE, to="engine.HistoryData" - ), - ), - migrations.AlterField( - model_name="historydata", - name="id", - field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), - ), - migrations.AlterField( - model_name="nodecelerytask", - name="id", - field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), - ), - migrations.AlterField( - model_name="noderelationship", - name="id", - field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), - ), - migrations.AlterField( - model_name="processcelerytask", - name="id", - field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), - ), - migrations.AlterField( - model_name="pipelineprocess", - name="snapshot", - field=models.ForeignKey( - db_constraint=False, null=True, on_delete=django.db.models.deletion.CASCADE, to="engine.ProcessSnapshot" - ), - ), - migrations.AlterField( - model_name="processsnapshot", - name="id", - field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), - ), - migrations.AlterField( - model_name="schedulecelerytask", - name="id", - field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), - ), - migrations.AlterField( - model_name="subprocessrelationship", - name="id", - field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0019_auto_20190729_1044.py b/lib/pipeline/engine/migrations/0019_auto_20190729_1044.py deleted file mode 100644 index 265dcb6..0000000 --- a/lib/pipeline/engine/migrations/0019_auto_20190729_1044.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0018_auto_20190729_1041"), - ] - - operations = [ - migrations.AlterField( - model_name="history", - name="data", - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="engine.HistoryData"), - ), - migrations.AlterField( - model_name="pipelineprocess", - name="snapshot", - field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.CASCADE, to="engine.ProcessSnapshot" - ), - ), - ] diff --git a/lib/pipeline/engine/migrations/0020_pipelinemodel_priority.py b/lib/pipeline/engine/migrations/0020_pipelinemodel_priority.py deleted file mode 100644 index 0e3a012..0000000 --- a/lib/pipeline/engine/migrations/0020_pipelinemodel_priority.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.11 on 2019-09-12 07:31 - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0019_auto_20190729_1044"), - ] - - operations = [ - migrations.AddField( - model_name="pipelinemodel", - name="priority", - field=models.IntegerField(default=100, verbose_name="\u6d41\u7a0b\u4f18\u5148\u7ea7"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0021_auto_20191213_0725.py b/lib/pipeline/engine/migrations/0021_auto_20191213_0725.py deleted file mode 100644 index f3c08be..0000000 --- a/lib/pipeline/engine/migrations/0021_auto_20191213_0725.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.23 on 2019-12-13 07:25 -from __future__ import unicode_literals - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0020_pipelinemodel_priority"), - ] - - operations = [ - migrations.AddField( - model_name="pipelinemodel", - name="queue", - field=models.CharField(default="", max_length=512, verbose_name="流程使用的队列名"), - ), - migrations.AlterField( - model_name="functionswitch", name="description", field=models.TextField(default="", verbose_name="功能描述"), - ), - migrations.AlterField( - model_name="history", - name="data", - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to="engine.HistoryData"), - ), - migrations.AlterField( - model_name="nodecelerytask", - name="celery_task_id", - field=models.CharField(default="", max_length=40, verbose_name="celery 任务 ID"), - ), - migrations.AlterField( - model_name="pipelineprocess", - name="current_node_id", - field=models.CharField(db_index=True, default="", max_length=32, verbose_name="当前推进到的节点的 ID"), - ), - migrations.AlterField( - model_name="pipelineprocess", - name="destination_id", - field=models.CharField(default="", max_length=32, verbose_name="遇到该 ID 的节点就停止推进"), - ), - migrations.AlterField( - model_name="pipelineprocess", - name="parent_id", - field=models.CharField(default="", max_length=32, verbose_name="父 process 的 ID"), - ), - migrations.AlterField( - model_name="pipelineprocess", - name="snapshot", - field=models.ForeignKey( - null=True, on_delete=django.db.models.deletion.SET_NULL, to="engine.ProcessSnapshot" - ), - ), - migrations.AlterField( - model_name="processcelerytask", - name="celery_task_id", - field=models.CharField(default="", max_length=40, verbose_name="celery 任务 ID"), - ), - migrations.AlterField( - model_name="schedulecelerytask", - name="celery_task_id", - field=models.CharField(default="", max_length=40, verbose_name="celery 任务 ID"), - ), - migrations.AlterField( - model_name="status", name="name", field=models.CharField(default="", max_length=64, verbose_name="节点名称"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0022_scheduleservice_multi_callback_enabled.py b/lib/pipeline/engine/migrations/0022_scheduleservice_multi_callback_enabled.py deleted file mode 100644 index 314ccd4..0000000 --- a/lib/pipeline/engine/migrations/0022_scheduleservice_multi_callback_enabled.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.23 on 2020-01-15 02:55 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0021_auto_20191213_0725"), - ] - - operations = [ - migrations.AddField( - model_name="scheduleservice", - name="multi_callback_enabled", - field=models.BooleanField(default=False, verbose_name="是否支持多次回调"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0023_status_state_refresh_at.py b/lib/pipeline/engine/migrations/0023_status_state_refresh_at.py deleted file mode 100644 index 1e47265..0000000 --- a/lib/pipeline/engine/migrations/0023_status_state_refresh_at.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.23 on 2020-02-13 07:38 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0022_scheduleservice_multi_callback_enabled"), - ] - - operations = [ - migrations.AddField( - model_name="status", - name="state_refresh_at", - field=models.DateTimeField(null=True, verbose_name="上次状态更新的时间"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0024_auto_20200224_0308.py b/lib/pipeline/engine/migrations/0024_auto_20200224_0308.py deleted file mode 100644 index fff49fe..0000000 --- a/lib/pipeline/engine/migrations/0024_auto_20200224_0308.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.23 on 2020-02-24 03:08 -from __future__ import unicode_literals - -from django.db import migrations - -import pipeline.engine.models.fields - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0023_status_state_refresh_at"), - ] - - operations = [ - migrations.AlterField( - model_name="data", - name="ex_data", - field=pipeline.engine.models.fields.IOField(default=None, verbose_name="异常数据"), - ), - migrations.AlterField( - model_name="data", - name="inputs", - field=pipeline.engine.models.fields.IOField(default=None, verbose_name="输入数据"), - ), - migrations.AlterField( - model_name="data", - name="outputs", - field=pipeline.engine.models.fields.IOField(default=None, verbose_name="输出数据"), - ), - migrations.AlterField( - model_name="historydata", - name="ex_data", - field=pipeline.engine.models.fields.IOField(default=None, verbose_name="异常数据"), - ), - migrations.AlterField( - model_name="historydata", - name="inputs", - field=pipeline.engine.models.fields.IOField(default=None, verbose_name="输入数据"), - ), - migrations.AlterField( - model_name="historydata", - name="outputs", - field=pipeline.engine.models.fields.IOField(default=None, verbose_name="输出数据"), - ), - ] diff --git a/lib/pipeline/engine/migrations/0025_multicallbackdata.py b/lib/pipeline/engine/migrations/0025_multicallbackdata.py deleted file mode 100644 index baef270..0000000 --- a/lib/pipeline/engine/migrations/0025_multicallbackdata.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.23 on 2020-03-03 07:30 -from __future__ import unicode_literals - -from django.db import migrations, models -import pipeline.engine.models.fields - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0024_auto_20200224_0308"), - ] - - operations = [ - migrations.CreateModel( - name="MultiCallbackData", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="自增ID")), - ("schedule_id", models.CharField(max_length=64, verbose_name="回调服务ID")), - ("data", pipeline.engine.models.fields.IOField(verbose_name="回调数据")), - ], - ), - ] diff --git a/lib/pipeline/engine/migrations/0026_auto_20200610_1442.py b/lib/pipeline/engine/migrations/0026_auto_20200610_1442.py deleted file mode 100644 index dd55f12..0000000 --- a/lib/pipeline/engine/migrations/0026_auto_20200610_1442.py +++ /dev/null @@ -1,43 +0,0 @@ -# Generated by Django 2.2.8 on 2020-06-10 06:42 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('engine', '0025_multicallbackdata'), - ] - - operations = [ - migrations.AlterField( - model_name='pipelineprocess', - name='is_alive', - field=models.BooleanField(db_index=True, default=True, verbose_name='该 process 是否还有效'), - ), - migrations.AlterField( - model_name='pipelineprocess', - name='is_frozen', - field=models.BooleanField(db_index=True, default=False, verbose_name='该 process 是否被冻结'), - ), - migrations.AlterField( - model_name='pipelineprocess', - name='is_sleep', - field=models.BooleanField(db_index=True, default=False, verbose_name='该 process 是否正在休眠'), - ), - migrations.AlterField( - model_name='pipelineprocess', - name='root_pipeline_id', - field=models.CharField(db_index=True, max_length=32, verbose_name='根 pipeline 的 ID'), - ), - migrations.AlterField( - model_name='scheduleservice', - name='is_scheduling', - field=models.BooleanField(db_index=True, default=False, verbose_name='是否正在被调度'), - ), - migrations.AlterField( - model_name='noderelationship', - name='distance', - field=models.IntegerField(db_index=True, verbose_name='距离'), - ), - ] diff --git a/lib/pipeline/engine/migrations/0027_sendfailedcelerytask.py b/lib/pipeline/engine/migrations/0027_sendfailedcelerytask.py deleted file mode 100644 index 917a848..0000000 --- a/lib/pipeline/engine/migrations/0027_sendfailedcelerytask.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.28 on 2020-07-01 11:54 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0026_auto_20200610_1442"), - ] - - operations = [ - migrations.CreateModel( - name="SendFailedCeleryTask", - fields=[ - ( - "id", - models.BigAutoField( - primary_key=True, serialize=False, verbose_name="ID" - ), - ), - ("name", models.CharField(max_length=1024, verbose_name="任务名")), - ("kwargs", models.TextField(verbose_name="任务参数")), - ( - "type", - models.IntegerField( - choices=[ - (0, "empty"), - (1, "process"), - (2, "node"), - (3, "schedule"), - ], - verbose_name="任务类型", - ), - ), - ("extra_kwargs", models.TextField(verbose_name="额外参数")), - ("exec_trace", models.TextField(verbose_name="错误信息")), - ( - "created_at", - models.DateTimeField(auto_now_add=True, verbose_name="创建时间"), - ), - ], - ), - ] diff --git a/lib/pipeline/engine/migrations/0028_auto_20210812_0906.py b/lib/pipeline/engine/migrations/0028_auto_20210812_0906.py deleted file mode 100644 index f68c2df..0000000 --- a/lib/pipeline/engine/migrations/0028_auto_20210812_0906.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 2.2.19 on 2021-08-12 09:06 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("engine", "0027_sendfailedcelerytask"), - ] - - operations = [ - migrations.AlterField( - model_name="pipelineprocess", - name="parent_id", - field=models.CharField(db_index=True, default="", max_length=32, verbose_name="父 process 的 ID"), - ), - ] diff --git a/lib/pipeline/engine/migrations/__init__.py b/lib/pipeline/engine/migrations/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/engine/migrations/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/engine/models/__init__.py b/lib/pipeline/engine/models/__init__.py deleted file mode 100644 index 7d0a360..0000000 --- a/lib/pipeline/engine/models/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.engine.models.core import * # noqa -from pipeline.engine.models.function import * # noqa -from pipeline.engine.models.data import * # noqa diff --git a/lib/pipeline/engine/models/core.py b/lib/pipeline/engine/models/core.py deleted file mode 100644 index 3c1c65f..0000000 --- a/lib/pipeline/engine/models/core.py +++ /dev/null @@ -1,1372 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import ujson as json -import contextlib -import logging -import traceback - -from celery import current_app -from celery.task.control import revoke -from django.db import models, transaction -from django.utils import timezone -from django.utils.translation import ugettext_lazy as _ - -from pipeline.conf import settings as pipeline_settings -from pipeline.constants import PIPELINE_DEFAULT_PRIORITY -from pipeline.core.data.base import DataObject -from pipeline.core.pipeline import Pipeline -from pipeline.django_signal_valve import valve -from pipeline.engine import exceptions, signals, states, utils -from pipeline.engine.core import data as data_service -from pipeline.engine.models.fields import IOField -from pipeline.engine.utils import ActionResult, Stack, calculate_elapsed_time -from pipeline.log.models import LogEntry -from pipeline.utils.uniqid import node_uniqid, uniqid - -logger = logging.getLogger("celery") - -RERUN_MAX_LIMIT = pipeline_settings.PIPELINE_RERUN_MAX_TIMES -NAME_MAX_LENGTH = 64 - - -class ProcessSnapshotManager(models.Manager): - def create_snapshot(self, pipeline_stack, children, root_pipeline, subprocess_stack): - data = { - "_pipeline_stack": pipeline_stack, - "_subprocess_stack": subprocess_stack, - "_children": children, - "_root_pipeline": root_pipeline, - } - return self.create(data=data) - - -class ProcessSnapshot(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - data = IOField(verbose_name=_("pipeline 运行时数据")) - - objects = ProcessSnapshotManager() - - @property - def pipeline_stack(self): - return self.data["_pipeline_stack"] - - @property - def children(self): - return self.data["_children"] - - @property - def root_pipeline(self): - return self.data["_root_pipeline"] - - @property - def subprocess_stack(self): - return self.data["_subprocess_stack"] - - def clean_children(self): - self.data["_children"] = [] - - def prune_top_pipeline(self, keep_from, keep_to): - self.data["_pipeline_stack"].top().prune(keep_from, keep_to) - - -class ProcessManager(models.Manager): - def prepare_for_pipeline(self, pipeline): - """ - 为 pipeline 创建相应的 process 并进行一系列初始化 - :param pipeline: - :return: - """ - # init runtime info - snapshot = ProcessSnapshot.objects.create_snapshot( - pipeline_stack=utils.Stack(), children=[], root_pipeline=pipeline, subprocess_stack=utils.Stack(), - ) - process = self.create( - id=node_uniqid(), root_pipeline_id=pipeline.id, current_node_id=pipeline.start_event.id, snapshot=snapshot, - ) - process.push_pipeline(pipeline) - process.save() - return process - - def fork_child(self, parent, current_node_id, destination_id): - """ - 创建一个上下文信息与当前 parent 一致的 child process - :param parent: - :param current_node_id: - :param destination_id: - :return: - """ - # init runtime info - pipeline_stack = Stack([parent.top_pipeline]) - root_pipeline_shell = parent.root_pipeline.shell() - snapshot = ProcessSnapshot.objects.create_snapshot( - pipeline_stack=pipeline_stack, - children=[], - root_pipeline=root_pipeline_shell, - subprocess_stack=parent.subprocess_stack, - ) - # refresh first, avoid keep the same ref to parent.top_pipeline - snapshot.refresh_from_db() - snapshot.prune_top_pipeline(current_node_id, destination_id) - snapshot.save() - # clear parent's change - snapshot.pipeline_stack.top().context.clear_change_keys() - - child = self.create( - id=node_uniqid(), - root_pipeline_id=parent.root_pipeline.id, - current_node_id=current_node_id, - destination_id=destination_id, - parent_id=parent.id, - snapshot=snapshot, - ) - for subproc_id in parent.subprocess_stack: - SubProcessRelationship.objects.add_relation(subproc_id, child.id) - - return child - - def process_ready(self, process_id, current_node_id=None, call_from_child=False): - """ - 发送一个进程已经准备好被调度的信号 - :param process_id: 已经准备好的进程 ID - :param current_node_id: 下一个执行的节点的 ID(可用于失败跳过) - :param call_from_child: 该信号是否由子进程发出 - :return: - """ - valve.send( - signals, - "process_ready", - sender=PipelineProcess, - process_id=process_id, - current_node_id=current_node_id, - call_from_child=call_from_child, - ) - - def batch_process_ready(self, process_id_list, pipeline_id): - """ - 发送批量唤醒进程的信号 - :param process_id_list: 需要被唤醒的进程 ID 列表 - :param pipeline_id: 这些进程相关的 root pipeline - :return: - """ - valve.send( - signals, - "batch_process_ready", - sender=PipelineProcess, - process_id_list=process_id_list, - pipeline_id=pipeline_id, - ) - - def child_process_ready(self, child_id): - """ - 发送子进程已经准备好被调度的信号 - :param child_id: 子进程 ID - :return: - """ - valve.send(signals, "child_process_ready", sender=PipelineProcess, child_id=child_id) - - def priority_for_process(self, process_id): - """ - 查询进程对应的 pipeline 的优先级 - :param process_id: 进程 ID - :return: - """ - return PipelineModel.objects.get(id=self.get(id=process_id).root_pipeline_id).priority - - def queue_for_process(self, process_id): - """ - 查询进程对应的 pipeline 所使用的队列 - :param process_id: 进程 ID - :return: - """ - return PipelineModel.objects.get(id=self.get(id=process_id).root_pipeline_id).queue - - def task_args_for_process(self, process_id): - pipeline_model = PipelineModel.objects.get(id=self.get(id=process_id).root_pipeline_id) - - return {"priority": pipeline_model.priority, "queue": pipeline_model.queue} - - -class PipelineProcess(models.Model): - """ - @relationship with other models - 1. PipelineInstance - process = PipelineProcess.objects.get(root_pipeline_id=pipeline_inst.instance_id) - pipeline_inst = PipelineInstance.objects.get(instance_id=process.root_pipeline_id) - """ - - id = models.CharField(_("Process ID"), unique=True, primary_key=True, max_length=32) - root_pipeline_id = models.CharField(_("根 pipeline 的 ID"), max_length=32, db_index=True) - current_node_id = models.CharField(_("当前推进到的节点的 ID"), max_length=32, default="", db_index=True) - destination_id = models.CharField(_("遇到该 ID 的节点就停止推进"), max_length=32, default="") - parent_id = models.CharField(_("父 process 的 ID"), max_length=32, default="", db_index=True) - ack_num = models.IntegerField(_("收到子节点 ACK 的数量"), default=0) - need_ack = models.IntegerField(_("需要收到的子节点 ACK 的数量"), default=-1) - is_alive = models.BooleanField(_("该 process 是否还有效"), default=True, db_index=True) - is_sleep = models.BooleanField(_("该 process 是否正在休眠"), default=False, db_index=True) - is_frozen = models.BooleanField(_("该 process 是否被冻结"), default=False, db_index=True) - snapshot = models.ForeignKey(ProcessSnapshot, null=True, on_delete=models.SET_NULL) - - objects = ProcessManager() - - @property - def pipeline_stack(self): - return self.snapshot.pipeline_stack if self.snapshot else None - - @property - def children(self): - return self.snapshot.children if self.snapshot else None - - @property - def root_pipeline(self): - return self.snapshot.root_pipeline if self.snapshot else None - - @property - def top_pipeline(self): - return self.pipeline_stack.top() - - @property - def subprocess_stack(self): - return self.snapshot.subprocess_stack if self.snapshot else None - - @property - def in_subprocess(self): - return len(self.snapshot.pipeline_stack) > 1 if self.snapshot else False - - def push_pipeline(self, pipeline, is_subprocess=False): - """ - 将 pipeline 压入运行时栈中 - :param pipeline: 需要被压入栈中的 pipeline 对象 - :param is_subprocess: 该 pipeline 是否是子流程 - :return: - """ - self.pipeline_stack.push(pipeline) - if is_subprocess: - self.subprocess_stack.push(pipeline.id) - SubProcessRelationship.objects.add_relation(pipeline.id, self.id) - - def pop_pipeline(self): - """ - 从运行时栈中弹出一个 pipeline - :return: - """ - pipeline = self.pipeline_stack.pop() - if self.subprocess_stack: - subproc_id = self.subprocess_stack.pop() - SubProcessRelationship.objects.delete_relation(subproc_id, self.id) - return pipeline - - def join(self, children): - """ - 令父进程等待子进程 - :param children: 需要等待的子进程列表 - :return: - """ - self.need_ack = len(children) - for child in children: - self.children.append(child.id) - self.save() - - def root_sleep_check(self): - """ - 检测 root pipeline 的状态判断当前进程是否需要休眠 - :return: - """ - root_state = Status.objects.state_for(self.root_pipeline.id) - if root_state in states.SLEEP_STATES: - return True, root_state - if root_state == states.BLOCKED: - # 子进程不会因为父进程进入了 BLOCKED 状态就进入睡眠 - return not self.parent_id, root_state - return False, root_state - - def subproc_sleep_check(self): - """ - 检测当前子流程栈中所有子流程的状态判断当前进程是否需要休眠 - :return: - """ - status = Status.objects.filter(id__in=self.subprocess_stack) - status_map = {s.id: s.state for s in status} - # 记录第一个处于暂停状态之前的所有子流程,用于子流程状态的修改 - before_suspended = [] - for subproc_id in self.subprocess_stack: - if status_map[subproc_id] == states.SUSPENDED: - return True, before_suspended - else: - before_suspended.append(subproc_id) - return False, before_suspended - - def freeze(self): - """ - 冻结当前进程 - :return: - """ - with transaction.atomic(): - self.__class__.objects.select_for_update().get(id=self.id) - self.is_frozen = True - self.save() - ProcessCeleryTask.objects.unbind(self.id) - - def unfreeze(self): - """ - 解冻当前进程 - :return: - """ - with transaction.atomic(): - self.__class__.objects.select_for_update().get(id=self.id) - self.is_frozen = False - self.save(save_snapshot=False) - valve.send(signals, "process_unfreeze", sender=PipelineProcess, process_id=self.id) - - def sleep(self, do_not_save=False, adjust_status=False, adjust_scope=None): - """ - 休眠当前进程 - :param do_not_save: 是否需要保存进程信息 - :param adjust_status: 是否需要调整 pipeline 中当前节点父级节点的状态 - :param adjust_scope: 状态调整的范围 - :return: - """ - if adjust_status: - self.adjust_status(adjust_scope) - if do_not_save: - return - with transaction.atomic(): - self.__class__.objects.select_for_update().get(id=self.id) - self.is_sleep = True - self.save() - ProcessCeleryTask.objects.unbind(self.id) - # dispatch children - for child_id in self.children: - PipelineProcess.objects.child_process_ready(child_id) - - def adjust_status(self, adjust_scope=None): - """ - 根据当前节点和子流程的状态来调整父级节点的状态 - :param adjust_scope: 子流程状态调整范围 - :return: - """ - node_state = Status.objects.state_for(self.current_node_id, may_not_exist=True) - pipeline_state = Status.objects.state_for(self.root_pipeline.id, may_not_exist=True) - subproc_states = Status.objects.states_for(self.subprocess_stack) - - if node_state in {states.FAILED, states.SUSPENDED}: - # if current node failed or suspended - Status.objects.batch_transit( - id_list=self.subprocess_stack, state=states.BLOCKED, from_state=states.RUNNING, - ) - Status.objects.transit(self.root_pipeline.id, to_state=states.BLOCKED, is_pipeline=True) - elif states.SUSPENDED in set(subproc_states): - # if any subprocess suspended - Status.objects.batch_transit(id_list=adjust_scope, state=states.BLOCKED, from_state=states.RUNNING) - Status.objects.transit(self.root_pipeline.id, to_state=states.BLOCKED, is_pipeline=True) - elif pipeline_state == states.SUSPENDED: - # if root pipeline suspended - Status.objects.batch_transit( - id_list=self.subprocess_stack, state=pipeline_state, from_state=states.RUNNING, - ) - - def wake_up(self): - """ - 唤醒当前进程 - :return: - """ - with transaction.atomic(): - self.__class__.objects.select_for_update().get(id=self.id) - self.is_sleep = False - self.save(save_snapshot=False) - - def destroy(self): - """ - 销毁当前进程及其上下文数据 - :return: - """ - self.is_alive = False - self.current_node_id = "" - snapshot = self.snapshot - self.snapshot = None - - self.save() - snapshot.delete() - ProcessCeleryTask.objects.destroy(self.id) - - def destroy_all(self): - """ - 销毁当前进程并递归销毁其所有子进程 - :return: - """ - _destroy_recursively(self) - - def save(self, save_snapshot=True, **kwargs): - if save_snapshot and self.snapshot: - self.snapshot.save() - return super(PipelineProcess, self).save(**kwargs) - - def blocked_by_failure_or_suspended(self): - """ - 检测当前进程是否因为节点失败而休眠 - :return: - """ - if not self.is_sleep: - return False - if Status.objects.state_for(self.current_node_id, may_not_exist=True) in { - states.FAILED, - states.SUSPENDED, - }: - return True - if not self.children: - return False - children = self.__class__.objects.filter(id__in=self.children) - result = [] - for child in children: - result.append(child.blocked_by_failure_or_suspended()) - return True in result - - def sync_with_children(self): - """ - 与子进程同步数据 - :return: - """ - for child_id in self.children: - context = data_service.get_object(self._context_key(child_id)) - parent_data = data_service.get_object(self._data_key(child_id)) - if context is None or parent_data is None: - raise exceptions.ChildDataSyncError( - "sync data with children {} failed, context({}) or parent_data({}) is None".format( - child_id, context, parent_data - ) - ) - self.top_pipeline.context.sync_change(context) - # self.top_pipeline.context.update_global_var(context.variables) - self.top_pipeline.data.update_outputs(parent_data.get_outputs()) - self.clean_children() # remove all children - - def destroy_and_wake_up_parent(self, destination_id): - """ - 销毁当前进程并尝试唤醒父进程 - :param destination_id: 当前进程终点节点 ID - :return: - """ - # save sync data - data_service.set_object(self._context_key(), self.top_pipeline.context) - data_service.set_object(self._data_key(), self.top_pipeline.data) - - self.__class__.objects.filter(id=self.parent_id).update(ack_num=models.F("ack_num") + 1) - can_wake_up = False - - with transaction.atomic(): - parent = self.__class__.objects.select_for_update().get(id=self.parent_id) - - if parent.need_ack != -1: - if parent.ack_num == parent.need_ack: - # try to wake up parent - parent.need_ack = -1 - parent.ack_num = 0 - can_wake_up = True - else: - if parent.blocked_by_failure_or_suspended(): - Status.objects.batch_transit( - id_list=self.subprocess_stack, state=states.BLOCKED, from_state=states.RUNNING, - ) - Status.objects.transit( - id=self.root_pipeline.id, to_state=states.BLOCKED, is_pipeline=True, - ) - - parent.save(save_snapshot=False) - - if can_wake_up: - self.__class__.objects.process_ready(parent.id, current_node_id=destination_id, call_from_child=True) - - SubProcessRelationship.objects.delete_relation(None, self.id) - self.destroy() - - def _context_key(self, process_id=None): - return "%s_context" % (process_id if process_id else self.id) - - def _data_key(self, process_id=None): - return "%s_data" % (process_id if process_id else self.id) - - def can_be_waked(self): - """ - 检测当前进程是否能够被唤醒 - :return: - """ - if not self.is_sleep or not self.is_alive: - return False - if self.need_ack != -1 and self.need_ack != self.ack_num: - return False - return True - - def clean_children(self): - """ - 清空当前进程的 children - :return: - """ - for child_id in self.children: - # 删除子进程的数据 - data_service.del_object(self._context_key(child_id)) - data_service.del_object(self._data_key(child_id)) - - self.snapshot.clean_children() - self.snapshot.save() - - def exit_gracefully(self, e): - """ - 在遇到无法处理的异常时优雅的退出当前进程 - :param e: - :return: - """ - ex_data = traceback.format_exc() - try: - current_node = self.top_pipeline.node(self.current_node_id) - except IndexError: - current_node = self.root_pipeline.node(self.current_node_id) - - # current_node may be a end_event in pipeline which had been popped - if current_node is not None: - result = Status.objects.fail(current_node, ex_data=ex_data) - else: - result = Status.objects.raw_fail(self.current_node_id, ex_data=ex_data) - - if not result.result: - logger.error( - "process({process_id}) exit_gracefully status transit failed, current_node :{node_id}".format( - process_id=self.id, node_id=current_node.id if current_node else self.current_node_id, - ) - ) - self.sleep(adjust_status=True) - - def refresh_current_node(self, current_node_id): - """ - 刷新当前节点的 ID - :param current_node_id: - :return: - """ - self.__class__.objects.filter(id=self.id).update(current_node_id=current_node_id) - - def revoke_subprocess(self): - if self.subprocess_stack: - Status.objects.batch_transit(id_list=list(self.subprocess_stack), state=states.REVOKED) - - if self.children: - for child_id in self.children: - PipelineProcess.objects.get(id=child_id).revoke_subprocess() - - def take_snapshot(self): - """ - 保存当前进程的快照对象 - :return: - """ - self.snapshot.save() - - -def _destroy_recursively(process): - if not process.is_alive: - return - if process.children: - for child_id in process.children: - child = PipelineProcess.objects.get(id=child_id) - _destroy_recursively(child) - process.destroy() - else: - process.destroy() - - -class PipelineModelManager(models.Manager): - def prepare_for_pipeline(self, pipeline, process, priority, queue=""): - return self.create(id=pipeline.id, process=process, priority=priority, queue=queue) - - def pipeline_ready(self, process_id): - valve.send(signals, "pipeline_ready", sender=Pipeline, process_id=process_id) - - def priority_for_pipeline(self, pipeline_id): - return self.get(id=pipeline_id).priority - - def task_args_for_pipeline(self, pipeline_id): - model = self.get(id=pipeline_id) - - return {"priority": model.priority, "queue": model.queue} - - -class PipelineModel(models.Model): - id = models.CharField("pipeline ID", unique=True, primary_key=True, max_length=32) - process = models.ForeignKey(PipelineProcess, null=True, on_delete=models.SET_NULL) - priority = models.IntegerField(_("流程优先级"), default=PIPELINE_DEFAULT_PRIORITY) - queue = models.CharField(_("流程使用的队列名"), max_length=512, default="") - - objects = PipelineModelManager() - - -class RelationshipManager(models.Manager): - def build_relationship(self, ancestor_id, descendant_id): - if self.filter(ancestor_id=ancestor_id, descendant_id=descendant_id).exists(): - # already build - return - ancestors = self.filter(descendant_id=ancestor_id) - relationships = [NodeRelationship(ancestor_id=descendant_id, descendant_id=descendant_id, distance=0)] - for ancestor in ancestors: - rel = NodeRelationship( - ancestor_id=ancestor.ancestor_id, descendant_id=descendant_id, distance=ancestor.distance + 1, - ) - relationships.append(rel) - self.bulk_create(relationships) - - -class NodeRelationship(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - ancestor_id = models.CharField(_("祖先 ID"), max_length=32, db_index=True) - descendant_id = models.CharField(_("后代 ID"), max_length=32, db_index=True) - distance = models.IntegerField(_("距离"), db_index=True) - - objects = RelationshipManager() - - def __unicode__(self): - return str("#{} -({})-> #{}".format(self.ancestor_id, self.distance, self.descendant_id,)) - - -class StatusManager(models.Manager): - def transit( - self, id, to_state, is_pipeline=False, appoint=False, start=False, name="", version=None, unchanged_pass=False, - ): - """ - 尝试改变某个节点的状态 - :param id: 节点 ID - :param to_state: 目标状态 - :param is_pipeline: 该节点是否是 pipeline - :param appoint: 该动作是否由用户发起(非引擎内部操作) - :param start: 是否刷新其开始时间 - :param name: 节点名称 - :param version: 节点版本 - :param unchanged_pass: 当 to_state 与当前节点状态相同时则视为操作成功 - :return: - """ - defaults = { - "name": name, - "state": to_state, - "version": uniqid(), - } - if start: - now = timezone.now() - defaults["started_time"] = now - defaults["state_refresh_at"] = now - status, created = self.get_or_create(id=id, defaults=defaults) - - # reservation or first creation - if created: - return ActionResult(result=True, message="success", extra=status) - - with transaction.atomic(): - kwargs = {"id": id} - if version: - kwargs["version"] = version - - try: - status = self.select_for_update().get(**kwargs) - - except Status.DoesNotExist: - return ActionResult(result=False, message="node not exists or not be executed yet") - - if unchanged_pass and status.state == to_state: - return ActionResult(result=True, message="success", extra=status) - - if states.can_transit( - from_state=status.state, to_state=to_state, is_pipeline=is_pipeline, appoint=appoint, - ): - - # 在冻结状态下不能改变 pipeline 的状态 - if is_pipeline: - subprocess_rel = SubProcessRelationship.objects.filter(subprocess_id=id) - if subprocess_rel: - process = PipelineProcess.objects.get(id=subprocess_rel[0].process_id) - if process.is_frozen: - return ActionResult(result=False, message="engine is frozen, can not perform operation",) - - processes = PipelineProcess.objects.filter(root_pipeline_id=id) - if processes and processes[0].is_frozen: - return ActionResult(result=False, message="engine is frozen, can not perform operation",) - - if name: - status.name = name - if to_state in states.ARCHIVED_STATES: - status.archived_time = timezone.now() - - # from FINISHED to RUNNING - if states.is_rerunning(from_state=status.state, to_state=to_state): - history = History.objects.record(status, is_rerunning=True) - if history: - LogEntry.objects.link_history(node_id=status.id, history_id=history.id) - status.loop += 1 - status.skip = False - status.version = uniqid() - - # reset started_time after record last status - if start: - status.started_time = timezone.now() - status.state = to_state - status.state_refresh_at = timezone.now() - status.save() - return ActionResult(result=True, message="success", extra=status) - else: - return ActionResult( - result=False, - message="can't transit state({}) from {} to {}".format(id, status.state, to_state), - extra=status, - ) - - def batch_transit(self, id_list, state, from_state=None, exclude=None): - """ - 批量改变节点状态,仅用于子流程的状态修改 - :param id_list: 子流程 ID 列表 - :param state: 目标状态 - :param from_state: 起始状态 - :param exclude: 不需要改变状态的子流程 ID 列表 - :return: - """ - if not id_list: - return - if not exclude: - exclude = [] - - id_list = set(id_list) - exclude = set(exclude) - kwargs = {"id__in": [i for i in id_list if i not in exclude]} - if from_state: - kwargs["state"] = from_state - with transaction.atomic(): - self.select_for_update().filter(**kwargs).update(state=state) - - def state_for(self, id, may_not_exist=False, version=None): - """ - 获取某个节点的状态 - :param id: 节点 ID - :param may_not_exist: 该节点是否可能不存在(未执行到) - :param version: 节点版本 - :return: - """ - kwargs = {"id": id} - if version: - kwargs["version"] = version - if may_not_exist: - try: - return self.get(**kwargs).state - except Status.DoesNotExist: - return None - return self.get(**kwargs).state - - def version_for(self, id): - return self.get(id=id).version - - def states_for(self, id_list): - return [s.state for s in self.filter(id__in=id_list)] - - def prepare_for_pipeline(self, pipeline): - cls_str = str(pipeline.__class__) - cls_name = pipeline.__class__.__name__[:NAME_MAX_LENGTH] - self.create( - id=pipeline.id, state=states.READY, name=cls_str if len(cls_str) <= NAME_MAX_LENGTH else cls_name, - ) - - def fail(self, node, ex_data): - action_res = self.transit(node.id, states.FAILED) - - if not action_res.result: - return action_res - - Data.objects.write_node_data(node, ex_data) - return action_res - - def raw_fail(self, node_id, ex_data): - action_res = self.transit(node_id, states.FAILED) - - if not action_res.result: - return action_res - - Data.objects.write_ex_data(node_id, ex_data) - return action_res - - def finish(self, node, error_ignorable=False): - action_res = self.transit(node.id, states.FINISHED) - - if not action_res.result: - return action_res - - Data.objects.write_node_data(node) - if error_ignorable: - s = Status.objects.get(id=node.id) - s.error_ignorable = True - s.save() - - return action_res - - def skip(self, process, node): - s = Status.objects.get(id=node.id) # 一定要先取出来,不然 archive time 会被覆盖 - if RERUN_MAX_LIMIT != 0 and s.loop > RERUN_MAX_LIMIT: - return ActionResult( - result=False, - message="rerun times exceed max limit: {limit}, can not skip".format(limit=RERUN_MAX_LIMIT), - ) - - action_res = self.transit(id=node.id, to_state=states.FINISHED, appoint=True) - if not action_res.result: - return action_res - - history = History.objects.record(s) - LogEntry.objects.link_history(node_id=node.id, history_id=history.id) - - s.refresh_from_db() - s.started_time = s.archived_time - - s.skip = True - s.save() - - # 由于节点执行失败,数据可能尚未写入当前的 Data 对象,所以要在这里做一次写入操作 - node.skip() - Data.objects.write_node_data(node) - - self.recover_from_block(process.root_pipeline.id, process.subprocess_stack) - signals.node_skip_call.send(sender=Status, process=process, node=node) - - return action_res - - def retry(self, process, node, inputs): - if RERUN_MAX_LIMIT != 0 and self.get(id=node.id).loop > RERUN_MAX_LIMIT: - return ActionResult( - result=False, - message="rerun times exceed max limit: {limit}, can not retry".format(limit=RERUN_MAX_LIMIT), - ) - - action_res = self.transit(id=node.id, to_state=states.READY, appoint=True) - if not action_res.result: - return action_res - - # add retry times - s = Status.objects.get(id=node.id) - s.version = uniqid() - history = History.objects.record(s) - LogEntry.objects.link_history(node_id=node.id, history_id=history.id) - s.retry += 1 - s.started_time = None - s.archived_time = None - s.save() - - # update inputs - if inputs: - new_data = DataObject(inputs=inputs, outputs={}) - node.data = new_data - Data.objects.write_node_data(node) - - # mark - node.next_exec_is_retry() - - self.recover_from_block(process.root_pipeline.id, process.subprocess_stack) - signals.node_retry_ready.send(sender=Status, process=process, node=node) - - # because node may be updated - process.save() - - return action_res - - def recover_from_block(self, root_pipeline_id, subprocess_stack): - Status.objects.batch_transit(id_list=subprocess_stack, state=states.RUNNING, from_state=states.BLOCKED) - Status.objects.transit(id=root_pipeline_id, to_state=states.READY, is_pipeline=True) - - @contextlib.contextmanager - def lock(self, id): - with transaction.atomic(): - self.select_for_update().get(id=id) - yield - - -class Status(models.Model): - id = models.CharField(_("节点 ID"), unique=True, primary_key=True, max_length=32) - state = models.CharField(_("状态"), max_length=10) - name = models.CharField(_("节点名称"), max_length=NAME_MAX_LENGTH, default="") - retry = models.IntegerField(_("重试次数"), default=0) - loop = models.IntegerField(_("循环次数"), default=1) - skip = models.BooleanField(_("是否跳过"), default=False) - error_ignorable = models.BooleanField(_("是否出错后自动忽略"), default=False) - created_time = models.DateTimeField(_("创建时间"), auto_now_add=True, db_index=True) - started_time = models.DateTimeField(_("开始时间"), null=True) - archived_time = models.DateTimeField(_("归档时间"), null=True) - version = models.CharField(_("版本"), max_length=32) - state_refresh_at = models.DateTimeField(_("上次状态更新的时间"), null=True) - - objects = StatusManager() - - class Meta: - ordering = ["-created_time"] - - def is_state_for_subproc(self): - return self.name.endswith("SubProcess") - - -class DataManager(models.Manager): - def write_node_data(self, node, ex_data=None): - data, created = self.get_or_create(id=node.id) - if hasattr(node, "data") and node.data: - data.inputs = node.data.get_inputs() - outputs = node.data.get_outputs() - ex_data = outputs.pop("ex_data", ex_data) - data.outputs = outputs - data.ex_data = ex_data - data.save() - - def write_ex_data(self, node_id, ex_data=None): - data, created = self.get_or_create(id=node_id) - data.ex_data = ex_data - data.save() - - def forced_fail(self, node_id, ex_data=""): - data, created = self.get_or_create(id=node_id) - data.outputs = { - "_forced_failed": True, - } - data.ex_data = ex_data - data.save() - - -class Data(models.Model): - id = models.CharField(_("节点 ID"), unique=True, primary_key=True, max_length=32) - inputs = IOField(verbose_name=_("输入数据"), default=None) - outputs = IOField(verbose_name=_("输出数据"), default=None) - ex_data = IOField(verbose_name=_("异常数据"), default=None) - - objects = DataManager() - - -class HistoryData(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - inputs = IOField(verbose_name=_("输入数据"), default=None) - outputs = IOField(verbose_name=_("输出数据"), default=None) - ex_data = IOField(verbose_name=_("异常数据"), default=None) - - objects = DataManager() - - -class MultiCallbackData(models.Model): - id = models.BigAutoField(_("自增ID"), primary_key=True) - schedule_id = models.CharField(_("回调服务ID"), max_length=NAME_MAX_LENGTH) - data = IOField(verbose_name=_("回调数据")) - - -DO_NOT_RECORD_WHEN_RERUN = frozenset({""}) - - -class HistoryManager(models.Manager): - def record(self, status, is_rerunning=False): - if is_rerunning and status.name in DO_NOT_RECORD_WHEN_RERUN: - return None - - data = Data.objects.get(id=status.id) - history_data = HistoryData.objects.create(inputs=data.inputs, outputs=data.outputs, ex_data=data.ex_data) - return self.create( - identifier=status.id, - started_time=status.started_time, - archived_time=status.archived_time, - data=history_data, - loop=status.loop, - skip=status.skip, - ) - - def get_histories(self, identifier, loop=None): - filters = {"identifier": identifier} - if loop is not None: - filters["loop"] = loop - histories = self.filter(**filters).order_by("started_time") - data = [ - { - "history_id": item.id, - "started_time": item.started_time, - "archived_time": item.archived_time, - "elapsed_time": calculate_elapsed_time(item.started_time, item.archived_time), - "inputs": item.data.inputs, - "outputs": item.data.outputs, - "ex_data": item.data.ex_data, - "loop": item.loop, - "skip": item.skip, - } - for item in histories - ] - return data - - -class History(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - identifier = models.CharField(_("节点 id"), max_length=32, db_index=True) - started_time = models.DateTimeField(_("开始时间")) - archived_time = models.DateTimeField(_("结束时间")) - loop = models.IntegerField(_("循环次数"), default=1) - skip = models.BooleanField(_("是否跳过"), default=False) - - data = models.ForeignKey(HistoryData, null=True, on_delete=models.SET_NULL) - - objects = HistoryManager() - - -class ScheduleServiceManager(models.Manager): - def set_schedule(self, activity_id, service_act, process_id, version, parent_data): - wait_callback = service_act.service.interval is None - multi_callback_enabled = service_act.service.multi_callback_enabled() - schedule = self.create( - id="{}{}".format(activity_id, version), - activity_id=activity_id, - service_act=service_act, - process_id=process_id, - wait_callback=wait_callback, - multi_callback_enabled=multi_callback_enabled, - version=version, - ) - data_service.set_schedule_data(schedule.id, parent_data) - - if not wait_callback: - count_down = service_act.service.interval.next() - valve.send( - signals, - "schedule_ready", - sender=ScheduleService, - process_id=process_id, - schedule_id=schedule.id, - countdown=count_down, - ) - - return schedule - - def schedule_for(self, activity_id, version): - return self.get(id="{}{}".format(activity_id, version)) - - def delete_schedule(self, activity_id, version): - return self.filter(activity_id=activity_id, version=version).delete() - - def update_celery_info(self, id, lock, celery_id, schedule_date, is_scheduling=False): - return self.filter(id=id, celery_info_lock=lock).update( - celery_info_lock=models.F("celery_info_lock") + 1, - celery_id=celery_id, - schedule_date=schedule_date, - is_scheduling=is_scheduling, - ) - - -class ScheduleService(models.Model): - SCHEDULE_ID_SPLIT_DIVISION = 32 - - id = models.CharField(_("ID 节点ID+version"), max_length=NAME_MAX_LENGTH, unique=True, primary_key=True) - activity_id = models.CharField(_("节点 ID"), max_length=32, db_index=True) - process_id = models.CharField(_("Pipeline 进程 ID"), max_length=32) - schedule_times = models.IntegerField(_("被调度次数"), default=0) - wait_callback = models.BooleanField(_("是否是回调型调度"), default=False) - multi_callback_enabled = models.BooleanField(_("是否支持多次回调"), default=False) - callback_data = IOField(verbose_name=_("回调数据"), default=None) - service_act = IOField(verbose_name=_("待调度服务")) - is_finished = models.BooleanField(_("是否已完成"), default=False) - version = models.CharField(_("Activity 的版本"), max_length=32, db_index=True) - is_scheduling = models.BooleanField(_("是否正在被调度"), default=False, db_index=True) - - objects = ScheduleServiceManager() - - def set_next_schedule(self): - if self.wait_callback: - raise exceptions.InvalidOperationException("can't set next schedule on callback schedule.") - count_down = self.service_act.service.interval.next() - self.is_scheduling = False - self.save() - ScheduleCeleryTask.objects.unbind(self.id) - - valve.send( - signals, - "schedule_ready", - sender=ScheduleService, - process_id=self.process_id, - schedule_id=self.id, - countdown=count_down, - ) - - def destroy(self): - schedule_id = self.id - self.delete() - data_service.delete_parent_data(schedule_id) - ScheduleCeleryTask.objects.destroy(schedule_id) - - def finish(self): - self.is_finished = True - self.service_act = None - self.is_scheduling = False - self.save() - ScheduleCeleryTask.objects.destroy(self.id) - - def callback(self, callback_data, process_id): - if not self.wait_callback: - raise exceptions.InvalidOperationException("can't callback a poll schedule.") - - if self.multi_callback_enabled: - callback_data = MultiCallbackData.objects.create(schedule_id=self.id, data=callback_data) - valve.send( - signals, - "schedule_ready", - sender=ScheduleService, - process_id=process_id, - schedule_id=self.id, - data_id=callback_data.id, - countdown=0, - ) - else: - self.callback_data = callback_data - self.save() - valve.send( - signals, - "schedule_ready", - sender=ScheduleService, - process_id=process_id, - schedule_id=self.id, - countdown=0, - ) - - def is_one_time_callback(self): - return self.wait_callback and not self.multi_callback_enabled - - -class SubProcessRelationshipManager(models.Manager): - def add_relation(self, subprocess_id, process_id): - return self.create(subprocess_id=subprocess_id, process_id=process_id) - - def delete_relation(self, subprocess_id, process_id): - kwargs = {} - if subprocess_id: - kwargs["subprocess_id"] = subprocess_id - if process_id: - kwargs["process_id"] = process_id - self.filter(**kwargs).delete() - - def get_relate_process(self, subprocess_id): - qs = self.filter(subprocess_id=subprocess_id) - proc_ids = [i.process_id for i in qs] - return PipelineProcess.objects.filter(id__in=proc_ids) - - -class SubProcessRelationship(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - subprocess_id = models.CharField(_("子流程 ID"), max_length=32, db_index=True) - process_id = models.CharField(_("对应的进程 ID"), max_length=32) - - objects = SubProcessRelationshipManager() - - -class ProcessCeleryTaskManager(models.Manager): - def bind(self, process_id, celery_task_id): - rel, created = self.get_or_create(process_id=process_id, defaults={"celery_task_id": celery_task_id}) - if not created: - rel.celery_task_id = celery_task_id - rel.save() - - def unbind(self, process_id): - self.filter(process_id=process_id).update(celery_task_id="") - - def destroy(self, process_id): - self.filter(process_id=process_id).delete() - - def start_task(self, process_id, task, kwargs, record_error=True): - if record_error: - with SendFailedCeleryTask.watch( - name=task.name, - kwargs=kwargs, - type=SendFailedCeleryTask.TASK_TYPE_PROCESS, - extra_kwargs={"process_id": process_id}, - ): - task_id = task.apply_async(**kwargs) - else: - task_id = task.apply_async(**kwargs) - self.bind(process_id, task_id) - - def revoke(self, process_id, kill=False): - task = self.get(process_id=process_id) - kwargs = {} if not kill else {"signal": "SIGKILL"} - revoke(task.celery_task_id, terminate=True, **kwargs) - self.destroy(process_id) - - -class ProcessCeleryTask(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - process_id = models.CharField(_("pipeline 进程 ID"), max_length=32, unique=True, db_index=True) - celery_task_id = models.CharField(_("celery 任务 ID"), max_length=40, default="") - - objects = ProcessCeleryTaskManager() - - -class ScheduleCeleryTaskManager(models.Manager): - def bind(self, schedule_id, celery_task_id): - rel, created = self.get_or_create(schedule_id=schedule_id, defaults={"celery_task_id": celery_task_id}) - if not created: - rel.celery_task_id = celery_task_id - rel.save() - - def unbind(self, schedule_id): - self.filter(schedule_id=schedule_id).update(celery_task_id="") - - def destroy(self, schedule_id): - self.filter(schedule_id=schedule_id).delete() - - def start_task(self, schedule_id, task, kwargs, record_error=True): - if record_error: - with SendFailedCeleryTask.watch( - name=task.name, - kwargs=kwargs, - type=SendFailedCeleryTask.TASK_TYPE_SCHEDULE, - extra_kwargs={"schedule_id": schedule_id}, - ): - task_id = task.apply_async(**kwargs) - else: - task_id = task.apply_async(**kwargs) - self.bind(schedule_id, task_id) - - -class ScheduleCeleryTask(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - schedule_id = models.CharField(_("schedule ID"), max_length=NAME_MAX_LENGTH, unique=True, db_index=True) - celery_task_id = models.CharField(_("celery 任务 ID"), max_length=40, default="") - - objects = ScheduleCeleryTaskManager() - - -class NodeCeleryTaskManager(models.Manager): - def bind(self, node_id, celery_task_id): - rel, created = self.update_or_create(node_id=node_id, defaults={"celery_task_id": celery_task_id}) - if not created: - rel.celery_task_id = celery_task_id - rel.save() - - def unbind(self, node_id): - self.filter(node_id=node_id).update(celery_task_id="") - - def destroy(self, node_id): - self.filter(node_id=node_id).delete() - - def start_task(self, node_id, task, kwargs, record_error=True): - if record_error: - with SendFailedCeleryTask.watch( - name=task.name, - kwargs=kwargs, - type=SendFailedCeleryTask.TASK_TYPE_NODE, - extra_kwargs={"node_id": node_id}, - ): - task_id = task.apply_async(**kwargs) - else: - task_id = task.apply_async(**kwargs) - self.bind(node_id, task_id) - - def revoke(self, node_id): - task = self.get(node_id=node_id) - revoke(task.celery_task_id) - self.destroy(node_id) - - -class NodeCeleryTask(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - node_id = models.CharField(_("节点 ID"), max_length=32, unique=True, db_index=True) - celery_task_id = models.CharField(_("celery 任务 ID"), max_length=40, default="") - - objects = NodeCeleryTaskManager() - - -class SendFailedCeleryTaskManager(models.Manager): - def record(self, name, kwargs, type, extra_kwargs, exec_trace): - save_extra_kwargs = extra_kwargs - save_kwargs = kwargs - - if not isinstance(save_extra_kwargs, str): - save_extra_kwargs = json.dumps(save_extra_kwargs) - - if not isinstance(save_kwargs, str): - save_kwargs = json.dumps(save_kwargs) - - return self.create( - name=name, kwargs=save_kwargs, type=type, extra_kwargs=save_extra_kwargs, exec_trace=exec_trace, - ) - - def resend(self, id): - self.get(id=id).resend() - - -class SendFailedCeleryTask(models.Model): - - TASK_TYPE_EMPTY = 0 - TASK_TYPE_PROCESS = 1 - TASK_TYPE_NODE = 2 - TASK_TYPE_SCHEDULE = 3 - - TASK_TYPE_CHOICES = ( - (TASK_TYPE_EMPTY, "empty"), - (TASK_TYPE_PROCESS, "process"), - (TASK_TYPE_NODE, "node"), - (TASK_TYPE_SCHEDULE, "schedule"), - ) - - id = models.BigAutoField(_("ID"), primary_key=True) - name = models.CharField(_("任务名"), max_length=1024) - kwargs = models.TextField(_("任务参数")) - type = models.IntegerField(_("任务类型"), choices=TASK_TYPE_CHOICES) - extra_kwargs = models.TextField(_("额外参数")) - exec_trace = models.TextField(_("错误信息")) - created_at = models.DateTimeField(_("创建时间"), auto_now_add=True) - - objects = SendFailedCeleryTaskManager() - - @property - def kwargs_dict(self): - return json.loads(self.kwargs) - - @property - def extra_kwargs_dict(self): - return json.loads(self.extra_kwargs) - - def resend(self): - try: - task = current_app.tasks[self.name] - - if self.type == self.TASK_TYPE_EMPTY: - task.apply_async(**self.kwargs_dict) - elif self.type == self.TASK_TYPE_PROCESS: - ProcessCeleryTask.objects.start_task( - process_id=self.extra_kwargs_dict["process_id"], - task=task, - kwargs=self.kwargs_dict, - record_error=False, - ) - elif self.type == self.TASK_TYPE_NODE: - NodeCeleryTask.objects.start_task( - node_id=self.extra_kwargs_dict["node_id"], task=task, kwargs=self.kwargs_dict, record_error=False, - ) - elif self.type == self.TASK_TYPE_SCHEDULE: - ScheduleCeleryTask.objects.start_task( - schedule_id=self.extra_kwargs_dict["schedule_id"], - task=task, - kwargs=self.kwargs_dict, - record_error=False, - ) - else: - raise TypeError("unsupport type: {}.".format(self.type)) - except Exception as e: - logger.exception("fail task send replay error.") - raise e - else: - self.delete() - - @classmethod - @contextlib.contextmanager - def watch(cls, name, kwargs, type, extra_kwargs): - try: - yield - except Exception: - logger.exception("celery task({}) watcher catch error.".format(name)) - cls.objects.record( - name=name, kwargs=kwargs, type=type, extra_kwargs=extra_kwargs, exec_trace=traceback.format_exc(), - ) - # raise specific exception to indicate that send fail task have been catched - raise exceptions.CeleryFailedTaskCatchException(name) diff --git a/lib/pipeline/engine/models/data.py b/lib/pipeline/engine/models/data.py deleted file mode 100644 index 13f1b41..0000000 --- a/lib/pipeline/engine/models/data.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db import models, transaction -from django.utils.translation import ugettext_lazy as _ - -from pipeline.engine.models.fields import IOField - - -class DataSnapshotManager(models.Manager): - def set_object(self, key, obj): - # do not use update_or_create, prevent of deadlock - with transaction.atomic(): - if self.get_object(key): - self.filter(key=key).update(obj=obj) - else: - self.create(key=key, obj=obj) - return True - - def get_object(self, key): - try: - return self.get(key=key).obj - except DataSnapshot.DoesNotExist: - return None - - def del_object(self, key): - try: - self.get(key=key).delete() - return True - except DataSnapshot.DoesNotExist: - return False - - -class DataSnapshot(models.Model): - key = models.CharField(_("对象唯一键"), max_length=255, primary_key=True) - obj = IOField(verbose_name=_("对象存储字段")) - - objects = DataSnapshotManager() diff --git a/lib/pipeline/engine/models/fields.py b/lib/pipeline/engine/models/fields.py deleted file mode 100644 index 6e4e66f..0000000 --- a/lib/pipeline/engine/models/fields.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import pickle -import traceback -import zlib - -from django.db import models - -from pipeline.utils.utils import convert_bytes_to_str -from . import nr_pickle - - -class IOField(models.BinaryField): - def __init__(self, compress_level=6, *args, **kwargs): - super(IOField, self).__init__(*args, **kwargs) - self.compress_level = compress_level - - def get_prep_value(self, value): - value = super(IOField, self).get_prep_value(value) - try: - serialized = zlib.compress(pickle.dumps(value), self.compress_level) - except RecursionError: - serialized = zlib.compress(nr_pickle.dumps(value), self.compress_level) - return serialized - - def to_python(self, value): - try: - value = super(IOField, self).to_python(value) - return pickle.loads(zlib.decompress(value)) - except UnicodeDecodeError: - # py2 pickle data process - return convert_bytes_to_str(pickle.loads(zlib.decompress(value), encoding="bytes")) - except Exception: - return "IOField to_python raise error: {}".format(traceback.format_exc()) - - def from_db_value(self, value, expression, connection, context=None): - return self.to_python(value) diff --git a/lib/pipeline/engine/models/function.py b/lib/pipeline/engine/models/function.py deleted file mode 100644 index 0bf6b76..0000000 --- a/lib/pipeline/engine/models/function.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import traceback - -from django.db import models -from django.utils.translation import ugettext_lazy as _ - -from pipeline.engine.conf import function_switch - -logger = logging.getLogger("celery") - - -class FunctionSwitchManager(models.Manager): - def init_db(self): - try: - name_set = {s.name for s in self.all()} - s_to_be_created = [] - for switch in function_switch.switch_list: - if switch["name"] not in name_set: - s_to_be_created.append( - FunctionSwitch( - name=switch["name"], description=switch["description"], is_active=switch["is_active"] - ) - ) - else: - self.filter(name=switch["name"]).update(description=switch["description"]) - self.bulk_create(s_to_be_created) - except Exception: - logger.error("function switch init failed: %s" % traceback.format_exc()) - - def is_frozen(self): - return self.get(name=function_switch.FREEZE_ENGINE).is_active - - def freeze_engine(self): - self.filter(name=function_switch.FREEZE_ENGINE).update(is_active=True) - - def unfreeze_engine(self): - self.filter(name=function_switch.FREEZE_ENGINE).update(is_active=False) - - -class FunctionSwitch(models.Model): - name = models.CharField(_("功能名称"), max_length=32, null=False, unique=True) - description = models.TextField(_("功能描述"), default="") - is_active = models.BooleanField(_("是否激活"), default=False) - - objects = FunctionSwitchManager() diff --git a/lib/pipeline/engine/models/nr_pickle.py b/lib/pipeline/engine/models/nr_pickle.py deleted file mode 100644 index 0ab1cd4..0000000 --- a/lib/pipeline/engine/models/nr_pickle.py +++ /dev/null @@ -1,1681 +0,0 @@ -"""Create portable serialized representations of Python objects. - -See module copyreg for a mechanism for registering custom picklers. -See module pickletools source for extensive comments. - -Classes: - - Pickler - Unpickler - -Functions: - - dump(object, file) - dumps(object) -> string - load(file) -> object - loads(string) -> object - -Misc variables: - - __version__ - format_version - compatible_formats - -""" - -from types import FunctionType -from copyreg import dispatch_table -from copyreg import _extension_registry, _inverted_registry, _extension_cache -from itertools import islice -from functools import partial -import sys -from sys import maxsize -from struct import pack, unpack -import re -import io -import codecs -import _compat_pickle - -__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler", "Unpickler", "dump", "dumps", "load", "loads"] - -# Shortcut for use in isinstance testing -bytes_types = (bytes, bytearray) - -# These are purely informational; no code uses these. -format_version = "4.0" # File format version we write -compatible_formats = [ - "1.0", # Original protocol 0 - "1.1", # Protocol 0 with INST added - "1.2", # Original protocol 1 - "1.3", # Protocol 1 with BINFLOAT added - "2.0", # Protocol 2 - "3.0", # Protocol 3 - "4.0", # Protocol 4 -] # Old format versions we can read - -# This is the highest protocol number we know how to read. -HIGHEST_PROTOCOL = 4 - -# The protocol we write by default. May be less than HIGHEST_PROTOCOL. -# We intentionally write a protocol that Python 2.x cannot read; -# there are too many issues with that. -DEFAULT_PROTOCOL = 3 - - -class PickleError(Exception): - """A common base class for the other pickling exceptions.""" - - pass - - -class PicklingError(PickleError): - """This exception is raised when an unpicklable object is passed to the - dump() method. - - """ - - pass - - -class UnpicklingError(PickleError): - """This exception is raised when there is a problem unpickling an object, - such as a security violation. - - Note that other exceptions may also be raised during unpickling, including - (but not necessarily limited to) AttributeError, EOFError, ImportError, - and IndexError. - - """ - - pass - - -# An instance of _Stop is raised by Unpickler.load_stop() in response to -# the STOP opcode, passing the object that is the result of unpickling. - - -class _Stop(Exception): - def __init__(self, value): - self.value = value - - -# Jython has PyStringMap; it's a dict subclass with string keys -try: - from org.python.core import PyStringMap -except ImportError: - PyStringMap = None - -# Pickle opcodes. See pickletools.py for extensive docs. The listing -# here is in kind-of alphabetical order of 1-character pickle code. -# pickletools groups them by purpose. - -MARK = b"(" # push special markobject on stack -STOP = b"." # every pickle ends with STOP -POP = b"0" # discard topmost stack item -POP_MARK = b"1" # discard stack top through topmost markobject -DUP = b"2" # duplicate top stack item -FLOAT = b"F" # push float object; decimal string argument -INT = b"I" # push integer or bool; decimal string argument -BININT = b"J" # push four-byte signed int -BININT1 = b"K" # push 1-byte unsigned int -LONG = b"L" # push long; decimal string argument -BININT2 = b"M" # push 2-byte unsigned int -NONE = b"N" # push None -PERSID = b"P" # push persistent object; id is taken from string arg -BINPERSID = b"Q" # " " " ; " " " " stack -REDUCE = b"R" # apply callable to argtuple, both on stack -STRING = b"S" # push string; NL-terminated string argument -BINSTRING = b"T" # push string; counted binary string argument -SHORT_BINSTRING = b"U" # " " ; " " " " < 256 bytes -UNICODE = b"V" # push Unicode string; raw-unicode-escaped'd argument -BINUNICODE = b"X" # " " " ; counted UTF-8 string argument -APPEND = b"a" # append stack top to list below it -BUILD = b"b" # call __setstate__ or __dict__.update() -GLOBAL = b"c" # push self.find_class(modname, name); 2 string args -DICT = b"d" # build a dict from stack items -EMPTY_DICT = b"}" # push empty dict -APPENDS = b"e" # extend list on stack by topmost stack slice -GET = b"g" # push item from memo on stack; index is string arg -BINGET = b"h" # " " " " " " ; " " 1-byte arg -INST = b"i" # build & push class instance -LONG_BINGET = b"j" # push item from memo on stack; index is 4-byte arg -LIST = b"l" # build list from topmost stack items -EMPTY_LIST = b"]" # push empty list -OBJ = b"o" # build & push class instance -PUT = b"p" # store stack top in memo; index is string arg -BINPUT = b"q" # " " " " " ; " " 1-byte arg -LONG_BINPUT = b"r" # " " " " " ; " " 4-byte arg -SETITEM = b"s" # add key+value pair to dict -TUPLE = b"t" # build tuple from topmost stack items -EMPTY_TUPLE = b")" # push empty tuple -SETITEMS = b"u" # modify dict by adding topmost key+value pairs -BINFLOAT = b"G" # push float; arg is 8-byte float encoding - -TRUE = b"I01\n" # not an opcode; see INT docs in pickletools.py -FALSE = b"I00\n" # not an opcode; see INT docs in pickletools.py - -# Protocol 2 - -PROTO = b"\x80" # identify pickle protocol -NEWOBJ = b"\x81" # build object by applying cls.__new__ to argtuple -EXT1 = b"\x82" # push object from extension registry; 1-byte index -EXT2 = b"\x83" # ditto, but 2-byte index -EXT4 = b"\x84" # ditto, but 4-byte index -TUPLE1 = b"\x85" # build 1-tuple from stack top -TUPLE2 = b"\x86" # build 2-tuple from two topmost stack items -TUPLE3 = b"\x87" # build 3-tuple from three topmost stack items -NEWTRUE = b"\x88" # push True -NEWFALSE = b"\x89" # push False -LONG1 = b"\x8a" # push long from < 256 bytes -LONG4 = b"\x8b" # push really big long - -_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3] - -# Protocol 3 (Python 3.x) - -BINBYTES = b"B" # push bytes; counted binary string argument -SHORT_BINBYTES = b"C" # " " ; " " " " < 256 bytes - -# Protocol 4 -SHORT_BINUNICODE = b"\x8c" # push short string; UTF-8 length < 256 bytes -BINUNICODE8 = b"\x8d" # push very long string -BINBYTES8 = b"\x8e" # push very long bytes string -EMPTY_SET = b"\x8f" # push empty set on the stack -ADDITEMS = b"\x90" # modify set by adding topmost stack items -FROZENSET = b"\x91" # build frozenset from topmost stack items -NEWOBJ_EX = b"\x92" # like NEWOBJ but work with keyword only arguments -STACK_GLOBAL = b"\x93" # same as GLOBAL but using names on the stacks -MEMOIZE = b"\x94" # store top of the stack in memo -FRAME = b"\x95" # indicate the beginning of a new frame - -__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$", x)]) - - -class _Framer: - - _FRAME_SIZE_TARGET = 64 * 1024 - - def __init__(self, file_write): - self.file_write = file_write - self.current_frame = None - - def start_framing(self): - self.current_frame = io.BytesIO() - - def end_framing(self): - if self.current_frame and self.current_frame.tell() > 0: - self.commit_frame(force=True) - self.current_frame = None - - def commit_frame(self, force=False): - if self.current_frame: - f = self.current_frame - if f.tell() >= self._FRAME_SIZE_TARGET or force: - with f.getbuffer() as data: - n = len(data) - write = self.file_write - write(FRAME) - write(pack("": - raise AttributeError("Can't get local attribute {!r} on {!r}".format(name, obj)) - try: - parent = obj - obj = getattr(obj, subpath) - except AttributeError: - raise AttributeError("Can't get attribute {!r} on {!r}".format(name, obj)) - return obj, parent - - -def whichmodule(obj, name): - """Find the module an object belong to.""" - module_name = getattr(obj, "__module__", None) - if module_name is not None: - return module_name - # Protect the iteration by using a list copy of sys.modules against dynamic - # modules that trigger imports of other modules upon calls to getattr. - for module_name, module in list(sys.modules.items()): - if module_name == "__main__" or module is None: - continue - try: - if _getattribute(module, name)[0] is obj: - return module_name - except AttributeError: - pass - return "__main__" - - -def encode_long(x): - r"""Encode a long to a two's complement little-endian binary string. - Note that 0 is a special case, returning an empty string, to save a - byte in the LONG1 pickling context. - - >>> encode_long(0) - b'' - >>> encode_long(255) - b'\xff\x00' - >>> encode_long(32767) - b'\xff\x7f' - >>> encode_long(-256) - b'\x00\xff' - >>> encode_long(-32768) - b'\x00\x80' - >>> encode_long(-128) - b'\x80' - >>> encode_long(127) - b'\x7f' - >>> - """ - if x == 0: - return b"" - nbytes = (x.bit_length() >> 3) + 1 - result = x.to_bytes(nbytes, byteorder="little", signed=True) - if x < 0 and nbytes > 1: - if result[-1] == 0xFF and (result[-2] & 0x80) != 0: - result = result[:-1] - return result - - -def decode_long(data): - r"""Decode a long from a two's complement little-endian binary string. - - >>> decode_long(b'') - 0 - >>> decode_long(b"\xff\x00") - 255 - >>> decode_long(b"\xff\x7f") - 32767 - >>> decode_long(b"\x00\xff") - -256 - >>> decode_long(b"\x00\x80") - -32768 - >>> decode_long(b"\x80") - -128 - >>> decode_long(b"\x7f") - 127 - """ - return int.from_bytes(data, byteorder="little", signed=True) - - -# Pickling machinery - - -class _Pickler: - def __init__(self, file, protocol=None, *, fix_imports=True): - """This takes a binary file for writing a pickle data stream. - - The optional *protocol* argument tells the pickler to use the - given protocol; supported protocols are 0, 1, 2, 3 and 4. The - default protocol is 3; a backward-incompatible protocol designed - for Python 3. - - Specifying a negative protocol version selects the highest - protocol version supported. The higher the protocol used, the - more recent the version of Python needed to read the pickle - produced. - - The *file* argument must have a write() method that accepts a - single bytes argument. It can thus be a file object opened for - binary writing, an io.BytesIO instance, or any other custom - object that meets this interface. - - If *fix_imports* is True and *protocol* is less than 3, pickle - will try to map the new Python 3 names to the old module names - used in Python 2, so that the pickle data stream is readable - with Python 2. - """ - if protocol is None: - protocol = DEFAULT_PROTOCOL - if protocol < 0: - protocol = HIGHEST_PROTOCOL - elif not 0 <= protocol <= HIGHEST_PROTOCOL: - raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL) - try: - self._file_write = file.write - except AttributeError: - raise TypeError("file must have a 'write' attribute") - self.framer = _Framer(self._file_write) - self.write = self.framer.write - self.memo = {} - self.proto = int(protocol) - self.bin = protocol >= 1 - self.fast = 0 - self.fix_imports = fix_imports and protocol < 3 - - def clear_memo(self): - """Clears the pickler's "memo". - - The memo is the data structure that remembers which objects the - pickler has already seen, so that shared or recursive objects - are pickled by reference and not by value. This method is - useful when re-using picklers. - """ - self.memo.clear() - - def dump(self, obj): - """Write a pickled representation of obj to the open file.""" - # Check whether Pickler was initialized correctly. This is - # only needed to mimic the behavior of _pickle.Pickler.dump(). - if not hasattr(self, "_file_write"): - raise PicklingError("Pickler.__init__() was not called by " "%s.__init__()" % (self.__class__.__name__,)) - if self.proto >= 2: - self.write(PROTO + pack("= 4: - self.framer.start_framing() - - # By faking recursion using generators, pickle is no longer dependent - # on python's recursion limit. This means that hugely recursive data - # structures can be pickled without a problem! It's also still just - # about as fast as it was for simple structures, albeit slower for - # large structures. - callstack = [self.save(obj)] - while callstack: - try: - result = next(callstack[-1]) - except StopIteration: - callstack.pop() - else: - if result is not None: - callstack.append(result) - - self.write(STOP) - self.framer.end_framing() - - def memoize(self, obj): - """Store an object in the memo.""" - - # The Pickler memo is a dictionary mapping object ids to 2-tuples - # that contain the Unpickler memo key and the object being memoized. - # The memo key is written to the pickle and will become - # the key in the Unpickler's memo. The object is stored in the - # Pickler memo so that transient objects are kept alive during - # pickling. - - # The use of the Unpickler memo length as the memo key is just a - # convention. The only requirement is that the memo values be unique. - # But there appears no advantage to any other scheme, and this - # scheme allows the Unpickler memo to be implemented as a plain (but - # growable) array, indexed by memo key. - if self.fast: - return - assert id(obj) not in self.memo - idx = len(self.memo) - self.write(self.put(idx)) - self.memo[id(obj)] = idx, obj - - # Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i. - def put(self, idx): - if self.proto >= 4: - return MEMOIZE - elif self.bin: - if idx < 256: - return BINPUT + pack("= 2 and func_name == "__newobj_ex__": - cls, args, kwargs = args - if not hasattr(cls, "__new__"): - raise PicklingError("args[0] from {} args has no __new__".format(func_name)) - if obj is not None and cls is not obj.__class__: - raise PicklingError("args[0] from {} args has the wrong class".format(func_name)) - if self.proto >= 4: - yield save(cls) - yield save(args) - yield save(kwargs) - write(NEWOBJ_EX) - else: - func = partial(cls.__new__, cls, *args, **kwargs) - yield save(func) - yield save(()) - write(REDUCE) - elif self.proto >= 2 and func_name == "__newobj__": - # A __reduce__ implementation can direct protocol 2 or newer to - # use the more efficient NEWOBJ opcode, while still - # allowing protocol 0 and 1 to work normally. For this to - # work, the function returned by __reduce__ should be - # called __newobj__, and its first argument should be a - # class. The implementation for __newobj__ - # should be as follows, although pickle has no way to - # verify this: - # - # def __newobj__(cls, *args): - # return cls.__new__(cls, *args) - # - # Protocols 0 and 1 will pickle a reference to __newobj__, - # while protocol 2 (and above) will pickle a reference to - # cls, the remaining args tuple, and the NEWOBJ code, - # which calls cls.__new__(cls, *args) at unpickling time - # (see load_newobj below). If __reduce__ returns a - # three-tuple, the state from the third tuple item will be - # pickled regardless of the protocol, calling __setstate__ - # at unpickling time (see load_build below). - # - # Note that no standard __newobj__ implementation exists; - # you have to provide your own. This is to enforce - # compatibility with Python 2.2 (pickles written using - # protocol 0 or 1 in Python 2.3 should be unpicklable by - # Python 2.2). - cls = args[0] - if not hasattr(cls, "__new__"): - raise PicklingError("args[0] from __newobj__ args has no __new__") - if obj is not None and cls is not obj.__class__: - raise PicklingError("args[0] from __newobj__ args has the wrong class") - args = args[1:] - yield save(cls) - yield save(args) - write(NEWOBJ) - else: - yield save(func) - yield save(args) - write(REDUCE) - - if obj is not None: - # If the object is already in the memo, this means it is - # recursive. In this case, throw away everything we put on the - # stack, and fetch the object back from the memo. - if id(obj) in self.memo: - write(POP + self.get(self.memo[id(obj)][0])) - else: - self.memoize(obj) - - # More new special cases (that work with older protocols as - # well): when __reduce__ returns a tuple with 4 or 5 items, - # the 4th and 5th item should be iterators that provide list - # items and dict items (as (key, value) tuples), or None. - - if listitems is not None: - yield self._batch_appends(listitems) - - if dictitems is not None: - yield self._batch_setitems(dictitems) - - if state is not None: - yield save(state) - write(BUILD) - - # Methods below this point are dispatched through the dispatch table - - dispatch = {} - - def save_none(self, obj): - self.write(NONE) - - dispatch[type(None)] = save_none - - def save_bool(self, obj): - if self.proto >= 2: - self.write(NEWTRUE if obj else NEWFALSE) - else: - self.write(TRUE if obj else FALSE) - - dispatch[bool] = save_bool - - def save_long(self, obj): - if self.bin: - # If the int is small enough to fit in a signed 4-byte 2's-comp - # format, we can store it more efficiently than the general - # case. - # First one- and two-byte unsigned ints: - if obj >= 0: - if obj <= 0xFF: - self.write(BININT1 + pack("= 2: - encoded = encode_long(obj) - n = len(encoded) - if n < 256: - self.write(LONG1 + pack("d", obj)) - else: - self.write(FLOAT + repr(obj).encode("ascii") + b"\n") - - dispatch[float] = save_float - - def save_bytes(self, obj): - if self.proto < 3: - if not obj: # bytes object is empty - yield self.save_reduce(bytes, (), obj=obj) - else: - yield self.save_reduce(codecs.encode, (str(obj, "latin1"), "latin1"), obj=obj) - return - n = len(obj) - if n <= 0xFF: - self.write(SHORT_BINBYTES + pack(" 0xFFFFFFFF and self.proto >= 4: - self.write(BINBYTES8 + pack("= 4: - self.write(SHORT_BINUNICODE + pack(" 0xFFFFFFFF and self.proto >= 4: - self.write(BINUNICODE8 + pack("= 2: - for element in obj: - yield save(element) - # Subtle. Same as in the big comment below. - if id(obj) in memo: - get = self.get(memo[id(obj)][0]) - self.write(POP * n + get) - else: - self.write(_tuplesize2code[n]) - self.memoize(obj) - return - - # proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple - # has more than 3 elements. - write = self.write - write(MARK) - for element in obj: - yield save(element) - - if id(obj) in memo: - # Subtle. d was not in memo when we entered save_tuple(), so - # the process of saving the tuple's elements must have saved - # the tuple itself: the tuple is recursive. The proper action - # now is to throw away everything we put on the stack, and - # simply GET the tuple (it's already constructed). This check - # could have been done in the "for element" loop instead, but - # recursive tuples are a rare thing. - get = self.get(memo[id(obj)][0]) - if self.bin: - write(POP_MARK + get) - else: # proto 0 -- POP_MARK not available - write(POP * (n + 1) + get) - return - - # No recursion. - write(TUPLE) - self.memoize(obj) - - dispatch[tuple] = save_tuple - - def save_list(self, obj): - if self.bin: - self.write(EMPTY_LIST) - else: # proto 0 -- can't use EMPTY_LIST - self.write(MARK + LIST) - - self.memoize(obj) - yield self._batch_appends(obj) - - dispatch[list] = save_list - - _BATCHSIZE = 1000 - - def _batch_appends(self, items): - # Helper to batch up APPENDS sequences - save = self.save - write = self.write - - if not self.bin: - for x in items: - yield save(x) - write(APPEND) - return - - it = iter(items) - while True: - tmp = list(islice(it, self._BATCHSIZE)) - n = len(tmp) - if n > 1: - write(MARK) - for x in tmp: - yield save(x) - write(APPENDS) - elif n: - yield save(tmp[0]) - write(APPEND) - # else tmp is empty, and we're done - if n < self._BATCHSIZE: - return - - def save_dict(self, obj): - if self.bin: - self.write(EMPTY_DICT) - else: # proto 0 -- can't use EMPTY_DICT - self.write(MARK + DICT) - - self.memoize(obj) - yield self._batch_setitems(obj.items()) - - dispatch[dict] = save_dict - if PyStringMap is not None: - dispatch[PyStringMap] = save_dict - - def _batch_setitems(self, items): - # Helper to batch up SETITEMS sequences; proto >= 1 only - save = self.save - write = self.write - - if not self.bin: - for k, v in items: - yield save(k) - yield save(v) - write(SETITEM) - return - - it = iter(items) - while True: - tmp = list(islice(it, self._BATCHSIZE)) - n = len(tmp) - if n > 1: - write(MARK) - for k, v in tmp: - yield save(k) - yield save(v) - write(SETITEMS) - elif n: - k, v = tmp[0] - yield save(k) - yield save(v) - write(SETITEM) - # else tmp is empty, and we're done - if n < self._BATCHSIZE: - return - - def save_set(self, obj): - save = self.save - write = self.write - - if self.proto < 4: - self.save_reduce(set, (list(obj),), obj=obj) - return - - write(EMPTY_SET) - self.memoize(obj) - - it = iter(obj) - while True: - batch = list(islice(it, self._BATCHSIZE)) - n = len(batch) - if n > 0: - write(MARK) - for item in batch: - yield save(item) - write(ADDITEMS) - if n < self._BATCHSIZE: - return - - dispatch[set] = save_set - - def save_frozenset(self, obj): - save = self.save - write = self.write - - if self.proto < 4: - self.save_reduce(frozenset, (list(obj),), obj=obj) - return - - write(MARK) - for item in obj: - yield save(item) - - if id(obj) in self.memo: - # If the object is already in the memo, this means it is - # recursive. In this case, throw away everything we put on the - # stack, and fetch the object back from the memo. - write(POP_MARK + self.get(self.memo[id(obj)][0])) - return - - write(FROZENSET) - self.memoize(obj) - - dispatch[frozenset] = save_frozenset - - def save_global(self, obj, name=None): - write = self.write - memo = self.memo # noqa - - if name is None: - name = getattr(obj, "__qualname__", None) - if name is None: - name = obj.__name__ - - module_name = whichmodule(obj, name) - try: - __import__(module_name, level=0) - module = sys.modules[module_name] - obj2, parent = _getattribute(module, name) - except (ImportError, KeyError, AttributeError): - raise PicklingError("Can't pickle %r: it's not found as %s.%s" % (obj, module_name, name)) - else: - if obj2 is not obj: - raise PicklingError("Can't pickle %r: it's not the same object as %s.%s" % (obj, module_name, name)) - - if self.proto >= 2: - code = _extension_registry.get((module_name, name)) - if code: - assert code > 0 - if code <= 0xFF: - write(EXT1 + pack("= 3. - if self.proto >= 4: - self.save(module_name) - self.save(name) - write(STACK_GLOBAL) - elif parent is not module: - self.save_reduce(getattr, (parent, lastname)) - elif self.proto >= 3: - write(GLOBAL + bytes(module_name, "utf-8") + b"\n" + bytes(name, "utf-8") + b"\n") - else: - if self.fix_imports: - r_name_mapping = _compat_pickle.REVERSE_NAME_MAPPING - r_import_mapping = _compat_pickle.REVERSE_IMPORT_MAPPING - if (module_name, name) in r_name_mapping: - module_name, name = r_name_mapping[(module_name, name)] - elif module_name in r_import_mapping: - module_name = r_import_mapping[module_name] - try: - write(GLOBAL + bytes(module_name, "ascii") + b"\n" + bytes(name, "ascii") + b"\n") - except UnicodeEncodeError: - raise PicklingError( - "can't pickle global identifier '%s.%s' using " "pickle protocol %i" % (module, name, self.proto) - ) - - self.memoize(obj) - - def save_type(self, obj): - if obj is type(None): # noqa - return self.save_reduce(type, (None,), obj=obj) - elif obj is type(NotImplemented): - return self.save_reduce(type, (NotImplemented,), obj=obj) - elif obj is type(...): # noqa - return self.save_reduce(type, (...,), obj=obj) - return self.save_global(obj) - - dispatch[FunctionType] = save_global - dispatch[type] = save_type - - -# Unpickling machinery - - -class _Unpickler: - def __init__(self, file, *, fix_imports=True, encoding="ASCII", errors="strict"): - """This takes a binary file for reading a pickle data stream. - - The protocol version of the pickle is detected automatically, so - no proto argument is needed. - - The argument *file* must have two methods, a read() method that - takes an integer argument, and a readline() method that requires - no arguments. Both methods should return bytes. Thus *file* - can be a binary file object opened for reading, an io.BytesIO - object, or any other custom object that meets this interface. - - The file-like object must have two methods, a read() method - that takes an integer argument, and a readline() method that - requires no arguments. Both methods should return bytes. - Thus file-like object can be a binary file object opened for - reading, a BytesIO object, or any other custom object that - meets this interface. - - Optional keyword arguments are *fix_imports*, *encoding* and - *errors*, which are used to control compatibility support for - pickle stream generated by Python 2. If *fix_imports* is True, - pickle will try to map the old Python 2 names to the new names - used in Python 3. The *encoding* and *errors* tell pickle how - to decode 8-bit string instances pickled by Python 2; these - default to 'ASCII' and 'strict', respectively. *encoding* can be - 'bytes' to read theses 8-bit string instances as bytes objects. - """ - self._file_readline = file.readline - self._file_read = file.read - self.memo = {} - self.encoding = encoding - self.errors = errors - self.proto = 0 - self.fix_imports = fix_imports - - def load(self): - """Read a pickled object representation from the open file. - - Return the reconstituted object hierarchy specified in the file. - """ - # Check whether Unpickler was initialized correctly. This is - # only needed to mimic the behavior of _pickle.Unpickler.dump(). - if not hasattr(self, "_file_read"): - raise UnpicklingError( - "Unpickler.__init__() was not called by " "%s.__init__()" % (self.__class__.__name__,) - ) - self._unframer = _Unframer(self._file_read, self._file_readline) - self.read = self._unframer.read - self.readline = self._unframer.readline - self.metastack = [] - self.stack = [] - self.append = self.stack.append - self.proto = 0 - read = self.read - dispatch = self.dispatch - try: - while True: - key = read(1) - if not key: - raise EOFError - assert isinstance(key, bytes_types) - dispatch[key[0]](self) - except _Stop as stopinst: - return stopinst.value - - # Return a list of items pushed in the stack after last MARK instruction. - def pop_mark(self): - items = self.stack - self.stack = self.metastack.pop() - self.append = self.stack.append - return items - - def persistent_load(self, pid): - raise UnpicklingError("unsupported persistent id encountered") - - dispatch = {} - - def load_proto(self): - proto = self.read(1)[0] - if not 0 <= proto <= HIGHEST_PROTOCOL: - raise ValueError("unsupported pickle protocol: %d" % proto) - self.proto = proto - - dispatch[PROTO[0]] = load_proto - - def load_frame(self): - (frame_size,) = unpack(" sys.maxsize: - raise ValueError("frame size > sys.maxsize: %d" % frame_size) - self._unframer.load_frame(frame_size) - - dispatch[FRAME[0]] = load_frame - - def load_persid(self): - try: - pid = self.readline()[:-1].decode("ascii") - except UnicodeDecodeError: - raise UnpicklingError("persistent IDs in protocol 0 must be ASCII strings") - self.append(self.persistent_load(pid)) - - dispatch[PERSID[0]] = load_persid - - def load_binpersid(self): - pid = self.stack.pop() - self.append(self.persistent_load(pid)) - - dispatch[BINPERSID[0]] = load_binpersid - - def load_none(self): - self.append(None) - - dispatch[NONE[0]] = load_none - - def load_false(self): - self.append(False) - - dispatch[NEWFALSE[0]] = load_false - - def load_true(self): - self.append(True) - - dispatch[NEWTRUE[0]] = load_true - - def load_int(self): - data = self.readline() - if data == FALSE[1:]: - val = False - elif data == TRUE[1:]: - val = True - else: - val = int(data, 0) - self.append(val) - - dispatch[INT[0]] = load_int - - def load_binint(self): - self.append(unpack("d", self.read(8))[0]) - - dispatch[BINFLOAT[0]] = load_binfloat - - def _decode_string(self, value): - # Used to allow strings from Python 2 to be decoded either as - # bytes or Unicode strings. This should be used only with the - # STRING, BINSTRING and SHORT_BINSTRING opcodes. - if self.encoding == "bytes": - return value - else: - return value.decode(self.encoding, self.errors) - - def load_string(self): - data = self.readline()[:-1] - # Strip outermost quotes - if len(data) >= 2 and data[0] == data[-1] and data[0] in b"\"'": - data = data[1:-1] - else: - raise UnpicklingError("the STRING opcode argument must be quoted") - self.append(self._decode_string(codecs.escape_decode(data)[0])) - - dispatch[STRING[0]] = load_string - - def load_binstring(self): - # Deprecated BINSTRING uses signed 32-bit length - (len,) = unpack(" maxsize: - raise UnpicklingError("BINBYTES exceeds system's maximum size " "of %d bytes" % maxsize) - self.append(self.read(len)) - - dispatch[BINBYTES[0]] = load_binbytes - - def load_unicode(self): - self.append(str(self.readline()[:-1], "raw-unicode-escape")) - - dispatch[UNICODE[0]] = load_unicode - - def load_binunicode(self): - (len,) = unpack(" maxsize: - raise UnpicklingError("BINUNICODE exceeds system's maximum size " "of %d bytes" % maxsize) - self.append(str(self.read(len), "utf-8", "surrogatepass")) - - dispatch[BINUNICODE[0]] = load_binunicode - - def load_binunicode8(self): - (len,) = unpack(" maxsize: - raise UnpicklingError("BINUNICODE8 exceeds system's maximum size " "of %d bytes" % maxsize) - self.append(str(self.read(len), "utf-8", "surrogatepass")) - - dispatch[BINUNICODE8[0]] = load_binunicode8 - - def load_binbytes8(self): - (len,) = unpack(" maxsize: - raise UnpicklingError("BINBYTES8 exceeds system's maximum size " "of %d bytes" % maxsize) - self.append(self.read(len)) - - dispatch[BINBYTES8[0]] = load_binbytes8 - - def load_short_binstring(self): - len = self.read(1)[0] - data = self.read(len) - self.append(self._decode_string(data)) - - dispatch[SHORT_BINSTRING[0]] = load_short_binstring - - def load_short_binbytes(self): - len = self.read(1)[0] - self.append(self.read(len)) - - dispatch[SHORT_BINBYTES[0]] = load_short_binbytes - - def load_short_binunicode(self): - len = self.read(1)[0] - self.append(str(self.read(len), "utf-8", "surrogatepass")) - - dispatch[SHORT_BINUNICODE[0]] = load_short_binunicode - - def load_tuple(self): - items = self.pop_mark() - self.append(tuple(items)) - - dispatch[TUPLE[0]] = load_tuple - - def load_empty_tuple(self): - self.append(()) - - dispatch[EMPTY_TUPLE[0]] = load_empty_tuple - - def load_tuple1(self): - self.stack[-1] = (self.stack[-1],) - - dispatch[TUPLE1[0]] = load_tuple1 - - def load_tuple2(self): - self.stack[-2:] = [(self.stack[-2], self.stack[-1])] - - dispatch[TUPLE2[0]] = load_tuple2 - - def load_tuple3(self): - self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])] - - dispatch[TUPLE3[0]] = load_tuple3 - - def load_empty_list(self): - self.append([]) - - dispatch[EMPTY_LIST[0]] = load_empty_list - - def load_empty_dictionary(self): - self.append({}) - - dispatch[EMPTY_DICT[0]] = load_empty_dictionary - - def load_empty_set(self): - self.append(set()) - - dispatch[EMPTY_SET[0]] = load_empty_set - - def load_frozenset(self): - items = self.pop_mark() - self.append(frozenset(items)) - - dispatch[FROZENSET[0]] = load_frozenset - - def load_list(self): - items = self.pop_mark() - self.append(items) - - dispatch[LIST[0]] = load_list - - def load_dict(self): - items = self.pop_mark() - d = {items[i]: items[i + 1] for i in range(0, len(items), 2)} - self.append(d) - - dispatch[DICT[0]] = load_dict - - # INST and OBJ differ only in how they get a class object. It's not - # only sensible to do the rest in a common routine, the two routines - # previously diverged and grew different bugs. - # klass is the class to instantiate, and k points to the topmost mark - # object, following which are the arguments for klass.__init__. - def _instantiate(self, klass, args): - if args or not isinstance(klass, type) or hasattr(klass, "__getinitargs__"): - try: - value = klass(*args) - except TypeError as err: - raise TypeError("in constructor for %s: %s" % (klass.__name__, str(err)), sys.exc_info()[2]) - else: - value = klass.__new__(klass) - self.append(value) - - def load_inst(self): - module = self.readline()[:-1].decode("ascii") - name = self.readline()[:-1].decode("ascii") - klass = self.find_class(module, name) - self._instantiate(klass, self.pop_mark()) - - dispatch[INST[0]] = load_inst - - def load_obj(self): - # Stack is ... markobject classobject arg1 arg2 ... - args = self.pop_mark() - cls = args.pop(0) - self._instantiate(cls, args) - - dispatch[OBJ[0]] = load_obj - - def load_newobj(self): - args = self.stack.pop() - cls = self.stack.pop() - obj = cls.__new__(cls, *args) - self.append(obj) - - dispatch[NEWOBJ[0]] = load_newobj - - def load_newobj_ex(self): - kwargs = self.stack.pop() - args = self.stack.pop() - cls = self.stack.pop() - obj = cls.__new__(cls, *args, **kwargs) - self.append(obj) - - dispatch[NEWOBJ_EX[0]] = load_newobj_ex - - def load_global(self): - module = self.readline()[:-1].decode("utf-8") - name = self.readline()[:-1].decode("utf-8") - klass = self.find_class(module, name) - self.append(klass) - - dispatch[GLOBAL[0]] = load_global - - def load_stack_global(self): - name = self.stack.pop() - module = self.stack.pop() - if type(name) is not str or type(module) is not str: - raise UnpicklingError("STACK_GLOBAL requires str") - self.append(self.find_class(module, name)) - - dispatch[STACK_GLOBAL[0]] = load_stack_global - - def load_ext1(self): - code = self.read(1)[0] - self.get_extension(code) - - dispatch[EXT1[0]] = load_ext1 - - def load_ext2(self): - (code,) = unpack("= 4: - return _getattribute(sys.modules[module], name)[0] - else: - return getattr(sys.modules[module], name) - - def load_reduce(self): - stack = self.stack - args = stack.pop() - func = stack[-1] - stack[-1] = func(*args) - - dispatch[REDUCE[0]] = load_reduce - - def load_pop(self): - if self.stack: - del self.stack[-1] - else: - self.pop_mark() - - dispatch[POP[0]] = load_pop - - def load_pop_mark(self): - self.pop_mark() - - dispatch[POP_MARK[0]] = load_pop_mark - - def load_dup(self): - self.append(self.stack[-1]) - - dispatch[DUP[0]] = load_dup - - def load_get(self): - i = int(self.readline()[:-1]) - self.append(self.memo[i]) - - dispatch[GET[0]] = load_get - - def load_binget(self): - i = self.read(1)[0] - self.append(self.memo[i]) - - dispatch[BINGET[0]] = load_binget - - def load_long_binget(self): - (i,) = unpack(" maxsize: - raise ValueError("negative LONG_BINPUT argument") - self.memo[i] = self.stack[-1] - - dispatch[LONG_BINPUT[0]] = load_long_binput - - def load_memoize(self): - memo = self.memo - memo[len(memo)] = self.stack[-1] - - dispatch[MEMOIZE[0]] = load_memoize - - def load_append(self): - stack = self.stack - value = stack.pop() - list = stack[-1] - list.append(value) - - dispatch[APPEND[0]] = load_append - - def load_appends(self): - items = self.pop_mark() - list_obj = self.stack[-1] - if isinstance(list_obj, list): - list_obj.extend(items) - else: - append = list_obj.append - for item in items: - append(item) - - dispatch[APPENDS[0]] = load_appends - - def load_setitem(self): - stack = self.stack - value = stack.pop() - key = stack.pop() - dict = stack[-1] - dict[key] = value - - dispatch[SETITEM[0]] = load_setitem - - def load_setitems(self): - items = self.pop_mark() - dict = self.stack[-1] - for i in range(0, len(items), 2): - dict[items[i]] = items[i + 1] - - dispatch[SETITEMS[0]] = load_setitems - - def load_additems(self): - items = self.pop_mark() - set_obj = self.stack[-1] - if isinstance(set_obj, set): - set_obj.update(items) - else: - add = set_obj.add - for item in items: - add(item) - - dispatch[ADDITEMS[0]] = load_additems - - def load_build(self): - stack = self.stack - state = stack.pop() - inst = stack[-1] - setstate = getattr(inst, "__setstate__", None) - if setstate is not None: - setstate(state) - return - slotstate = None - if isinstance(state, tuple) and len(state) == 2: - state, slotstate = state - if state: - inst_dict = inst.__dict__ - intern = sys.intern - for k, v in state.items(): - if type(k) is str: - inst_dict[intern(k)] = v - else: - inst_dict[k] = v - if slotstate: - for k, v in slotstate.items(): - setattr(inst, k, v) - - dispatch[BUILD[0]] = load_build - - def load_mark(self): - self.metastack.append(self.stack) - self.stack = [] - self.append = self.stack.append - - dispatch[MARK[0]] = load_mark - - def load_stop(self): - value = self.stack.pop() - raise _Stop(value) - - dispatch[STOP[0]] = load_stop - - -# Shorthands - - -def _dump(obj, file, protocol=None, *, fix_imports=True): - _Pickler(file, protocol, fix_imports=fix_imports).dump(obj) - - -def _dumps(obj, protocol=None, *, fix_imports=True): - f = io.BytesIO() - _Pickler(f, protocol, fix_imports=fix_imports).dump(obj) - res = f.getvalue() - assert isinstance(res, bytes_types) - return res - - -def _load(file, *, fix_imports=True, encoding="ASCII", errors="strict"): - return _Unpickler(file, fix_imports=fix_imports, encoding=encoding, errors=errors).load() - - -def _loads(s, *, fix_imports=True, encoding="ASCII", errors="strict"): - if isinstance(s, str): - raise TypeError("Can't load pickle from unicode string") - file = io.BytesIO(s) - return _Unpickler(file, fix_imports=fix_imports, encoding=encoding, errors=errors).load() - - -# # Use the faster _pickle if possible -# try: -# from _pickle import ( -# PickleError, -# PicklingError, -# UnpicklingError, -# Pickler, -# Unpickler, -# dump, -# dumps, -# load, -# loads -# ) -# except ImportError: -# Pickler, Unpickler = _Pickler, _Unpickler -# dump, dumps, load, loads = _dump, _dumps, _load, _loads - -Pickler, Unpickler = _Pickler, _Unpickler -dump, dumps, load, loads = _dump, _dumps, _load, _loads - -# Doctest - - -def _test(): - import doctest - - return doctest.testmod() - - -if __name__ == "__main__": - import argparse - - parser = argparse.ArgumentParser(description="display contents of the pickle files") - parser.add_argument("pickle_file", type=argparse.FileType("br"), nargs="*", help="the pickle file") - parser.add_argument("-t", "--test", action="store_true", help="run self-test suite") - parser.add_argument("-v", action="store_true", help="run verbosely; only affects self-test run") - args = parser.parse_args() - if args.test: - _test() - else: - if not args.pickle_file: - parser.print_help() - else: - import pprint - - for f in args.pickle_file: - obj = load(f) - pprint.pprint(obj) diff --git a/lib/pipeline/engine/signals/__init__.py b/lib/pipeline/engine/signals/__init__.py deleted file mode 100644 index 6d1b876..0000000 --- a/lib/pipeline/engine/signals/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.dispatch import Signal - -pipeline_ready = Signal(providing_args=["process_id"]) -pipeline_end = Signal(providing_args=["root_pipeline_id"]) -pipeline_revoke = Signal(providing_args=["root_pipeline_id"]) -child_process_ready = Signal(providing_args=["child_id"]) -process_ready = Signal(providing_args=["parent_id", "current_node_id", "call_from_child"]) -batch_process_ready = Signal(providing_args=["process_id_list", "pipeline_id"]) -wake_from_schedule = Signal(providing_args=["process_id, activity_id"]) -schedule_ready = Signal(providing_args=["schedule_id", "countdown", "process_id", "data_id"]) -process_unfreeze = Signal(providing_args=["process_id"]) -# activity failed signal -activity_failed = Signal(providing_args=["pipeline_id", "pipeline_activity_id", "subprocess_id_stack"]) - -# signal for developer (do not use valve to pass them!) -service_schedule_fail = Signal(providing_args=["activity_shell", "schedule_service", "ex_data"]) -service_schedule_success = Signal(providing_args=["activity_shell", "schedule_service"]) -node_skip_call = Signal(providing_args=["process", "node"]) -node_retry_ready = Signal(providing_args=["process", "node"]) - -service_activity_timeout_monitor_start = Signal(providing_args=["node_id", "version", "root_pipeline_id", "countdown"]) -service_activity_timeout_monitor_end = Signal(providing_args=["node_id", "version"]) diff --git a/lib/pipeline/engine/signals/dispatch.py b/lib/pipeline/engine/signals/dispatch.py deleted file mode 100644 index 61091d2..0000000 --- a/lib/pipeline/engine/signals/dispatch.py +++ /dev/null @@ -1,108 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import traceback - -from django.utils.module_loading import import_string - -from pipeline.conf import settings -from pipeline.core.flow.activity import ServiceActivity -from pipeline.core.pipeline import Pipeline -from pipeline.engine import models, signals -from pipeline.engine.exceptions import InvalidPipelineEndHandleError -from pipeline.engine.signals import handlers - -try: - end_handler = import_string(settings.PIPELINE_END_HANDLER) -except ImportError: - raise InvalidPipelineEndHandleError( - "pipeline end handler ({}) import error with exception: {}".format( - settings.PIPELINE_END_HANDLER, traceback.format_exc() - ) - ) - - -# DISPATCH_UID = __name__.replace('.', '_') - - -def dispatch_pipeline_ready(): - signals.pipeline_ready.connect(handlers.pipeline_ready_handler, sender=Pipeline, dispatch_uid="_pipeline_ready") - - -def dispatch_pipeline_end(): - signals.pipeline_end.connect(end_handler, sender=Pipeline, dispatch_uid="_pipeline_end") - - -def dispatch_child_process_ready(): - signals.child_process_ready.connect( - handlers.child_process_ready_handler, sender=models.PipelineProcess, dispatch_uid="_child_process_ready" - ) - - -def dispatch_process_ready(): - signals.process_ready.connect( - handlers.process_ready_handler, sender=models.PipelineProcess, dispatch_uid="_process_ready" - ) - - -def dispatch_batch_process_ready(): - signals.batch_process_ready.connect( - handlers.batch_process_ready_handler, sender=models.PipelineProcess, dispatch_uid="_batch_process_ready" - ) - - -def dispatch_wake_from_schedule(): - signals.wake_from_schedule.connect( - handlers.wake_from_schedule_handler, sender=models.ScheduleService, dispatch_uid="_wake_from_schedule" - ) - - -def dispatch_schedule_ready(): - signals.schedule_ready.connect( - handlers.schedule_ready_handler, sender=models.ScheduleService, dispatch_uid="_schedule_ready" - ) - - -def dispatch_process_unfreeze(): - signals.process_unfreeze.connect( - handlers.process_unfreeze_handler, sender=models.PipelineProcess, dispatch_uid="_process_unfreeze" - ) - - -def dispatch_service_activity_timeout_monitor_start(): - signals.service_activity_timeout_monitor_start.connect( - handlers.service_activity_timeout_monitor_start_handler, - sender=ServiceActivity, - dispatch_uid="_service_activity_timeout_monitor_start", - ) - - -def dispatch_service_activity_timeout_monitor_end(): - signals.service_activity_timeout_monitor_end.connect( - handlers.service_activity_timeout_monitor_end_handler, - sender=ServiceActivity, - dispatch_uid="__service_activity_timeout_monitor_end", - ) - - -def dispatch(): - dispatch_pipeline_ready() - dispatch_pipeline_end() - dispatch_child_process_ready() - dispatch_process_ready() - dispatch_batch_process_ready() - dispatch_wake_from_schedule() - dispatch_schedule_ready() - dispatch_process_unfreeze() - dispatch_service_activity_timeout_monitor_start() - dispatch_service_activity_timeout_monitor_end() diff --git a/lib/pipeline/engine/signals/handlers.py b/lib/pipeline/engine/signals/handlers.py deleted file mode 100644 index 6daab8b..0000000 --- a/lib/pipeline/engine/signals/handlers.py +++ /dev/null @@ -1,172 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import contextlib - -from pipeline.celery.settings import QueueResolver -from pipeline.engine import tasks, exceptions -from pipeline.engine.models import ( - NodeCeleryTask, - PipelineModel, - PipelineProcess, - ProcessCeleryTask, - ScheduleCeleryTask, - SendFailedCeleryTask, -) - -logger = logging.getLogger("root") - - -@contextlib.contextmanager -def celery_task_send_fail_pass(): - try: - yield - except exceptions.CeleryFailedTaskCatchException as e: - # we catch CeleryFailedTaskCatchException here and ignore it. - # so we can process the fail task in SendFailedCeleryTask - logger.exception("{} task send error.".format(e.task_name)) - - -class CeleryTaskArgsResolver(object): - def __init__(self, process_id): - self.process_id = process_id - - def resolve_args(self, task): - args = {} - task_args = PipelineProcess.objects.task_args_for_process(self.process_id) - - queue = task_args["queue"] - priority = task_args["priority"] - - args["priority"] = priority - - if queue: - queue_resolver = QueueResolver(queue) - args["routing_key"] = queue_resolver.resolve_task_routing_key(task) - args["queue"] = queue_resolver.resolve_task_queue_name(task) - - return args - - -def pipeline_ready_handler(sender, process_id, **kwargs): - task = tasks.start - args_resolver = CeleryTaskArgsResolver(process_id) - - with celery_task_send_fail_pass(): - ProcessCeleryTask.objects.start_task( - process_id=process_id, task=task, kwargs={"args": [process_id], **args_resolver.resolve_args(task)}, - ) - - -def pipeline_end_handler(sender, root_pipeline_id, **kwargs): - pass - - -def child_process_ready_handler(sender, child_id, **kwargs): - task = tasks.dispatch - args_resolver = CeleryTaskArgsResolver(child_id) - - with celery_task_send_fail_pass(): - ProcessCeleryTask.objects.start_task( - process_id=child_id, task=task, kwargs={"args": [child_id], **args_resolver.resolve_args(task)}, - ) - - -def process_ready_handler(sender, process_id, current_node_id=None, call_from_child=False, **kwargs): - - task = tasks.process_wake_up - args_resolver = CeleryTaskArgsResolver(process_id) - - with celery_task_send_fail_pass(): - ProcessCeleryTask.objects.start_task( - process_id=process_id, - task=task, - kwargs={"args": [process_id, current_node_id, call_from_child], **args_resolver.resolve_args(task)}, - ) - - -def batch_process_ready_handler(sender, process_id_list, pipeline_id, **kwargs): - - task = tasks.batch_wake_up - task_args = PipelineModel.objects.task_args_for_pipeline(pipeline_id) - priority = task_args["priority"] - queue = task_args["queue"] - - kwargs = { - "args": [process_id_list, pipeline_id], - "priority": priority, - } - if queue: - kwargs["routing_key"] = QueueResolver(queue).resolve_task_routing_key(task) - - with celery_task_send_fail_pass(): - with SendFailedCeleryTask.watch( - name=task.name, kwargs=kwargs, type=SendFailedCeleryTask.TASK_TYPE_EMPTY, extra_kwargs={}, - ): - task.apply_async(**kwargs) - - -def wake_from_schedule_handler(sender, process_id, activity_id, **kwargs): - - task = tasks.wake_from_schedule - args_resolver = CeleryTaskArgsResolver(process_id) - - with celery_task_send_fail_pass(): - ProcessCeleryTask.objects.start_task( - process_id=process_id, - task=task, - kwargs={"args": [process_id, activity_id], **args_resolver.resolve_args(task)}, - ) - - -def process_unfreeze_handler(sender, process_id, **kwargs): - task = tasks.process_unfreeze - args_resolver = CeleryTaskArgsResolver(process_id) - - with celery_task_send_fail_pass(): - ProcessCeleryTask.objects.start_task( - process_id=process_id, task=task, kwargs={"args": [process_id], **args_resolver.resolve_args(task)}, - ) - - -def schedule_ready_handler(sender, process_id, schedule_id, countdown, data_id=None, **kwargs): - task = tasks.service_schedule - args_resolver = CeleryTaskArgsResolver(process_id) - - with celery_task_send_fail_pass(): - ScheduleCeleryTask.objects.start_task( - schedule_id=schedule_id, - task=task, - kwargs={ - "args": [process_id, schedule_id, data_id], - "countdown": countdown, - **args_resolver.resolve_args(task), - }, - ) - - -def service_activity_timeout_monitor_start_handler(sender, node_id, version, root_pipeline_id, countdown, **kwargs): - NodeCeleryTask.objects.start_task( - node_id=node_id, - task=tasks.node_timeout_check, - kwargs={ - "args": [node_id, version, root_pipeline_id], - "countdown": countdown, - "priority": PipelineModel.objects.priority_for_pipeline(root_pipeline_id), - }, - ) - - -def service_activity_timeout_monitor_end_handler(sender, node_id, version, **kwargs): - NodeCeleryTask.objects.revoke(node_id) diff --git a/lib/pipeline/engine/states.py b/lib/pipeline/engine/states.py deleted file mode 100644 index 5d6d1ca..0000000 --- a/lib/pipeline/engine/states.py +++ /dev/null @@ -1,101 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.engine.utils import ConstantDict - -CREATED = "CREATED" -READY = "READY" -RUNNING = "RUNNING" -SUSPENDED = "SUSPENDED" -BLOCKED = "BLOCKED" -FINISHED = "FINISHED" -FAILED = "FAILED" -REVOKED = "REVOKED" -EXPIRED = "EXPIRED" - -ALL_STATES = frozenset([READY, RUNNING, SUSPENDED, BLOCKED, FINISHED, FAILED, REVOKED]) - -ARCHIVED_STATES = frozenset([FINISHED, FAILED, REVOKED]) -SLEEP_STATES = frozenset([SUSPENDED, REVOKED]) -CHILDREN_IGNORE_STATES = frozenset([BLOCKED]) - -_NODE_TRANSITION = ConstantDict( - { - READY: frozenset([RUNNING, SUSPENDED]), - RUNNING: frozenset([FINISHED, FAILED]), - SUSPENDED: frozenset([READY, REVOKED]), - BLOCKED: frozenset([]), - FINISHED: frozenset([RUNNING, FAILED]), - FAILED: frozenset([]), - REVOKED: frozenset([]), - } -) - -_PIPELINE_TRANSITION = ConstantDict( - { - READY: frozenset([RUNNING, SUSPENDED, BLOCKED]), - RUNNING: frozenset([SUSPENDED, BLOCKED, FINISHED, FAILED]), - SUSPENDED: frozenset([READY, REVOKED, BLOCKED]), - BLOCKED: frozenset([READY, REVOKED]), - FINISHED: frozenset([RUNNING]), - FAILED: frozenset([]), - REVOKED: frozenset([]), - } -) - -_APPOINT_PIPELINE_TRANSITION = ConstantDict( - { - READY: frozenset([SUSPENDED, REVOKED]), - RUNNING: frozenset([SUSPENDED, REVOKED]), - SUSPENDED: frozenset([READY, REVOKED, RUNNING]), - BLOCKED: frozenset([REVOKED]), - FINISHED: frozenset([]), - FAILED: frozenset([REVOKED]), - REVOKED: frozenset([]), - } -) - -_APPOINT_NODE_TRANSITION = ConstantDict( - { - READY: frozenset([SUSPENDED]), - RUNNING: frozenset([]), - SUSPENDED: frozenset([READY]), - BLOCKED: frozenset([]), - FINISHED: frozenset([]), - FAILED: frozenset([READY, FINISHED]), - REVOKED: frozenset([]), - } -) - -TRANSITION_MAP = { - # first level: is_pipeline - True: { - # second level: appoint - True: _APPOINT_PIPELINE_TRANSITION, - False: _PIPELINE_TRANSITION, - }, - False: {True: _APPOINT_NODE_TRANSITION, False: _NODE_TRANSITION}, -} - - -def can_transit(from_state, to_state, is_pipeline=False, appoint=False): - transition = TRANSITION_MAP[is_pipeline][appoint] - - if from_state in transition: - if to_state in transition[from_state]: - return True - return False - - -def is_rerunning(from_state, to_state): - return from_state == FINISHED and to_state == RUNNING diff --git a/lib/pipeline/engine/tasks.py b/lib/pipeline/engine/tasks.py deleted file mode 100644 index 956350f..0000000 --- a/lib/pipeline/engine/tasks.py +++ /dev/null @@ -1,306 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import datetime -from dateutil.relativedelta import relativedelta -from celery import task -from celery.schedules import crontab -from celery.task import periodic_task -from django.db import transaction, connection - -from pipeline.conf import default_settings -from pipeline.core.pipeline import Pipeline -from pipeline.engine import api, signals, states -from pipeline.engine.core import runtime, schedule -from pipeline.engine.health import zombie -from pipeline.engine.models import ( - NodeCeleryTask, - NodeRelationship, - PipelineProcess, - ProcessCeleryTask, - Status, - ScheduleService, - History, -) -from pipeline.models import PipelineInstance - -logger = logging.getLogger("celery") - - -@task(ignore_result=True) -def process_unfreeze(process_id): - process = PipelineProcess.objects.get(id=process_id) - if not process.is_alive: - logger.warning("process(%s) is not alive, mission cancel." % process_id) - return - - runtime.run_loop(process) - - -@task(ignore_result=True) -def start(process_id): - process = PipelineProcess.objects.get(id=process_id) - if not process.is_alive: - logger.warning("process(%s) is not alive, mission cancel." % process_id) - return - - pipeline_id = process.root_pipeline.id - # try to run - action_result = Status.objects.transit(pipeline_id, states.RUNNING, is_pipeline=True, start=True) - if not action_result.result: - logger.warning("can not start pipeline({}), message: {}".format(pipeline_id, action_result.message)) - return - - NodeRelationship.objects.build_relationship(pipeline_id, pipeline_id) - - runtime.run_loop(process) - - -@task(ignore_result=True) -def dispatch(child_id): - process = PipelineProcess.objects.get(id=child_id) - if not process.is_alive: - logger.info("process(%s) is not alive, mission cancel." % child_id) - return - - runtime.run_loop(process) - - -@task(ignore_result=True) -def process_wake_up(process_id, current_node_id=None, call_from_child=False): - process = PipelineProcess.objects.get(id=process_id) - if not process.is_alive: - logger.warning("process(%s) is not alive, mission cancel." % process_id) - return - - pipeline_id = process.root_pipeline.id - if not call_from_child: - # success_when_unchanged to deal with parallel wake up - action_result = Status.objects.transit( - pipeline_id, to_state=states.RUNNING, is_pipeline=True, unchanged_pass=True - ) - if not action_result.result: - # BLOCKED is a tolerant running state - if action_result.extra.state != states.BLOCKED: - logger.warning("can not start pipeline({}), message: {}".format(pipeline_id, action_result.message)) - return - - process.wake_up() - if current_node_id: - process.current_node_id = current_node_id - - runtime.run_loop(process) - - -@task(ignore_result=True) -def wake_up(process_id): - process = PipelineProcess.objects.get(id=process_id) - if not process.is_alive: - logger.warning("process(%s) is not alive, mission cancel." % process_id) - return - - process.wake_up() - runtime.run_loop(process) - - -@task(ignore_result=True) -def batch_wake_up(process_id_list, pipeline_id): - # success_when_unchanged to deal with parallel gateway subprocess wake up - action_result = Status.objects.transit(pipeline_id, to_state=states.RUNNING, is_pipeline=True, unchanged_pass=True) - if not action_result.result: - logger.warning("can not start pipeline({}), message: {}".format(pipeline_id, action_result.message)) - return - for process_id in process_id_list: - task_id = wake_up.apply_async(args=[process_id]).id - ProcessCeleryTask.objects.bind(process_id, task_id) - - -@task(ignore_result=True) -def wake_from_schedule(process_id, service_act_id): - process = PipelineProcess.objects.get(id=process_id) - process.wake_up() - - service_act = process.top_pipeline.node(service_act_id) - process.current_node_id = service_act.next().id - runtime.run_loop(process) - - -@task(ignore_result=True) -def service_schedule(process_id, schedule_id, data_id=None): - schedule.schedule(process_id, schedule_id, data_id) - - -@task(ignore_result=True) -def node_timeout_check(node_id, version, root_pipeline_id): - NodeCeleryTask.objects.destroy(node_id) - state = Status.objects.state_for(node_id, version=version, may_not_exist=True) - if not state or state != states.RUNNING: - logger.warning("node {} {} timeout kill failed, node not exist or not in running".format(node_id, version)) - return - - action_result = api.forced_fail(node_id, kill=True, ex_data="node execution timeout") - if action_result.result: - signals.activity_failed.send(sender=Pipeline, pipeline_id=root_pipeline_id, pipeline_activity_id=node_id) - else: - logger.warning("node {} - {} timeout kill failed".format(node_id, version)) - - -@periodic_task(run_every=(crontab(**default_settings.ENGINE_ZOMBIE_PROCESS_HEAL_CRON)), ignore_result=True) -def heal_zombie_process(): - logger.info("Zombie process heal start") - - healer = zombie.get_healer() - - try: - healer.heal() - except Exception: - logger.exception("An error occurred when healing zombies") - - logger.info("Zombie process heal finish") - - -@periodic_task(run_every=(crontab(**default_settings.EXPIRED_TASK_CLEAN_CRON)), ignore_result=True) -def expired_tasks_clean(): - if not default_settings.EXPIRED_TASK_CLEAN: - logger.info("EXPIRED_TASK_CLEAN switch off, won't clean expired tasks.") - return - timestamp = datetime.datetime.now().timestamp() - logger.info("Expired tasks clean start, timestamp: {}".format(timestamp)) - - expired_create_time = datetime.date.today() - relativedelta(months=default_settings.TASK_EXPIRED_MONTH) - pipeline_instance_ids = list( - PipelineInstance.objects.filter( - create_time__lte=expired_create_time, is_finished=True, is_revoked=False, is_expired=False - ) - .order_by("create_time") - .values_list("instance_id", flat=True)[: default_settings.EXPIRED_TASK_CLEAN_NUM_LIMIT] - ) - logger.info( - "Clean expired tasks before {} with tasks number: {}, instance ids: {}, timestamp: {}".format( - expired_create_time, len(pipeline_instance_ids), ",".join(pipeline_instance_ids), timestamp - ) - ) - - for instance_id in pipeline_instance_ids: - try: - logger.info("Clean expired task: {}, timestamp: {}".format(instance_id, timestamp)) - _clean_pipeline_instance_data(instance_id, timestamp) - except Exception as e: - logger.exception( - "An error occurred when clean expired task instance {}: {}, {}".format(instance_id, e, timestamp) - ) - - logger.info("Expired tasks clean finish, timestamp: {}".format(timestamp)) - - -def _clean_pipeline_instance_data(instance_id, timestamp): - """ - 根据instance_id删除对应的任务数据 - """ - process_nodes = list( - set(NodeRelationship.objects.filter(ancestor_id=instance_id).values_list("descendant_id", flat=True)) - ) - process_nodes = [process_node for process_node in process_nodes if process_node] - process_nodes_regex = "^" + "|^".join(process_nodes) if process_nodes else "" - pipeline_processes = PipelineProcess.objects.filter(root_pipeline_id=instance_id).values_list("id", "snapshot__id") - pipeline_process_ids, process_snapshot_ids = [], [] - for process_id, snapshot_id in pipeline_processes: - if process_id: - pipeline_process_ids.append(process_id) - if snapshot_id: - process_snapshot_ids.append(snapshot_id) - - delete_subprocess_relationship = ( - "DELETE FROM `engine_subprocessrelationship` WHERE `engine_subprocessrelationship`.`process_id` IN (%s)" - ) - delete_process_snapshot = "DELETE FROM `engine_processsnapshot` WHERE `engine_processsnapshot`.`id` IN (%s)" - delete_pipeline_model = "DELETE FROM `engine_pipelinemodel` WHERE `engine_pipelinemodel`.`process_id` IN (%s)" - delete_process_celery_task = ( - "DELETE FROM `engine_processcelerytask` WHERE `engine_processcelerytask`.`process_id` IN (%s)" - ) - schedule_service_ids = list( - ScheduleService.objects.filter(process_id__in=pipeline_process_ids).values_list("id", flat=True) - ) - schedule_service_ids = [schedule_service_id for schedule_service_id in schedule_service_ids if schedule_service_id] - delete_schedule_service = "DELETE FROM `engine_scheduleservice` WHERE `engine_scheduleservice`.`process_id` IN (%s)" - delete_multi_callback_data = ( - "DELETE FROM `engine_multicallbackdata` WHERE `engine_multicallbackdata`.`schedule_id` IN (%s)" - ) - delete_node_relationship = ( - "DELETE FROM `engine_noderelationship` " - "WHERE (`engine_noderelationship`.`ancestor_id` IN (%s) " - "OR `engine_noderelationship`.`descendant_id` IN (%s)) " - ) - delete_node_celery_tasks = "DELETE FROM `engine_nodecelerytask` " "WHERE `engine_nodecelerytask`.`node_id` IN (%s)" - delete_status = "DELETE FROM `engine_status` WHERE `engine_status`.`id` IN (%s)" - delete_data = "DELETE FROM `engine_data` WHERE `engine_data`.`id` IN (%s)" - delete_datasnapshot = "DELETE FROM `engine_datasnapshot` WHERE `engine_datasnapshot`.`key` REGEXP %s" - delete_schedule_celery_task = ( - "DELETE FROM `engine_schedulecelerytask`" "WHERE `engine_schedulecelerytask`.`schedule_id` REGEXP %s" - ) - history_data_ids = list( - History.objects.filter(identifier__in=process_nodes).only("data").values_list("data__id", flat=True) - ) - delete_history = "DELETE FROM `engine_history` WHERE `engine_history`.`identifier` IN (%s)" - delete_history_data = "DELETE FROM `engine_historydata` WHERE `engine_historydata`.`id` IN (%s)" - delete_pipeline_process = ( - "DELETE FROM `engine_pipelineprocess` " "WHERE `engine_pipelineprocess`.`root_pipeline_id` = %s" - ) - with transaction.atomic(): - with connection.cursor() as cursor: - if pipeline_process_ids: - process_fs = _sql_format_strings(pipeline_process_ids) - _raw_sql_execute(cursor, delete_subprocess_relationship % process_fs, pipeline_process_ids, timestamp) - _raw_sql_execute(cursor, delete_pipeline_model % process_fs, pipeline_process_ids, timestamp) - _raw_sql_execute(cursor, delete_process_celery_task % process_fs, pipeline_process_ids, timestamp) - _raw_sql_execute(cursor, delete_schedule_service % process_fs, pipeline_process_ids, timestamp) - if process_snapshot_ids: - snapshot_fd = _sql_format_strings(process_snapshot_ids) - _raw_sql_execute(cursor, delete_process_snapshot % snapshot_fd, process_snapshot_ids, timestamp) - if schedule_service_ids: - service_fd = _sql_format_strings(schedule_service_ids) - _raw_sql_execute(cursor, delete_multi_callback_data % service_fd, schedule_service_ids, timestamp) - if process_nodes: - node_fs = _sql_format_strings(process_nodes) - _raw_sql_execute( - cursor, delete_node_relationship % (node_fs, node_fs), process_nodes + process_nodes, timestamp - ) - _raw_sql_execute(cursor, delete_node_celery_tasks % node_fs, process_nodes, timestamp) - _raw_sql_execute(cursor, delete_status % node_fs, process_nodes, timestamp) - _raw_sql_execute(cursor, delete_data % node_fs, process_nodes, timestamp) - _raw_sql_execute(cursor, delete_history % node_fs, process_nodes, timestamp) - if process_nodes_regex: - _raw_sql_execute(cursor, delete_datasnapshot, [process_nodes_regex], timestamp) - _raw_sql_execute(cursor, delete_schedule_celery_task, [process_nodes_regex], timestamp) - if history_data_ids: - history_fs = _sql_format_strings(history_data_ids) - _raw_sql_execute(cursor, delete_history_data % history_fs, history_data_ids, timestamp) - _raw_sql_execute(cursor, delete_pipeline_process, [instance_id], timestamp) - PipelineInstance.objects.filter(instance_id=instance_id).update(is_expired=True) - - -def _sql_log(sql, params, timestamp): - if isinstance(params, list): - logger.info("[execute raw sql]: {}, timestamp: {}".format(sql % tuple(params), timestamp)) - else: - logger.info("[execute raw sql]: {}, timestamp: {}".format(sql % params, timestamp)) - - -def _sql_format_strings(list_data): - return ",".join(["%s"] * len(list_data)) - - -def _raw_sql_execute(cursor, sql, params, timestamp): - _sql_log(sql, params, timestamp) - cursor.execute(sql, params) diff --git a/lib/pipeline/engine/utils.py b/lib/pipeline/engine/utils.py deleted file mode 100644 index daec04f..0000000 --- a/lib/pipeline/engine/utils.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.utils import timezone - - -class Stack(list): - def top(self): - return self[len(self) - 1] - - def push(self, item): - self.append(item) - - -class ConstantDict(dict): - """ConstantDict is a subclass of :class:`dict`, implementing __setitem__ - method to avoid item assignment:: - - >>> d = ConstantDict({'key': 'value'}) - >>> d['key'] = 'value' - Traceback (most recent call last): - ... - TypeError: 'ConstantDict' object does not support item assignment - """ - - def __setitem__(self, key, value): - raise TypeError("'%s' object does not support item assignment" % self.__class__.__name__) - - -def calculate_elapsed_time(started_time, archived_time): - """ - @summary: 计算节点耗时 - @param started_time: 执行开始时间 - @param archived_time: 执行结束时间 - @return: - """ - if archived_time and started_time: - elapsed_time = (archived_time - started_time).total_seconds() - elif started_time: - elapsed_time = (timezone.now() - started_time).total_seconds() - else: - elapsed_time = 0 - return round(elapsed_time) - - -class ActionResult(object): - def __init__(self, result, message, extra=None): - self.result = result - self.message = message - self.extra = extra diff --git a/lib/pipeline/eri/__init__.py b/lib/pipeline/eri/__init__.py deleted file mode 100644 index a95c0e4..0000000 --- a/lib/pipeline/eri/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -default_app_config = "pipeline.eri.apps.ERIConfig" diff --git a/lib/pipeline/eri/admin.py b/lib/pipeline/eri/admin.py deleted file mode 100644 index e2fac9c..0000000 --- a/lib/pipeline/eri/admin.py +++ /dev/null @@ -1,104 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.contrib import admin - -from pipeline.eri import models - - -@admin.register(models.Process) -class ProcessAdmin(admin.ModelAdmin): - list_display = [ - "id", - "parent_id", - "ack_num", - "need_ack", - "asleep", - "suspended", - "frozen", - "dead", - "root_pipeline_id", - "current_node_id", - ] - search_fields = ["root_pipeline_id__exact", "current_node_id__exact", "suspended_by__exact"] - - -@admin.register(models.Node) -class NodeAdmin(admin.ModelAdmin): - list_display = ["id", "node_id", "detail"] - search_fields = ["node_id__exact"] - - -@admin.register(models.State) -class StateAdmin(admin.ModelAdmin): - list_display = [ - "id", - "node_id", - "root_id", - "parent_id", - "name", - "version", - "loop", - "created_time", - "started_time", - "archived_time", - ] - search_fields = ["node_id__exact", "root_id__exact", "parent_id__exact"] - - -@admin.register(models.Schedule) -class ScheduleAdmin(admin.ModelAdmin): - list_display = ["id", "type", "process_id", "node_id", "finished", "expired", "version", "schedule_times"] - search_fields = ["node_id__exact"] - - -@admin.register(models.Data) -class DataAdmin(admin.ModelAdmin): - list_display = ["id", "node_id", "inputs", "outputs"] - search_fields = ["node_id__exact"] - - -@admin.register(models.ExecutionData) -class ExecutionDataAdmin(admin.ModelAdmin): - list_display = ["id", "node_id", "inputs", "outputs"] - search_fields = ["node_id__exact"] - - -@admin.register(models.CallbackData) -class CallbackDataAdmin(admin.ModelAdmin): - list_display = ["id", "node_id", "version", "data"] - search_fields = ["id__exact"] - - -@admin.register(models.ContextValue) -class ContextValueAdmin(admin.ModelAdmin): - list_display = ["id", "pipeline_id", "key", "type", "serializer", "value"] - search_fields = ["pipeline_id__exact"] - - -@admin.register(models.ContextOutputs) -class ContextOutputsAdmin(admin.ModelAdmin): - list_display = ["id", "pipeline_id", "outputs"] - search_fields = ["pipeline_id__exact"] - - -@admin.register(models.ExecutionHistory) -class ExecutionHistoryAdmin(admin.ModelAdmin): - list_display = ["id", "node_id", "loop", "started_time", "archived_time"] - search_fields = ["node_id__exact"] - - -@admin.register(models.LogEntry) -class LogEntryAdmin(admin.ModelAdmin): - list_display = ["id", "node_id", "version", "level_name", "message", "logged_at"] - search_fields = ["node_id__exact"] diff --git a/lib/pipeline/eri/apps.py b/lib/pipeline/eri/apps.py deleted file mode 100644 index fa2c2a3..0000000 --- a/lib/pipeline/eri/apps.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.apps import AppConfig - -from bamboo_engine.handlers import register - - -class ERIConfig(AppConfig): - name = "pipeline.eri" - verbose_name = "PipelineEngineRuntimeInterface" - - def ready(self): - from .celery.tasks import execute, schedule, timeout_check # noqa - - register() diff --git a/lib/pipeline/eri/celery/__init__.py b/lib/pipeline/eri/celery/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/eri/celery/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/eri/celery/queues.py b/lib/pipeline/eri/celery/queues.py deleted file mode 100644 index 261a4bf..0000000 --- a/lib/pipeline/eri/celery/queues.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from typing import Any, List - -from kombu import Exchange, Queue - - -class QueueResolver: - def __init__(self, queue: str): - self.queue = queue - - def resolve_task_queue_and_routing_key(self, task: Any) -> (str, str): - task_name = task - if not isinstance(task_name, str): - task_name = task.name - - queue_config = self.routes_config() - return queue_config[task_name]["queue"], queue_config[task_name]["routing_key"] - - def routes_config(self) -> dict: - suffix = "_%s" % self.queue if self.queue else "" - return { - "pipeline.eri.celery.tasks.execute": { - "queue": "er_execute%s" % suffix, - "routing_key": "er_execute%s" % suffix, - }, - "pipeline.eri.celery.tasks.schedule": { - "queue": "er_schedule%s" % suffix, - "routing_key": "er_schedule%s" % suffix, - }, - "pipeline.eri.celery.tasks.timeout_check": { - "queue": "er_timeout%s" % suffix, - "routing_key": "er_timeout%s" % suffix, - }, - } - - def queues(self) -> List[Queue]: - exchange = Exchange("default", type="direct") - return [ - Queue(queue_config["queue"], exchange, routing_key=queue_config["routing_key"], max_priority=255) - for queue_config in self.routes_config().values() - ] - - -CELERY_QUEUES = QueueResolver("").queues() diff --git a/lib/pipeline/eri/celery/step.py b/lib/pipeline/eri/celery/step.py deleted file mode 100644 index 976949b..0000000 --- a/lib/pipeline/eri/celery/step.py +++ /dev/null @@ -1,23 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from celery.bootsteps import StartStopStep -from prometheus_client import start_http_server - - -class PromServerStep(StartStopStep): - requires = {"celery.worker.components:Timer"} - port = 8001 # default port - - def start(self, worker): - start_http_server(self.port) diff --git a/lib/pipeline/eri/celery/tasks.py b/lib/pipeline/eri/celery/tasks.py deleted file mode 100644 index ba001e7..0000000 --- a/lib/pipeline/eri/celery/tasks.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -from typing import Optional - -from celery import task - -from bamboo_engine import states -from bamboo_engine.engine import Engine - -from pipeline.eri.runtime import BambooDjangoRuntime - - -@task(ignore_result=True) -def execute(process_id: int, node_id: str, root_pipeline_id: str = None, parent_pipeline_id: str = None): - runtime = BambooDjangoRuntime() - Engine(runtime).execute( - process_id=process_id, node_id=node_id, root_pipeline_id=root_pipeline_id, parent_pipeline_id=parent_pipeline_id - ) - - -@task(ignore_result=True) -def schedule(process_id: int, node_id: str, schedule_id: str, callback_data_id: Optional[int]): - runtime = BambooDjangoRuntime() - Engine(runtime).schedule( - process_id=process_id, node_id=node_id, schedule_id=schedule_id, callback_data_id=callback_data_id - ) - - -@task(ignore_result=True) -def timeout_check(self, process_id: int, node_id: str, version: str): - runtime = BambooDjangoRuntime() - state = runtime.get_state(node_id=node_id) - if state.name == states.RUNNING and state.version == version: - Engine(runtime).forced_fail_activity(node_id=node_id, ex_data="timeout kill") diff --git a/lib/pipeline/eri/codec.py b/lib/pipeline/eri/codec.py deleted file mode 100644 index ebb87a4..0000000 --- a/lib/pipeline/eri/codec.py +++ /dev/null @@ -1,55 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import json - -from typing import Any - -from django.utils.module_loading import import_string - -DATA_JSON_ENCODER_PATH = None -DATA_JSON_OBJECT_HOOK_PATH = None - -_LOCAL = {} - - -def _get_local(key, path): - if key in _LOCAL: - return _LOCAL[key] - - try: - _LOCAL[key] = import_string(path) - except ImportError: - _LOCAL[key] = None - - return _LOCAL[key] - - -def _get_data_json_encoder(): - if not DATA_JSON_ENCODER_PATH: - return None - return _get_local("data_json_encoder", DATA_JSON_ENCODER_PATH) - - -def _get_data_json_object_hook(): - if not DATA_JSON_OBJECT_HOOK_PATH: - return None - return _get_local("data_json_object_hook", DATA_JSON_OBJECT_HOOK_PATH) - - -def data_json_loads(data: str) -> Any: - return json.loads(data, object_hook=_get_data_json_object_hook()) - - -def data_json_dumps(data: Any) -> str: - return json.dumps(data, cls=_get_data_json_encoder()) diff --git a/lib/pipeline/eri/imp/__init__.py b/lib/pipeline/eri/imp/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/eri/imp/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/eri/imp/context.py b/lib/pipeline/eri/imp/context.py deleted file mode 100644 index c13a687..0000000 --- a/lib/pipeline/eri/imp/context.py +++ /dev/null @@ -1,150 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import json -from typing import Dict, List, Set - -from django.db import transaction - -from bamboo_engine import metrics -from bamboo_engine.eri import ContextValue, ContextValueType - -from pipeline.eri.models import ContextValue as DBContextValue -from pipeline.eri.models import ContextOutputs -from pipeline.eri.imp.serializer import SerializerMixin - - -class ContextMixin(SerializerMixin): - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_CONTEXT_VALUE_READ_TIME) - def get_context_values(self, pipeline_id: str, keys: set) -> List[ContextValue]: - """ - 获取某个流程上下文中的 keys 所指定的键对应变量的值 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :param keys: 变量键 - :type keys: set - :return: 变量值信息 - :rtype: List[ContextValue] - """ - qs = DBContextValue.objects.filter(pipeline_id=pipeline_id, key__in=keys).only( - "key", "type", "serializer", "value", "code" - ) - - return [ - ContextValue( - key=cv_model.key, - type=ContextValueType(cv_model.type), - value=self._deserialize(cv_model.value, cv_model.serializer), - code=cv_model.code or None, - ) - for cv_model in qs - ] - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_CONTEXT_REF_READ_TIME) - def get_context_key_references(self, pipeline_id: str, keys: set) -> set: - """ - 获取某个流程上下文中 keys 所指定的变量直接和间接引用的其他所有变量的键 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :param keys: 变量 key 列表 - :type keys: set - :return: keys 所指定的变量直接和简介引用的其他所有变量的键 - :rtype: set - """ - qs = DBContextValue.objects.filter(pipeline_id=pipeline_id, key__in=keys).only("references") - - references = [] - for cv_model in qs: - references.extend(json.loads(cv_model.references)) - - return set(references) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_CONTEXT_VALUE_UPSERT_TIME) - @transaction.atomic - def upsert_plain_context_values(self, pipeline_id: str, update: Dict[str, ContextValue]): - """ - 更新或创建新的普通上下文数据 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :param update: 更新数据 - :type update: Dict[str, ContextValue] - """ - exist_keys = DBContextValue.objects.filter(pipeline_id=pipeline_id).values_list("key", flat=True) - update_keys = set(update.keys()).intersection(exist_keys) - - # update - for k in update_keys: - context_value = update[k] - value, serializer = self._serialize(context_value.value) - - DBContextValue.objects.filter(pipeline_id=pipeline_id, key=k).update( - type=ContextValueType.PLAIN.value, value=value, serializer=serializer, code="", references="[]", - ) - - # insert - insert_keys = set(update.keys()).difference(exist_keys) - context_value_models = [] - for k in insert_keys: - context_value = update[k] - value, serializer = self._serialize(context_value.value) - - context_value_models.append( - DBContextValue( - pipeline_id=pipeline_id, - key=context_value.key, - type=ContextValueType.PLAIN.value, - serializer=serializer, - value=value, - code="", - references="[]", - ) - ) - - DBContextValue.objects.bulk_create(context_value_models, batch_size=500) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_CONTEXT_VALUE_READ_TIME) - def get_context(self, pipeline_id: str) -> List[ContextValue]: - """ - 获取某个流程的所有上下文数据 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :return: [description] - :rtype: List[ContextValue] - """ - qs = DBContextValue.objects.filter(pipeline_id=pipeline_id).only("key", "type", "serializer", "value", "code") - - return [ - ContextValue( - key=cv_model.key, - type=ContextValueType(cv_model.type), - value=self._deserialize(cv_model.value, cv_model.serializer), - code=cv_model.code or None, - ) - for cv_model in qs - ] - - def get_context_outputs(self, pipeline_id: str) -> Set[str]: - """ - 获取流程上下文需要输出的数据 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :return: 输出数据 key - :rtype: Set[str] - """ - co_model = ContextOutputs.objects.get(pipeline_id=pipeline_id) - return set(json.loads(co_model.outputs)) diff --git a/lib/pipeline/eri/imp/data.py b/lib/pipeline/eri/imp/data.py deleted file mode 100644 index a3da59b..0000000 --- a/lib/pipeline/eri/imp/data.py +++ /dev/null @@ -1,254 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import json -from typing import Dict - -from bamboo_engine import metrics, exceptions -from bamboo_engine.eri import Data, DataInput, ExecutionData, CallbackData - -from pipeline.eri import codec -from pipeline.eri.models import Data as DBData -from pipeline.eri.models import ExecutionData as DBExecutionData -from pipeline.eri.models import CallbackData as DBCallbackData -from pipeline.eri.imp.serializer import SerializerMixin - - -class DataMixin(SerializerMixin): - def _get_data_inputs(self, inputs: dict): - return {k: DataInput(need_render=v["need_render"], value=v["value"]) for k, v in inputs.items()} - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_DATA_READ_TIME) - def get_data(self, node_id: str) -> Data: - """ - 获取某个节点的数据对象 - - :param node_id: 节点 ID - :type node_id: str - :return: 数据对象实例 - :rtype: Data - """ - try: - data_model = DBData.objects.get(node_id=node_id) - except DBData.DoesNotExist: - raise exceptions.NotFoundError - return Data( - inputs=self._get_data_inputs(codec.data_json_loads(data_model.inputs)), - outputs=json.loads(data_model.outputs), - ) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_DATA_INPUTS_READ_TIME) - def get_data_inputs(self, node_id: str) -> Dict[str, DataInput]: - """ - 获取某个节点的输入数据 - - :param node_id: 节点 ID - :type node_id: str - :return: 输入数据字典 - :rtype: dict - """ - qs = DBData.objects.filter(node_id=node_id).only("inputs") - - if not qs: - raise exceptions.NotFoundError - - return self._get_data_inputs(codec.data_json_loads(qs[0].inputs)) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_DATA_OUTPUTS_READ_TIME) - def get_data_outputs(self, node_id: str) -> dict: - """ - 获取某个节点的输出数据 - - :param node_id: 节点 ID - :type node_id: str - :return: 输入数据字典 - :rtype: dict - """ - qs = DBData.objects.filter(node_id=node_id).only("outputs") - - if not qs: - raise exceptions.NotFoundError - - return json.loads(qs[0].outputs) - - def set_data_inputs(self, node_id: str, data: Dict[str, DataInput]): - """ - 将节点数据对象的 inputs 设置为 data - - : param node_id: 节点 ID - : type node_id: str - : param data: 目标数据 - : type data: dict - """ - inputs = codec.data_json_dumps({k: {"need_render": v.need_render, "value": v.value} for k, v in data.items()}) - if DBData.objects.filter(node_id=node_id).exists(): - DBData.objects.filter(node_id=node_id).update(inputs=inputs) - else: - DBData.objects.create(node_id=node_id, inputs=inputs, outputs="{}") - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_READ_TIME) - def get_execution_data(self, node_id: str) -> ExecutionData: - """ - 获取某个节点的执行数据 - - : param node_id: 节点 ID - : type node_id: str - : return: 执行数据实例 - : rtype: ExecutionData - """ - try: - data_model = DBExecutionData.objects.get(node_id=node_id) - except DBExecutionData.DoesNotExist: - raise exceptions.NotFoundError - return ExecutionData( - inputs=self._deserialize(data_model.inputs, data_model.inputs_serializer), - outputs=self._deserialize(data_model.outputs, data_model.outputs_serializer), - ) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_INPUTS_READ_TIME) - def get_execution_data_inputs(self, node_id: str) -> dict: - """ - 获取某个节点的执行数据输入 - - :param node_id: 节点 ID - :type node_id: str - :return: 执行数据输入 - :rtype: dict - """ - qs = DBExecutionData.objects.filter(node_id=node_id).only("inputs_serializer", "inputs") - - if not qs: - return {} - - return self._deserialize(qs[0].inputs, qs[0].inputs_serializer) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_OUTPUTS_READ_TIME) - def get_execution_data_outputs(self, node_id: str) -> dict: - """ - 获取某个节点的执行数据输出 - - :param node_id: 节点 ID - :type node_id: str - :return: 执行数据输出 - :rtype: dict - """ - qs = DBExecutionData.objects.filter(node_id=node_id).only("outputs_serializer", "outputs") - - if not qs: - return {} - - return self._deserialize(qs[0].outputs, qs[0].outputs_serializer) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_WRITE_TIME) - def set_execution_data(self, node_id: str, data: ExecutionData): - """ - 设置某个节点的执行数据 - - :param node_id: 节点 ID - :type node_id: str - :param data: 执行数据实例 - :type data: ExecutionData - """ - inputs, inputs_serializer = self._serialize(data.inputs) - outputs, outputs_serializer = self._serialize(data.outputs) - if DBExecutionData.objects.filter(node_id=node_id).exists(): - DBExecutionData.objects.filter(node_id=node_id).update( - inputs=inputs, - inputs_serializer=inputs_serializer, - outputs=outputs, - outputs_serializer=outputs_serializer, - ) - else: - DBExecutionData.objects.create( - node_id=node_id, - inputs=inputs, - inputs_serializer=inputs_serializer, - outputs=outputs, - outputs_serializer=outputs_serializer, - ) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_INPUTS_WRITE_TIME) - def set_execution_data_inputs(self, node_id: str, inputs: dict): - """ - 设置某个节点的执行数据输入 - - :param node_id: 节点 ID - :type node_id: str - :param outputs: 输出数据 - :type outputs: dict - """ - inputs, inputs_serializer = self._serialize(inputs) - if DBExecutionData.objects.filter(node_id=node_id).exists(): - DBExecutionData.objects.filter(node_id=node_id).update(inputs=inputs, inputs_serializer=inputs_serializer) - else: - DBExecutionData.objects.create( - node_id=node_id, - inputs=inputs, - inputs_serializer=inputs_serializer, - outputs="{}", - outputs_serializer=self.JSON_SERIALIZER, - ) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_EXEC_DATA_OUTPUTS_WRITE_TIME) - def set_execution_data_outputs(self, node_id: str, outputs: dict): - """ - 设置某个节点的执行数据输出 - - :param node_id: 节点 ID - :type node_id: str - :param outputs: 输出数据 - :type outputs: dict - """ - outputs, outputs_serializer = self._serialize(outputs) - if DBExecutionData.objects.filter(node_id=node_id).exists(): - DBExecutionData.objects.filter(node_id=node_id).update( - outputs=outputs, outputs_serializer=outputs_serializer - ) - else: - DBExecutionData.objects.create( - node_id=node_id, - inputs="{}", - inputs_serializer=self.JSON_SERIALIZER, - outputs=outputs, - outputs_serializer=outputs_serializer, - ) - - def set_callback_data(self, node_id: str, version: str, data: dict) -> int: - """ - 设置某个节点执行数据的回调数据 - - :param node_id: 节点 ID - :type node_id: str - :param version: 节点执行版本 - :type version: str - :param data: 回调数据 - :type data: dict - :return: 回调数据 ID - :rtype: int - """ - return DBCallbackData.objects.create(node_id=node_id, version=version, data=json.dumps(data)).id - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_CALLBACK_DATA_READ_TIME) - def get_callback_data(self, data_id: int) -> CallbackData: - """ - 获取回调数据 - - :param data_id: Data ID - :type data_id: int - :return: 回调数据实例 - :rtype: CallbackData - """ - data_model = DBCallbackData.objects.get(id=data_id) - return CallbackData( - id=data_model.id, node_id=data_model.node_id, version=data_model.version, data=json.loads(data_model.data) - ) diff --git a/lib/pipeline/eri/imp/executable_event.py b/lib/pipeline/eri/imp/executable_event.py deleted file mode 100644 index b39411f..0000000 --- a/lib/pipeline/eri/imp/executable_event.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from typing import List - -from bamboo_engine.eri import ExecutableEvent - -from pipeline.core.flow.event import ExecutableEndEvent - - -class ExecutableEndEventWrapper(ExecutableEvent): - def __init__(self, end_event: ExecutableEndEvent): - self.end_event = end_event - - def execute(self, pipeline_stack: List[str], root_pipeline_id: str): - """ - execute 逻辑 - - :param pipeline_stack: 流程栈 - :type pipeline_stack: List[str] - :param root_pipeline_id: 根流程 ID - :type root_pipeline_id: str - """ - in_subprocess = len(pipeline_stack) > 1 - current_pipeline_id = pipeline_stack[-1] - - return self.end_event.execute(in_subprocess, root_pipeline_id, current_pipeline_id) diff --git a/lib/pipeline/eri/imp/execution_history.py b/lib/pipeline/eri/imp/execution_history.py deleted file mode 100644 index 12988de..0000000 --- a/lib/pipeline/eri/imp/execution_history.py +++ /dev/null @@ -1,136 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from typing import List -from datetime import datetime - -from bamboo_engine.eri import ExecutionHistory, ExecutionShortHistory - -from pipeline.eri.models import ExecutionHistory as DBExecutionHistory -from pipeline.eri.imp.serializer import SerializerMixin - - -class ExecutionHistoryMixin(SerializerMixin): - def add_history( - self, - node_id: str, - started_time: datetime, - archived_time: datetime, - loop: int, - skip: bool, - retry: int, - version: str, - inputs: dict, - outputs: dict, - ) -> int: - """ - 为某个节点记录一次执行历史 - - : param node_id: 节点 ID - : type node_id: str - : param started_time: 开始时间 - : type started_time: datetime - : param archived_time: 归档时间 - : type archived_time: datetime - : param loop: 重入计数 - : type loop: int - : param skip: 是否跳过 - : type skip: bool - : param retry: 重试次数 - : type retry: int - : param version: 节点执行版本号 - : type version: str - : param inputs: 输入数据 - : type inputs: dict - : param outputs: 输出数据 - : type outputs: dict - """ - inputs, inputs_serializer = self._serialize(inputs) - outputs, outputs_serializer = self._serialize(outputs) - return DBExecutionHistory.objects.create( - node_id=node_id, - loop=loop, - retry=retry, - skip=skip, - version=version, - started_time=started_time, - archived_time=archived_time, - inputs=inputs, - inputs_serializer=inputs_serializer, - outputs=outputs, - outputs_serializer=outputs_serializer, - ).id - - def get_histories(self, node_id: str, loop: int = -1) -> List[ExecutionHistory]: - """ - 返回某个节点的历史记录 - - :param node_id: 节点 ID - :type node_id: str - :param loop: 重入次数, -1 表示不过滤重入次数 - :type loop: int, optional - :return: 历史记录列表 - :rtype: List[History] - """ - fields = {"node_id": node_id} - if loop != -1: - fields["loop"] = loop - qs = DBExecutionHistory.objects.filter(**fields) - - return [ - ExecutionHistory( - id=model.id, - node_id=model.node_id, - started_time=model.started_time, - archived_time=model.archived_time, - loop=model.loop, - skip=model.skip, - retry=model.retry, - version=model.version, - inputs=self._deserialize(model.inputs, model.inputs_serializer), - outputs=self._deserialize(model.outputs, model.outputs_serializer), - ) - for model in qs - ] - - def get_short_histories(self, node_id: str, loop: int = -1) -> List[ExecutionShortHistory]: - """ - 返回某个节点的简要历史记录 - - :param node_id: 节点 ID - :type node_id: str - :param loop: 重入次数, -1 表示不过滤重入次数 - :type loop: int, optional - :return: 历史记录列表 - :rtype: List[ExecutionShortHistory] - """ - fields = {"node_id": node_id} - if loop != -1: - fields["loop"] = loop - qs = DBExecutionHistory.objects.filter(**fields).defer( - "inputs", "inputs_serializer", "outputs", "outputs_serializer" - ) - - return [ - ExecutionShortHistory( - id=model.id, - node_id=model.node_id, - started_time=model.started_time, - archived_time=model.archived_time, - loop=model.loop, - skip=model.skip, - retry=model.retry, - version=model.version, - ) - for model in qs - ] diff --git a/lib/pipeline/eri/imp/hooks.py b/lib/pipeline/eri/imp/hooks.py deleted file mode 100644 index a81fe6d..0000000 --- a/lib/pipeline/eri/imp/hooks.py +++ /dev/null @@ -1,276 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from typing import Optional - -from pipeline.eri.models import LogEntry - - -class HooksMixin: - def pre_prepare_run_pipeline( - self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options - ): - """ - 调用 pre_prepare_run_pipeline 前执行的钩子 - - :param pipeline: 流程描述对象 - :type pipeline: dict - :param root_pipeline_data 根流程数据 - :type root_pipeline_data: dict - :param root_pipeline_context 根流程上下文 - :type root_pipeline_context: dict - :param subprocess_context 子流程预置流程上下文 - :type subprocess_context: dict - """ - - def post_prepare_run_pipeline( - self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options - ): - """ - 调用 pre_prepare_run_pipeline 后执行的钩子 - - :param pipeline: 流程描述对象 - :type pipeline: dict - :param root_pipeline_data 根流程数据 - :type root_pipeline_data: dict - :param root_pipeline_context 根流程上下文 - :type root_pipeline_context: dict - :param subprocess_context 子流程预置流程上下文 - :type subprocess_context: dict - """ - - def pre_pause_pipeline(self, pipeline_id: str): - """ - 暂停 pipeline 前执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def post_pause_pipeline(self, pipeline_id: str): - """ - 暂停 pipeline 后执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def pre_revoke_pipeline(self, pipeline_id: str): - """ - 撤销 pipeline 前执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def post_revoke_pipeline(self, pipeline_id: str): - """ - 撤销 pipeline 后执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def pre_resume_pipeline(self, pipeline_id: str): - """ - 继续 pipeline 前执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def post_resume_pipeline(self, pipeline_id: str): - """ - 继续 pipeline 后执行的钩子 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - """ - - def pre_resume_node(self, node_id: str): - """ - 继续节点后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - """ - - def post_resume_node(self, node_id: str): - """ - 继续节点后执行的钩子 - - :param node_id: [description]节点 ID - :type node_id: str - """ - - def pre_pause_node(self, node_id: str): - """ - 暂停节点前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - """ - - def post_pause_node(self, node_id: str): - """ - 暂停节点后执行的钩子 - - :param node_id: [description]节点 ID - :type node_id: str - """ - - def pre_retry_node(self, node_id: str, data: Optional[dict]): - """ - 重试节点前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param data: 重试时使用的节点执行输入 - :type data: Optional[dict] - """ - - def post_retry_node(self, node_id: str, data: Optional[dict]): - """ - 重试节点后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param data: 重试时使用的节点执行输入 - :type data: Optional[dict] - """ - - def pre_skip_node(self, node_id: str): - """ - 跳过节点前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - """ - - def post_skip_node(self, node_id: str): - """ - 跳过节点后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - """ - - def pre_skip_exclusive_gateway(self, node_id: str, flow_id: str): - """ - 跳过分支网关前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param flow_id: 跳过后选择的目标流 ID - :type flow_id: str - """ - - def post_skip_exclusive_gateway(self, node_id: str, flow_id: str): - """ - 跳过分支网关后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param flow_id: 跳过后选择的目标流 ID - :type flow_id: str - """ - - def pre_skip_conditional_parallel_gateway(self, node_id: str, flow_ids: list, converge_gateway_id: str): - """ - 跳过条件并行网关前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param flow_ids: 跳过后选择的目标流 ID 列表 - :type flow_ids: list - :param converge_gateway_id: 目标汇聚网关 ID - :type converge_gateway_id: str - """ - - def post_skip_conditional_parallel_gateway(self, node_id: str, flow_ids: list, converge_gateway_id: str): - """ - 跳过条件并行网关后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param flow_ids: 跳过后选择的目标流 ID 列表 - :type flow_ids: list - :param converge_gateway_id: 目标汇聚网关 ID - :type converge_gateway_id: str - """ - - def pre_forced_fail_activity(self, node_id: str, ex_data: str): - """ - 强制失败节点前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param ex_data: 写入节点执行数据的失败信息 - :type ex_data: str - """ - - def post_forced_fail_activity(self, node_id: str, ex_data: str, old_version: str, new_version: str): - """ - 强制失败节点后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param ex_data: 写入节点执行数据的失败信息 - :type ex_data: str - :param old_version: 强制失败前的节点版本 - :type old_version: str - :param new_version: 强制失败后的节点版本 - :type new_version: str - """ - # 在强制失败刷新版本后更新已经记录的日志的版本 - LogEntry.objects.filter(node_id=node_id, version=old_version).update(version=new_version) - - def pre_callback(self, node_id: str, version: str, data: str): - """ - 回调节点前执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param version: 节点执行版本 - :type version: str - :param data: 回调数据 - :type data: str - """ - - def post_callback(self, node_id: str, version: str, data: str): - """ - 回调节点后执行的钩子 - - :param node_id: 节点 ID - :type node_id: str - :param version: 节点执行版本 - :type version: str - :param data: 回调数据 - :type data: str - """ - - def pre_retry_subprocess(self, node_id: str): - """ - 子流程重试前执行的钩子 - - :param node_id: 子流程节点 ID - :type node_id: str - """ - - def post_retry_subprocess(self, node_id: str): - """ - 子流程重试后执行的钩子 - - :param node_id: 子流程节点 ID - :type node_id: str - """ diff --git a/lib/pipeline/eri/imp/node.py b/lib/pipeline/eri/imp/node.py deleted file mode 100644 index 8d2b5fb..0000000 --- a/lib/pipeline/eri/imp/node.py +++ /dev/null @@ -1,110 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import json - -from bamboo_engine import metrics -from bamboo_engine.eri import ( - Node, - NodeType, - ServiceActivity, - SubProcess, - ExclusiveGateway, - ParallelGateway, - ConditionalParallelGateway, - ConvergeGateway, - EmptyStartEvent, - EmptyEndEvent, - ExecutableEndEvent, - Condition, -) - -from pipeline.eri.models import Node as DBNode - - -class NodeMixin: - def _get_node(self, node: DBNode): - node_detail = json.loads(node.detail) - node_type = node_detail["type"] - targets = node_detail["targets"] - common_args = dict( - id=node.node_id, - target_flows=list(targets.keys()), - target_nodes=list(targets.values()), - targets=node_detail["targets"], - root_pipeline_id=node_detail["root_pipeline_id"], - parent_pipeline_id=node_detail["parent_pipeline_id"], - can_skip=node_detail["can_skip"], - can_retry=node_detail["can_retry"], - ) - - if node_type == NodeType.ServiceActivity.value: - return ServiceActivity( - type=NodeType.ServiceActivity, - code=node_detail["code"], - version=node_detail["version"], - timeout=node_detail["timeout"], - error_ignorable=node_detail["error_ignorable"], - **common_args - ) - - elif node_type == NodeType.SubProcess.value: - return SubProcess(type=NodeType.SubProcess, start_event_id=node_detail["start_event_id"], **common_args) - - elif node_type == NodeType.ExclusiveGateway.value: - return ExclusiveGateway( - type=NodeType.ExclusiveGateway, - conditions=[Condition(**c) for c in node_detail["conditions"]], - **common_args - ) - - elif node_type == NodeType.ParallelGateway.value: - return ParallelGateway( - type=NodeType.ParallelGateway, converge_gateway_id=node_detail["converge_gateway_id"], **common_args - ) - - elif node_type == NodeType.ConditionalParallelGateway.value: - return ConditionalParallelGateway( - type=NodeType.ConditionalParallelGateway, - converge_gateway_id=node_detail["converge_gateway_id"], - conditions=[Condition(**c) for c in node_detail["conditions"]], - **common_args - ) - - elif node_type == NodeType.ConvergeGateway.value: - return ConvergeGateway(type=NodeType.ConvergeGateway, **common_args) - - elif node_type == NodeType.EmptyStartEvent.value: - return EmptyStartEvent(type=NodeType.EmptyStartEvent, **common_args) - - elif node_type == NodeType.EmptyEndEvent.value: - return EmptyEndEvent(type=NodeType.EmptyEndEvent, **common_args) - - elif node_type == NodeType.ExecutableEndEvent.value: - return ExecutableEndEvent(type=NodeType.ExecutableEndEvent, code=node_detail["code"], **common_args) - - else: - raise ValueError("unknown node type: {}".format(node_type)) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_NODE_READ_TIME) - def get_node(self, node_id: str) -> Node: - """ - 获取某个节点的详细信息 - - :param node_id: 节点 ID - :type node_id: str - :return: Node 实例 - :rtype: Node - """ - node = DBNode.objects.get(node_id=node_id) - return self._get_node(node) diff --git a/lib/pipeline/eri/imp/plugin_manager.py b/lib/pipeline/eri/imp/plugin_manager.py deleted file mode 100644 index 0114691..0000000 --- a/lib/pipeline/eri/imp/plugin_manager.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from bamboo_engine.eri import Service, ExecutableEvent, Variable - -from pipeline.component_framework.library import ComponentLibrary -from pipeline.core.flow import FlowNodeClsFactory -from pipeline.core.data.library import VariableLibrary - -from pipeline.eri.imp.service import ServiceWrapper -from pipeline.eri.imp.executable_event import ExecutableEndEventWrapper -from pipeline.eri.imp.variable import VariableWrapper - - -class PipelinePluginManagerMixin: - def get_service(self, code: str, version: str) -> Service: - """ - 根据代号与版本获取特定服务对象实例 - - :param code: 服务唯一代号 - :type code: str - :param version: 服务版本 - :type version: str - :return: 服务对象实例 - :rtype: Service - """ - comp_cls = ComponentLibrary.get_component_class(code, version) - service = comp_cls.bound_service() - return ServiceWrapper(service) - - def get_executable_end_event(self, code: str) -> ExecutableEvent: - """ - 根据代号获取特定可执行结束事件实例 - - :param code: 可执行结束事件唯一代号 - :type code: str - :return: 可执行结束事件实例 - :rtype: ExecutableEvent: - """ - event_cls = FlowNodeClsFactory.get_node_cls(code) - event = event_cls(id=None) - return ExecutableEndEventWrapper(event) - - def get_compute_variable(self, code: str, key: str, value: Variable, additional_data: dict) -> Variable: - """ - 根据代号获取变量实例 - - :param code: 唯一代号 - :type code: str - :param key: 变量 key - :type key: str - :param value: 变量配置 - :type value: Any - :param additional_data: 额外数据字典 - :type additional_data: dict - :return: 变量实例 - :rtype: Variable - """ - var_cls = VariableLibrary.get_var_class(code=code) - return VariableWrapper(original_value=value, var_cls=var_cls, additional_data=additional_data) diff --git a/lib/pipeline/eri/imp/process.py b/lib/pipeline/eri/imp/process.py deleted file mode 100644 index 8a61062..0000000 --- a/lib/pipeline/eri/imp/process.py +++ /dev/null @@ -1,347 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import json -from typing import List, Optional, Dict - -from django.utils import timezone -from django.db.models import F - -from bamboo_engine import metrics -from bamboo_engine.eri import ProcessInfo, SuspendedProcessInfo, DispatchProcess - -from pipeline.eri.models import Process - - -class ProcessMixin: - def beat(self, process_id: int): - """ - 进程心跳 - - :param process_id: 进程 ID - :type process_id: int - """ - Process.objects.filter(id=process_id).update(last_heartbeat=timezone.now()) - - def wake_up(self, process_id: int): - """ - 将当前进程标记为唤醒状态 - - :param process_id: 进程 ID - :type process_id: int - """ - Process.objects.filter(id=process_id).update(asleep=False) - - def sleep(self, process_id: int): - """ - 将当前进程标记为睡眠状态 - - :param process_id: 进程 ID - :type process_id: int - """ - Process.objects.filter(id=process_id).update(asleep=True) - - def suspend(self, process_id: int, by: str): - """ - 将当前进程标记为阻塞状态 - - :param process_id: 进程 ID - :type process_id: int - :param by: 造成阻塞的节点信息 - :type by: str - """ - Process.objects.filter(id=process_id).update(suspended=True, suspended_by=by) - - def resume(self, process_id: int): - """ - 将进程标记为非阻塞状态 - - :param process_id: 进程 ID - :type process_id: int - """ - Process.objects.filter(id=process_id).update(suspended=False, suspended_by="") - - def batch_resume(self, process_id_list: List[int]): - """ - 批量将进程标记为非阻塞状态 - - :param process_id_list: 进程 ID 列表 - :type process_id_list: List[int] - """ - Process.objects.filter(id__in=process_id_list).update(suspended=False, suspended_by="") - - def die(self, process_id: int): - """ - 将当前进程标记为非存活状态 - - :param process_id: 进程 ID - :type process_id: int - """ - Process.objects.filter(id=process_id).update(dead=True) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_PROCESS_READ_TIME) - def get_process_info(self, process_id: int) -> ProcessInfo: - """ - 获取某个进程的基本信息 - - :param process_id: 进程 ID - :type process_id: int - :return: 进程基本信息 - :rtype: ProcessInfo - """ - qs = Process.objects.filter(id=process_id).only( - "id", "destination_id", "root_pipeline_id", "pipeline_stack", "parent_id" - ) - - if len(qs) != 1: - raise Process.DoesNotExist("Process with id({}) does not exist".format(process_id)) - - process = qs[0] - return ProcessInfo( - process_id=process.id, - destination_id=process.destination_id, - root_pipeline_id=process.root_pipeline_id, - pipeline_stack=json.loads(process.pipeline_stack), - parent_id=process.parent_id, - ) - - def kill(self, process_id: int): - """ - 强制结束某个进程正在进行的活动,并将其标志为睡眠状态 - - :param process_id: 进程 ID - :type process_id: int - """ - Process.objects.filter(id=process_id).update(asleep=True) - - def get_suspended_process_info(self, suspended_by: str) -> List[SuspendedProcessInfo]: - """ - 获取由于 pipeline 暂停而被暂停执行的进程信息 - - : param suspended_by: 进程 ID - : type suspended_by: str - : return: 暂停的进程信息 - : rtype: SuspendedProcessInfo - """ - qs = Process.objects.filter(suspended_by=suspended_by).only( - "id", "current_node_id", "root_pipeline_id", "pipeline_stack" - ) - - return [ - SuspendedProcessInfo( - process_id=p.id, - current_node=p.current_node_id, - root_pipeline_id=p.root_pipeline_id, - pipeline_stack=json.loads(p.pipeline_stack), - ) - for p in qs - ] - - def get_sleep_process_info_with_current_node_id(self, node_id: str) -> Optional[ProcessInfo]: - """ - 获取由于处于睡眠状态且当前节点 ID 为 node_id 的进程 ID - - : param node_id: 节点 ID - : type node_id: str - : return: 进程 ID - : rtype: str - """ - qs = Process.objects.filter(asleep=True, current_node_id=node_id).only( - "id", "destination_id", "root_pipeline_id", "pipeline_stack", "parent_id" - ) - - if len(qs) == 0: - return None - - if len(qs) != 1: - raise ValueError("found multiple sleep process({}) with current_node_id({})".format(qs, node_id)) - - return ProcessInfo( - process_id=qs[0].id, - destination_id=qs[0].destination_id, - root_pipeline_id=qs[0].root_pipeline_id, - pipeline_stack=json.loads(qs[0].pipeline_stack), - parent_id=qs[0].parent_id, - ) - - def get_process_id_with_current_node_id(self, node_id: str) -> Optional[str]: - """ - 获取当前节点 ID 为 node_id 且存活的进程 ID - - : param node_id: 节点 ID - : type node_id: str - : return: 进程 ID - : rtype: str - """ - qs = Process.objects.filter(dead=False, current_node_id=node_id).only("id") - - if len(qs) == 0: - return None - - if len(qs) != 1: - raise ValueError("found multiple process({}) with current_node_id({})".format(qs, node_id)) - - return qs[0].id - - def set_current_node(self, process_id: int, node_id: str): - """ - 将进程当前处理节点标记为 node - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - """ - Process.objects.filter(id=process_id).update(current_node_id=node_id) - - def child_process_finish(self, parent_id: int, process_id: int) -> bool: - """ - 标记某个进程的子进程执行完成,并返回是否能够唤醒父进程继续执行的标志位 - - :param parent_id: 父进程 ID - :type parent_id: int - :param process_id: 子进程 ID - :type process_id: int - :return: 是否能够唤醒父进程继续执行 - :rtype: bool - """ - Process.objects.filter(id=process_id).update(dead=True) - - Process.objects.filter(id=parent_id).update(ack_num=F("ack_num") + 1) - - # compare(where) and set(update) - row = Process.objects.filter(id=parent_id, ack_num=F("need_ack")).update(ack_num=0, need_ack=-1) - - return row != 0 - - def is_frozen(self, process_id: int) -> bool: - """ - 检测当前进程是否需要被冻结 - - :param process_id: 进程 ID - :type process_id: int - :return: 是否需要被冻结 - :rtype: bool - """ - return Process.objects.filter(id=process_id, frozen=True).exists() - - def freeze(self, process_id: int): - """ - 冻结当前进程 - - :param process_id: 进程 ID - :type process_id: int - """ - Process.objects.filter(id=process_id).update(frozen=True) - - def fork( - self, - parent_id: str, - root_pipeline_id: str, - pipeline_stack: List[str], - from_to: Dict[str, str], - ) -> List[DispatchProcess]: - """ - 根据当前进程 fork 出多个子进程 - - :param parent_id: 父进程 ID - :type parent_id: str - :param root_pipeline_id: 根流程 ID - :type root_pipeline_id: str - :param pipeline_stack: 子流程栈 - :type pipeline_stack: List[str] - :param from_to: 子进程的执行开始节点和目标节点 - :type from_to: Dict[str, str] - :return: 待调度进程信息列表 - :rtype: List[DispatchProcess] - """ - qs = Process.objects.filter(id=parent_id).only("priority", "queue") - stack_json = json.dumps(pipeline_stack) - - if not qs: - raise Process.DoesNotExist("Process with id({}) does not exist".format(parent_id)) - - children = [ - Process( - parent_id=parent_id, - asleep=True, - destination_id=destination, - current_node_id=current_node, - root_pipeline_id=root_pipeline_id, - pipeline_stack=stack_json, - priority=qs[0].priority, - queue=qs[0].queue, - ) - for current_node, destination in from_to.items() - ] - - Process.objects.bulk_create(children, batch_size=500) - - qs = Process.objects.filter(parent_id=parent_id, dead=False).only("id", "current_node_id") - - children_count = len(qs) - expect = len(from_to) - if children_count != expect: - raise ValueError( - "process({}) fork failed, children count({}) does not match expect({})".format( - parent_id, children_count, expect - ) - ) - - return [DispatchProcess(process_id=p.id, node_id=p.current_node_id) for p in qs] - - def join(self, process_id: int, children_id: List[str]): - """ - 让父进程等待子进程 - - :param process_id: 父进程 ID - :type process_id: int - :param children_id: 子进程 ID 列表 - :type children_id: List[str] - """ - Process.objects.filter(id=process_id).update(ack_num=0, need_ack=len(children_id)) - - def set_pipeline_stack(self, process_id: int, stack: List[str]): - """ - 设置进程的流程栈 - - :param process_id: 进程 ID - :type process_id: int - :param stack: 流程栈 - :type stack: List[str] - """ - Process.objects.filter(id=process_id).update(pipeline_stack=json.dumps(stack)) - - def get_process_info_with_root_pipeline(self, pipeline_id: str) -> List[ProcessInfo]: - """ - 根据根流程 ID 获取一批进程的信息 - - :param pipeline_id: 流程 ID - :type pipeline_id: str - :return: 进程基本信息 - :rtype: List[ProcessInfo] - """ - qs = Process.objects.filter(root_pipeline_id=pipeline_id).only( - "id", "destination_id", "root_pipeline_id", "pipeline_stack", "parent_id" - ) - - return [ - ProcessInfo( - process_id=process.id, - destination_id=process.destination_id, - root_pipeline_id=process.root_pipeline_id, - pipeline_stack=json.loads(process.pipeline_stack), - parent_id=process.parent_id, - ) - for process in qs - ] diff --git a/lib/pipeline/eri/imp/schedule.py b/lib/pipeline/eri/imp/schedule.py deleted file mode 100644 index af88230..0000000 --- a/lib/pipeline/eri/imp/schedule.py +++ /dev/null @@ -1,145 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db.models import F - -from bamboo_engine import metrics -from bamboo_engine.eri import Schedule, ScheduleType - -from pipeline.eri.models import Schedule as DBSchedule - - -class ScheduleMixin: - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_SCHEDULE_WRITE_TIME) - def set_schedule(self, process_id: int, node_id: str, version: str, schedule_type: ScheduleType) -> Schedule: - """ - 设置 schedule 对象 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param version: 执行版本 - :type version: str - :param schedule_type: 调度类型 - :type schedule_type: ScheduleType - :return: 调度对象实例 - :rtype: Schedule - """ - schedule_model = DBSchedule.objects.create( - process_id=process_id, node_id=node_id, type=schedule_type.value, version=version - ) - return Schedule( - id=schedule_model.id, - type=schedule_type, - process_id=schedule_model.process_id, - node_id=schedule_model.node_id, - finished=schedule_model.finished, - expired=schedule_model.expired, - version=schedule_model.version, - times=schedule_model.schedule_times, - ) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_SCHEDULE_READ_TIME) - def get_schedule(self, schedule_id: str) -> Schedule: - """ - 获取 Schedule 对象 - - :param schedule_id: 调度实例 ID - :type schedule_id: str - :return: Schedule 对象实例 - :rtype: Schedule - """ - schedule_model = DBSchedule.objects.get(id=schedule_id) - - return Schedule( - id=schedule_model.id, - type=ScheduleType(schedule_model.type), - process_id=schedule_model.process_id, - node_id=schedule_model.node_id, - finished=schedule_model.finished, - expired=schedule_model.expired, - version=schedule_model.version, - times=schedule_model.schedule_times, - ) - - def get_schedule_with_node_and_version(self, node_id: str, version: str) -> Schedule: - """ - 通过节点 ID 和执行版本来获取 Scheudle 对象 - - :param node_id: 节点 ID - :type node_id: str - :param version: 执行版本 - :type version: str - :return: Schedule 对象 - :rtype: Schedule - """ - schedule_model = DBSchedule.objects.get(node_id=node_id, version=version) - - return Schedule( - id=schedule_model.id, - type=ScheduleType(schedule_model.type), - process_id=schedule_model.process_id, - node_id=schedule_model.node_id, - finished=schedule_model.finished, - expired=schedule_model.expired, - version=schedule_model.version, - times=schedule_model.schedule_times, - ) - - def apply_schedule_lock(self, schedule_id: str) -> bool: - """ - 获取 Schedule 对象的调度锁,返回是否成功获取锁 - - :param schedule_id: 调度实例 ID - :type schedule_id: str - :return: 是否成功获取锁 - :rtype: bool - """ - return DBSchedule.objects.filter(id=schedule_id, scheduling=False).update(scheduling=True) == 1 - - def release_schedule_lock(self, schedule_id: int): - """ - 释放指定 Schedule 的调度锁 - - :param schedule_id: Schedule ID - :type schedule_id: int - """ - DBSchedule.objects.filter(id=schedule_id, scheduling=True).update(scheduling=False) - - def expire_schedule(self, schedule_id: int): - """ - 将某个 Schedule 对象标记为已过期 - - :param schedule_id: 调度实例 ID - :type schedule_id: int - """ - DBSchedule.objects.filter(id=schedule_id).update(expired=True) - - def finish_schedule(self, schedule_id: int): - """ - 将某个 Schedule 对象标记为已完成 - - :param schedule_id: 调度实例 ID - :type schedule_id: int - """ - DBSchedule.objects.filter(id=schedule_id).update(finished=True) - - def add_schedule_times(self, schedule_id: int): - """ - 将某个 Schedule 对象的调度次数 +1 - - :param schedule_id: 调度实例 ID - :type schedule_id: int - """ - DBSchedule.objects.filter(id=schedule_id).update(schedule_times=F("schedule_times") + 1) diff --git a/lib/pipeline/eri/imp/serializer.py b/lib/pipeline/eri/imp/serializer.py deleted file mode 100644 index e5c51c6..0000000 --- a/lib/pipeline/eri/imp/serializer.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import json -import pickle -import codecs -from typing import Any - - -class SerializerMixin: - JSON_SERIALIZER = "json" - PICKLE_SERIALIZER = "pickle" - - def _deserialize(self, data: str, serializer: str) -> Any: - if serializer == self.JSON_SERIALIZER: - return json.loads(data) - elif serializer == self.PICKLE_SERIALIZER: - return pickle.loads(codecs.decode(data.encode(), "base64")) - else: - raise ValueError("unsupport serializer type: {}".format(serializer)) - - def _serialize(self, data: Any) -> (str, str): - try: - return json.dumps(data), self.JSON_SERIALIZER - except TypeError: - return codecs.encode(pickle.dumps(data), "base64").decode(), self.PICKLE_SERIALIZER diff --git a/lib/pipeline/eri/imp/service.py b/lib/pipeline/eri/imp/service.py deleted file mode 100644 index a474cff..0000000 --- a/lib/pipeline/eri/imp/service.py +++ /dev/null @@ -1,178 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from typing import Optional - -from bamboo_engine.eri import Service as ServiceInterface -from bamboo_engine.eri import Schedule, ExecutionData, CallbackData, ScheduleType - -from pipeline.core.flow.activity import Service -from pipeline.core.data.base import DataObject -from pipeline.eri.log import get_logger - - -class ServiceWrapper(ServiceInterface): - def __init__(self, service: Service): - self.service = service - - def pre_execute(self, data: ExecutionData, root_pipeline_data: ExecutionData): - """ - execute 执行前执行的逻辑 - - :param data: 节点执行数据 - :type data: ExecutionData - :param root_pipeline_data: 根流程执行数据 - :type root_pipeline_data: ExecutionData - """ - pre_execute = getattr(self.service, "pre_execute", None) - if callable(pre_execute): - return pre_execute(DataObject(inputs=data.inputs, outputs=data.outputs)) - - def execute(self, data: ExecutionData, root_pipeline_data: ExecutionData) -> bool: - """ - execute 逻辑 - - :param data: 节点执行数据 - :type data: ExecutionData - :param root_pipeline_data: 根流程执行数据 - :type root_pipeline_data: ExecutionData - :return: 是否执行成功 - :rtype: bool - """ - data_obj = DataObject(inputs=data.inputs, outputs=data.outputs) - parent_data_obj = DataObject(inputs=root_pipeline_data.inputs, outputs=root_pipeline_data.outputs) - - try: - execute_res = self.service.execute(data_obj, parent_data_obj) - finally: - # sync data object modification to execution data - data.inputs = data_obj.inputs - data.outputs = data_obj.outputs - - if execute_res is None: - execute_res = True - - return execute_res - - def schedule( - self, - schedule: Schedule, - data: ExecutionData, - root_pipeline_data: ExecutionData, - callback_data: Optional[CallbackData] = None, - ) -> bool: - """ - schedule 逻辑 - - :param schedule: Schedule 对象 - :type schedule: Schedule - :param data: 节点执行数据 - :type data: ExecutionData - :param root_pipeline_data: 根流程执行数据 - :type root_pipeline_data: ExecutionData - :param callback_data: 回调数据, defaults to None - :type callback_data: Optional[CallbackData], optional - :return: [description] - :rtype: bool - """ - data_obj = DataObject(inputs=data.inputs, outputs=data.outputs) - parent_data_obj = DataObject(inputs=root_pipeline_data.inputs, outputs=root_pipeline_data.outputs) - - try: - schedule_res = self.service.schedule( - data_obj, parent_data_obj, callback_data.data if callback_data else None - ) - except Exception as e: - raise e - finally: - # sync data object modification to execution data - data.inputs = data_obj.inputs - data.outputs = data_obj.outputs - - if schedule_res is None: - schedule_res = True - - return schedule_res - - def need_schedule(self) -> bool: - """ - 服务是否需要调度 - - :return: 是否需要调度 - :rtype: bool - """ - return self.service.need_schedule() - - def schedule_type(self) -> Optional[ScheduleType]: - """ - 服务调度类型 - - :return: 调度类型 - :rtype: Optional[ScheduleType] - """ - if not self.service.need_schedule(): - return None - - if self.service.interval: - return ScheduleType.POLL - - if not self.service.multi_callback_enabled(): - return ScheduleType.CALLBACK - - return ScheduleType.MULTIPLE_CALLBACK - - def is_schedule_done(self) -> bool: - """ - 调度是否完成 - - :return: 调度是否完成 - :rtype: bool - """ - return self.service.is_schedule_finished() - - def schedule_after( - self, schedule: Optional[Schedule], data: ExecutionData, root_pipeline_data: ExecutionData - ) -> int: - """ - 计算下一次调度间隔 - - :param schedule: 调度对象,未进行调度时传入为空 - :type schedule: Optional[Schedule] - :param data: 节点执行数据 - :type data: ExecutionData - :param root_pipeline_data: 根流程执行数据 - :type root_pipeline_data: ExecutionData - :return: 调度间隔,单位为秒 - :rtype: int - """ - if self.service.interval is None: - return -1 - - if schedule is None: - return self.service.interval.next() - - # count will add in next, so minus 1 at here - self.service.interval.count = schedule.times - 1 - - return self.service.interval.next() - - def setup_runtime_attributes(self, **attrs): - """ - 装载运行时属性 - - :param attrs: 运行时属性 - :type attrs: Dict[str, Any] - """ - - attrs["logger"] = get_logger(node_id=attrs["id"], loop=attrs["loop"], version=attrs["version"]) - self.service.setup_runtime_attrs(**attrs) diff --git a/lib/pipeline/eri/imp/state.py b/lib/pipeline/eri/imp/state.py deleted file mode 100644 index 953af62..0000000 --- a/lib/pipeline/eri/imp/state.py +++ /dev/null @@ -1,333 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from typing import Optional, Dict, List - -from django.utils import timezone -from bamboo_engine.eri import State -from bamboo_engine import states, metrics -from bamboo_engine.utils.string import unique_id -from bamboo_engine.exceptions import StateVersionNotMatchError - -from pipeline.eri.signals import post_set_state -from pipeline.eri.models import State as DBState - - -class StateMixin: - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_STATE_READ_TIME) - def get_state(self, node_id: str) -> State: - """ - 获取某个节点的状态对象 - - : param node_id: 节点 ID - : type node_id: str - : return: State 实例 - : rtype: State - """ - state = DBState.objects.get(node_id=node_id) - - return State( - node_id=state.node_id, - root_id=state.root_id, - parent_id=state.parent_id, - name=state.name, - version=state.version, - loop=state.loop, - inner_loop=state.inner_loop, - retry=state.retry, - skip=state.skip, - error_ignored=state.error_ignored, - created_time=state.created_time, - started_time=state.started_time, - archived_time=state.archived_time, - ) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_STATE_READ_TIME) - def get_state_or_none(self, node_id: str) -> Optional[State]: - """ - 获取某个节点的状态对象,如果不存在则返回 None - - : param node_id: 节点 ID - : type node_id: str - : return: State 实例 - : rtype: State - """ - try: - return self.get_state(node_id) - except DBState.DoesNotExist: - return None - - def get_state_by_root(self, root_id: str) -> List[State]: - """ - 根据根节点 ID 获取一批节点状态 - - :param root_id: 根节点 ID - :type root_id: str - :return: 节点状态列表 - :rtype: List[State] - """ - qs = DBState.objects.filter(root_id=root_id) - - return [ - State( - node_id=state.node_id, - root_id=state.root_id, - parent_id=state.parent_id, - name=state.name, - version=state.version, - loop=state.loop, - inner_loop=state.inner_loop, - retry=state.retry, - skip=state.skip, - error_ignored=state.error_ignored, - created_time=state.created_time, - started_time=state.started_time, - archived_time=state.archived_time, - ) - for state in qs - ] - - def get_state_by_parent(self, parent_id: str) -> List[State]: - """ - 根据父节点 ID 获取一批节点状态 - - :param parent_id: 父节点 ID - :type parent_id: str - :return: 节点状态列表 - :rtype: List[State] - """ - qs = DBState.objects.filter(parent_id=parent_id) - - return [ - State( - node_id=state.node_id, - root_id=state.root_id, - parent_id=state.parent_id, - name=state.name, - version=state.version, - loop=state.loop, - inner_loop=state.inner_loop, - retry=state.retry, - skip=state.skip, - error_ignored=state.error_ignored, - created_time=state.created_time, - started_time=state.started_time, - archived_time=state.archived_time, - ) - for state in qs - ] - - def batch_get_state_name(self, node_id_list: List[str]) -> Dict[str, str]: - """ - 批量获取一批节点的状态 - - :param node_id_list: 节点 ID 列表 - :type node_id_list: List[str] - :return: 节点ID -> 状态名称 - :rtype: Dict[str, str] - """ - qs = DBState.objects.filter(node_id__in=node_id_list).only("node_id", "name") - return {state.node_id: state.name for state in qs} - - def has_state(self, node_id: str) -> bool: - """ - 是否存在某个节点的的状态 - - :param node_id: 节点 ID - :type node_id: str - :return: 该节点状态是否存在 - :rtype: bool - """ - return DBState.objects.filter(node_id=node_id).exists() - - def reset_state_inner_loop(self, node_id: str) -> int: - """ - 设置节点的当前流程重入次数 - - :param node_id: 节点 ID - :type node_id: str - :return: 更新状态行数 - :rtype: int - """ - return DBState.objects.filter(node_id=node_id).update(inner_loop=0) - - def reset_children_state_inner_loop(self, node_id: str) -> int: - """ - 批量设置子流程节点的所有子节点inner_loop次数 - - :param node_id: 子流程节点 ID - :type node_id: str - :return: 更新状态行数 - :rtype: int - """ - return DBState.objects.filter(parent_id=node_id).update(inner_loop=0) - - def set_state_root_and_parent(self, node_id: str, root_id: str, parent_id: str): - """ - 设置节点的根流程和父流程 ID - - :param node_id: 节点 ID - :type node_id: str - :param root_id: 根流程 ID - :type root_id: str - :param parent_id: 父流程 ID - :type parent_id: str - """ - DBState.objects.filter(node_id=node_id).update(root_id=root_id, parent_id=parent_id) - - @metrics.setup_histogram(metrics.ENGINE_RUNTIME_STATE_WRITE_TIME) - def set_state( - self, - node_id: str, - to_state: str, - version: str = None, - loop: int = -1, - inner_loop: int = -1, - root_id: Optional[str] = None, - parent_id: Optional[str] = None, - is_retry: bool = False, - is_skip: bool = False, - reset_retry: bool = False, - reset_skip: bool = False, - error_ignored: bool = False, - reset_error_ignored: bool = False, - refresh_version: bool = False, - clear_started_time: bool = False, - set_started_time: bool = False, - clear_archived_time: bool = False, - set_archive_time: bool = False, - ) -> str: - """ - 设置节点的状态,如果节点存在,进行状态转换时需要满足状态转换状态机 - - :param node_id: 节点 ID - :type node_id: str - :param to_state: 目标状态 - :type to_state: str - :param loop: 循环次数, 为 -1 时表示不设置 - :type loop: int, optional - :param inner_loop: 当前流程循环次数, 为 -1 时表示不设置 - :type inner_loop: int, optional - :param version: 目标状态版本,为空时表示不做版本校验 - :type version: Optional[str], optional - :param root_id: 根节点 ID,为空时表示不设置 - :type root_id: Optional[str], optional - :param parent_id: 父节点 ID,为空时表示不设置 - :type parent_id: Optional[str], optional - :param is_retry: 是否增加重试次数 - :type is_retry: bool, optional - :param is_skip: 是否将跳过设置为 True - :type is_skip: bool, optional - :param reset_retry: 是否重置重试次数 - :type reset_retry: bool, optional - :param reset_skip: 是否重置跳过标志 - :type reset_skip: bool, optional - :param error_ignored: 是否为忽略错误跳过 - :type error_ignored: bool, optional - :param reset_error_ignored: 是否重置忽略错误标志 - :type reset_error_ignored: bool, optional - :param refresh_version: 是否刷新版本号 - :type refresh_version: bool, optional - :param clear_started_time: 是否清空开始时间 - :type clear_started_time: bool, optional - :param set_started_time: 是否设置开始时间 - :type set_started_time: bool, optional - :param clear_archived_time: 是否清空归档时间 - :type clear_archived_time: bool, optional - :param set_archive_time: 是否设置归档时间 - :type set_archive_time: bool, optional - :return: 该节点最新版本 - :rtype: str - """ - state = self.get_state_or_none(node_id) - ret_version = "" - - if state and version and state.version != version: - raise StateVersionNotMatchError("state version({}) not match {}".format(state.version, version)) - - fields = {} - - if loop != -1: - fields["loop"] = loop - - if inner_loop != -1: - fields["inner_loop"] = inner_loop - - if root_id: - fields["root_id"] = root_id - - if parent_id: - fields["parent_id"] = parent_id - - if is_retry and state: - fields["retry"] = state.retry + 1 - - if is_skip and state: - fields["skip"] = True - - if reset_retry and state: - fields["retry"] = 0 - - if reset_skip and state: - fields["skip"] = False - - if reset_error_ignored and state: - fields["error_ignored"] = False - - if error_ignored and state: - fields["error_ignored"] = True - - if refresh_version or state is None: - fields["version"] = unique_id("v") - - if clear_started_time and state: - fields["started_time"] = None - - if set_started_time: - fields["started_time"] = timezone.now() - - if clear_archived_time and state: - fields["archived_time"] = timezone.now() - - if set_archive_time: - fields["archived_time"] = timezone.now() - - if state: - if not states.can_transit(from_state=state.name, to_state=to_state): - raise RuntimeError( - "can't not transit node({}) state from {} to {}".format(node_id, state.name, to_state) - ) - - filters = {"node_id": node_id} - if version: - filters["version"] = version - - rows = DBState.objects.filter(**filters).update(name=to_state, **fields) - - if rows != 1: - raise StateVersionNotMatchError("state with version({}) not exist".format(version)) - ret_version = fields.get("version", state.version) - else: - state = DBState.objects.create(node_id=node_id, name=to_state, **fields) - ret_version = fields["version"] - - post_set_state.send( - sender=DBState, - node_id=node_id, - to_state=to_state, - version=ret_version, - root_id=state.root_id, - parent_id=state.parent_id, - loop=loop, - ) - return ret_version diff --git a/lib/pipeline/eri/imp/task.py b/lib/pipeline/eri/imp/task.py deleted file mode 100644 index 9dfd2b4..0000000 --- a/lib/pipeline/eri/imp/task.py +++ /dev/null @@ -1,175 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from typing import Optional - -from celery import current_app - -from pipeline.eri.celery.queues import QueueResolver - -from pipeline.eri.models import Process - - -def _retry_once(action: callable): - try: - action() - except Exception: - try: - action() - except Exception as e: - raise e - - -class TaskMixin: - def _get_task_route_params(self, task_name: str, process_id: int): - process = Process.objects.filter(id=process_id).only("priority", "queue").first() - resolver = QueueResolver(process.queue) - queue, routing_key = resolver.resolve_task_queue_and_routing_key(task_name) - return { - "queue": queue, - "priority": process.priority, - "routing_key": routing_key, - } - - def execute(self, process_id: int, node_id: str, root_pipeline_id: str, parent_pipeline_id: str): - """ - 派发执行任务,执行任务被拉起执行时应该调用 Engine 实例的 execute 方法 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - """ - task_name = "pipeline.eri.celery.tasks.execute" - route_params = self._get_task_route_params(task_name, process_id) - - def action(): - current_app.tasks[task_name].apply_async( - kwargs={ - "process_id": process_id, - "node_id": node_id, - "root_pipeline_id": root_pipeline_id, - "parent_pipeline_id": parent_pipeline_id, - }, - **route_params, - ) - - _retry_once(action=action) - - def schedule( - self, - process_id: int, - node_id: str, - schedule_id: str, - callback_data_id: Optional[int] = None, - ): - """ - 派发调度任务,调度任务被拉起执行时应该调用 Engine 实例的 schedule 方法 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param schedule_id: 调度 ID - :type schedule_id: str - """ - task_name = "pipeline.eri.celery.tasks.schedule" - route_params = self._get_task_route_params(task_name, process_id) - - def action(): - current_app.tasks[task_name].apply_async( - kwargs={ - "process_id": process_id, - "node_id": node_id, - "schedule_id": schedule_id, - "callback_data_id": callback_data_id, - }, - **route_params, - ) - - _retry_once(action=action) - - def set_next_schedule( - self, - process_id: int, - node_id: str, - schedule_id: str, - schedule_after: int, - callback_data_id: Optional[int] = None, - ): - """ - 设置下次调度时间,调度倒数归零后应该执行 Engine 实例的 schedule 方法 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param schedule_id: 调度 ID - :type schedule_id: str - :param schedule_after: 调度倒数 - :type schedule_after: int - """ - task_name = "pipeline.eri.celery.tasks.schedule" - route_params = self._get_task_route_params(task_name, process_id) - - def action(): - current_app.tasks[task_name].apply_async( - kwargs={ - "process_id": process_id, - "node_id": node_id, - "schedule_id": schedule_id, - "callback_data_id": callback_data_id, - }, - countdown=schedule_after, - **route_params, - ) - - _retry_once(action=action) - - def start_timeout_monitor(self, process_id: int, node_id: str, version: str, timeout: int): - """ - 开始对某个节点执行的超时监控,若超时时间归零后节点未进入归档状态,则强制失败该节点 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param version: 执行版本 - :type version: str - :param timeout: 超时时间,单位为秒 - :type timeout: int - """ - task_name = "pipeline.eri.celery.tasks.timeout_check" - route_params = self._get_task_route_params(task_name, process_id) - - current_app.tasks[task_name].apply_async( - kwargs={"process_id": process_id, "node_id": node_id, "version": version}, countdown=timeout, **route_params - ) - - def stop_timeout_monitor( - self, - process_id: int, - node_id: str, - version: str, - ): - """ - 停止对某个节点的超时监控 - - :param process_id: 进程 ID - :type process_id: int - :param node_id: 节点 ID - :type node_id: str - :param version: 执行版本 - :type version: str - """ - return diff --git a/lib/pipeline/eri/imp/variable.py b/lib/pipeline/eri/imp/variable.py deleted file mode 100644 index 5022a07..0000000 --- a/lib/pipeline/eri/imp/variable.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import inspect -from typing import Any, Type - -from bamboo_engine.eri import Variable as VariableInterface -from pipeline.core.data.var import Variable - - -class VariableProxy: - def __init__(self, original_value: Variable, var_cls: Type, pipeline_data: dict): - self.get_value = getattr(var_cls, "get_value") - self.original_value = original_value - self.pipeline_data = pipeline_data - for name, value in inspect.getmembers(var_cls): - if not name.startswith("__") and not hasattr(self, name) and inspect.isfunction(value): - setattr(self, name, value) - - def get(self) -> Any: - self.value = self.original_value.get() - return self.get_value(self) - - -class VariableWrapper(VariableInterface): - def __init__(self, original_value: Variable, var_cls: Type, additional_data: dict): - self.var = VariableProxy(original_value=original_value, var_cls=var_cls, pipeline_data=additional_data) - - def get(self) -> Any: - return self.var.get() diff --git a/lib/pipeline/eri/log.py b/lib/pipeline/eri/log.py deleted file mode 100644 index 8de8fe9..0000000 --- a/lib/pipeline/eri/log.py +++ /dev/null @@ -1,59 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -from logging import LogRecord, LoggerAdapter - -from django.core.exceptions import AppRegistryNotReady -from bamboo_engine import local - -logger = logging.getLogger("pipeline.eri.log") - - -def get_logger(node_id: str, loop: int, version: str): - return LoggerAdapter(logger=logger, extra={"node_id": node_id, "loop": loop, "version": version}) - - -class ERINodeLogHandler(logging.Handler): - def emit(self, record: LogRecord): - from pipeline.eri.models import LogEntry - - LogEntry.objects.create( - node_id=record.node_id, - loop=record.loop, - version=record.version, - logger_name=record.name, - level_name=record.levelname, - message=self.format(record), - ) - - -class EngineContextLogHandler(logging.Handler): - def emit(self, record): - try: - from pipeline.eri.models import LogEntry - except AppRegistryNotReady: - return - - node_info = local.get_node_info() - if not node_info: - return - - LogEntry.objects.create( - node_id=node_info.node_id, - version=node_info.version, - loop=node_info.loop, - logger_name=record.name, - level_name=record.levelname, - message=self.format(record), - ) diff --git a/lib/pipeline/eri/migrations/0001_initial.py b/lib/pipeline/eri/migrations/0001_initial.py deleted file mode 100644 index 9b55e15..0000000 --- a/lib/pipeline/eri/migrations/0001_initial.py +++ /dev/null @@ -1,160 +0,0 @@ -# Generated by Django 2.2.19 on 2021-03-09 03:41 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [] - - operations = [ - migrations.CreateModel( - name="CallbackData", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), - ("node_id", models.CharField(max_length=33, verbose_name="节点 ID")), - ("version", models.CharField(max_length=33, verbose_name="状态版本")), - ("data", models.TextField(verbose_name="回调数据")), - ], - ), - migrations.CreateModel( - name="ContextOutputs", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), - ("pipeline_id", models.CharField(max_length=33, unique=True, verbose_name="流程 ID")), - ("outputs", models.TextField(verbose_name="输出配置")), - ], - ), - migrations.CreateModel( - name="Data", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), - ("node_id", models.CharField(db_index=True, max_length=33, unique=True, verbose_name="节点 ID")), - ("inputs", models.TextField(verbose_name="原始输入数据")), - ("outputs", models.TextField(verbose_name="原始输出数据")), - ], - ), - migrations.CreateModel( - name="ExecutionData", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), - ("node_id", models.CharField(db_index=True, max_length=33, unique=True, verbose_name="节点 ID")), - ("inputs_serializer", models.CharField(max_length=32, verbose_name="输入序列化器")), - ("outputs_serializer", models.CharField(max_length=32, verbose_name="输出序列化器")), - ("inputs", models.TextField(verbose_name="节点执行输入数据")), - ("outputs", models.TextField(verbose_name="节点执行输出数据")), - ], - ), - migrations.CreateModel( - name="Node", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), - ("node_id", models.CharField(db_index=True, max_length=33, verbose_name="节点 ID")), - ("detail", models.TextField(verbose_name="节点详情")), - ], - ), - migrations.CreateModel( - name="Process", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), - ("parent_id", models.BigIntegerField(db_index=True, default=-1, verbose_name="父进程 ID")), - ("ack_num", models.IntegerField(default=0, verbose_name="收到子进程 ACK 数量")), - ("need_ack", models.IntegerField(default=-1, verbose_name="需要收到的子进程 ACK 数量")), - ("asleep", models.BooleanField(default=True, verbose_name="是否处于休眠状态")), - ("suspended", models.BooleanField(default=False, verbose_name="是否处于暂停状态")), - ("frozen", models.BooleanField(default=False, verbose_name="是否处于冻结状态")), - ("dead", models.BooleanField(default=False, verbose_name="是否已经死亡")), - ("last_heartbeat", models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="上次心跳时间")), - ("destination_id", models.CharField(default="", max_length=33, verbose_name="执行终点 ID")), - ("current_node_id", models.CharField(db_index=True, default="", max_length=33, verbose_name="当前节点 ID")), - ("root_pipeline_id", models.CharField(max_length=33, verbose_name="根流程 ID")), - ( - "suspended_by", - models.CharField(db_index=True, default="", max_length=33, verbose_name="导致进程暂停的节点 ID"), - ), - ("priority", models.IntegerField(verbose_name="优先级")), - ("queue", models.CharField(default="", max_length=128, verbose_name="所属队列")), - ("pipeline_stack", models.TextField(default="[]", verbose_name="流程栈")), - ], - ), - migrations.CreateModel( - name="State", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), - ("node_id", models.CharField(max_length=33, unique=True, verbose_name="节点 ID")), - ("root_id", models.CharField(db_index=True, default="", max_length=33, verbose_name="根节点 ID")), - ("parent_id", models.CharField(db_index=True, default="", max_length=33, verbose_name="父节点 ID")), - ("name", models.CharField(max_length=64, verbose_name="状态名")), - ("version", models.CharField(max_length=33, verbose_name="状态版本")), - ("loop", models.IntegerField(default=1, verbose_name="循环次数")), - ("retry", models.IntegerField(default=0, verbose_name="重试次数")), - ("skip", models.BooleanField(default=False, verbose_name="是否跳过")), - ("error_ignored", models.BooleanField(default=False, verbose_name="是否出错后自动忽略")), - ("created_time", models.DateTimeField(auto_now_add=True, verbose_name="创建时间")), - ("started_time", models.DateTimeField(null=True, verbose_name="开始时间")), - ("archived_time", models.DateTimeField(null=True, verbose_name="归档时间")), - ], - ), - migrations.CreateModel( - name="Schedule", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), - ("type", models.IntegerField(verbose_name="调度类型")), - ("process_id", models.BigIntegerField(default=-1, verbose_name="进程 ID")), - ("node_id", models.CharField(max_length=33, verbose_name="节点 ID")), - ("finished", models.BooleanField(default=False, verbose_name="是否已完成")), - ("expired", models.BooleanField(default=False, verbose_name="是否已过期")), - ("scheduling", models.BooleanField(default=False, verbose_name="是否正在调度")), - ("version", models.CharField(max_length=33, verbose_name="状态版本")), - ("schedule_times", models.IntegerField(default=0, verbose_name="被调度次数")), - ], - options={"unique_together": {("node_id", "version")},}, - ), - migrations.CreateModel( - name="LogEntry", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), - ("node_id", models.CharField(max_length=33, verbose_name="节点 ID")), - ("loop", models.IntegerField(default=1, verbose_name="循环次数")), - ("logger_name", models.CharField(max_length=128, verbose_name="logger 名称")), - ("level_name", models.CharField(max_length=32, verbose_name="日志等级")), - ("message", models.TextField(null=True, verbose_name="日志内容")), - ("logged_at", models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="输出时间")), - ], - options={"index_together": {("node_id", "loop")},}, - ), - migrations.CreateModel( - name="ExecutionHistory", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), - ("node_id", models.CharField(max_length=33, verbose_name="节点 ID")), - ("loop", models.IntegerField(default=1, verbose_name="循环次数")), - ("retry", models.IntegerField(default=0, verbose_name="重试次数")), - ("skip", models.BooleanField(default=False, verbose_name="是否跳过")), - ("version", models.CharField(max_length=33, verbose_name="状态版本")), - ("started_time", models.DateTimeField(verbose_name="开始时间")), - ("archived_time", models.DateTimeField(verbose_name="归档时间")), - ("inputs_serializer", models.CharField(max_length=32, verbose_name="输入序列化器")), - ("outputs_serializer", models.CharField(max_length=32, verbose_name="输出序列化器")), - ("inputs", models.TextField(verbose_name="节点执行输入数据")), - ("outputs", models.TextField(verbose_name="节点执行输出数据")), - ], - options={"index_together": {("node_id", "loop")},}, - ), - migrations.CreateModel( - name="ContextValue", - fields=[ - ("id", models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID")), - ("pipeline_id", models.CharField(max_length=33, verbose_name="流程 ID")), - ("key", models.CharField(max_length=128, verbose_name="变量 key")), - ("type", models.IntegerField(verbose_name="变量类型")), - ("serializer", models.CharField(max_length=32, verbose_name="序列化器")), - ("code", models.CharField(default="", max_length=128, verbose_name="计算型变量类型唯一标志")), - ("value", models.TextField(verbose_name="变量值")), - ("references", models.TextField(verbose_name="所有对其他变量直接或间接的引用")), - ], - options={"unique_together": {("pipeline_id", "key")},}, - ), - ] diff --git a/lib/pipeline/eri/migrations/0002_auto_20210322_0233.py b/lib/pipeline/eri/migrations/0002_auto_20210322_0233.py deleted file mode 100644 index 73e47cb..0000000 --- a/lib/pipeline/eri/migrations/0002_auto_20210322_0233.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 2.2.19 on 2021-03-22 02:33 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("eri", "0001_initial"), - ] - - operations = [ - migrations.AlterField( - model_name="process", - name="root_pipeline_id", - field=models.CharField(db_index=True, max_length=33, verbose_name="根流程 ID"), - ), - ] diff --git a/lib/pipeline/eri/migrations/0003_logentry_version.py b/lib/pipeline/eri/migrations/0003_logentry_version.py deleted file mode 100644 index aaa18f6..0000000 --- a/lib/pipeline/eri/migrations/0003_logentry_version.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 2.2.19 on 2021-03-29 12:15 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("eri", "0002_auto_20210322_0233"), - ] - - operations = [ - migrations.AddField( - model_name="logentry", - name="version", - field=models.CharField(default="", max_length=33, verbose_name="状态版本"), - ), - ] diff --git a/lib/pipeline/eri/migrations/0004_state_inner_loop_.py b/lib/pipeline/eri/migrations/0004_state_inner_loop_.py deleted file mode 100644 index 846ca99..0000000 --- a/lib/pipeline/eri/migrations/0004_state_inner_loop_.py +++ /dev/null @@ -1,16 +0,0 @@ -# Generated by Django 2.2.16 on 2021-07-12 12:27 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("eri", "0003_logentry_version"), - ] - - operations = [ - migrations.AddField( - model_name="state", name="inner_loop", field=models.IntegerField(default=1, verbose_name="子流程内部循环次数"), - ), - ] diff --git a/lib/pipeline/eri/migrations/__init__.py b/lib/pipeline/eri/migrations/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/lib/pipeline/eri/models.py b/lib/pipeline/eri/models.py deleted file mode 100644 index 5f53b7f..0000000 --- a/lib/pipeline/eri/models.py +++ /dev/null @@ -1,147 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db import models -from django.utils.translation import ugettext_lazy as _ - - -class Process(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - parent_id = models.BigIntegerField(_("父进程 ID"), default=-1, db_index=True) - ack_num = models.IntegerField(_("收到子进程 ACK 数量"), default=0) - need_ack = models.IntegerField(_("需要收到的子进程 ACK 数量"), default=-1) - asleep = models.BooleanField(_("是否处于休眠状态"), default=True) - suspended = models.BooleanField(_("是否处于暂停状态"), default=False) - frozen = models.BooleanField(_("是否处于冻结状态"), default=False) - dead = models.BooleanField(_("是否已经死亡"), default=False) - last_heartbeat = models.DateTimeField(_("上次心跳时间"), auto_now_add=True, db_index=True) - destination_id = models.CharField(_("执行终点 ID"), default="", max_length=33) - current_node_id = models.CharField(_("当前节点 ID"), default="", max_length=33, db_index=True) - root_pipeline_id = models.CharField(_("根流程 ID"), null=False, max_length=33, db_index=True) - suspended_by = models.CharField(_("导致进程暂停的节点 ID"), default="", max_length=33, db_index=True) - priority = models.IntegerField(_("优先级")) - queue = models.CharField(_("所属队列"), default="", max_length=128) - pipeline_stack = models.TextField(_("流程栈"), default="[]", null=False) - - -class Node(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - node_id = models.CharField(_("节点 ID"), null=False, max_length=33, db_index=True) - detail = models.TextField(_("节点详情"), null=False) - - -class State(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - node_id = models.CharField(_("节点 ID"), null=False, max_length=33, unique=True) - root_id = models.CharField(_("根节点 ID"), null=False, default="", max_length=33, db_index=True) - parent_id = models.CharField(_("父节点 ID"), null=False, default="", max_length=33, db_index=True) - name = models.CharField(_("状态名"), null=False, max_length=64) - version = models.CharField(_("状态版本"), null=False, max_length=33) - loop = models.IntegerField(_("循环次数"), default=1) - inner_loop = models.IntegerField(_("子流程内部循环次数"), default=1) - retry = models.IntegerField(_("重试次数"), default=0) - skip = models.BooleanField(_("是否跳过"), default=False) - error_ignored = models.BooleanField(_("是否出错后自动忽略"), default=False) - created_time = models.DateTimeField(_("创建时间"), auto_now_add=True) - started_time = models.DateTimeField(_("开始时间"), null=True) - archived_time = models.DateTimeField(_("归档时间"), null=True) - - -class Schedule(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - type = models.IntegerField(_("调度类型")) - process_id = models.BigIntegerField(_("进程 ID"), default=-1) - node_id = models.CharField(_("节点 ID"), null=False, max_length=33) - finished = models.BooleanField(_("是否已完成"), default=False) - expired = models.BooleanField(_("是否已过期"), default=False) - scheduling = models.BooleanField(_("是否正在调度"), default=False) - version = models.CharField(_("状态版本"), null=False, max_length=33) - schedule_times = models.IntegerField(_("被调度次数"), default=0) - - class Meta: - unique_together = ["node_id", "version"] - - -class Data(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - node_id = models.CharField(_("节点 ID"), null=False, max_length=33, db_index=True, unique=True) - inputs = models.TextField(_("原始输入数据")) - outputs = models.TextField(_("原始输出数据")) - - -class ExecutionData(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - node_id = models.CharField(_("节点 ID"), null=False, max_length=33, db_index=True, unique=True) - inputs_serializer = models.CharField(_("输入序列化器"), null=False, max_length=32) - outputs_serializer = models.CharField(_("输出序列化器"), null=False, max_length=32) - inputs = models.TextField(_("节点执行输入数据")) - outputs = models.TextField(_("节点执行输出数据")) - - -class CallbackData(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - node_id = models.CharField(_("节点 ID"), null=False, max_length=33) - version = models.CharField(_("状态版本"), null=False, max_length=33) - data = models.TextField(_("回调数据")) - - -class ContextValue(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - pipeline_id = models.CharField(_("流程 ID"), null=False, max_length=33) - key = models.CharField(_("变量 key"), null=False, max_length=128) - type = models.IntegerField(_("变量类型")) - serializer = models.CharField(_("序列化器"), null=False, max_length=32) - code = models.CharField(_("计算型变量类型唯一标志"), default="", max_length=128) - value = models.TextField(_("变量值")) - references = models.TextField(_("所有对其他变量直接或间接的引用")) - - class Meta: - unique_together = ["pipeline_id", "key"] - - -class ContextOutputs(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - pipeline_id = models.CharField(_("流程 ID"), null=False, max_length=33, unique=True) - outputs = models.TextField(_("输出配置")) - - -class ExecutionHistory(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - node_id = models.CharField(_("节点 ID"), null=False, max_length=33) - loop = models.IntegerField(_("循环次数"), default=1) - retry = models.IntegerField(_("重试次数"), default=0) - skip = models.BooleanField(_("是否跳过"), default=False) - version = models.CharField(_("状态版本"), null=False, max_length=33) - started_time = models.DateTimeField(_("开始时间"), null=False) - archived_time = models.DateTimeField(_("归档时间"), null=False) - inputs_serializer = models.CharField(_("输入序列化器"), null=False, max_length=32) - outputs_serializer = models.CharField(_("输出序列化器"), null=False, max_length=32) - inputs = models.TextField(_("节点执行输入数据")) - outputs = models.TextField(_("节点执行输出数据")) - - class Meta: - index_together = ["node_id", "loop"] - - -class LogEntry(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - node_id = models.CharField(_("节点 ID"), max_length=33) - version = models.CharField(_("状态版本"), default="", max_length=33) - loop = models.IntegerField(_("循环次数"), default=1) - logger_name = models.CharField(_("logger 名称"), max_length=128) - level_name = models.CharField(_("日志等级"), max_length=32) - message = models.TextField(_("日志内容"), null=True) - logged_at = models.DateTimeField(_("输出时间"), auto_now_add=True, db_index=True) - - class Meta: - index_together = ["node_id", "loop"] diff --git a/lib/pipeline/eri/runtime.py b/lib/pipeline/eri/runtime.py deleted file mode 100644 index 4b4d885..0000000 --- a/lib/pipeline/eri/runtime.py +++ /dev/null @@ -1,538 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import json -from typing import Optional, List - -from django.conf import settings -from django.db import transaction - -from kombu import Exchange, Queue, Connection - -from bamboo_engine import states -from bamboo_engine.template import Template -from bamboo_engine.eri import interfaces -from bamboo_engine.eri import EngineRuntimeInterface, NodeType, ContextValueType - -from pipeline.eri import codec -from pipeline.eri.imp.plugin_manager import PipelinePluginManagerMixin -from pipeline.eri.imp.hooks import HooksMixin -from pipeline.eri.imp.process import ProcessMixin -from pipeline.eri.imp.node import NodeMixin -from pipeline.eri.imp.state import StateMixin -from pipeline.eri.imp.schedule import ScheduleMixin -from pipeline.eri.imp.data import DataMixin -from pipeline.eri.imp.context import ContextMixin -from pipeline.eri.imp.execution_history import ExecutionHistoryMixin -from pipeline.eri.imp.task import TaskMixin -from pipeline.eri.celery.queues import QueueResolver - -from pipeline.eri.models import Node, Data, ContextValue, Process, ContextOutputs, LogEntry, ExecutionHistory, State - - -class BambooDjangoRuntime( - TaskMixin, - ExecutionHistoryMixin, - ContextMixin, - DataMixin, - ScheduleMixin, - StateMixin, - NodeMixin, - ProcessMixin, - PipelinePluginManagerMixin, - HooksMixin, - EngineRuntimeInterface, -): - CONTEXT_VALUE_TYPE_MAP = { - "plain": ContextValueType.PLAIN.value, - "splice": ContextValueType.SPLICE.value, - "lazy": ContextValueType.COMPUTE.value, - } - - ERI_SUPPORT_VERSION = 5 - - def __init__(self): - try: - eri_version = interfaces.version() - except AttributeError: - raise RuntimeError( - "bamboo_engine eri do not support version fetch, please make sure bamboo_engine version >= 1.1.6" - ) - - major_version = int(eri_version.split(".")[0]) - if major_version > self.ERI_SUPPORT_VERSION: - raise RuntimeError( - "unsupported bamboo_engine eri version: %s, expect version: <= %s.x.x" - % (eri_version, self.ERI_SUPPORT_VERSION) - ) - - def _data_inputs_assemble(self, pipeline_id: str, node_id: str, node_inputs: dict) -> (dict, List[ContextValue]): - inputs = {} - context_values = [] - for k, v in node_inputs.items(): - if v["type"] == "lazy": - if k.startswith("${") and k.endswith("}"): - cv_key = "${%s_%s}" % (k[2:-1], node_id) - else: - cv_key = "${%s_%s}" % (k, node_id) - if len(cv_key) > 128: - raise ValueError("var key %s length exceeds 128" % cv_key) - context_values.append( - ContextValue( - pipeline_id=pipeline_id, - key=cv_key, - type=ContextValueType.COMPUTE.value, - serializer=self.JSON_SERIALIZER, - value=json.dumps(v["value"]), - code=v.get("custom_type", ""), - ) - ) - inputs[k] = {"need_render": True, "value": cv_key} - else: - inputs[k] = {"need_render": v["type"] == "splice", "value": v["value"]} - # inject need_render from node_inputs[item].need_render - if not v.get("need_render", True): - inputs[k]["need_render"] = False - return inputs, context_values - - def _gen_executable_end_event_node(self, event: dict, pipeline: dict, root_id: str, parent_id: str) -> Node: - return Node( - node_id=event["id"], - detail=json.dumps( - { - "id": event["id"], - "type": NodeType.ExecutableEndEvent.value, - "targets": {}, - "root_pipeline_id": root_id, - "parent_pipeline_id": parent_id, - "can_skip": False, - "can_retry": True, - "code": event["type"], - } - ), - ) - - def _gen_event_node(self, event: dict, pipeline: dict, root_id: str, parent_id: str) -> Node: - return Node( - node_id=event["id"], - detail=json.dumps( - { - "id": event["id"], - "type": event["type"], - "targets": {event["outgoing"]: pipeline["flows"][event["outgoing"]]["target"]} - if event["type"] == NodeType.EmptyStartEvent.value - else {}, - "root_pipeline_id": root_id, - "parent_pipeline_id": parent_id, - "can_skip": event["type"] == NodeType.EmptyStartEvent.value, - "can_retry": True, - } - ), - ) - - def _gen_gateway_node(self, gateway: dict, pipeline: dict, root_id: str, parent_id: str) -> Node: - if gateway["type"] != NodeType.ConvergeGateway.value: - targets = {flow_id: pipeline["flows"][flow_id]["target"] for flow_id in gateway["outgoing"]} - else: - targets = {gateway["outgoing"]: pipeline["flows"][gateway["outgoing"]]["target"]} - - detail = { - "id": gateway["id"], - "type": gateway["type"], - "targets": targets, - "root_pipeline_id": root_id, - "parent_pipeline_id": parent_id, - "can_retry": True, - "can_skip": False, - } - - if gateway["type"] == NodeType.ExclusiveGateway.value: - detail["can_skip"] = True - detail["conditions"] = [ - { - "name": flow_id, - "evaluation": cond["evaluate"], - "target_id": pipeline["flows"][flow_id]["target"], - "flow_id": flow_id, - } - for flow_id, cond in gateway["conditions"].items() - ] - elif gateway["type"] == NodeType.ParallelGateway.value: - detail["converge_gateway_id"] = gateway["converge_gateway_id"] - - elif gateway["type"] == NodeType.ConditionalParallelGateway.value: - detail["conditions"] = [ - { - "name": flow_id, - "evaluation": cond["evaluate"], - "target_id": pipeline["flows"][flow_id]["target"], - "flow_id": flow_id, - } - for flow_id, cond in gateway["conditions"].items() - ] - detail["converge_gateway_id"] = gateway["converge_gateway_id"] - elif gateway["type"] == NodeType.ConvergeGateway.value: - pass - else: - raise ValueError("unsupport gateway type {}: {}".format(gateway["type"], gateway)) - - return Node(node_id=gateway["id"], detail=json.dumps(detail)) - - def _gen_activity_node(self, act: dict, pipeline: dict, root_id: str, parent_id: str) -> Node: - return Node( - node_id=act["id"], - detail=json.dumps( - { - "id": act["id"], - "type": NodeType.ServiceActivity.value, - "targets": {act["outgoing"]: pipeline["flows"][act["outgoing"]]["target"]}, - "root_pipeline_id": root_id, - "parent_pipeline_id": parent_id, - "can_skip": act["skippable"], - "code": act["component"]["code"], - "version": act["component"].get("version", "legacy"), - "timeout": act.get("timeout"), - "error_ignorable": act["error_ignorable"], - "can_retry": act["retryable"], - } - ), - ) - - def _gen_subproc_node(self, subproc: dict, pipeline: dict, root_id: str, parent_id: str) -> Node: - return Node( - node_id=subproc["id"], - detail=json.dumps( - { - "id": subproc["id"], - "type": NodeType.SubProcess.value, - "targets": {subproc["outgoing"]: pipeline["flows"][subproc["outgoing"]]["target"]}, - "root_pipeline_id": root_id, - "parent_pipeline_id": parent_id, - "can_skip": False, - "can_retry": True, - "start_event_id": subproc["pipeline"]["start_event"]["id"], - } - ), - ) - - def _prepare( - self, pipeline: dict, root_id: str, subprocess_context: dict, parent_id: Optional[str] = None - ) -> (List[Node], List[Data], List[ContextValue], List[ContextOutputs]): - - parent_id = parent_id or root_id - - nodes = [] - datas = [] - context_values = [] - context_outputs = [] - - node_outputs = {} - context_var_references = {} - final_references = {} - - # collect all node outputs and initial reference - for key, input_data in pipeline["data"]["inputs"].items(): - source_act = input_data.get("source_act") - source_key = input_data.get("source_key") - if not source_act: - context_var_references[key] = Template(input_data["value"]).get_reference() - final_references[key] = set() - context_values.append( - ContextValue( - pipeline_id=pipeline["id"], - key=key, - type=self.CONTEXT_VALUE_TYPE_MAP[input_data["type"]], - serializer=self.JSON_SERIALIZER, - value=json.dumps(input_data["value"]), - code=input_data.get("custom_type", ""), - ) - ) - else: - if isinstance(source_act, list): - for sa in source_act: - node_outputs.setdefault(sa["source_act"], {})[sa["source_key"]] = key - else: - node_outputs.setdefault(source_act, {})[source_key] = key - - # pre_render_keys in start_event - if "pre_render_keys" in pipeline["data"] and pipeline["data"]["pre_render_keys"]: - datas.append( - Data( - node_id=pipeline["start_event"]["id"], - inputs=codec.data_json_dumps( - {"pre_render_keys": {"need_render": False, "value": pipeline["data"]["pre_render_keys"]}} - ), - outputs={}, - ) - ) - - # process activities - for act in pipeline["activities"].values(): - if act["type"] == NodeType.ServiceActivity.value: - # node - nodes.append(self._gen_activity_node(act=act, pipeline=pipeline, root_id=root_id, parent_id=parent_id)) - # data - data_inputs, compute_cvs = self._data_inputs_assemble(parent_id, act["id"], act["component"]["inputs"]) - datas.append( - Data( - node_id=act["id"], - inputs=codec.data_json_dumps(data_inputs), - outputs=json.dumps(node_outputs.get(act["id"], {})), - ) - ) - # compute context values - for cv in compute_cvs: - context_values.append(cv) - final_references[cv.key] = set() - context_var_references[cv.key] = Template(cv.value).get_reference() - - elif act["type"] == NodeType.SubProcess.value: - # node - nodes.append( - self._gen_subproc_node(subproc=act, pipeline=pipeline, root_id=root_id, parent_id=parent_id) - ) - # data - data_inputs, compute_cvs = self._data_inputs_assemble(parent_id, act["id"], act["params"]) - datas.append( - Data( - node_id=act["id"], - inputs=codec.data_json_dumps(data_inputs), - outputs=json.dumps(node_outputs.get(act["id"], {})), - ) - ) - # compute context values - for cv in compute_cvs: - context_values.append(cv) - final_references[cv.key] = set() - context_var_references[cv.key] = Template(cv.value).get_reference() - - # subprocess output - context_outputs.append( - ContextOutputs(pipeline_id=act["id"], outputs=json.dumps(act["pipeline"]["data"]["outputs"])) - ) - - # subprocess preset context - for key, value in subprocess_context.items(): - serialized, serializer = self._serialize(value) - context_values.append( - ContextValue( - pipeline_id=act["id"], - key=key, - type=self.CONTEXT_VALUE_TYPE_MAP["plain"], - serializer=serializer, - value=serialized, - references="[]", - ) - ) - - sub_nodes, sub_datas, sub_ctx_values, sub_ctx_outputs = self._prepare( - pipeline=act["pipeline"], - root_id=root_id, - subprocess_context=subprocess_context, - parent_id=act["id"], - ) - - nodes.extend(sub_nodes) - datas.extend(sub_datas) - context_values.extend(sub_ctx_values) - context_outputs.extend(sub_ctx_outputs) - else: - raise ValueError("unsupport act type {}: {}".format(act["type"], act["id"])) - - # process events - nodes.append( - self._gen_event_node(event=pipeline["start_event"], pipeline=pipeline, root_id=root_id, parent_id=parent_id) - ) - if pipeline["end_event"]["type"] == NodeType.EmptyEndEvent.value: - nodes.append( - self._gen_event_node( - event=pipeline["end_event"], pipeline=pipeline, root_id=root_id, parent_id=parent_id - ) - ) - else: - nodes.append( - self._gen_executable_end_event_node( - event=pipeline["end_event"], pipeline=pipeline, root_id=root_id, parent_id=parent_id - ) - ) - - # process gateways - for gateway in pipeline["gateways"].values(): - nodes.append( - self._gen_gateway_node(gateway=gateway, pipeline=pipeline, root_id=root_id, parent_id=parent_id) - ) - - # resolve final references (BFS) - # convert a:b, b:c,d -> a:b,c,d b:c,d - for key, references in context_var_references.items(): - queue = [] - queue.extend(references) - - while queue: - r = queue.pop() - - # processed - if r in final_references[key]: - continue - - final_references[key].add(r) - if r in context_var_references: - queue.extend(context_var_references[r]) - - for cv in context_values: - if cv.pipeline_id != parent_id: - continue - fr = final_references.get(cv.key) - cv.references = json.dumps(list(fr)) if fr else "[]" - - if parent_id == root_id: - context_outputs.append(ContextOutputs(pipeline_id=root_id, outputs=json.dumps(pipeline["data"]["outputs"]))) - - return nodes, datas, context_values, context_outputs - - def prepare_run_pipeline( - self, pipeline: dict, root_pipeline_data: dict, root_pipeline_context: dict, subprocess_context: dict, **options - ) -> int: - """ - 进行 pipeline 执行前的准备工作,并返回 进程 ID,该函数执行完成后即代表 - pipeline 是随时可以通过 execute(process_id, start_event_id) 启动执行的 - 一般来说,应该完成以下工作: - - 准备好进程模型 - - 准备好流程中每个节点的信息 - - 准备好流程中每个节点数据对象的信息 - - :param pipeline: pipeline 描述对象 - :type pipeline: dict - :param root_pipeline_data 根流程数据 - :type root_pipeline_data: dict - :param root_pipeline_context 根流程上下文 - :type root_pipeline_context: dict - :param subprocess_context 子流程预置流程上下文 - :type subprocess_context: dict - :return: 进程 ID - :rtype: str - """ - - queue = options.get("queue", "") - priority = options.get("priority", 100) - pipeline_id = pipeline["id"] - - nodes, datas, context_values, context_outputs = self._prepare( - pipeline=pipeline, root_id=pipeline["id"], subprocess_context=subprocess_context - ) - datas.append( - Data( - node_id=pipeline_id, - inputs=codec.data_json_dumps( - {k: {"need_render": False, "value": v} for k, v in root_pipeline_data.items()} - ), - outputs="{}", - ) - ) - for key, value in root_pipeline_context.items(): - serialized, serializer = self._serialize(value) - context_values.append( - ContextValue( - pipeline_id=pipeline_id, - key=key, - type=self.CONTEXT_VALUE_TYPE_MAP["plain"], - serializer=serializer, - value=serialized, - references="[]", - ) - ) - batch_size = getattr(settings, "BAMBOO_DJANGO_ERI_PREPARE_BATCH_SIZE", 500) - - with transaction.atomic(): - pid = Process.objects.create( - root_pipeline_id=pipeline_id, - queue=queue, - priority=priority, - pipeline_stack='["{}"]'.format(pipeline_id), - ).id - self.set_state( - node_id=pipeline_id, - to_state=states.RUNNING, - root_id=pipeline_id, - parent_id="", - set_started_time=True, - ) - - Node.objects.bulk_create(nodes, batch_size=batch_size) - Data.objects.bulk_create(datas, batch_size=batch_size) - ContextValue.objects.bulk_create(context_values, batch_size=batch_size) - ContextOutputs.objects.bulk_create(context_outputs, batch_size=batch_size) - - return pid - - def node_rerun_limit(self, root_pipeline_id: str, node_id: str) -> int: - """ - 返回节点最大重入次数 - - :param root_pipeline_id: 根流程 ID - :type root_pipeline_id: str - :param node_id: 节点 ID - :type node_id: str - :return: 节点最大重入次数 - :rtype: int - """ - return int(getattr(settings, "BAMBOO_DJANGO_ERI_NODE_RERUN_LIMIT", 100)) - - def add_queue(self, name: str, routing_key: Optional[str] = ""): - """ - 在 Broker 中新增用户自定义队列,注意配合 CELERY_CREATE_MISSING_QUEUES 选项使用 - - :param name: 队列名 - :type name: str - :param routing_key: routing key - :type routing_key: str - """ - queue_resolver = QueueResolver(name) - - exchange = Exchange("default", type="direct") - with Connection(settings.BROKER_URL) as conn: - with conn.channel() as channel: - for queue_config in queue_resolver.routes_config().values(): - queue = Queue( - queue_config["queue"], exchange, routing_key=queue_config["routing_key"], max_priority=255 - ) - queue.declare(channel=channel) - - def get_plain_log_for_node(self, node_id: str, history_id: int = -1, version: str = None) -> str: - """ - 读取某个节点某一次执行的日志 - - :param node_id: 节点 ID - :type node_id: str - :param history_id: 执行历史 ID, -1 表示获取最新日志 - :type history_id: int, optional - :param version: 节点执行版本,当该参数与执行历史 ID 同时存在时,以版本为准 - :return: 节点日志 - :rtype: str - """ - if not version: - if history_id != -1: - qs = ExecutionHistory.objects.filter(id=history_id).only("version") - else: - qs = State.objects.filter(node_id=node_id).only("version") - - if not qs: - return "" - version = qs.first().version - - return "\n".join( - [ - e.message - for e in LogEntry.objects.order_by("id").filter(node_id=node_id, version=version).only("message") - ] - ) diff --git a/lib/pipeline/eri/signals.py b/lib/pipeline/eri/signals.py deleted file mode 100644 index 4b4d2cb..0000000 --- a/lib/pipeline/eri/signals.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.dispatch import Signal - -post_set_state = Signal(providing_args=["node_id", "to_state", "version", "root_id", "parent_id", "loop"]) diff --git a/lib/pipeline/exceptions.py b/lib/pipeline/exceptions.py deleted file mode 100644 index 2b09162..0000000 --- a/lib/pipeline/exceptions.py +++ /dev/null @@ -1,195 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -class PipelineException(Exception): - pass - - -class FlowTypeError(PipelineException): - pass - - -class InvalidOperationException(PipelineException): - pass - - -class ConditionExhaustedException(PipelineException): - pass - - -class EvaluationException(PipelineException): - pass - - -class NodeNotExistException(PipelineException): - pass - - -class SourceKeyException(NodeNotExistException): - pass - - -class VariableHydrateException(PipelineException): - pass - - -class ParserException(PipelineException): - pass - - -class SubprocessRefError(PipelineException): - pass - - -class TemplateImportError(PipelineException): - pass - - -class SubprocessExpiredError(PipelineException): - pass - - -# -# data exception -# - - -class DataException(PipelineException): - pass - - -class DataInitException(DataException): - pass - - -class DataAttrException(DataException): - pass - - -class DataTypeErrorException(DataException): - pass - - -class CycleErrorException(DataException): - pass - - -class ConnectionValidateError(DataException): - def __init__(self, failed_nodes, detail, *args): - self.failed_nodes = failed_nodes - self.detail = detail - super(ConnectionValidateError, self).__init__(*args) - - -class ConvergeMatchError(DataException): - def __init__(self, gateway_id, *args): - self.gateway_id = gateway_id - super(ConvergeMatchError, self).__init__(*args) - - -class StreamValidateError(DataException): - def __init__(self, node_id, *args): - self.node_id = node_id - super(StreamValidateError, self).__init__(*args) - - -class IsolateNodeError(DataException): - pass - - -# -# component exception -# - - -class ComponentException(PipelineException): - pass - - -class ComponentDataFormatException(ComponentException): - pass - - -class ComponentNotExistException(ComponentException): - pass - - -class ComponentDataLackException(ComponentDataFormatException): - pass - - -# -# tag exception -# - - -class PipelineError(Exception): - pass - - -class TagError(PipelineError): - pass - - -class AttributeMissingError(TagError): - pass - - -class AttributeValidationError(TagError): - pass - - -# -# constant exception -# -class ConstantException(PipelineException): - pass - - -class ConstantNotExistException(ConstantException): - pass - - -class ConstantReferenceException(ConstantException): - pass - - -class ConstantTypeException(ConstantException): - pass - - -class ConstantSyntaxException(ConstantException): - pass - - -# -# context exception -# -class ContextError(PipelineError): - pass - - -class ReferenceNotExistError(ContextError): - pass - - -class InsufficientVariableError(ContextError): - pass - - -# -# periodic task exception -# -class InvalidCrontabException(PipelineException): - pass diff --git a/lib/pipeline/log/__init__.py b/lib/pipeline/log/__init__.py deleted file mode 100644 index 8eca6ab..0000000 --- a/lib/pipeline/log/__init__.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - - -def setup(level=None): - from pipeline.logging import pipeline_logger as logger - from pipeline.log.handlers import EngineLogHandler - - if level in set(logging._levelToName.values()): - logger.setLevel(level) - - logging._acquireLock() - try: - for hdl in logger.handlers: - if isinstance(hdl, EngineLogHandler): - break - else: - hdl = EngineLogHandler() - hdl.setLevel(logger.level) - logger.addHandler(hdl) - finally: - logging._releaseLock() - - -default_app_config = "pipeline.log.apps.LogConfig" diff --git a/lib/pipeline/log/admin.py b/lib/pipeline/log/admin.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/log/admin.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/log/apps.py b/lib/pipeline/log/apps.py deleted file mode 100644 index c2e476d..0000000 --- a/lib/pipeline/log/apps.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.apps import AppConfig - -from pipeline.conf import default_settings - - -class LogConfig(AppConfig): - name = "pipeline.log" - verbose_name = "Database Logging" - - def ready(self): - from pipeline.log import setup - - setup(level=default_settings.PIPELINE_LOG_LEVEL) diff --git a/lib/pipeline/log/handlers.py b/lib/pipeline/log/handlers.py deleted file mode 100644 index 64662b5..0000000 --- a/lib/pipeline/log/handlers.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from django.core.exceptions import AppRegistryNotReady - -from pipeline.engine.core import context - - -class EngineLogHandler(logging.Handler): - def emit(self, record): - try: - from . import models - except AppRegistryNotReady: - return - - models.LogEntry.objects.create( - logger_name=record.name, - level_name=record.levelname, - message=self.format(record), - exception=record.exc_text, - node_id=record._id, - ) - - -class EngineContextLogHandler(logging.Handler): - def emit(self, record): - try: - from . import models - except AppRegistryNotReady: - return - - node_id = context.get_node_id() - if not node_id: - return - - models.LogEntry.objects.create( - logger_name=record.name, - level_name=record.levelname, - message=self.format(record), - exception=record.exc_text, - node_id=node_id, - ) diff --git a/lib/pipeline/log/migrations/0001_initial.py b/lib/pipeline/log/migrations/0001_initial.py deleted file mode 100644 index 619d8a2..0000000 --- a/lib/pipeline/log/migrations/0001_initial.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [] - - operations = [ - migrations.CreateModel( - name="LogEntry", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ("logger_name", models.SlugField(max_length=128)), - ("level_name", models.SlugField(max_length=32)), - ("message", models.TextField()), - ("exception", models.TextField()), - ("logged_at", models.DateTimeField(auto_now_add=True)), - ("node_id", models.CharField(max_length=32, db_index=True)), - ], - ), - ] diff --git a/lib/pipeline/log/migrations/0002_auto_20180810_1054.py b/lib/pipeline/log/migrations/0002_auto_20180810_1054.py deleted file mode 100644 index a77441e..0000000 --- a/lib/pipeline/log/migrations/0002_auto_20180810_1054.py +++ /dev/null @@ -1,27 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("log", "0001_initial"), - ] - - operations = [ - migrations.AlterField(model_name="logentry", name="exception", field=models.TextField(null=True),), - migrations.AlterField(model_name="logentry", name="message", field=models.TextField(null=True),), - ] diff --git a/lib/pipeline/log/migrations/0003_logentry_history_id.py b/lib/pipeline/log/migrations/0003_logentry_history_id.py deleted file mode 100644 index f67290b..0000000 --- a/lib/pipeline/log/migrations/0003_logentry_history_id.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("log", "0002_auto_20180810_1054"), - ] - - operations = [ - migrations.AddField(model_name="logentry", name="history_id", field=models.IntegerField(default=-1),), - ] diff --git a/lib/pipeline/log/migrations/0004_auto_20180814_1555.py b/lib/pipeline/log/migrations/0004_auto_20180814_1555.py deleted file mode 100644 index aad49e6..0000000 --- a/lib/pipeline/log/migrations/0004_auto_20180814_1555.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("log", "0003_logentry_history_id"), - ] - - operations = [ - migrations.AlterField( - model_name="logentry", - name="exception", - field=models.TextField(null=True, verbose_name="\u5f02\u5e38\u4fe1\u606f"), - ), - migrations.AlterField( - model_name="logentry", - name="history_id", - field=models.IntegerField(default=-1, verbose_name="\u8282\u70b9\u6267\u884c\u5386\u53f2 ID"), - ), - migrations.AlterField( - model_name="logentry", - name="level_name", - field=models.SlugField(max_length=32, verbose_name="\u65e5\u5fd7\u7b49\u7ea7"), - ), - migrations.AlterField( - model_name="logentry", - name="logged_at", - field=models.DateTimeField(auto_now_add=True, verbose_name="\u8f93\u51fa\u65f6\u95f4"), - ), - migrations.AlterField( - model_name="logentry", - name="logger_name", - field=models.SlugField(max_length=128, verbose_name="logger \u540d\u79f0"), - ), - migrations.AlterField( - model_name="logentry", - name="message", - field=models.TextField(null=True, verbose_name="\u65e5\u5fd7\u5185\u5bb9"), - ), - migrations.AlterField( - model_name="logentry", - name="node_id", - field=models.CharField(max_length=32, verbose_name="\u8282\u70b9 ID", db_index=True), - ), - ] diff --git a/lib/pipeline/log/migrations/0005_auto_20190729_1041.py b/lib/pipeline/log/migrations/0005_auto_20190729_1041.py deleted file mode 100644 index 7becf5b..0000000 --- a/lib/pipeline/log/migrations/0005_auto_20190729_1041.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("log", "0004_auto_20180814_1555"), - ] - - operations = [ - migrations.AlterField( - model_name="logentry", - name="id", - field=models.BigAutoField(primary_key=True, serialize=False, verbose_name="ID"), - ), - ] diff --git a/lib/pipeline/log/migrations/0006_auto_20201201_1638.py b/lib/pipeline/log/migrations/0006_auto_20201201_1638.py deleted file mode 100644 index 5b8ecd6..0000000 --- a/lib/pipeline/log/migrations/0006_auto_20201201_1638.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("log", "0005_auto_20190729_1041"), - ] - - operations = [ - migrations.AlterField( - model_name="logentry", - name="logged_at", - field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="输出时间"), - ), - ] diff --git a/lib/pipeline/log/migrations/__init__.py b/lib/pipeline/log/migrations/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/log/migrations/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/log/models.py b/lib/pipeline/log/models.py deleted file mode 100644 index 5fa33fb..0000000 --- a/lib/pipeline/log/models.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db import models -from django.utils import timezone -from django.utils.translation import ugettext_lazy as _ - - -class LogEntryManager(models.Manager): - def link_history(self, node_id, history_id): - self.filter(node_id=node_id, history_id=-1).update(history_id=history_id) - - def plain_log_for_node(self, node_id, history_id): - entries = self.order_by("id").filter(node_id=node_id, history_id=history_id) - plain_entries = [] - for entry in entries: - plain_entries.append( - "[%s %s] %s, exception: %s" - % (entry.logged_at.strftime("%Y-%m-%d %H:%M:%S"), entry.level_name, entry.message, entry.exception) - ) - return "\n".join(plain_entries) - - def delete_expired_log(self, interval): - expired_date = timezone.now() + timezone.timedelta(days=(-interval)) - to_be_deleted = self.filter(logged_at__lt=expired_date) - count = to_be_deleted.count() - to_be_deleted.delete() - return count - - -class LogEntry(models.Model): - id = models.BigAutoField(_("ID"), primary_key=True) - logger_name = models.SlugField(_("logger 名称"), max_length=128) - level_name = models.SlugField(_("日志等级"), max_length=32) - message = models.TextField(_("日志内容"), null=True) - exception = models.TextField(_("异常信息"), null=True) - logged_at = models.DateTimeField(_("输出时间"), auto_now_add=True, db_index=True) - - node_id = models.CharField(_("节点 ID"), max_length=32, db_index=True) - history_id = models.IntegerField(_("节点执行历史 ID"), default=-1) - - objects = LogEntryManager() diff --git a/lib/pipeline/log/tasks.py b/lib/pipeline/log/tasks.py deleted file mode 100644 index be5cec1..0000000 --- a/lib/pipeline/log/tasks.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from celery.decorators import periodic_task -from celery.schedules import crontab -from django.conf import settings - -from pipeline.log.models import LogEntry - -logger = logging.getLogger(__name__) - - -@periodic_task(run_every=(crontab(minute=0, hour=0)), ignore_result=True) -def clean_expired_log(): - expired_interval = getattr(settings, "LOG_PERSISTENT_DAYS", None) - - if expired_interval is None: - expired_interval = 30 - logger.warning("LOG_PERSISTENT_DAYS are not found in settings, use default value: 30") - - del_num = LogEntry.objects.delete_expired_log(expired_interval) - logger.info("%s log entry are deleted" % del_num) diff --git a/lib/pipeline/log/views.py b/lib/pipeline/log/views.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/log/views.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/logging.py b/lib/pipeline/logging.py deleted file mode 100644 index 4b04e89..0000000 --- a/lib/pipeline/logging.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - - -def get_pipeline_logger(): - return logging.getLogger(__name__) - - -pipeline_logger = get_pipeline_logger() diff --git a/lib/pipeline/management/__init__.py b/lib/pipeline/management/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/management/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/management/commands/__init__.py b/lib/pipeline/management/commands/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/management/commands/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/management/commands/app.py b/lib/pipeline/management/commands/app.py deleted file mode 100644 index 5e21f97..0000000 --- a/lib/pipeline/management/commands/app.py +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -from __future__ import absolute_import, unicode_literals - -from celery import current_app - - -#: The Django-Celery app instance. -app = current_app._get_current_object() diff --git a/lib/pipeline/management/commands/base.py b/lib/pipeline/management/commands/base.py deleted file mode 100644 index 45dc748..0000000 --- a/lib/pipeline/management/commands/base.py +++ /dev/null @@ -1,158 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -from __future__ import absolute_import, unicode_literals - -import os -import sys - -import celery - -try: - import django_celery_beat -except ImportError: - import djcelery - -from kombu.utils.encoding import str_to_bytes -from django.core.management.base import BaseCommand - -DB_SHARED_THREAD = """\ -DatabaseWrapper objects created in a thread can only \ -be used in that same thread. The object with alias '{0}' \ -was created in thread id {1} and this is thread id {2}.\ -""" - - -def setenv(k, v): # noqa - os.environ[str_to_bytes(k)] = str_to_bytes(v) - - -def patch_thread_ident(): - # monkey patch django. - # This patch make sure that we use real threads to get the ident which - # is going to happen if we are using gevent or eventlet. - # -- patch taken from gunicorn - if getattr(patch_thread_ident, "called", False): - return - try: - from django.db.backends.base.base import BaseDatabaseWrapper, DatabaseError - - if "validate_thread_sharing" in BaseDatabaseWrapper.__dict__: - import threading - - _get_ident = threading.get_ident - - __old__init__ = BaseDatabaseWrapper.__init__ - - def _init(self, *args, **kwargs): - __old__init__(self, *args, **kwargs) - self._thread_ident = _get_ident() - - def _validate_thread_sharing(self): - if not self.allow_thread_sharing and self._thread_ident != _get_ident(): - raise DatabaseError(DB_SHARED_THREAD % (self.alias, self._thread_ident, _get_ident()),) - - BaseDatabaseWrapper.__init__ = _init - BaseDatabaseWrapper.validate_thread_sharing = _validate_thread_sharing - - patch_thread_ident.called = True - except ImportError: - pass - - -patch_thread_ident() - - -class CeleryCommand(BaseCommand): - options = () - if hasattr(BaseCommand, "option_list"): - options = BaseCommand.option_list - else: - - def add_arguments(self, parser): - option_typemap = {"string": str, "int": int, "float": float} - for opt in self.option_list: - option = {k: v for k, v in opt.__dict__.items() if v is not None} - flags = option.get("_long_opts", []) + option.get("_short_opts", []) - if option.get("default") == ("NO", "DEFAULT"): - option["default"] = None - if option.get("nargs") == 1: - del option["nargs"] - del option["_long_opts"] - del option["_short_opts"] - if "type" in option: - opttype = option["type"] - option["type"] = option_typemap.get(opttype, opttype) - parser.add_argument(*flags, **option) - - skip_opts = ["--app", "--loader", "--config", "--no-color"] - requires_system_checks = False - keep_base_opts = False - stdout, stderr = sys.stdout, sys.stderr - - def get_version(self): - def get_version(self): - try: - version = "celery {c.__version__}\ndjango-celery-beat {d.__version__}".format( - c=celery, d=django_celery_beat, - ) - except ImportError: - version = "celery {c.__version__}\ndjango-celery {d.__version__}".format(c=celery, d=djcelery,) - return version - - def execute(self, *args, **options): - broker = options.get("broker") - if broker: - self.set_broker(broker) - super(CeleryCommand, self).execute(*args, **options) - - def set_broker(self, broker): - setenv("CELERY_BROKER_URL", broker) - - def run_from_argv(self, argv): - self.handle_default_options(argv[2:]) - return super(CeleryCommand, self).run_from_argv(argv) - - def handle_default_options(self, argv): - acc = [] - broker = None - for i, arg in enumerate(argv): - # --settings and --pythonpath are also handled - # by BaseCommand.handle_default_options, but that is - # called with the resulting options parsed by optparse. - if "--settings=" in arg: - _, settings_module = arg.split("=") - setenv("DJANGO_SETTINGS_MODULE", settings_module) - elif "--pythonpath=" in arg: - _, pythonpath = arg.split("=") - sys.path.insert(0, pythonpath) - elif "--broker=" in arg: - _, broker = arg.split("=") - elif arg == "-b": - broker = argv[i + 1] - else: - acc.append(arg) - if broker: - self.set_broker(broker) - return argv if self.keep_base_opts else acc - - def die(self, msg): - sys.stderr.write(msg) - sys.stderr.write("\n") - sys.exit() - - def _is_unwanted_option(self, option): - return option._long_opts and option._long_opts[0] in self.skip_opts - - @property - def option_list(self): - return [x for x in self.options if not self._is_unwanted_option(x)] diff --git a/lib/pipeline/management/commands/celery.py b/lib/pipeline/management/commands/celery.py deleted file mode 100644 index 5f00772..0000000 --- a/lib/pipeline/management/commands/celery.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -from __future__ import absolute_import, unicode_literals - -from optparse import make_option as Option - -from celery.bin import celery - -from pipeline.management.commands.app import app -from pipeline.management.commands.base import CeleryCommand - -base = celery.CeleryCommand(app=app) - - -class Command(CeleryCommand): - """The celery command.""" - - help = "celery commands, see celery help" - options = ( - Option("-A", "--app", default=None), - Option("--broker", default=None), - Option("--loader", default=None), - Option("--config", default=None), - Option("--workdir", default=None, dest="working_directory"), - Option("--result-backend", default=None), - Option("--no-color", "-C", action="store_true", default=None), - Option("--quiet", "-q", action="store_true"), - ) - if base.get_options() is not None: - options = options + CeleryCommand.options + base.get_options() - - def run_from_argv(self, argv): - argv = self.handle_default_options(argv) - base.execute_from_commandline(["{0[0]} {0[1]}".format(argv)] + argv[2:],) diff --git a/lib/pipeline/management/commands/celerybeat.py b/lib/pipeline/management/commands/celerybeat.py deleted file mode 100644 index 73e0cfe..0000000 --- a/lib/pipeline/management/commands/celerybeat.py +++ /dev/null @@ -1,36 +0,0 @@ -""" - -Start the celery clock service from the Django management command. - -""" -from __future__ import absolute_import, unicode_literals - -from optparse import make_option as Option - -from celery.bin import beat - -from pipeline.management.commands.app import app -from pipeline.management.commands.base import CeleryCommand - -beat = beat.beat(app=app) - - -class Command(CeleryCommand): - """Run the celery periodic task scheduler.""" - - help = 'Old alias to the "celery beat" command.' - options = ( - Option("-A", "--app", default=None), - Option("--broker", default=None), - Option("--loader", default=None), - Option("--config", default=None), - Option("--workdir", default=None, dest="working_directory"), - Option("--result-backend", default=None), - Option("--no-color", "-C", action="store_true", default=None), - Option("--quiet", "-q", action="store_true"), - ) - if beat.get_options() is not None: - options = options + CeleryCommand.options + beat.get_options() - - def handle(self, *args, **options): - beat.run(*args, **options) diff --git a/lib/pipeline/management/commands/create_atoms_app.py b/lib/pipeline/management/commands/create_atoms_app.py deleted file mode 100644 index d48d158..0000000 --- a/lib/pipeline/management/commands/create_atoms_app.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import os -import sys - -from django.core.management import base, call_command -from django.template import Template, Context - -from pipeline.templates.create_plugins_app import js_file, plugins, py_file - -PY_COPYRIGHT = '''# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -''' - - -class Command(base.BaseCommand): - help = "Create an application for atoms development" - - def add_arguments(self, parser): - parser.add_argument("app_name", nargs=1, type=str) - - def handle(self, *args, **options): - - app_name = options["app_name"][0] - if os.path.isdir(app_name): - sys.stdout.write("the directory [%s] already exists, please try another name.\n") - return - - call_command("startapp", app_name) - - collection_path = "%s/components/collections" % app_name - tests_path = "%s/tests/components/collections/plugins_test" % app_name - static_collection_path = "{}/static/{}".format(app_name, app_name) - init_file_info = { - "%s/components/collections/__init__.py" % app_name: py_file.TEMPLATE, - "%s/components/__init__.py" % app_name: py_file.TEMPLATE, - "%s/components/collections/plugins.py" % app_name: plugins.TEMPLATE, - "%s/tests/__init__.py" % app_name: py_file.TEMPLATE, - "%s/tests/components/__init__.py" % app_name: py_file.TEMPLATE, - "%s/tests/components/collections/__init__.py" % app_name: py_file.TEMPLATE, - "%s/tests/components/collections/plugins_test/__init__.py" % app_name: py_file.TEMPLATE, - "{}/static/{}/plugins.js".format(app_name, app_name): js_file.TEMPLATE, - } - exist_file_path = [ - "%s/migrations/__init__.py" % app_name, - "%s/__init__.py" % app_name, - "%s/apps.py" % app_name, - ] - useless_file_path = [ - "%s/admin.py" % app_name, - "%s/models.py" % app_name, - "%s/tests.py" % app_name, - "%s/views.py" % app_name, - ] - os.makedirs(collection_path) - os.makedirs(tests_path) - os.makedirs(static_collection_path) - - empty_context = Context() - for p, tmpl in list(init_file_info.items()): - with open(p, "w+") as f: - f.write(Template(tmpl).render(empty_context)) - - for p in exist_file_path: - with open(p, "r") as f: - content = f.readlines() - - if content and content[0].startswith("# -*- coding: utf-8 -*-"): - content = content[1:] - - content.insert(0, PY_COPYRIGHT) - - with open(p, "w") as f: - f.writelines(content) - - for p in useless_file_path: - os.remove(p) diff --git a/lib/pipeline/management/commands/create_plugins_app.py b/lib/pipeline/management/commands/create_plugins_app.py deleted file mode 100644 index bcecf14..0000000 --- a/lib/pipeline/management/commands/create_plugins_app.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .create_atoms_app import * # noqa diff --git a/lib/pipeline/management/commands/generate_config.py b/lib/pipeline/management/commands/generate_config.py deleted file mode 100644 index 777d340..0000000 --- a/lib/pipeline/management/commands/generate_config.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import os - -from django.core.management.base import BaseCommand -from django.template.loader import render_to_string - -from pipeline.conf import settings - - -class Command(BaseCommand): - help = "Generate Redis & Supervisor configuration file for pipeline" - - configs = { - os.path.join(settings.BASE_DIR, "etc/redis.conf"): "redis/redis.tmpl", - os.path.join(settings.BASE_DIR, "etc/supervisord.conf"): "supervisor/supervisor.tmpl", - } - - var_paths = [os.path.join(settings.BASE_DIR, "var/log/"), os.path.join(settings.BASE_DIR, "var/run/")] - - def add_arguments(self, parser): - parser.add_argument("-pc", dest="p_worker_num", default=2, help="Set number of worker bind with pipeline") - parser.add_argument("-sc", dest="s_worker_num", default=2, help="Set the number of worker bind with schedule") - parser.add_argument( - "--worker", - action="store_true", - dest="is_worker", - default=False, - help="is worker process group (default False)", - ) - parser.add_argument( - "--master", - action="store_true", - dest="is_master", - default=False, - help="is master process group (default False)", - ) - - def handle(self, *args, **options): - context = { - "settings": settings, - "is_master": options["is_master"], - "is_worker": options["is_worker"], - "p_worker_num": options["p_worker_num"], - "s_worker_num": options["s_worker_num"], - "uid": os.getuid(), - } - - for path in self.var_paths: - if not os.path.exists(path): - os.makedirs(path) - - for target_path, template_name in list(self.configs.items()): - dirname = os.path.dirname(target_path) - if not os.path.exists(dirname): - try: - os.makedirs(dirname) - except Exception: - pass - - with open(target_path, "wb+") as f: - f.write(render_to_string(template_name, context)) diff --git a/lib/pipeline/migrations/0001_initial.py b/lib/pipeline/migrations/0001_initial.py deleted file mode 100644 index e4df9f4..0000000 --- a/lib/pipeline/migrations/0001_initial.py +++ /dev/null @@ -1,167 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -# Generated by Django 1.11.2 on 2017-11-24 10:43 - - -from django.db import migrations, models -import django.db.models.deletion -import pipeline.models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [] - - operations = [ - migrations.CreateModel( - name="PipelineInstance", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("instance_id", models.CharField(max_length=32, unique=True, verbose_name="\u5b9e\u4f8bID")), - ( - "name", - models.CharField( - default="\u9ed8\u8ba4\u5b9e\u4f8b", max_length=64, verbose_name="\u5b9e\u4f8b\u540d\u79f0" - ), - ), - ("creator", models.CharField(max_length=32, verbose_name="\u521b\u5efa\u8005")), - ("create_time", models.DateTimeField(auto_now_add=True, verbose_name="\u521b\u5efa\u65f6\u95f4")), - ("executor", models.CharField(max_length=32, verbose_name="\u6267\u884c\u8005")), - ("start_time", models.DateTimeField(blank=True, null=True, verbose_name="\u542f\u52a8\u65f6\u95f4")), - ("finish_time", models.DateTimeField(blank=True, null=True, verbose_name="\u7ed3\u675f\u65f6\u95f4")), - ("description", models.TextField(blank=True, null=True, verbose_name="\u63cf\u8ff0")), - ("is_started", models.BooleanField(default=False, verbose_name="\u662f\u5426\u5df2\u7ecf\u542f\u52a8")), - ( - "is_finished", - models.BooleanField(default=False, verbose_name="\u662f\u5426\u5df2\u7ecf\u5b8c\u6210"), - ), - ( - "is_deleted", - models.BooleanField( - default=False, - help_text="\u8868\u793a\u5f53\u524d\u5b9e\u4f8b\u662f\u5426\u5220\u9664", - verbose_name="\u662f\u5426\u5df2\u7ecf\u5220\u9664", - ), - ), - ], - options={ - "ordering": ["-create_time"], - "verbose_name": "Pipeline\u5b9e\u4f8b", - "verbose_name_plural": "Pipeline\u5b9e\u4f8b", - }, - ), - migrations.CreateModel( - name="PipelineTemplate", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("template_id", models.CharField(max_length=32, unique=True, verbose_name="\u6a21\u677fID")), - ( - "name", - models.CharField( - default="\u9ed8\u8ba4\u6a21\u677f", max_length=64, verbose_name="\u6a21\u677f\u540d\u79f0" - ), - ), - ("create_time", models.DateTimeField(auto_now_add=True, verbose_name="\u521b\u5efa\u65f6\u95f4")), - ("creator", models.CharField(max_length=32, verbose_name="\u521b\u5efa\u8005")), - ("description", models.TextField(blank=True, null=True, verbose_name="\u63cf\u8ff0")), - ("editor", models.CharField(blank=True, max_length=32, null=True, verbose_name="\u4fee\u6539\u8005")), - ("edit_time", models.DateTimeField(auto_now=True, verbose_name="\u4fee\u6539\u65f6\u95f4")), - ( - "is_deleted", - models.BooleanField( - default=False, - help_text="\u8868\u793a\u5f53\u524d\u6a21\u677f\u662f\u5426\u5220\u9664", - verbose_name="\u662f\u5426\u5220\u9664", - ), - ), - ], - options={ - "ordering": ["-edit_time"], - "verbose_name": "Pipeline\u6a21\u677f", - "verbose_name_plural": "Pipeline\u6a21\u677f", - }, - ), - migrations.CreateModel( - name="Snapshot", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ( - "md5sum", - models.CharField( - max_length=32, unique=True, verbose_name="\u5feb\u7167\u5b57\u7b26\u4e32\u7684md5sum" - ), - ), - ("create_time", models.DateTimeField(auto_now_add=True, verbose_name="\u521b\u5efa\u65f6\u95f4")), - ("data", pipeline.models.CompressJSONField(blank=True, null=True)), - ], - options={ - "ordering": ["-id"], - "verbose_name": "\u6a21\u677f\u5feb\u7167", - "verbose_name_plural": "\u6a21\u677f\u5feb\u7167", - }, - ), - migrations.CreateModel( - name="VariableModel", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("code", models.CharField(max_length=255, unique=True, verbose_name="\u53d8\u91cf\u7f16\u7801")), - ("status", models.BooleanField(default=True, verbose_name="\u53d8\u91cf\u662f\u5426\u53ef\u7528")), - ], - options={ - "ordering": ["-id"], - "verbose_name": "lazy \u53d8\u91cf", - "verbose_name_plural": "lazy \u53d8\u91cf", - }, - ), - migrations.AddField( - model_name="pipelinetemplate", - name="snapshot", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="pipeline.Snapshot", - verbose_name="\u6a21\u677f\u7ed3\u6784\u6570\u636e", - ), - ), - migrations.AddField( - model_name="pipelineinstance", - name="execution_snapshot", - field=models.ForeignKey( - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="execution_snapshot", - to="pipeline.Snapshot", - verbose_name="\u7528\u4e8e\u5b9e\u4f8b\u6267\u884c\u7684\u7ed3\u6784\u6570\u636e", - ), - ), - migrations.AddField( - model_name="pipelineinstance", - name="snapshot", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - related_name="snapshot", - to="pipeline.Snapshot", - verbose_name="\u5b9e\u4f8b\u7ed3\u6784\u6570\u636e", - ), - ), - migrations.AddField( - model_name="pipelineinstance", - name="template", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="pipeline.PipelineTemplate", - verbose_name="Pipeline\u6a21\u677f", - ), - ), - ] diff --git a/lib/pipeline/migrations/0002_auto_20180109_1825.py b/lib/pipeline/migrations/0002_auto_20180109_1825.py deleted file mode 100644 index c25e516..0000000 --- a/lib/pipeline/migrations/0002_auto_20180109_1825.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -# Generated by Django 1.11.2 on 2018-01-09 18:25 - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0001_initial"), - ] - - operations = [ - migrations.AlterField( - model_name="pipelineinstance", - name="creator", - field=models.CharField(blank=True, max_length=32, verbose_name="\u521b\u5efa\u8005"), - ), - migrations.AlterField( - model_name="pipelineinstance", - name="description", - field=models.TextField(blank=True, default="", verbose_name="\u63cf\u8ff0"), - preserve_default=False, - ), - migrations.AlterField( - model_name="pipelineinstance", - name="executor", - field=models.CharField(blank=True, max_length=32, verbose_name="\u6267\u884c\u8005"), - ), - ] diff --git a/lib/pipeline/migrations/0003_auto_20180206_1955.py b/lib/pipeline/migrations/0003_auto_20180206_1955.py deleted file mode 100644 index 52d6c72..0000000 --- a/lib/pipeline/migrations/0003_auto_20180206_1955.py +++ /dev/null @@ -1,70 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" -# Generated by Django 1.11.2 on 2018-02-06 19:55 - - -from django.db import migrations, models -import django.db.models.deletion -import pipeline.models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0002_auto_20180109_1825"), - ] - - operations = [ - migrations.CreateModel( - name="TemplateScheme", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ( - "unique_id", - models.CharField( - blank=True, max_length=97, unique=True, verbose_name="\u552f\u4e00\u65b9\u6848\u540d\u79f0" - ), - ), - ("name", models.CharField(max_length=64, verbose_name="\u65b9\u6848\u540d\u79f0")), - ("edit_time", models.DateTimeField(auto_now=True, verbose_name="\u4fee\u6539\u65f6\u95f4")), - ("data", pipeline.models.CompressJSONField(verbose_name="\u65b9\u6848\u6570\u636e")), - ], - ), - migrations.AlterModelOptions( - name="variablemodel", - options={ - "ordering": ["-id"], - "verbose_name": "Variable\u53d8\u91cf", - "verbose_name_plural": "Variable\u53d8\u91cf", - }, - ), - migrations.AlterField( - model_name="pipelineinstance", - name="name", - field=models.CharField(default=b"default_instance", max_length=64, verbose_name="\u5b9e\u4f8b\u540d\u79f0"), - ), - migrations.AlterField( - model_name="pipelinetemplate", - name="name", - field=models.CharField(default=b"default_template", max_length=64, verbose_name="\u6a21\u677f\u540d\u79f0"), - ), - migrations.AddField( - model_name="templatescheme", - name="template", - field=models.ForeignKey( - on_delete=django.db.models.deletion.CASCADE, - to="pipeline.PipelineTemplate", - verbose_name="\u5bf9\u5e94\u6a21\u677f ID", - ), - ), - ] diff --git a/lib/pipeline/migrations/0004_auto_20180516_1708.py b/lib/pipeline/migrations/0004_auto_20180516_1708.py deleted file mode 100644 index d0f69c0..0000000 --- a/lib/pipeline/migrations/0004_auto_20180516_1708.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0003_auto_20180206_1955"), - ] - - operations = [ - migrations.AlterField( - model_name="templatescheme", - name="unique_id", - field=models.CharField(unique=True, max_length=97, verbose_name="\u65b9\u6848\u552f\u4e00ID", blank=True), - ), - ] diff --git a/lib/pipeline/migrations/0005_pipelineinstance_tree_info.py b/lib/pipeline/migrations/0005_pipelineinstance_tree_info.py deleted file mode 100644 index 64999d4..0000000 --- a/lib/pipeline/migrations/0005_pipelineinstance_tree_info.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0004_auto_20180516_1708"), - ] - - operations = [ - migrations.AddField( - model_name="pipelineinstance", - name="tree_info", - field=models.ForeignKey( - related_name="tree_info", - verbose_name="\u63d0\u524d\u8ba1\u7b97\u597d\u7684\u4e00\u4e9b\u6d41\u7a0b\u7ed3\u6784\u6570\u636e", - to="pipeline.Snapshot", - null=True, - on_delete=models.SET_NULL, - ), - ), - ] diff --git a/lib/pipeline/migrations/0006_auto_20180814_1622.py b/lib/pipeline/migrations/0006_auto_20180814_1622.py deleted file mode 100644 index 8548b8e..0000000 --- a/lib/pipeline/migrations/0006_auto_20180814_1622.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models -import pipeline.models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0005_pipelineinstance_tree_info"), - ] - - operations = [ - migrations.CreateModel( - name="TreeInfo", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ("data", pipeline.models.CompressJSONField(null=True, blank=True)), - ], - ), - migrations.AlterField( - model_name="pipelineinstance", - name="tree_info", - field=models.ForeignKey( - related_name="tree_info", - verbose_name="\u63d0\u524d\u8ba1\u7b97\u597d\u7684\u4e00\u4e9b\u6d41\u7a0b\u7ed3\u6784\u6570\u636e", - to="pipeline.TreeInfo", - null=True, - on_delete=models.SET_NULL, - ), - ), - ] diff --git a/lib/pipeline/migrations/0007_templaterelationship.py b/lib/pipeline/migrations/0007_templaterelationship.py deleted file mode 100644 index c5113be..0000000 --- a/lib/pipeline/migrations/0007_templaterelationship.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0006_auto_20180814_1622"), - ] - - operations = [ - migrations.CreateModel( - name="TemplateRelationship", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ("ancestor_template_id", models.CharField(max_length=32, verbose_name="\u6839\u6a21\u677fID")), - ( - "descendant_template_id", - models.CharField(max_length=32, verbose_name="\u5b50\u6d41\u7a0b\u6a21\u677fID"), - ), - ("refer_sum", models.IntegerField(verbose_name="\u5f15\u7528\u6b21\u6570")), - ], - ), - ] diff --git a/lib/pipeline/migrations/0007_templateversion.py b/lib/pipeline/migrations/0007_templateversion.py deleted file mode 100644 index a299b79..0000000 --- a/lib/pipeline/migrations/0007_templateversion.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0006_auto_20180814_1622"), - ] - - operations = [ - migrations.CreateModel( - name="TemplateVersion", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ( - "md5", - models.CharField( - max_length=32, db_index=True, verbose_name="\u5feb\u7167\u5b57\u7b26\u4e32\u7684md5" - ), - ), - ("date", models.DateTimeField(auto_now_add=True, verbose_name="\u6dfb\u52a0\u65e5\u671f")), - ( - "snapshot_id", - models.ForeignKey( - verbose_name="\u6a21\u677f\u6570\u636e ID", to="pipeline.Snapshot", on_delete=models.CASCADE - ), - ), - ( - "template_id", - models.ForeignKey( - to="pipeline.PipelineTemplate", - to_field="template_id", - verbose_name="\u6a21\u677f ID", - on_delete=models.CASCADE, - ), - ), - ], - ), - ] diff --git a/lib/pipeline/migrations/0008_auto_20180824_1115.py b/lib/pipeline/migrations/0008_auto_20180824_1115.py deleted file mode 100644 index 492669a..0000000 --- a/lib/pipeline/migrations/0008_auto_20180824_1115.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0007_templaterelationship"), - ("pipeline", "0007_templateversion"), - ] - - operations = [ - migrations.RenameField(model_name="templateversion", old_name="snapshot_id", new_name="snapshot",), - migrations.RemoveField(model_name="templateversion", name="template_id",), - migrations.AddField( - model_name="templateversion", - name="template", - field=models.ForeignKey( - default="", verbose_name="\u6a21\u677f ID", to="pipeline.PipelineTemplate", on_delete=models.CASCADE - ), - preserve_default=False, - ), - ] diff --git a/lib/pipeline/migrations/0011_auto_20180906_1045.py b/lib/pipeline/migrations/0011_auto_20180906_1045.py deleted file mode 100644 index 5bce064..0000000 --- a/lib/pipeline/migrations/0011_auto_20180906_1045.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0008_auto_20180824_1115"), - ] - - operations = [ - migrations.RemoveField(model_name="templaterelationship", name="refer_sum",), - migrations.AddField( - model_name="templaterelationship", - name="subprocess_node_id", - field=models.CharField(default="", max_length=32, verbose_name="\u5b50\u6d41\u7a0b\u8282\u70b9 ID"), - preserve_default=False, - ), - migrations.AddField( - model_name="templaterelationship", - name="version", - field=models.CharField(default="", max_length=32, verbose_name="\u5feb\u7167\u5b57\u7b26\u4e32\u7684md5"), - preserve_default=False, - ), - migrations.AlterField( - model_name="templaterelationship", - name="ancestor_template_id", - field=models.CharField(max_length=32, verbose_name="\u6839\u6a21\u677fID", db_index=True), - ), - ] diff --git a/lib/pipeline/migrations/0012_templatecurrentversion.py b/lib/pipeline/migrations/0012_templatecurrentversion.py deleted file mode 100644 index cd77511..0000000 --- a/lib/pipeline/migrations/0012_templatecurrentversion.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0011_auto_20180906_1045"), - ] - - operations = [ - migrations.CreateModel( - name="TemplateCurrentVersion", - fields=[ - ("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)), - ("template_id", models.CharField(max_length=32, verbose_name="\u6a21\u677fID", db_index=True)), - ( - "current_version", - models.CharField(max_length=32, verbose_name="\u5feb\u7167\u5b57\u7b26\u4e32\u7684md5"), - ), - ], - ), - ] diff --git a/lib/pipeline/migrations/0013_old_template_process.py b/lib/pipeline/migrations/0013_old_template_process.py deleted file mode 100644 index 732f303..0000000 --- a/lib/pipeline/migrations/0013_old_template_process.py +++ /dev/null @@ -1,64 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations -from django.db.models.signals import post_save - - -def reverse_func(apps, schema_editor): - pass - - -def forward_func(apps, schema_editor): - PipelineTemplate = apps.get_model("pipeline", "PipelineTemplate") - TemplateRelationship = apps.get_model("pipeline", "TemplateRelationship") - TemplateVersion = apps.get_model("pipeline", "TemplateVersion") - TemplateCurrentVersion = apps.get_model("pipeline", "TemplateCurrentVersion") - db_alias = schema_editor.connection.alias - template_list = PipelineTemplate.objects.using(db_alias).filter(is_deleted=False) - - for template in template_list: - TemplateRelationship.objects.using(db_alias).filter(ancestor_template_id=template.template_id).delete() - acts = list(template.snapshot.data["activities"].values()) - subprocess_nodes = [act for act in acts if act["type"] == "SubProcess"] - rs = [] - for sp in subprocess_nodes: - version = ( - sp.get("version") - or PipelineTemplate.objects.using(db_alias).get(template_id=sp["template_id"]).snapshot.md5sum - ) - rs.append( - TemplateRelationship( - ancestor_template_id=template.template_id, - descendant_template_id=sp["template_id"], - subprocess_node_id=sp["id"][:32], - version=version, - ) - ) - TemplateRelationship.objects.bulk_create(rs) - - versions = TemplateVersion.objects.using(db_alias).filter(template_id=template.id).order_by("-id") - if not (versions and versions[0].md5 == template.snapshot.md5sum): - TemplateVersion.objects.create(template=template, snapshot=template.snapshot, md5=template.snapshot.md5sum) - TemplateCurrentVersion.objects.update_or_create( - template_id=template.template_id, defaults={"current_version": template.snapshot.md5sum} - ) - - -class Migration(migrations.Migration): - dependencies = [ - ("pipeline", "0012_templatecurrentversion"), - ] - - operations = [migrations.RunPython(forward_func, reverse_func)] diff --git a/lib/pipeline/migrations/0014_auto_20181127_1053.py b/lib/pipeline/migrations/0014_auto_20181127_1053.py deleted file mode 100644 index c878dcf..0000000 --- a/lib/pipeline/migrations/0014_auto_20181127_1053.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0013_old_template_process"), - ] - - operations = [ - migrations.AlterField( - model_name="pipelineinstance", - name="name", - field=models.CharField( - default=b"default_instance", max_length=128, verbose_name="\u5b9e\u4f8b\u540d\u79f0" - ), - ), - migrations.AlterField( - model_name="pipelinetemplate", - name="name", - field=models.CharField( - default=b"default_template", max_length=128, verbose_name="\u6a21\u677f\u540d\u79f0" - ), - ), - ] diff --git a/lib/pipeline/migrations/0015_auto_20181214_1453.py b/lib/pipeline/migrations/0015_auto_20181214_1453.py deleted file mode 100644 index 35da8fd..0000000 --- a/lib/pipeline/migrations/0015_auto_20181214_1453.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0014_auto_20181127_1053"), - ] - - operations = [ - migrations.AlterField( - model_name="pipelineinstance", - name="execution_snapshot", - field=models.ForeignKey( - related_name="execution_snapshot_instances", - verbose_name="\u7528\u4e8e\u5b9e\u4f8b\u6267\u884c\u7684\u7ed3\u6784\u6570\u636e", - to="pipeline.Snapshot", - null=True, - on_delete=models.SET_NULL, - ), - ), - migrations.AlterField( - model_name="pipelineinstance", - name="snapshot", - field=models.ForeignKey( - related_name="snapshot_instances", - verbose_name="\u5b9e\u4f8b\u7ed3\u6784\u6570\u636e\uff0c\u6307\u5411\u5b9e\u4f8b\u5bf9\u5e94\u7684\u6a21\u677f\u7684\u7ed3\u6784\u6570\u636e", - to="pipeline.Snapshot", - on_delete=models.SET_NULL, - ), - ), - migrations.AlterField( - model_name="pipelineinstance", - name="tree_info", - field=models.ForeignKey( - related_name="tree_info_instances", - verbose_name="\u63d0\u524d\u8ba1\u7b97\u597d\u7684\u4e00\u4e9b\u6d41\u7a0b\u7ed3\u6784\u6570\u636e", - to="pipeline.TreeInfo", - null=True, - on_delete=models.SET_NULL, - ), - ), - migrations.AlterField( - model_name="pipelinetemplate", - name="snapshot", - field=models.ForeignKey( - related_name="templates", - verbose_name="\u6a21\u677f\u7ed3\u6784\u6570\u636e", - to="pipeline.Snapshot", - on_delete=models.DO_NOTHING, - ), - ), - ] diff --git a/lib/pipeline/migrations/0016_auto_20181220_0958.py b/lib/pipeline/migrations/0016_auto_20181220_0958.py deleted file mode 100644 index 12c518c..0000000 --- a/lib/pipeline/migrations/0016_auto_20181220_0958.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0015_auto_20181214_1453"), - ] - - operations = [ - migrations.AlterField( - model_name="pipelinetemplate", - name="snapshot", - field=models.ForeignKey( - related_name="snapshot_templates", - verbose_name="\u6a21\u677f\u7ed3\u6784\u6570\u636e", - to="pipeline.Snapshot", - on_delete=models.DO_NOTHING, - ), - ), - ] diff --git a/lib/pipeline/migrations/0017_pipelinetemplate_has_subprocess.py b/lib/pipeline/migrations/0017_pipelinetemplate_has_subprocess.py deleted file mode 100644 index 1a73b38..0000000 --- a/lib/pipeline/migrations/0017_pipelinetemplate_has_subprocess.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0016_auto_20181220_0958"), - ] - - operations = [ - migrations.AddField( - model_name="pipelinetemplate", - name="has_subprocess", - field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u542b\u6709\u5b50\u6d41\u7a0b"), - ), - ] diff --git a/lib/pipeline/migrations/0018_set_has_subprocess.py b/lib/pipeline/migrations/0018_set_has_subprocess.py deleted file mode 100644 index 42dd1ae..0000000 --- a/lib/pipeline/migrations/0018_set_has_subprocess.py +++ /dev/null @@ -1,39 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations - -from pipeline.core.constants import PE - - -def reverse_func(apps, schema_editor): - pass - - -def forward_func(apps, schema_editor): - PipelineTemplate = apps.get_model("pipeline", "PipelineTemplate") - - for template in PipelineTemplate.objects.all(): - if not template.is_deleted: - acts = list(template.snapshot.data[PE.activities].values()) - template.has_subprocess = any([act for act in acts if act["type"] == PE.SubProcess]) - template.save() - - -class Migration(migrations.Migration): - dependencies = [ - ("pipeline", "0017_pipelinetemplate_has_subprocess"), - ] - - operations = [migrations.RunPython(forward_func, reverse_func)] diff --git a/lib/pipeline/migrations/0019_delete_variablemodel.py b/lib/pipeline/migrations/0019_delete_variablemodel.py deleted file mode 100644 index d7e677b..0000000 --- a/lib/pipeline/migrations/0019_delete_variablemodel.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0018_set_has_subprocess"), - ] - - operations = [ - migrations.DeleteModel(name="VariableModel",), - ] diff --git a/lib/pipeline/migrations/0020_auto_20190906_1119.py b/lib/pipeline/migrations/0020_auto_20190906_1119.py deleted file mode 100644 index 9e4be34..0000000 --- a/lib/pipeline/migrations/0020_auto_20190906_1119.py +++ /dev/null @@ -1,37 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0019_delete_variablemodel"), - ] - - operations = [ - migrations.AlterField( - model_name="pipelineinstance", - name="template", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - to="pipeline.PipelineTemplate", - verbose_name="Pipeline\u6a21\u677f", - ), - ), - ] diff --git a/lib/pipeline/migrations/0021_auto_20190906_1143.py b/lib/pipeline/migrations/0021_auto_20190906_1143.py deleted file mode 100644 index 6ecd8ed..0000000 --- a/lib/pipeline/migrations/0021_auto_20190906_1143.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models -import django.db.models.deletion - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0020_auto_20190906_1119"), - ] - - operations = [ - migrations.AlterField( - model_name="pipelineinstance", - name="execution_snapshot", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="execution_snapshot_instances", - to="pipeline.Snapshot", - verbose_name="\u7528\u4e8e\u5b9e\u4f8b\u6267\u884c\u7684\u7ed3\u6784\u6570\u636e", - ), - ), - migrations.AlterField( - model_name="pipelineinstance", - name="snapshot", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="snapshot_instances", - to="pipeline.Snapshot", - verbose_name="\u5b9e\u4f8b\u7ed3\u6784\u6570\u636e\uff0c\u6307\u5411\u5b9e\u4f8b\u5bf9\u5e94\u7684\u6a21\u677f\u7684\u7ed3\u6784\u6570\u636e", - ), - ), - migrations.AlterField( - model_name="pipelineinstance", - name="tree_info", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.CASCADE, - related_name="tree_info_instances", - to="pipeline.TreeInfo", - verbose_name="\u63d0\u524d\u8ba1\u7b97\u597d\u7684\u4e00\u4e9b\u6d41\u7a0b\u7ed3\u6784\u6570\u636e", - ), - ), - ] diff --git a/lib/pipeline/migrations/0022_pipelineinstance_is_revoked.py b/lib/pipeline/migrations/0022_pipelineinstance_is_revoked.py deleted file mode 100644 index c261433..0000000 --- a/lib/pipeline/migrations/0022_pipelineinstance_is_revoked.py +++ /dev/null @@ -1,31 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0021_auto_20190906_1143"), - ] - - operations = [ - migrations.AddField( - model_name="pipelineinstance", - name="is_revoked", - field=models.BooleanField(default=False, verbose_name="\u662f\u5426\u5df2\u7ecf\u64a4\u9500"), - ), - ] diff --git a/lib/pipeline/migrations/0023_set_is_revoked.py b/lib/pipeline/migrations/0023_set_is_revoked.py deleted file mode 100644 index 21d85a8..0000000 --- a/lib/pipeline/migrations/0023_set_is_revoked.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from __future__ import unicode_literals - -from django.db import migrations - -from pipeline.engine import states - - -def reverse_func(apps, schema_editor): - pass - - -def forward_func(apps, schema_editor): - PipelineInstance = apps.get_model("pipeline", "PipelineInstance") - Status = apps.get_model("engine", "Status") - - revoked_status = Status.objects.filter(state=states.REVOKED).values("id", "archived_time") - id_to_time = {status["id"]: status["archived_time"] for status in revoked_status} - instances = PipelineInstance.objects.filter(instance_id__in=list(id_to_time.keys())) - for inst in instances: - inst.finish_time = id_to_time[inst.instance_id] - inst.is_revoked = True - inst.save() - - -class Migration(migrations.Migration): - dependencies = [ - ("pipeline", "0022_pipelineinstance_is_revoked"), - ] - - operations = [migrations.RunPython(forward_func, reverse_func)] diff --git a/lib/pipeline/migrations/0024_auto_20200213_0738.py b/lib/pipeline/migrations/0024_auto_20200213_0738.py deleted file mode 100644 index 814aac1..0000000 --- a/lib/pipeline/migrations/0024_auto_20200213_0738.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.23 on 2020-02-13 07:38 -from __future__ import unicode_literals - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0023_set_is_revoked"), - ] - - operations = [ - migrations.AlterField( - model_name="pipelineinstance", - name="execution_snapshot", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="execution_snapshot_instances", - to="pipeline.Snapshot", - verbose_name="用于实例执行的结构数据", - ), - ), - migrations.AlterField( - model_name="pipelineinstance", - name="name", - field=models.CharField(default="default_instance", max_length=128, verbose_name="实例名称"), - ), - migrations.AlterField( - model_name="pipelineinstance", - name="snapshot", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="snapshot_instances", - to="pipeline.Snapshot", - verbose_name="实例结构数据,指向实例对应的模板的结构数据", - ), - ), - migrations.AlterField( - model_name="pipelineinstance", - name="tree_info", - field=models.ForeignKey( - blank=True, - null=True, - on_delete=django.db.models.deletion.SET_NULL, - related_name="tree_info_instances", - to="pipeline.TreeInfo", - verbose_name="提前计算好的一些流程结构数据", - ), - ), - migrations.AlterField( - model_name="pipelinetemplate", - name="name", - field=models.CharField(default="default_template", max_length=128, verbose_name="模板名称"), - ), - ] diff --git a/lib/pipeline/migrations/0025_auto_20200813_1216.py b/lib/pipeline/migrations/0025_auto_20200813_1216.py deleted file mode 100644 index 0ad908a..0000000 --- a/lib/pipeline/migrations/0025_auto_20200813_1216.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.29 on 2020-08-13 04:16 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0024_auto_20200213_0738"), - ] - - operations = [ - migrations.AlterField( - model_name="snapshot", - name="md5sum", - field=models.CharField(db_index=True, max_length=32, verbose_name="快照字符串的md5sum"), - ), - ] diff --git a/lib/pipeline/migrations/0026_auto_20201028_1049.py b/lib/pipeline/migrations/0026_auto_20201028_1049.py deleted file mode 100644 index 284ed9b..0000000 --- a/lib/pipeline/migrations/0026_auto_20201028_1049.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.29 on 2020-10-28 02:49 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0025_auto_20200813_1216"), - ] - - operations = [ - migrations.AlterField( - model_name="pipelinetemplate", - name="name", - field=models.CharField(db_index=True, default="default_template", max_length=128, verbose_name="模板名称"), - ), - ] diff --git a/lib/pipeline/migrations/0027_auto_20201123_1552.py b/lib/pipeline/migrations/0027_auto_20201123_1552.py deleted file mode 100644 index a9d39f6..0000000 --- a/lib/pipeline/migrations/0027_auto_20201123_1552.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.29 on 2020-11-23 07:52 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0026_auto_20201028_1049"), - ] - - operations = [ - migrations.AlterField( - model_name="pipelineinstance", - name="instance_id", - field=models.CharField(db_index=True, max_length=32, unique=True, verbose_name="实例ID"), - ), - ] diff --git a/lib/pipeline/migrations/0028_auto_20201227_1952.py b/lib/pipeline/migrations/0028_auto_20201227_1952.py deleted file mode 100644 index 0294168..0000000 --- a/lib/pipeline/migrations/0028_auto_20201227_1952.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by Django 1.11.29 on 2020-12-27 11:52 -from __future__ import unicode_literals - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0027_auto_20201123_1552"), - ] - - operations = [ - migrations.AlterField( - model_name="pipelinetemplate", - name="create_time", - field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="创建时间"), - ), - migrations.AlterField( - model_name="pipelinetemplate", - name="edit_time", - field=models.DateTimeField(auto_now=True, db_index=True, verbose_name="修改时间"), - ), - ] diff --git a/lib/pipeline/migrations/0029_templaterelationship_always_use_latest.py b/lib/pipeline/migrations/0029_templaterelationship_always_use_latest.py deleted file mode 100644 index d4c665e..0000000 --- a/lib/pipeline/migrations/0029_templaterelationship_always_use_latest.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 2.2.19 on 2021-06-07 09:23 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0028_auto_20201227_1952"), - ] - - operations = [ - migrations.AddField( - model_name="templaterelationship", - name="always_use_latest", - field=models.BooleanField(default=False, verbose_name="是否永远使用最新版本"), - ), - ] diff --git a/lib/pipeline/migrations/0030_auto_20210607_1210.py b/lib/pipeline/migrations/0030_auto_20210607_1210.py deleted file mode 100644 index 475af6c..0000000 --- a/lib/pipeline/migrations/0030_auto_20210607_1210.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 2.2.19 on 2021-06-07 12:10 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0029_templaterelationship_always_use_latest"), - ] - - operations = [ - migrations.AlterField( - model_name="templaterelationship", - name="descendant_template_id", - field=models.CharField(db_index=True, max_length=32, verbose_name="子流程模板ID"), - ), - ] diff --git a/lib/pipeline/migrations/0031_auto_20210624_2317.py b/lib/pipeline/migrations/0031_auto_20210624_2317.py deleted file mode 100644 index 012a605..0000000 --- a/lib/pipeline/migrations/0031_auto_20210624_2317.py +++ /dev/null @@ -1,23 +0,0 @@ -# Generated by Django 2.2.16 on 2021-06-24 15:17 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ("pipeline", "0030_auto_20210607_1210"), - ] - - operations = [ - migrations.AddField( - model_name="pipelineinstance", - name="is_expired", - field=models.BooleanField(default=False, help_text="运行时被定期清理即为过期", verbose_name="是否已经过期"), - ), - migrations.AlterField( - model_name="pipelineinstance", - name="create_time", - field=models.DateTimeField(auto_now_add=True, db_index=True, verbose_name="创建时间"), - ), - ] diff --git a/lib/pipeline/migrations/0032_templatescheme_subprocess_scheme_relation.py b/lib/pipeline/migrations/0032_templatescheme_subprocess_scheme_relation.py deleted file mode 100644 index 4fa1528..0000000 --- a/lib/pipeline/migrations/0032_templatescheme_subprocess_scheme_relation.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 2.2.24 on 2022-01-12 14:21 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('pipeline', '0031_auto_20210624_2317'), - ] - - operations = [ - migrations.AddField( - model_name='templatescheme', - name='subprocess_scheme_relation', - field=models.ManyToManyField(to='pipeline.TemplateRelationship', verbose_name='子流程节点引用执行方案的关系'), - ), - ] diff --git a/lib/pipeline/migrations/0033_logentry_signal.py b/lib/pipeline/migrations/0033_logentry_signal.py deleted file mode 100644 index 8c7b293..0000000 --- a/lib/pipeline/migrations/0033_logentry_signal.py +++ /dev/null @@ -1,36 +0,0 @@ -# Generated by Django 2.2.6 on 2022-01-27 08:30 - -from django.db import migrations, models -import pipeline.django_signal_valve.models - - -class Migration(migrations.Migration): - - dependencies = [ - ('pipeline', '0032_templatescheme_subprocess_scheme_relation'), - ] - - operations = [ - migrations.CreateModel( - name='LogEntry', - fields=[ - ('id', models.BigAutoField(primary_key=True, serialize=False, verbose_name='ID')), - ('logger_name', models.SlugField(max_length=128, verbose_name='logger 名称')), - ('level_name', models.SlugField(max_length=32, verbose_name='日志等级')), - ('message', models.TextField(null=True, verbose_name='日志内容')), - ('exception', models.TextField(null=True, verbose_name='异常信息')), - ('logged_at', models.DateTimeField(auto_now_add=True, db_index=True, verbose_name='输出时间')), - ('node_id', models.CharField(db_index=True, max_length=32, verbose_name='节点 ID')), - ('history_id', models.IntegerField(default=-1, verbose_name='节点执行历史 ID')), - ], - ), - migrations.CreateModel( - name='Signal', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('module_path', models.TextField(verbose_name='信号模块名')), - ('name', models.CharField(max_length=64, verbose_name='信号属性名')), - ('kwargs', pipeline.django_signal_valve.models.IOField(verbose_name='信号参数')), - ], - ), - ] diff --git a/lib/pipeline/migrations/__init__.py b/lib/pipeline/migrations/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/migrations/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/models.py b/lib/pipeline/models.py deleted file mode 100644 index 5a5a388..0000000 --- a/lib/pipeline/models.py +++ /dev/null @@ -1,802 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import copy -import hashlib -import logging -import queue -import zlib - -import ujson as json -from django.db import models, transaction -from django.utils import timezone -from django.utils.module_loading import import_string -from django.utils.translation import ugettext_lazy as _ - -from pipeline.conf import settings -from pipeline.constants import PIPELINE_DEFAULT_PRIORITY -from pipeline.core.constants import PE -from pipeline.signals import post_pipeline_finish, post_pipeline_revoke -from pipeline.engine.utils import ActionResult, calculate_elapsed_time -from pipeline.exceptions import SubprocessRefError -from pipeline.parser.context import get_pipeline_context -from pipeline.parser.utils import replace_all_id -from pipeline.service import task_service -from pipeline.utils.graph import Graph -from pipeline.utils.uniqid import node_uniqid, uniqid - -MAX_LEN_OF_NAME = 128 -logger = logging.getLogger("root") - - -class CompressJSONField(models.BinaryField): - def __init__(self, compress_level=6, *args, **kwargs): - super(CompressJSONField, self).__init__(*args, **kwargs) - self.compress_level = compress_level - - def get_prep_value(self, value): - value = super(CompressJSONField, self).get_prep_value(value) - return zlib.compress(json.dumps(value).encode("utf-8"), self.compress_level) - - def to_python(self, value): - value = super(CompressJSONField, self).to_python(value) - return json.loads(zlib.decompress(value).decode("utf-8")) - - def from_db_value(self, value, expression, connection, context=None): - return self.to_python(value) - - -class SnapshotManager(models.Manager): - def create_snapshot(self, data): - h = hashlib.md5() - h.update(json.dumps(data).encode("utf-8")) - snapshot = self.create(md5sum=h.hexdigest(), data=data) - return snapshot - - def data_for_snapshot(self, snapshot_id): - return self.get(id=snapshot_id).data - - -class Snapshot(models.Model): - """ - 数据快照 - """ - - md5sum = models.CharField(_("快照字符串的md5sum"), max_length=32, db_index=True) - create_time = models.DateTimeField(_("创建时间"), auto_now_add=True) - data = CompressJSONField(null=True, blank=True) - - objects = SnapshotManager() - - class Meta: - verbose_name = _("模板快照") - verbose_name_plural = _("模板快照") - ordering = ["-id"] - app_label = "pipeline" - - def __unicode__(self): - return str(self.md5sum) - - def has_change(self, data): - """ - 检测 data 的 md5 是否和当前存储的不一致 - @param data: - @return: 新的 md5,md5 是否有变化 - """ - h = hashlib.md5() - h.update(json.dumps(data).encode("utf-8")) - md5 = h.hexdigest() - return md5, self.md5sum != md5 - - -class TreeInfo(models.Model): - """ - pipeline 数据信息 - """ - - data = CompressJSONField(null=True, blank=True) - - -def get_subprocess_act_list(pipeline_data): - """ - 获取 pipeline 结构中所有的子流程节点 - @param pipeline_data: 流程结构数据 - @return: 子流程节点 - """ - activities = pipeline_data[PE.activities] - act_ids = [act_id for act_id in activities if activities[act_id][PE.type] == PE.SubProcess] - return [activities[act_id] for act_id in act_ids] - - -def _act_id_in_graph(act): - """ - 获取子流程节点引用的模板 ID - @param act: 子流程节点 - @return: 模板 ID:版本 或 模板ID - """ - return "{}:{}".format(act["template_id"], act["version"]) if act.get("version") else act["template_id"] - - -class TemplateManager(models.Manager): - def subprocess_ref_validate(self, data, root_id=None, root_name=None): - """ - 验证子流程引用是否合法 - @param data: - @param root_id: - @param root_name: - @return: 引用是否合法,相关信息 - """ - try: - sub_refs, name_map = self.construct_subprocess_ref_graph(data, root_id=root_id, root_name=root_name) - except PipelineTemplate.DoesNotExist as e: - return False, str(e) - - nodes = list(sub_refs.keys()) - flows = [] - for node in nodes: - for ref in sub_refs[node]: - if ref in nodes: - flows.append([node, ref]) - graph = Graph(nodes, flows) - # circle reference check - trace = graph.get_cycle() - if trace: - name_trace = " → ".join([name_map[proc_id] for proc_id in trace]) - return False, _("子流程引用链中存在循环引用:%s") % name_trace - - return True, "" - - def create_model(self, structure_data, **kwargs): - """ - 创建流程模板对象 - @param structure_data: pipeline 结构数据 - @param kwargs: 其他参数 - @return: 流程模板 - """ - result, msg = self.subprocess_ref_validate(structure_data) - - if not result: - raise SubprocessRefError(msg) - - snapshot = Snapshot.objects.create_snapshot(structure_data) - kwargs["snapshot"] = snapshot - kwargs["template_id"] = node_uniqid() - obj = self.create(**kwargs) - # version track - # TemplateVersion.objects.track(obj) - - return obj - - def delete_model(self, template_ids): - """ - 删除模板对象 - @param template_ids: 模板对象 ID 列表或 ID - @return: - """ - if not isinstance(template_ids, list): - template_ids = [template_ids] - qs = self.filter(template_id__in=template_ids) - for template in qs: - template.is_deleted = True - template.name = uniqid() - template.save() - - def construct_subprocess_ref_graph(self, pipeline_data, root_id=None, root_name=None): - """ - 构造子流程引用图 - @param pipeline_data: pipeline 结构数据 - @param root_id: 所有引用开始的根流程 ID - @param root_name: 根流程名 - @return: 子流程引用图,模板 ID -> 模板姓名映射字典 - """ - subprocess_act = get_subprocess_act_list(pipeline_data) - tid_queue = queue.Queue() - graph = {} - version = {} - name_map = {} - - if root_id: - graph[root_id] = [_act_id_in_graph(act) for act in subprocess_act] - name_map[root_id] = root_name - - for act in subprocess_act: - tid_queue.put(_act_id_in_graph(act)) - version[_act_id_in_graph(act)] = act.get("version") - - while not tid_queue.empty(): - tid = tid_queue.get() - template = self.get(template_id=tid.split(":")[0]) - name_map[tid] = template.name - subprocess_act = get_subprocess_act_list(template.data_for_version(version[tid])) - - for act in subprocess_act: - ref_tid = _act_id_in_graph(act) - graph.setdefault(tid, []).append(ref_tid) - version[_act_id_in_graph(act)] = act.get("version") - if ref_tid not in graph: - tid_queue.put(ref_tid) - if not subprocess_act: - graph[tid] = [] - - return graph, name_map - - def unfold_subprocess(self, pipeline_data): - """ - 展开 pipeline 数据中所有的子流程 - @param pipeline_data: pipeline 数据 - @return: - """ - id_maps = replace_all_id(pipeline_data) - activities = pipeline_data[PE.activities] - for act_id, act in list(activities.items()): - if act[PE.type] == PE.SubProcess: - subproc_data = self.get(template_id=act[PE.template_id]).data_for_version(act.get(PE.version)) - - sub_id_maps = self.unfold_subprocess(subproc_data) - # act_id is new id - id_maps[PE.subprocess_detail].update({act_id: sub_id_maps}) - - subproc_data[PE.id] = act_id - act[PE.pipeline] = subproc_data - return id_maps - - def replace_id(self, pipeline_data): - """ - 替换 pipeline 中所有 ID - @param pipeline_data: pipeline 数据 - @return: - """ - id_maps = replace_all_id(pipeline_data) - activities = pipeline_data[PE.activities] - for act_id, act in list(activities.items()): - if act[PE.type] == PE.SubProcess: - subproc_data = act[PE.pipeline] - sub_id_maps = self.replace_id(subproc_data) - # act_id is new id - id_maps[PE.subprocess_detail].update({act_id: sub_id_maps}) - - subproc_data[PE.id] = act_id - act[PE.pipeline] = subproc_data - return id_maps - - -class PipelineTemplate(models.Model): - """ - 流程模板 - """ - - template_id = models.CharField(_("模板ID"), max_length=32, unique=True) - name = models.CharField(_("模板名称"), max_length=MAX_LEN_OF_NAME, default="default_template", db_index=True) - create_time = models.DateTimeField(_("创建时间"), auto_now_add=True, db_index=True) - creator = models.CharField(_("创建者"), max_length=32) - description = models.TextField(_("描述"), null=True, blank=True) - editor = models.CharField(_("修改者"), max_length=32, null=True, blank=True) - edit_time = models.DateTimeField(_("修改时间"), auto_now=True, db_index=True) - snapshot = models.ForeignKey( - Snapshot, verbose_name=_("模板结构数据"), related_name="snapshot_templates", on_delete=models.DO_NOTHING - ) - has_subprocess = models.BooleanField(_("是否含有子流程"), default=False) - is_deleted = models.BooleanField(_("是否删除"), default=False, help_text=_("表示当前模板是否删除")) - - objects = TemplateManager() - - class Meta: - verbose_name = _("Pipeline模板") - verbose_name_plural = _("Pipeline模板") - ordering = ["-edit_time"] - app_label = "pipeline" - - def __unicode__(self): - return "{}-{}".format(self.template_id, self.name) - - @property - def data(self): - return self.snapshot.data - - @property - def version(self): - return self.snapshot.md5sum - - @property - def subprocess_version_info(self): - # 1. get all subprocess - subprocess_info = TemplateRelationship.objects.get_subprocess_info(self.template_id).values( - "descendant_template_id", "subprocess_node_id", "version", "always_use_latest" - ) - info = {"subproc_has_update": False, "details": []} - if not subprocess_info: - return info - - # 2. check whether subprocess is expired - temp_current_versions = { - item.template_id: item - for item in TemplateCurrentVersion.objects.filter( - template_id__in=[item["descendant_template_id"] for item in subprocess_info] - ) - } - - expireds = [] - for item in subprocess_info: - item["expired"] = ( - False - if item["version"] is None - or item["descendant_template_id"] not in temp_current_versions - or item["always_use_latest"] - else (item["version"] != temp_current_versions[item["descendant_template_id"]].current_version) - ) - info["details"].append(item) - expireds.append(item["expired"]) - - info["subproc_has_update"] = any(expireds) - - # 3. return - return info - - @property - def subprocess_has_update(self): - return self.subprocess_version_info["subproc_has_update"] - - def data_for_version(self, version): - """ - 获取某个版本的模板数据 - @param version: 版本号 - @return: 模板数据 - """ - if not version: - return self.data - return Snapshot.objects.filter(md5sum=version).order_by("-id").first().data - - def referencer(self): - """ - 获取引用了该模板的其他模板 - @return: 引用了该模板的其他模板 ID 列表 - """ - referencer = TemplateRelationship.objects.referencer(self.template_id) - template_id = self.__class__.objects.filter(template_id__in=referencer, is_deleted=False).values_list( - "template_id", flat=True - ) - return list(template_id) - - def clone_data(self): - """ - 获取该模板数据的克隆 - @return: ID 替换过后的模板数据 - """ - data = self.data - replace_all_id(self.data) - return data - - def update_template(self, structure_data, **kwargs): - """ - 更新当前模板的模板数据 - @param structure_data: pipeline 结构数据 - @param kwargs: 其他参数 - @return: - """ - result, msg = PipelineTemplate.objects.subprocess_ref_validate(structure_data, self.template_id, self.name) - if not result: - raise SubprocessRefError(msg) - - snapshot = Snapshot.objects.create_snapshot(structure_data) - kwargs["snapshot"] = snapshot - kwargs["edit_time"] = timezone.now() - exclude_keys = ["template_id", "creator", "create_time", "is_deleted"] - for key in exclude_keys: - kwargs.pop(key, None) - for key, value in list(kwargs.items()): - setattr(self, key, value) - self.save() - - def gen_instance(self, inputs=None, **kwargs): - """ - 使用该模板创建实例 - @param inputs: 自定义输入 - @param kwargs: 其他参数 - @return: 实例对象 - """ - instance, _ = PipelineInstance.objects.create_instance( - template=self, exec_data=copy.deepcopy(self.data), inputs=inputs, **kwargs - ) - return instance - - def set_has_subprocess_bit(self): - acts = list(self.data[PE.activities].values()) - self.has_subprocess = any([act["type"] == PE.SubProcess for act in acts]) - - -class TemplateRelationShipManager(models.Manager): - def get_subprocess_info(self, template_id): - """ - 获取某个模板中所有的子流程信息 - @param template_id: 模板 ID - @return: 该模板所引用的子流程相关信息 - """ - return self.filter(ancestor_template_id=template_id) - - def referencer(self, template_id): - """ - 获取引用了某个模板的其他模板 - @param template_id: 被引用的模板 - @return: 引用了该模板的其他模板 ID 列表 - """ - return list(set(self.filter(descendant_template_id=template_id).values_list("ancestor_template_id", flat=True))) - - -class TemplateRelationship(models.Model): - """ - 流程模板引用关系:直接引用 - """ - - ancestor_template_id = models.CharField(_("根模板ID"), max_length=32, db_index=True) - descendant_template_id = models.CharField(_("子流程模板ID"), max_length=32, null=False, db_index=True) - subprocess_node_id = models.CharField(_("子流程节点 ID"), max_length=32, null=False) - version = models.CharField(_("快照字符串的md5"), max_length=32, null=False) - always_use_latest = models.BooleanField(_("是否永远使用最新版本"), default=False) - - objects = TemplateRelationShipManager() - - -class TemplateCurrentVersionManager(models.Manager): - def update_current_version(self, template): - """ - 更新某个模板的当前版本 - @param template: 模板对象 - @return: 记录模板当前版本的对象 - """ - obj, __ = self.update_or_create( - template_id=template.template_id, defaults={"current_version": template.version} - ) - return obj - - -class TemplateCurrentVersion(models.Model): - """ - 记录流程模板当前版本的表 - """ - - template_id = models.CharField(_("模板ID"), max_length=32, db_index=True) - current_version = models.CharField(_("快照字符串的md5"), max_length=32, null=False) - - objects = TemplateCurrentVersionManager() - - -class TemplateVersionManager(models.Manager): - def track(self, template): - """ - 记录模板的版本号 - @param template: 被记录模板 - @return: 版本跟踪对象 - """ - if not template.snapshot: - return None - - # don't track if latest version is same as current version - versions = self.filter(template_id=template.id).order_by("-id") - if versions and versions[0].md5 == template.snapshot.md5sum: - return versions[0] - - return self.create(template=template, snapshot=template.snapshot, md5=template.snapshot.md5sum) - - -class TemplateVersion(models.Model): - """ - 模板版本号记录节点 - """ - - template = models.ForeignKey(PipelineTemplate, verbose_name=_("模板 ID"), null=False, on_delete=models.CASCADE) - snapshot = models.ForeignKey(Snapshot, verbose_name=_("模板数据 ID"), null=False, on_delete=models.CASCADE) - md5 = models.CharField(_("快照字符串的md5"), max_length=32, db_index=True) - date = models.DateTimeField(_("添加日期"), auto_now_add=True) - - objects = TemplateVersionManager() - - -class TemplateScheme(models.Model): - """ - 模板执行方案 - """ - - template = models.ForeignKey( - PipelineTemplate, verbose_name=_("对应模板 ID"), null=False, blank=False, on_delete=models.CASCADE - ) - unique_id = models.CharField(_("方案唯一ID"), max_length=97, unique=True, null=False, blank=True) - name = models.CharField(_("方案名称"), max_length=64, null=False, blank=False) - edit_time = models.DateTimeField(_("修改时间"), auto_now=True) - data = CompressJSONField(verbose_name=_("方案数据")) - - subprocess_scheme_relation = models.ManyToManyField(verbose_name=_("子流程节点引用执行方案的关系"), to=TemplateRelationship) - - -class InstanceManager(models.Manager): - def create_instance(self, template, exec_data, spread=False, inputs=None, **kwargs): - """ - 创建流程实例对象 - @param template: 流程模板 - @param exec_data: 执行用流程数据 - @param spread: exec_data 是否已经展开 - @param kwargs: 其他参数 - @param inputs: 自定义输入 - @return: 实例对象 - """ - if not spread: - id_maps = PipelineTemplate.objects.unfold_subprocess(exec_data) - else: - id_maps = PipelineTemplate.objects.replace_id(exec_data) - - inputs = inputs or {} - - for key, val in list(inputs.items()): - if key in exec_data["data"]["inputs"]: - exec_data["data"]["inputs"][key]["value"] = val - - instance_id = node_uniqid() - exec_data["id"] = instance_id - exec_snapshot = Snapshot.objects.create_snapshot(exec_data) - TreeInfo.objects.create() - if template is not None: - kwargs["template"] = template - kwargs["snapshot_id"] = template.snapshot.id - kwargs["instance_id"] = instance_id - kwargs["execution_snapshot_id"] = exec_snapshot.id - return self.create(**kwargs), id_maps - - def delete_model(self, instance_ids): - """ - 删除流程实例对象 - @param instance_ids: 实例 ID 或 ID 列表 - @return: - """ - if not isinstance(instance_ids, list): - instance_ids = [instance_ids] - qs = self.filter(instance_id__in=instance_ids) - for instance in qs: - instance.is_deleted = True - instance.name = uniqid() - instance.save() - - def set_started(self, instance_id, executor): - """ - 将实例的状态设置为已开始 - @param instance_id: 实例 ID - @param executor: 执行者 - @return: - """ - self.filter(instance_id=instance_id).update(start_time=timezone.now(), is_started=True, executor=executor) - - def set_finished(self, instance_id): - """ - 将实例的状态设置为已完成 - @param instance_id: 实例 ID - @return: - """ - self.filter(instance_id=instance_id).update(finish_time=timezone.now(), is_finished=True) - post_pipeline_finish.send(sender=PipelineInstance, instance_id=instance_id) - - def set_revoked(self, instance_id): - """ - 将实例的状态设置为已撤销 - @param instance_id: 实例 ID - @return: - """ - self.filter(instance_id=instance_id).update(finish_time=timezone.now(), is_revoked=True) - post_pipeline_revoke.send(sender=PipelineInstance, instance_id=instance_id) - - -class PipelineInstance(models.Model): - """ - 流程实例对象 - """ - - instance_id = models.CharField(_("实例ID"), max_length=32, unique=True, db_index=True) - template = models.ForeignKey( - PipelineTemplate, verbose_name=_("Pipeline模板"), null=True, blank=True, on_delete=models.SET_NULL - ) - name = models.CharField(_("实例名称"), max_length=MAX_LEN_OF_NAME, default="default_instance") - creator = models.CharField(_("创建者"), max_length=32, blank=True) - create_time = models.DateTimeField(_("创建时间"), auto_now_add=True, db_index=True) - executor = models.CharField(_("执行者"), max_length=32, blank=True) - start_time = models.DateTimeField(_("启动时间"), null=True, blank=True) - finish_time = models.DateTimeField(_("结束时间"), null=True, blank=True) - description = models.TextField(_("描述"), blank=True) - is_started = models.BooleanField(_("是否已经启动"), default=False) - is_finished = models.BooleanField(_("是否已经完成"), default=False) - is_revoked = models.BooleanField(_("是否已经撤销"), default=False) - is_deleted = models.BooleanField(_("是否已经删除"), default=False, help_text=_("表示当前实例是否删除")) - is_expired = models.BooleanField(_("是否已经过期"), default=False, help_text=_("运行时被定期清理即为过期")) - snapshot = models.ForeignKey( - Snapshot, - blank=True, - null=True, - related_name="snapshot_instances", - verbose_name=_("实例结构数据,指向实例对应的模板的结构数据"), - on_delete=models.SET_NULL, - ) - execution_snapshot = models.ForeignKey( - Snapshot, - blank=True, - null=True, - related_name="execution_snapshot_instances", - verbose_name=_("用于实例执行的结构数据"), - on_delete=models.SET_NULL, - ) - tree_info = models.ForeignKey( - TreeInfo, - blank=True, - null=True, - related_name="tree_info_instances", - verbose_name=_("提前计算好的一些流程结构数据"), - on_delete=models.SET_NULL, - ) - - objects = InstanceManager() - - class Meta: - verbose_name = _("Pipeline实例") - verbose_name_plural = _("Pipeline实例") - ordering = ["-create_time"] - app_label = "pipeline" - - def __unicode__(self): - return "{}-{}".format(self.instance_id, self.name) - - @property - def data(self): - return self.snapshot.data - - @property - def execution_data(self): - return self.execution_snapshot.data - - @property - def node_id_set(self): - if not self.tree_info: - self.calculate_tree_info(save=True) - return set(self.tree_info.data["node_id_set"]) - - @property - def elapsed_time(self): - return calculate_elapsed_time(self.start_time, self.finish_time) - - def set_execution_data(self, data): - """ - 设置实例的执行用流程数据 - @param data: 执行用流程数据 - @return: - """ - self.execution_snapshot.data = data - self.execution_snapshot.save() - - def _replace_id(self, exec_data): - """ - 替换执行用流程数据中的所有 ID - @param exec_data: 执行用流程数据 - @return: - """ - replace_all_id(exec_data) - activities = exec_data[PE.activities] - for act_id, act in list(activities.items()): - if act[PE.type] == PE.SubProcess: - self._replace_id(act["pipeline"]) - act["pipeline"]["id"] = act_id - - def clone(self, creator, **kwargs): - """ - 返回当前实例对象的克隆 - @param creator: 创建者 - @param kwargs: 其他参数 - @return: 当前实例对象的克隆 - """ - name = kwargs.get("name") or timezone.localtime(timezone.now()).strftime("clone%Y%m%d%H%m%S") - instance_id = node_uniqid() - - exec_data = self.execution_data - self._replace_id(exec_data) - # replace root id - exec_data["id"] = instance_id - new_snapshot = Snapshot.objects.create_snapshot(exec_data) - - return self.__class__.objects.create( - template=self.template, - instance_id=instance_id, - name=name, - creator=creator, - description=self.description, - snapshot=self.snapshot, - execution_snapshot=new_snapshot, - ) - - def start(self, executor, check_workers=True, priority=PIPELINE_DEFAULT_PRIORITY, queue=""): - """ - 启动当前流程 - @param executor: 执行者 - @param check_workers: 是否检测 worker 的状态 - @return: 执行结果 - """ - - with transaction.atomic(): - instance = self.__class__.objects.select_for_update().get(id=self.id) - if instance.is_started: - return ActionResult(result=False, message="pipeline instance already started.") - - pipeline_data = instance.execution_data - - try: - parser_cls = import_string(settings.PIPELINE_PARSER_CLASS) - except ImportError: - return ActionResult(result=False, message="invalid parser class: %s" % settings.PIPELINE_PARSER_CLASS) - - instance.start_time = timezone.now() - instance.is_started = True - instance.executor = executor - - parser = parser_cls(pipeline_data) - pipeline = parser.parse( - root_pipeline_data=get_pipeline_context( - instance, obj_type="instance", data_type="data", username=executor - ), - root_pipeline_context=get_pipeline_context( - instance, obj_type="instance", data_type="context", username=executor - ), - ) - - # calculate tree info - instance.calculate_tree_info() - - instance.save() - - act_result = task_service.run_pipeline(pipeline, check_workers=check_workers, priority=priority, queue=queue) - - if not act_result.result: - with transaction.atomic(): - instance = self.__class__.objects.select_for_update().get(id=self.id) - instance.start_time = None - instance.is_started = False - instance.executor = "" - instance.save() - - return act_result - - def _get_node_id_set(self, node_id_set, data): - """ - 递归获取当前实例中所有节点的 ID(包括子流程中的节点) - @param node_id_set: 节点 ID 集合 - @param data: 流程数据 - @return: - """ - node_id_set.add(data[PE.start_event]["id"]) - node_id_set.add(data[PE.end_event]["id"]) - for gid in data[PE.gateways]: - node_id_set.add(gid) - for aid, act_data in list(data[PE.activities].items()): - node_id_set.add(aid) - if act_data[PE.type] == PE.SubProcess: - self._get_node_id_set(node_id_set, act_data["pipeline"]) - - def calculate_tree_info(self, save=False): - """ - 计算当前流程实例执行用流程数据中的一些基本信息 - @param save: 是否在计算完后保存实例对象 - @return: - """ - self.tree_info = TreeInfo.objects.create() - node_id_set = set({}) - - # get node id set - self._get_node_id_set(node_id_set, self.execution_data) - - tree_info = {"node_id_set": list(node_id_set)} - self.tree_info.data = tree_info - self.tree_info.save() - - if save: - self.save() diff --git a/lib/pipeline/parser/__init__.py b/lib/pipeline/parser/__init__.py deleted file mode 100644 index 33eaef7..0000000 --- a/lib/pipeline/parser/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .pipeline_parser import PipelineParser # noqa diff --git a/lib/pipeline/parser/context.py b/lib/pipeline/parser/context.py deleted file mode 100644 index 2f6e6cb..0000000 --- a/lib/pipeline/parser/context.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from importlib import import_module - -from pipeline.conf import settings - - -def get_pipeline_context(obj, obj_type, data_type="data", username=""): - """ - @summary: pipeline context hook - @param obj: PipelineTemplete or PipelineInstance object - @param obj_type: template or instance - @param data_type: data(for component parent_data.inputs) or context(for pipeline root context) - @param username: - @return: - """ - context = {} - if obj_type == "template": - context_path = settings.PIPELINE_TEMPLATE_CONTEXT - elif obj_type == "instance": - context_path = settings.PIPELINE_INSTANCE_CONTEXT - else: - return context - if context_path: - mod, func = context_path.rsplit(".", 1) - mod = import_module(mod) - func = getattr(mod, func) - context = func(obj, data_type, username) - if not isinstance(context, dict): - context = {"data": context} - return context diff --git a/lib/pipeline/parser/pipeline_parser.py b/lib/pipeline/parser/pipeline_parser.py deleted file mode 100644 index 3a69598..0000000 --- a/lib/pipeline/parser/pipeline_parser.py +++ /dev/null @@ -1,289 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from copy import deepcopy - -from django.utils.module_loading import import_string - -from pipeline import exceptions -from pipeline.component_framework.library import ComponentLibrary -from pipeline.core.constants import PE -from pipeline.core.data.base import DataObject -from pipeline.core.data.context import Context -from pipeline.core.data.converter import get_variable -from pipeline.core.data.hydration import hydrate_node_data, hydrate_subprocess_context -from pipeline.core.flow import ( - Condition, - ConditionalParallelGateway, - ConvergeGateway, - ExclusiveGateway, - FlowNodeClsFactory, - ParallelGateway, - SequenceFlow, -) -from pipeline.core.pipeline import Pipeline, PipelineSpec -from pipeline.validators.base import validate_pipeline_tree - - -def classify_inputs(pipeline_inputs, params, is_subprocess, root_pipeline_params=None): - """ - @summary: classify pipeline inputs into different parts - @param pipeline_inputs: pipeline or subprocess inputs - @param params: pipeline or subprocess params, which can cover item whose is_param is True in inputs - @param is_subprocess: whether pipeline is root or subprocess - @param root_pipeline_params: root pipeline params which should deliver to all subprocess - @return: - """ - # data from activity outputs - act_outputs = {} - # params should deliver to son subprocess - subprocess_params = {} - # context scope to resolving inputs - scope_info = deepcopy(root_pipeline_params) - for key, info in list(pipeline_inputs.items()): - source_act = info.get(PE.source_act) - if isinstance(source_act, str): - act_outputs.setdefault(info[PE.source_act], {}).update({info[PE.source_key]: key}) - continue - elif isinstance(source_act, list): - for source_info in source_act: - act_outputs.setdefault(source_info[PE.source_act], {}).update({source_info[PE.source_key]: key}) - - is_param = info.get(PE.is_param, False) - info = params.get(key, info) if is_param else info - if is_subprocess and is_param: - subprocess_params.update({key: info}) - continue - - scope_info.update({key: info}) - result = {"act_outputs": act_outputs, "scope_info": scope_info, "subprocess_params": subprocess_params} - return result - - -class PipelineParser(object): - def __init__(self, pipeline_tree, cycle_tolerate=False): - validate_pipeline_tree(pipeline_tree, cycle_tolerate=cycle_tolerate) - self.pipeline_tree = deepcopy(pipeline_tree) - self.cycle_tolerate = cycle_tolerate - - def parse(self, root_pipeline_data=None, root_pipeline_context=None): - """ - @summary: parse pipeline json tree to object with root data - @param root_pipeline_data: like business info or operator, which can be accessed by parent_data in - Component.execute - @param root_pipeline_context: params for pipeline to resolving inputs data - @return: - """ - return self._parse(root_pipeline_data, root_pipeline_context) - - def _parse( - self, root_pipeline_data=None, root_pipeline_params=None, params=None, is_subprocess=False, parent_context=None - ): - """ - @summary: parse pipeline and subprocess recursively - @param root_pipeline_data: root data from root pipeline parsing, witch will be passed to subprocess recursively - @param root_pipeline_params: params from root pipeline for all subprocess - @param params: params from parent for son subprocess - @param is_subprocess: whither is subprocess - @param parent_context: parent context for activity of subprocess to resolving inputs - @return: Pipeline object - """ - if root_pipeline_data is None: - root_pipeline_data = {} - if root_pipeline_params is None: - root_pipeline_params = {} - if params is None: - params = {} - - pipeline_inputs = self.pipeline_tree[PE.data][PE.inputs] - classification = classify_inputs(pipeline_inputs, params, is_subprocess, root_pipeline_params) - - output_keys = self.pipeline_tree[PE.data][PE.outputs] - context = Context(classification["act_outputs"], output_keys) - for key, info in list(classification["scope_info"].items()): - var = get_variable(key, info, context, root_pipeline_data) - context.set_global_var(key, var) - - pipeline_data = deepcopy(root_pipeline_data) - if is_subprocess: - if parent_context is None: - raise exceptions.DataTypeErrorException("parent context of subprocess cannot be none") - for key, info in list(classification["subprocess_params"].items()): - var = get_variable(key, info, parent_context, pipeline_data) - pipeline_data.update({key: var}) - - start = self.pipeline_tree[PE.start_event] - start_cls = FlowNodeClsFactory.get_node_cls(start[PE.type]) - if "pre_render_keys" in self.pipeline_tree[PE.data]: - start_event = start_cls( - id=start[PE.id], - name=start[PE.name], - data=DataObject({"pre_render_keys": self.pipeline_tree[PE.data][PE.pre_render_keys]}), - ) - else: - start_event = start_cls(id=start[PE.id], name=start[PE.name]) - - end = self.pipeline_tree[PE.end_event] - end_cls = FlowNodeClsFactory.get_node_cls(end[PE.type]) - end_event = end_cls(id=end[PE.id], name=end[PE.name], data=DataObject({})) - - acts = self.pipeline_tree[PE.activities] - act_objs = [] - for act in list(acts.values()): - act_cls = FlowNodeClsFactory.get_node_cls(act[PE.type]) - if act[PE.type] == PE.ServiceActivity: - component = ComponentLibrary.get_component( - component_code=act[PE.component][PE.code], - data_dict=act[PE.component][PE.inputs], - version=act[PE.component].get(PE.version), - ) - service = component.service() - data = component.data_for_execution(context, pipeline_data) - handler_path = act.get("failure_handler") - failure_handler = import_string(handler_path) if handler_path else None - act_objs.append( - act_cls( - id=act[PE.id], - service=service, - name=act[PE.name], - data=data, - error_ignorable=act.get(PE.error_ignorable, False), - skippable=act[PE.skippable] if PE.skippable in act else act.get(PE.skippable_old, True), - retryable=act[PE.retryable] if PE.retryable in act else act.get(PE.retryable_old, True), - timeout=act.get(PE.timeout), - failure_handler=failure_handler, - ) - ) - elif act[PE.type] == PE.SubProcess: - sub_tree = act[PE.pipeline] - params = act[PE.params] - sub_parser = PipelineParser(pipeline_tree=sub_tree, cycle_tolerate=self.cycle_tolerate) - act_objs.append( - act_cls( - id=act[PE.id], - pipeline=sub_parser._parse( - root_pipeline_data=root_pipeline_data, - root_pipeline_params=root_pipeline_params, - params=params, - is_subprocess=True, - parent_context=context, - ), - name=act[PE.name], - ) - ) - else: - raise exceptions.FlowTypeError("Unknown Activity type: %s" % act[PE.type]) - - gateways = self.pipeline_tree[PE.gateways] - flows = self.pipeline_tree[PE.flows] - gateway_objs = [] - for gw in list(gateways.values()): - gw_cls = FlowNodeClsFactory.get_node_cls(gw[PE.type]) - if gw[PE.type] in {PE.ParallelGateway, PE.ConditionalParallelGateway}: - gateway_objs.append( - gw_cls(id=gw[PE.id], converge_gateway_id=gw[PE.converge_gateway_id], name=gw[PE.name]) - ) - elif gw[PE.type] in {PE.ExclusiveGateway, PE.ConvergeGateway}: - gateway_objs.append(gw_cls(id=gw[PE.id], name=gw[PE.name])) - else: - raise exceptions.FlowTypeError("Unknown Gateway type: %s" % gw[PE.type]) - - flow_objs_dict = {} - for fl in list(flows.values()): - flow_nodes = act_objs + gateway_objs - if fl[PE.source] == start[PE.id]: - source = start_event - else: - source = [x for x in flow_nodes if x.id == fl[PE.source]][0] - if fl[PE.target] == end[PE.id]: - target = end_event - else: - target = [x for x in flow_nodes if x.id == fl[PE.target]][0] - flow_objs_dict[fl[PE.id]] = SequenceFlow(fl[PE.id], source, target) - flow_objs = list(flow_objs_dict.values()) - - # add incoming and outgoing flow to acts - if not isinstance(start[PE.outgoing], list): - start[PE.outgoing] = [start[PE.outgoing]] - for outgoing_id in start[PE.outgoing]: - start_event.outgoing.add_flow(flow_objs_dict[outgoing_id]) - - if not isinstance(end[PE.incoming], list): - end[PE.incoming] = [end[PE.incoming]] - for incoming_id in end[PE.incoming]: - end_event.incoming.add_flow(flow_objs_dict[incoming_id]) - - for act in act_objs: - incoming = acts[act.id][PE.incoming] - if isinstance(incoming, list): - for s in incoming: - act.incoming.add_flow(flow_objs_dict[s]) - else: - act.incoming.add_flow(flow_objs_dict[incoming]) - - act.outgoing.add_flow(flow_objs_dict[acts[act.id][PE.outgoing]]) - - for gw in gateway_objs: - if isinstance(gw, ExclusiveGateway) or isinstance(gw, ConditionalParallelGateway): - for flow_id, con in list(gateways[gw.id][PE.conditions].items()): - con_obj = Condition(con[PE.evaluate], flow_objs_dict[flow_id]) - gw.add_condition(con_obj) - - if isinstance(gateways[gw.id][PE.incoming], list): - for incoming_id in gateways[gw.id][PE.incoming]: - gw.incoming.add_flow(flow_objs_dict[incoming_id]) - else: - gw.incoming.add_flow(flow_objs_dict[gateways[gw.id][PE.incoming]]) - - for outgoing_id in gateways[gw.id][PE.outgoing]: - gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) - - elif isinstance(gw, ParallelGateway): - if isinstance(gateways[gw.id][PE.incoming], list): - for incoming_id in gateways[gw.id][PE.incoming]: - gw.incoming.add_flow(flow_objs_dict[incoming_id]) - else: - gw.incoming.add_flow(flow_objs_dict[gateways[gw.id][PE.incoming]]) - - for outgoing_id in gateways[gw.id][PE.outgoing]: - gw.outgoing.add_flow(flow_objs_dict[outgoing_id]) - - elif isinstance(gw, ConvergeGateway): - for incoming_id in gateways[gw.id][PE.incoming]: - gw.incoming.add_flow(flow_objs_dict[incoming_id]) - gw.outgoing.add_flow(flow_objs_dict[gateways[gw.id][PE.outgoing]]) - - else: - raise exceptions.FlowTypeError("Unknown Gateway type: %s" % type(gw)) - - context.duplicate_variables() - pipeline_data = DataObject(pipeline_data) - pipeline_spec = PipelineSpec(start_event, end_event, flow_objs, act_objs, gateway_objs, pipeline_data, context) - return Pipeline(self.pipeline_tree[PE.id], pipeline_spec) - - def get_act(self, act_id, subprocess_stack=None, root_pipeline_data=None, root_pipeline_context=None): - if subprocess_stack is None: - subprocess_stack = [] - pipeline = self.parse(root_pipeline_data, root_pipeline_context) - for sub_id in subprocess_stack: - subprocess_act = [x for x in pipeline.spec.activities if x.id == sub_id][0] - hydrate_subprocess_context(subprocess_act) - pipeline = subprocess_act.pipeline - act = [x for x in pipeline.spec.activities if x.id == act_id][0] - return act - - def get_act_inputs(self, act_id, subprocess_stack=None, root_pipeline_data=None, root_pipeline_context=None): - act = self.get_act(act_id, subprocess_stack, root_pipeline_data, root_pipeline_context) - hydrate_node_data(act) - inputs = act.data.inputs - return inputs diff --git a/lib/pipeline/parser/schemas.py b/lib/pipeline/parser/schemas.py deleted file mode 100644 index c806bba..0000000 --- a/lib/pipeline/parser/schemas.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -PIPELINE_TREE_PARSER = { - "type": "object", - "properties": { - "data": {"type": "object", "properties": {"inputs": {"type": "object"}, "outputs": {"type": "object"}}}, - "activities": {"type": "object"}, - "end_event": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "incoming": {"type": "string"}, - "name": {"type": "string"}, - "outgoing": {"type": "string"}, - "type": {"type": "string"}, - }, - }, - "flows": {"type": "object"}, - "gateways": {"type": "object"}, - "id": {"type": "string"}, - "line": {"type": "array"}, - "location": {"type": "array"}, - "start_event": { - "type": "object", - "properties": { - "id": {"type": "string"}, - "incoming": {"type": "string"}, - "name": {"type": "string"}, - "outgoing": {"type": "string"}, - "type": {"type": "string"}, - }, - }, - }, -} diff --git a/lib/pipeline/parser/utils.py b/lib/pipeline/parser/utils.py deleted file mode 100644 index ca25e68..0000000 --- a/lib/pipeline/parser/utils.py +++ /dev/null @@ -1,241 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from pipeline.utils.uniqid import node_uniqid, line_uniqid -from pipeline.core.constants import PE -from pipeline.exceptions import NodeNotExistException - -logger = logging.getLogger("root") - -BRANCH_SELECT_GATEWAYS = {PE.ExclusiveGateway, PE.ConditionalParallelGateway} - - -def recursive_replace_id(pipeline_data): - pipeline_data[PE.id] = node_uniqid() - replace_all_id(pipeline_data) - activities = pipeline_data[PE.activities] - for act_id, act in list(activities.items()): - if act[PE.type] == PE.SubProcess: - recursive_replace_id(act[PE.pipeline]) - act[PE.pipeline][PE.id] = act_id - - -def replace_all_id(pipeline_data): - flows = pipeline_data[PE.flows] - node_map = {} - flow_map = {} - - # step.1 replace nodes id - - # replace events id - start_event_id = node_uniqid() - end_event_id = node_uniqid() - node_map[pipeline_data[PE.start_event][PE.id]] = start_event_id - node_map[pipeline_data[PE.end_event][PE.id]] = end_event_id - - start_event_id_maps = _replace_event_id(flows, pipeline_data[PE.start_event], start_event_id) - end_event_id_maps = _replace_event_id(flows, pipeline_data[PE.end_event], end_event_id) - - # replace activities id - activity_id_maps = {} - activities = pipeline_data[PE.activities] - keys = list(activities.keys()) - for old_id in keys: - substituted_id = node_uniqid() - node_map[old_id] = substituted_id - _replace_activity_id(flows, activities, old_id, substituted_id) - activity_id_maps[old_id] = substituted_id - - # replace gateways id - gateway_id_maps = {} - gateways = pipeline_data[PE.gateways] - keys = list(gateways.keys()) - for old_id in keys: - substituted_id = node_uniqid() - node_map[old_id] = substituted_id - _replace_gateway_id(flows, gateways, old_id, substituted_id) - gateway_id_maps[old_id] = substituted_id - - # step.2 replace flows id - flow_id_maps = {} - keys = list(flows.keys()) - for old_id in keys: - substituted_id = line_uniqid() - flow_map[old_id] = substituted_id - _replace_flow_id(flows, old_id, substituted_id, pipeline_data) - flow_id_maps[old_id] = substituted_id - - # step.3 replace id in data - _replace_id_in_data(pipeline_data, node_map) - - # step.4 try to replace front end data - _replace_front_end_data_id(pipeline_data, node_map, flow_map) - - return { - PE.start_event: start_event_id_maps, - PE.end_event: end_event_id_maps, - PE.activities: activity_id_maps, - PE.gateways: gateway_id_maps, - PE.flows: flow_id_maps, - PE.subprocess_detail: {}, - } - - -def _replace_id_in_data(pipeline_data, node_map): - for _, var_info in list(pipeline_data.get(PE.data, {}).get(PE.inputs, {}).items()): - if PE.source_act in var_info: - if isinstance(var_info[PE.source_act], str): - var_info[PE.source_act] = node_map[var_info[PE.source_act]] - else: - for source_info in var_info[PE.source_act]: - source_info[PE.source_act] = node_map[var_info[PE.source_act]] - - -def _replace_front_end_data_id(pipeline_data, node_map, flow_map): - if "line" in pipeline_data: - for line in pipeline_data["line"]: - line[PE.id] = flow_map[line[PE.id]] - line[PE.source][PE.id] = node_map[line[PE.source][PE.id]] - line[PE.target][PE.id] = node_map[line[PE.target][PE.id]] - if "location" in pipeline_data: - for location in pipeline_data["location"]: - location[PE.id] = node_map[location[PE.id]] - if "constants" in pipeline_data: - for key, constant in list(pipeline_data[PE.constants].items()): - source_info = constant.get("source_info", None) - if source_info: - replaced_constant = {} - for source_step, source_keys in list(source_info.items()): - try: - replaced_constant[node_map[source_step]] = source_keys - except KeyError as e: - message = "replace pipeline template id error: %s" % e - logger.exception(message) - raise NodeNotExistException(message) - constant["source_info"] = replaced_constant - - -def _replace_flow_id(flows, flow_id, substituted_id, pipeline_data): - flow = flows[flow_id] - flow[PE.id] = substituted_id - - _replace_flow_in_node(flow[PE.source], pipeline_data, substituted_id, flow_id, PE.outgoing) - _replace_flow_in_node(flow[PE.target], pipeline_data, substituted_id, flow_id, PE.incoming) - - flows.pop(flow_id) - flows[substituted_id] = flow - - -def _replace_flow_in_node(node_id, pipeline_data, substituted_id, flow_id, field): - if node_id in pipeline_data[PE.activities]: - node = pipeline_data[PE.activities][node_id] - elif node_id in pipeline_data[PE.gateways]: - node = pipeline_data[PE.gateways][node_id] - if node[PE.type] in BRANCH_SELECT_GATEWAYS and field == PE.outgoing: - _replace_flow_in_exclusive_gateway_conditions(node, substituted_id, flow_id) - elif node_id == pipeline_data[PE.start_event][PE.id]: - node = pipeline_data[PE.start_event] - elif node_id == pipeline_data[PE.end_event][PE.id]: - node = pipeline_data[PE.end_event] - sequence = node[field] - if isinstance(sequence, list): - i = sequence.index(flow_id) - sequence.pop(i) - sequence.insert(i, substituted_id) - else: - node[field] = substituted_id - - -def _replace_flow_in_exclusive_gateway_conditions(gateway, substituted_id, flow_id): - conditions = gateway[PE.conditions] - conditions[substituted_id] = conditions[flow_id] - conditions.pop(flow_id) - - -def _replace_gateway_id(flows, gateways, gateway_id, substituted_id): - try: - gateway = gateways[gateway_id] - gateway[PE.id] = substituted_id - - if gateway[PE.type] == PE.ConvergeGateway: - flows[gateway[PE.outgoing]][PE.source] = substituted_id - for flow_id in gateway[PE.incoming]: - flows[flow_id][PE.target] = substituted_id - # replace converge_gateway_id - for g_id, gw in list(gateways.items()): - if PE.converge_gateway_id in gw and gw[PE.converge_gateway_id] == gateway_id: - gw[PE.converge_gateway_id] = substituted_id - else: - incoming = gateway[PE.incoming] - - if isinstance(incoming, list): - for flow_id in incoming: - flows[flow_id][PE.target] = substituted_id - else: - flows[gateway[PE.incoming]][PE.target] = substituted_id - - for flow_id in gateway[PE.outgoing]: - flows[flow_id][PE.source] = substituted_id - - gateways.pop(gateway_id) - gateways[substituted_id] = gateway - except KeyError as e: - message = "replace gateway id error: %s" % e - logger.exception(message) - raise NodeNotExistException(message) - - -def _replace_activity_id(flows, activities, act_id, substituted_id): - try: - activity = activities[act_id] - activity[PE.id] = substituted_id - - incoming = activity[PE.incoming] - - if isinstance(incoming, list): - for s in incoming: - flows[s][PE.target] = substituted_id - else: - flows[activity[PE.incoming]][PE.target] = substituted_id - - flows[activity[PE.outgoing]][PE.source] = substituted_id - - activities.pop(act_id) - activities[substituted_id] = activity - except KeyError as e: - message = "replace activity id error: %s" % e - logger.exception(message) - raise NodeNotExistException(message) - - -def _replace_event_id(flows, event, substituted_id): - replace_maps = {} - try: - replace_maps[event[PE.id]] = substituted_id - event[PE.id] = substituted_id - if event[PE.incoming]: - if isinstance(event[PE.incoming], list): - for incoming in event[PE.incoming]: - flows[incoming][PE.target] = substituted_id - else: - flows[event[PE.incoming]][PE.target] = substituted_id - else: - flows[event[PE.outgoing]][PE.source] = substituted_id - except KeyError as e: - message = "replace event id error: %s" % e - logger.exception(message) - raise NodeNotExistException(message) - - return replace_maps diff --git a/lib/pipeline/service/__init__.py b/lib/pipeline/service/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/service/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/service/pipeline_engine_adapter/__init__.py b/lib/pipeline/service/pipeline_engine_adapter/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/service/pipeline_engine_adapter/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/service/pipeline_engine_adapter/adapter_api.py b/lib/pipeline/service/pipeline_engine_adapter/adapter_api.py deleted file mode 100644 index ef4f563..0000000 --- a/lib/pipeline/service/pipeline_engine_adapter/adapter_api.py +++ /dev/null @@ -1,172 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.constants import PIPELINE_DEFAULT_PRIORITY -from pipeline.engine import api -from pipeline.log.models import LogEntry - -STATE_MAP = { - "CREATED": "RUNNING", - "READY": "RUNNING", - "RUNNING": "RUNNING", - "BLOCKED": "BLOCKED", - "SUSPENDED": "SUSPENDED", - "FINISHED": "FINISHED", - "FAILED": "FAILED", - "REVOKED": "REVOKED", -} - - -def run_pipeline(pipeline_instance, instance_id=None, check_workers=True, priority=PIPELINE_DEFAULT_PRIORITY, queue=""): - return api.start_pipeline(pipeline_instance, check_workers=check_workers, priority=priority, queue=queue) - - -def pause_pipeline(pipeline_id): - return api.pause_pipeline(pipeline_id) - - -def revoke_pipeline(pipeline_id): - return api.revoke_pipeline(pipeline_id) - - -def resume_pipeline(pipeline_id): - return api.resume_pipeline(pipeline_id) - - -def pause_activity(act_id): - return api.pause_node_appointment(act_id) - - -def resume_activity(act_id): - return api.resume_node_appointment(act_id) - - -def retry_activity(act_id, inputs=None): - return api.retry_node(act_id, inputs=inputs) - - -def skip_activity(act_id): - return api.skip_node(act_id) - - -def pause_subprocess(subprocess_id): - return api.pause_subprocess(subprocess_id) - - -def skip_exclusive_gateway(gateway_id, flow_id): - return api.skip_exclusive_gateway(gateway_id, flow_id) - - -def skip_conditional_parallel_gateway(gateway_id, flow_ids, converge_gateway_id): - return api.skip_conditional_parallel_gateway(gateway_id, flow_ids, converge_gateway_id) - - -def forced_fail(node_id, ex_data=""): - return api.forced_fail(node_id, ex_data=ex_data) - - -def get_inputs(act_id): - return api.get_inputs(act_id) - - -def get_outputs(act_id): - return api.get_outputs(act_id) - - -def get_activity_histories(act_id): - histories = api.get_activity_histories(act_id) - for item in histories: - item["started_time"] = _better_time_or_none(item["started_time"]) - item["finished_time"] = _better_time_or_none(item.pop("archived_time")) - return histories - - -def callback(act_id, data=None): - return api.activity_callback(act_id, data) - - -def get_state(node_id): - tree = api.get_status_tree(node_id, max_depth=100) - - res = _map(tree) - - # collect all atom - descendants = {} - _collect_descendants(tree, descendants) - res["children"] = descendants - - # return - return res - - -def _get_node_state(tree): - status = [] - - # return state when meet leaf - if not tree.get("children", []): - return STATE_MAP[tree["state"]] - - # iterate children and get child state recursively - for identifier_code, child_tree in list(tree["children"].items()): - status.append(_get_node_state(child_tree)) - - # summary parent state - return STATE_MAP[_get_parent_state_from_children_state(tree["state"], status)] - - -def _get_parent_state_from_children_state(parent_state, children_state_list): - """ - @summary: 根据子任务状态计算父任务状态 - @param parent_state: - @param children_state_list: - @return: - """ - children_state_set = set(children_state_list) - if parent_state == "BLOCKED": - if "RUNNING" in children_state_set: - parent_state = "RUNNING" - if "FAILED" in children_state_set: - parent_state = "FAILED" - return parent_state - - -def _collect_descendants(tree, descendants): - # iterate children for tree - for identifier_code, child_tree in list(tree["children"].items()): - child_status = _map(child_tree) - descendants[identifier_code] = child_status - - # collect children - if child_tree["children"]: - _collect_descendants(child_tree, descendants) - - -def _better_time_or_none(time): - return time.strftime("%Y-%m-%d %H:%M:%S") if time else time - - -def _map(tree): - tree.setdefault("children", {}) - return { - "id": tree["id"], - "state": _get_node_state(tree), - "start_time": _better_time_or_none(tree["started_time"]), - "finish_time": _better_time_or_none(tree["archived_time"]), - "loop": tree["loop"], - "retry": tree["retry"], - "skip": tree["skip"], - } - - -def get_plain_log_for_node(node_id, history_id): - return LogEntry.objects.plain_log_for_node(node_id=node_id, history_id=history_id) diff --git a/lib/pipeline/service/task_service.py b/lib/pipeline/service/task_service.py deleted file mode 100644 index 0d03f8b..0000000 --- a/lib/pipeline/service/task_service.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import importlib - -from pipeline.conf import settings -from pipeline.constants import PIPELINE_DEFAULT_PRIORITY - -adapter_api = importlib.import_module(settings.PIPELINE_ENGINE_ADAPTER_API) - - -def run_pipeline(pipeline, instance_id=None, check_workers=True, priority=PIPELINE_DEFAULT_PRIORITY, queue=""): - return adapter_api.run_pipeline(pipeline, instance_id, check_workers=check_workers, priority=priority, queue=queue) - - -def pause_pipeline(pipeline_id): - return adapter_api.pause_pipeline(pipeline_id) - - -def revoke_pipeline(pipeline_id): - return adapter_api.revoke_pipeline(pipeline_id) - - -def resume_pipeline(pipeline_id): - return adapter_api.resume_pipeline(pipeline_id) - - -def pause_activity(act_id): - return adapter_api.pause_activity(act_id) - - -def resume_activity(act_id): - return adapter_api.resume_activity(act_id) - - -def retry_activity(act_id, inputs=None): - return adapter_api.retry_activity(act_id, inputs=inputs) - - -def skip_activity(act_id): - return adapter_api.skip_activity(act_id) - - -def skip_exclusive_gateway(gateway_id, flow_id): - return adapter_api.skip_exclusive_gateway(gateway_id, flow_id) - - -def skip_conditional_parallel_gateway(gateway_id, flow_ids, converge_gateway_id): - return adapter_api.skip_conditional_parallel_gateway(gateway_id, flow_ids, converge_gateway_id) - - -def forced_fail(act_id, ex_data=""): - return adapter_api.forced_fail(act_id, ex_data) - - -def get_state(node_id): - return adapter_api.get_state(node_id) - - -def get_topo_tree(pipeline_id): - return adapter_api.get_topo_tree(pipeline_id) - - -def get_inputs(act_id): - return adapter_api.get_inputs(act_id) - - -def get_outputs(act_id): - return adapter_api.get_outputs(act_id) - - -def get_activity_histories(act_id): - return adapter_api.get_activity_histories(act_id) - - -def callback(act_id, data=None): - return adapter_api.callback(act_id, data) - - -def get_plain_log_for_node(node_id, history_id=-1): - return adapter_api.get_plain_log_for_node(node_id, history_id) diff --git a/lib/pipeline/signals/__init__.py b/lib/pipeline/signals/__init__.py deleted file mode 100644 index 503402c..0000000 --- a/lib/pipeline/signals/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.dispatch import Signal - -post_pipeline_finish = Signal(providing_args=["instance_id"]) -post_pipeline_revoke = Signal(providing_args=["instance_id"]) diff --git a/lib/pipeline/signals/handlers.py b/lib/pipeline/signals/handlers.py deleted file mode 100644 index 60d8ad4..0000000 --- a/lib/pipeline/signals/handlers.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db import transaction -from django.db.models.signals import post_save, pre_save -from django.dispatch import receiver - -from pipeline.core.constants import PE -from pipeline.core.pipeline import Pipeline -from pipeline.engine.signals import pipeline_end, pipeline_revoke -from pipeline.models import ( - PipelineInstance, - PipelineTemplate, - TemplateCurrentVersion, - TemplateRelationship, - TemplateVersion, -) - - -@receiver(pre_save, sender=PipelineTemplate) -def pipeline_template_pre_save_handler(sender, instance, **kwargs): - template = instance - - if template.is_deleted: - return - - template.set_has_subprocess_bit() - - -@receiver(post_save, sender=PipelineTemplate) -def pipeline_template_post_save_handler(sender, instance, created, **kwargs): - template = instance - - if template.is_deleted: - TemplateRelationship.objects.filter(ancestor_template_id=template.template_id).delete() - return - - with transaction.atomic(): - TemplateRelationship.objects.filter(ancestor_template_id=template.template_id).delete() - acts = list(template.data[PE.activities].values()) - subprocess_nodes = [act for act in acts if act["type"] == PE.SubProcess] - rs = [] - template_scheme_dict = {} - for sp in subprocess_nodes: - version = sp.get("version") or PipelineTemplate.objects.get(template_id=sp["template_id"]).version - always_use_latest = sp.get("always_use_latest", False) - - template_scheme_dict.update({ - sp["template_id"]: sp.get("scheme_id_list", []) - }) - - rs.append( - TemplateRelationship( - ancestor_template_id=template.template_id, - descendant_template_id=sp["template_id"], - subprocess_node_id=sp["id"], - version=version, - always_use_latest=always_use_latest - ) - ) - if rs: - TemplateRelationship.objects.bulk_create(rs) - - relation_queryset = TemplateRelationship.objects.filter(ancestor_template_id=template.template_id) - for relation in relation_queryset: - scheme_id_list = template_scheme_dict[relation.descendant_template_id] - relation.templatescheme_set.add(*scheme_id_list) - - TemplateVersion.objects.track(template) - TemplateCurrentVersion.objects.update_current_version(template) - - -@receiver(pipeline_end, sender=Pipeline) -def pipeline_end_handler(sender, root_pipeline_id, **kwargs): - try: - PipelineInstance.objects.set_finished(root_pipeline_id) - except PipelineInstance.DoesNotExist: # task which do not belong to any instance - pass - - -@receiver(pipeline_revoke, sender=Pipeline) -def pipeline_revoke_handler(sender, root_pipeline_id, **kwargs): - try: - PipelineInstance.objects.set_revoked(root_pipeline_id) - except PipelineInstance.DoesNotExist: # task which do not belong to any instance - pass diff --git a/lib/pipeline/templates/__init__.py b/lib/pipeline/templates/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/templates/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/templates/create_plugins_app/__init__.py b/lib/pipeline/templates/create_plugins_app/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/templates/create_plugins_app/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/templates/create_plugins_app/js_file.py b/lib/pipeline/templates/create_plugins_app/js_file.py deleted file mode 100644 index b3490b8..0000000 --- a/lib/pipeline/templates/create_plugins_app/js_file.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -TEMPLATE = """ -/** -* Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -* Edition) available. -* Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -* You may obtain a copy of the License at -* http://opensource.org/licenses/MIT -* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -* specific language governing permissions and limitations under the License. -*/ -""" diff --git a/lib/pipeline/templates/create_plugins_app/plugins.py b/lib/pipeline/templates/create_plugins_app/plugins.py deleted file mode 100644 index 2b9e84c..0000000 --- a/lib/pipeline/templates/create_plugins_app/plugins.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -TEMPLATE = """ -# -*- coding: utf-8 -*- -\"\"\" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -\"\"\" - -from pipeline.core.flow.activity import Service -from pipeline.component_framework.component import Component -""" diff --git a/lib/pipeline/templates/create_plugins_app/py_file.py b/lib/pipeline/templates/create_plugins_app/py_file.py deleted file mode 100644 index bb7f275..0000000 --- a/lib/pipeline/templates/create_plugins_app/py_file.py +++ /dev/null @@ -1,35 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -TEMPLATE = """ -# -*- coding: utf-8 -*- -\"\"\" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -\"\"\" - -import logging - -from pipeline.conf import settings -from pipeline.core.flow.activity import Service -from pipeline.component_framework.component import Component - -logger = logging.getLogger('celery') -""" diff --git a/lib/pipeline/utils/__init__.py b/lib/pipeline/utils/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/utils/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/utils/boolrule/__init__.py b/lib/pipeline/utils/boolrule/__init__.py deleted file mode 100644 index 28fa00a..0000000 --- a/lib/pipeline/utils/boolrule/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -__author__ = "Steve Webster" -__email__ = "spjwebster@gmail.com" -__version__ = "0.2.1" - -from .boolrule import BoolRule, MissingVariableException, UnknownOperatorException # noqa diff --git a/lib/pipeline/utils/boolrule/boolrule.py b/lib/pipeline/utils/boolrule/boolrule.py deleted file mode 100644 index 0f346d1..0000000 --- a/lib/pipeline/utils/boolrule/boolrule.py +++ /dev/null @@ -1,291 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pyparsing import ( - CaselessLiteral, - Combine, - Forward, - Group, - Keyword, - Optional, - ParseException, - ParseResults, - QuotedString, - Suppress, - Word, - ZeroOrMore, - alphanums, - alphas, - delimitedList, - nums, - oneOf, -) - -PATH_DELIMITER = "." - - -class SubstituteVal(object): - """ - Represents a token that will later be replaced by a context value. - """ - - def __init__(self, t): - self._path = t[0] - - def get_val(self, context): - if not context: - # raise MissingVariableException( - # 'context missing or empty' - # ) - return self._path - - val = context - - try: - for part in self._path.split(PATH_DELIMITER): - val = getattr(val, part) if hasattr(val, part) else val[part] - - except KeyError: - raise MissingVariableException("no value supplied for {}".format(self._path)) - - return val - - def __repr__(self): - return "SubstituteVal(%s)" % self._path - - -def get_bool_expression(): - - # Grammar definition - # match gcloud's variable - identifier = Combine(Optional("${") + Optional("_") + Word(alphas, alphanums + "_") + Optional("}")) - # identifier = Word(alphas, alphanums + "_") - propertyPath = delimitedList(identifier, PATH_DELIMITER, combine=True) - - and_ = Keyword("and", caseless=True) - or_ = Keyword("or", caseless=True) - - lparen = Suppress("(") - rparen = Suppress(")") - - binaryOp = oneOf("== != < > >= <= in notin issuperset notissuperset", caseless=True)("operator") - - E = CaselessLiteral("E") - numberSign = Word("+-", exact=1) - realNumber = Combine( - Optional(numberSign) - + (Word(nums) + "." + Optional(Word(nums)) | ("." + Word(nums))) - + Optional(E + Optional(numberSign) + Word(nums)) - ) - - integer = Combine(Optional(numberSign) + Word(nums) + Optional(E + Optional("+") + Word(nums))) - - # str_ = quotedString.addParseAction(removeQuotes) - str_ = QuotedString('"') | QuotedString("'") - bool_ = oneOf("true false", caseless=True) - - simpleVals = ( - realNumber.setParseAction(lambda toks: float(toks[0])) - | integer.setParseAction(lambda toks: int(toks[0])) - | str_ - | bool_.setParseAction(lambda toks: toks[0] == "true") - | propertyPath.setParseAction(lambda toks: SubstituteVal(toks)) - ) # need to add support for alg expressions - - propertyVal = simpleVals | (lparen + Group(delimitedList(simpleVals)) + rparen) - - boolExpression = Forward() - boolCondition = Group( - (Group(propertyVal)("lval") + binaryOp + Group(propertyVal)("rval")) | (lparen + boolExpression + rparen) - ) - boolExpression << boolCondition + ZeroOrMore((and_ | or_) + boolExpression) - - return boolExpression - - -def double_equals_trans(lval, rval, operator): - # double equals - if operator in ["in", "notin"]: - if isinstance(rval, list) and len(rval): - transed_rval = [] - if isinstance(lval, int): - for item in rval: - try: - transed_rval.append(int(item)) - except Exception: - pass - elif isinstance(lval, str): - for item in rval: - try: - transed_rval.append(str(item)) - except Exception: - pass - rval += transed_rval - - elif operator in ["issuperset", "notissuperset"]: - # avoid convert set('abc') to {a, b, c}, but keep {'abc'} - if isinstance(lval, str): - lval = [lval] - if isinstance(rval, str): - rval = [rval] - - else: - try: - if isinstance(lval, int): - rval = int(rval) - elif isinstance(rval, int): - lval = int(lval) - if isinstance(lval, str): - rval = str(rval) - elif isinstance(rval, str): - lval = str(lval) - except Exception: - pass - - return lval, rval - - -class BoolRule(object): - """ - Represents a boolean expression and provides a `test` method to evaluate - the expression and determine its truthiness. - - :param query: A string containing the query to be evaluated - :param lazy: If ``True``, parse the query the first time it's tested rather - than immediately. This can help with performance if you - instantiate a lot of rules and only end up evaluating a - small handful. - """ - - _compiled = False - _tokens = None - _query = None - - def __init__(self, query, lazy=False, strict=True): - self._query = query - self.strict = strict - if not lazy: - self._compile() - - def test(self, context=None): - """ - Test the expression against the given context and return the result. - - :param context: A dict context to evaluate the expression against. - :return: True if the expression succesfully evaluated against the - context, or False otherwise. - """ - if self._is_match_all(): - return True - - self._compile() - return self._test_tokens(self._tokens, context) - - def _is_match_all(self): - return True if self._query == "*" else False - - def _compile(self): - if not self._compiled: - - # special case match-all query - if self._is_match_all(): - return - - try: - self._tokens = get_bool_expression().parseString(self._query, parseAll=self.strict) - except ParseException: - raise - - self._compiled = True - - def _expand_val(self, val, context): - if type(val) == list: - val = [self._expand_val(v, context) for v in val] - - if isinstance(val, SubstituteVal): - ret = val.get_val(context) - return ret - - if isinstance(val, ParseResults): - return [self._expand_val(x, context) for x in val.asList()] - - return val - - def _test_tokens(self, tokens, context): - passed = False - - for token in tokens: - - if not isinstance(token, ParseResults): - if token == "or" and passed: - return True - elif token == "and" and not passed: - return False - continue - - if not token.getName(): - passed = self._test_tokens(token, context) - continue - - items = token.asDict() - - operator = items["operator"] - lval = self._expand_val(items["lval"][0], context) - rval = self._expand_val(items["rval"][0], context) - lval, rval = double_equals_trans(lval, rval, operator) - - if operator in ("=", "==", "eq"): - passed = lval == rval - elif operator in ("!=", "ne"): - passed = lval != rval - elif operator in (">", "gt"): - passed = lval > rval - elif operator in (">=", "ge"): - passed = lval >= rval - elif operator in ("<", "lt"): - passed = lval < rval - elif operator in ("<=", "le"): - passed = lval <= rval - elif operator == "in": - passed = lval in rval - elif operator == "notin": - passed = lval not in rval - elif operator == "issuperset": - passed = set(lval).issuperset(set(rval)) - elif operator == "notissuperset": - passed = not set(lval).issuperset(set(rval)) - else: - raise UnknownOperatorException("Unknown operator '{}'".format(operator)) - - return passed - - -class MissingVariableException(Exception): - """ - Raised when an expression contains a property path that's not supplied in - the context. - """ - - pass - - -class UnknownOperatorException(Exception): - """ - Raised when an expression uses an unknown operator. - - This should never be thrown since the operator won't be correctly parsed as - a token by pyparsing, but it's useful to have this hanging around for when - additional operators are being added. - """ - - pass diff --git a/lib/pipeline/utils/collections.py b/lib/pipeline/utils/collections.py deleted file mode 100644 index e45c26d..0000000 --- a/lib/pipeline/utils/collections.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -class FancyDict(dict): - def __getattr__(self, key): - try: - return self[key] - except KeyError as k: - raise AttributeError(k) - - def __setattr__(self, key, value): - # 内建属性不放入 key 中 - if key.startswith("__") and key.endswith("__"): - super().__setattr__(key, value) - else: - self[key] = value - - def __delattr__(self, key): - try: - del self[key] - except KeyError as k: - raise AttributeError(k) diff --git a/lib/pipeline/utils/crypt.py b/lib/pipeline/utils/crypt.py deleted file mode 100644 index 7322997..0000000 --- a/lib/pipeline/utils/crypt.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import base64 -import rsa - - -def rsa_decrypt_password(encrypted_password, private_key): - return rsa.decrypt( - base64.decodestring(encrypted_password.encode("utf-8")), rsa.PrivateKey.load_pkcs1(private_key) - ).decode("utf-8") diff --git a/lib/pipeline/utils/dj.py b/lib/pipeline/utils/dj.py deleted file mode 100644 index 15f7951..0000000 --- a/lib/pipeline/utils/dj.py +++ /dev/null @@ -1,18 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import sys - - -def in_test(): - return sys.argv[1:2] == ["test"] diff --git a/lib/pipeline/utils/env.py b/lib/pipeline/utils/env.py deleted file mode 100644 index 9d08c07..0000000 --- a/lib/pipeline/utils/env.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import sys -import traceback - -DJANGO_MANAGE_CMD = "manage.py" - - -def get_django_command(): - if sys.argv and sys.argv[0] == DJANGO_MANAGE_CMD: - try: - return sys.argv[1] - except Exception: - print( - "get django start up command error with argv: {argv}, traceback: {traceback}".format( - argv=sys.argv, traceback=traceback.format_exc() - ) - ) - - return None - - return None diff --git a/lib/pipeline/utils/graph.py b/lib/pipeline/utils/graph.py deleted file mode 100644 index fad1c14..0000000 --- a/lib/pipeline/utils/graph.py +++ /dev/null @@ -1,261 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -class Graph(object): - def __init__(self, nodes, flows): - self.nodes = nodes - self.flows = flows - self.path = [] - self.last_visited_node = "" - self.graph = {node: [] for node in self.nodes} - for flow in self.flows: - self.graph[flow[0]].append(flow[1]) - - def has_cycle(self): - self.path = [] - visited = {node: False for node in self.nodes} - visit_stack = {node: False for node in self.nodes} - - for node in self.nodes: - if self._has_cycle(node, visited, visit_stack): - return True - return False - - def _has_cycle(self, node, visited, visit_stack): - self.last_visited_node = node - self.path.append(node) - visited[node] = True - visit_stack[node] = True - - for neighbor in self.graph[node]: - if not visited[neighbor]: - if self._has_cycle(neighbor, visited, visit_stack): - return True - elif visit_stack[neighbor]: - self.path.append(neighbor) - return True - - self.path.remove(node) - visit_stack[node] = False - return False - - def get_cycle(self): - if self.has_cycle(): - cross_node = self.path[-1] - if self.path.count(cross_node) > 1: - return self.path[self.path.index(cross_node) :] - else: - return self.path - return [] - - -if __name__ == "__main__": - graph1 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4]]) - assert not graph1.has_cycle() - assert graph1.get_cycle() == [] - graph2 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4], [4, 1]]) - assert graph2.has_cycle() - assert graph2.get_cycle() == [1, 2, 3, 4, 1] - graph3 = Graph([1, 2, 3, 4], [[1, 2], [2, 3], [3, 4], [4, 2]]) - assert graph3.has_cycle() - assert graph3.get_cycle() == [2, 3, 4, 2] - graph4 = Graph( - [ - "n20c4a0601193f268bfa168f1192eacd", - "nef42d10350b3961b53df7af67e16d9b", - "n0ada7b4abe63771a43052eaf188dc4b", - "n0cd3b95c714388bacdf1a486ab432fc", - "n1430047af8537f88710c4bbf3cbfb0f", - "n383748fe27434d582f0ca17af9d968a", - "n51426abd4be3a4691c80a73c3f93b3c", - "n854753a77933562ae72ec87c365f23d", - "n89f083892a731d7b9d7edb0f372006d", - "n8d4568db0ad364692b0387e86a2f1e0", - "n8daedbb02273a0fbc94cc118c90649f", - "n90b7ef55fe839b181879e036b4f8ffe", - "n99817348b4a36a6931854c93eed8c5f", - "na02956eba6f3a36ab9b0af2f2350213", - "nc3d0d49adf530bbaffe53630c184c0a", - "nca50848d1aa340f8c2b4776ce81868d", - "ncab9a48e79d357195dcee68dad3a31f", - "ncb4e013a6a8348bab087cc8500a3876", - "ne1f86f902a23e7fa4a67192e8b38a05", - "ne26def77df1385caa206c64e7e3ea53", - "nf3ebee137c53da28091ad7d140ce00c", - "nfc1dcdd7476393b9a81a988c113e1cf", - "n0197f8f210b3a1b8a7fc2f90e94744e", - "n01fb40259ad3cf285bb11a8bbbe59f2", - "n03f39191e8a32629145ba6a677ed040", - "n03ffc3b9e12316d8be63261cb9dec71", - "n07982b8985139249bca3a046f3a4379", - "n0b9e36e6b633ddb906d2044f658f110", - "n136c4fedebe3eb0ba932495aff6a945", - "n17cdc62c5d43976a413bda8f35634eb", - "n1d48483d8023439ad98d61d156c85fb", - "n26725bdcc0931fab0bc73e7244545ca", - "n2890db24f6c3cd1bbcd6b7d8cf2c045", - "n2ad9caac5b737bd897d4c8844c85f12", - "n2c88d1c1d8b35aebf883cbf259fb6bc", - "n302d25dfc9c369ab13104d5208e7119", - "n31688b7ab44338e9e6cb8dcaf259eef", - "n374443fbdc1313d98ebbe19d535fec2", - "n38c3dd0344a3f86bc7511c454bcdf4c", - "n3934eef90463940a6a9cf4ba2e63b1c", - "n40d5f0ca4bc3dd99c0b264cb186f00f", - "n476ddcb6dd33e2abac43596b08c2bc1", - "n4790f8aa48e335aa712e2af757e180b", - "n48bbfdc912334fc89c4f48c05e8969e", - "n5bef4f4532a382eaf79a0af70b2396b", - "n5ced56bcc863060ac4977755f35a5f5", - "n66a0562670e37648a3e05c243335bff", - "n6dc118cd3f7341d9ef8c97c63e2e9d9", - "n6e9d52e1ea53958a93e5b34022e7037", - "n786694b5ed33295a885b5bcd8c7c1ce", - "n7dccd56c80233469a4609f684ebe457", - "n8492d92ab6a3da48c2b49d6fcb8a479", - "n86a8b1a56f9399f90c4c227594a9d03", - "n8a805c0cd02307bad9f7828880b53dc", - "n8c7e35b0457300d9d6a96a6b1d18329", - "n91fdaed36403d06a07f4afe85e2892c", - "n9335d0718a937f9a39ec5b36d5637fe", - "n9372fb07ad936cba31f3d4e440f395a", - "n9ab96f926d83a93a5d3ebe2888fd343", - "na2a8a54e68033d0a276eb88dbff91c3", - "na493a7b5d5b3cc29f4070a6c4589cb7", - "nadfa68cb2503a39aac6626d6c72484a", - "nae1218ddd2e3448b562bc79dc084401", - "nc012287be793377b975b0230b35d713", - "ncb2e01f0c5336fe82b0e0e496f2612b", - "ncb5843900903b4c8a0a8302474d8c51", - "ncbf4db2c48f3348b2c7081f9e3b363a", - "nd4ee6c3248935ce9239e4bb20a81ab8", - "ndb1cf7af0e2319c9868530d0df8fd93", - "ne36a6858a733430bffa4fec053dc1ab", - "ne7af4a7c3613b3d81fe9e6046425a36", - "ne8035dd8de732758c1cc623f80f2fc8", - "ned91fdb914c35f3a21f320f62d72ffd", - "nf5448b3c66430f4a299d08208d313a6", - "nfaa0756a06f300495fb2e2e45e05ed3", - ], - [ - ["n8d4568db0ad364692b0387e86a2f1e0", "n5bef4f4532a382eaf79a0af70b2396b"], - ["n8daedbb02273a0fbc94cc118c90649f", "nf5448b3c66430f4a299d08208d313a6"], - ["n01fb40259ad3cf285bb11a8bbbe59f2", "ne1f86f902a23e7fa4a67192e8b38a05"], - ["ncab9a48e79d357195dcee68dad3a31f", "n0197f8f210b3a1b8a7fc2f90e94744e"], - ["na493a7b5d5b3cc29f4070a6c4589cb7", "ne1f86f902a23e7fa4a67192e8b38a05"], - ["n89f083892a731d7b9d7edb0f372006d", "n136c4fedebe3eb0ba932495aff6a945"], - ["n51426abd4be3a4691c80a73c3f93b3c", "n9ab96f926d83a93a5d3ebe2888fd343"], - ["n89f083892a731d7b9d7edb0f372006d", "n8492d92ab6a3da48c2b49d6fcb8a479"], - ["n17cdc62c5d43976a413bda8f35634eb", "n6e9d52e1ea53958a93e5b34022e7037"], - ["n476ddcb6dd33e2abac43596b08c2bc1", "ne1f86f902a23e7fa4a67192e8b38a05"], - ["n6dc118cd3f7341d9ef8c97c63e2e9d9", "nfc1dcdd7476393b9a81a988c113e1cf"], - ["n91fdaed36403d06a07f4afe85e2892c", "ncb4e013a6a8348bab087cc8500a3876"], - ["n8a805c0cd02307bad9f7828880b53dc", "n3934eef90463940a6a9cf4ba2e63b1c"], - ["n2890db24f6c3cd1bbcd6b7d8cf2c045", "n0ada7b4abe63771a43052eaf188dc4b"], - ["ned91fdb914c35f3a21f320f62d72ffd", "n383748fe27434d582f0ca17af9d968a"], - ["n89f083892a731d7b9d7edb0f372006d", "n0b9e36e6b633ddb906d2044f658f110"], - ["nc3d0d49adf530bbaffe53630c184c0a", "na493a7b5d5b3cc29f4070a6c4589cb7"], - ["ncb2e01f0c5336fe82b0e0e496f2612b", "nc012287be793377b975b0230b35d713"], - ["n86a8b1a56f9399f90c4c227594a9d03", "nf3ebee137c53da28091ad7d140ce00c"], - ["nc3d0d49adf530bbaffe53630c184c0a", "nadfa68cb2503a39aac6626d6c72484a"], - ["na02956eba6f3a36ab9b0af2f2350213", "na2a8a54e68033d0a276eb88dbff91c3"], - ["n8daedbb02273a0fbc94cc118c90649f", "n07982b8985139249bca3a046f3a4379"], - ["n136c4fedebe3eb0ba932495aff6a945", "nfc1dcdd7476393b9a81a988c113e1cf"], - ["n9372fb07ad936cba31f3d4e440f395a", "n1430047af8537f88710c4bbf3cbfb0f"], - ["n8d4568db0ad364692b0387e86a2f1e0", "n91fdaed36403d06a07f4afe85e2892c"], - ["n854753a77933562ae72ec87c365f23d", "n40d5f0ca4bc3dd99c0b264cb186f00f"], - ["n854753a77933562ae72ec87c365f23d", "n1d48483d8023439ad98d61d156c85fb"], - ["n9ab96f926d83a93a5d3ebe2888fd343", "n383748fe27434d582f0ca17af9d968a"], - ["ne36a6858a733430bffa4fec053dc1ab", "n0cd3b95c714388bacdf1a486ab432fc"], - ["n03ffc3b9e12316d8be63261cb9dec71", "nca50848d1aa340f8c2b4776ce81868d"], - ["ne8035dd8de732758c1cc623f80f2fc8", "n0ada7b4abe63771a43052eaf188dc4b"], - ["n51426abd4be3a4691c80a73c3f93b3c", "ned91fdb914c35f3a21f320f62d72ffd"], - ["nd4ee6c3248935ce9239e4bb20a81ab8", "nfaa0756a06f300495fb2e2e45e05ed3"], - ["n5bef4f4532a382eaf79a0af70b2396b", "ncb4e013a6a8348bab087cc8500a3876"], - ["ne26def77df1385caa206c64e7e3ea53", "n786694b5ed33295a885b5bcd8c7c1ce"], - ["n854753a77933562ae72ec87c365f23d", "ne8035dd8de732758c1cc623f80f2fc8"], - ["n374443fbdc1313d98ebbe19d535fec2", "ndb1cf7af0e2319c9868530d0df8fd93"], - ["nfaa0756a06f300495fb2e2e45e05ed3", "n8c7e35b0457300d9d6a96a6b1d18329"], - ["n90b7ef55fe839b181879e036b4f8ffe", "n26725bdcc0931fab0bc73e7244545ca"], - ["n8d4568db0ad364692b0387e86a2f1e0", "ncb2e01f0c5336fe82b0e0e496f2612b"], - ["ncb5843900903b4c8a0a8302474d8c51", "ncb4e013a6a8348bab087cc8500a3876"], - ["nf5448b3c66430f4a299d08208d313a6", "nf3ebee137c53da28091ad7d140ce00c"], - ["n20c4a0601193f268bfa168f1192eacd", "nd4ee6c3248935ce9239e4bb20a81ab8"], - ["nca50848d1aa340f8c2b4776ce81868d", "nc3d0d49adf530bbaffe53630c184c0a"], - ["na02956eba6f3a36ab9b0af2f2350213", "n03ffc3b9e12316d8be63261cb9dec71"], - ["n7dccd56c80233469a4609f684ebe457", "n8daedbb02273a0fbc94cc118c90649f"], - ["n0ada7b4abe63771a43052eaf188dc4b", "na02956eba6f3a36ab9b0af2f2350213"], - ["n9335d0718a937f9a39ec5b36d5637fe", "n99817348b4a36a6931854c93eed8c5f"], - ["n90b7ef55fe839b181879e036b4f8ffe", "n5ced56bcc863060ac4977755f35a5f5"], - ["ncb4e013a6a8348bab087cc8500a3876", "ne26def77df1385caa206c64e7e3ea53"], - ["na02956eba6f3a36ab9b0af2f2350213", "n4790f8aa48e335aa712e2af757e180b"], - ["nc012287be793377b975b0230b35d713", "ncb4e013a6a8348bab087cc8500a3876"], - ["n8d4568db0ad364692b0387e86a2f1e0", "ncb5843900903b4c8a0a8302474d8c51"], - ["n40d5f0ca4bc3dd99c0b264cb186f00f", "n0ada7b4abe63771a43052eaf188dc4b"], - ["n38c3dd0344a3f86bc7511c454bcdf4c", "n17cdc62c5d43976a413bda8f35634eb"], - ["n6e9d52e1ea53958a93e5b34022e7037", "n90b7ef55fe839b181879e036b4f8ffe"], - ["nf3ebee137c53da28091ad7d140ce00c", "n51426abd4be3a4691c80a73c3f93b3c"], - ["n99817348b4a36a6931854c93eed8c5f", "n89f083892a731d7b9d7edb0f372006d"], - ["n89f083892a731d7b9d7edb0f372006d", "n6dc118cd3f7341d9ef8c97c63e2e9d9"], - ["n8daedbb02273a0fbc94cc118c90649f", "n66a0562670e37648a3e05c243335bff"], - ["nadfa68cb2503a39aac6626d6c72484a", "ne1f86f902a23e7fa4a67192e8b38a05"], - ["n383748fe27434d582f0ca17af9d968a", "nef42d10350b3961b53df7af67e16d9b"], - ["na02956eba6f3a36ab9b0af2f2350213", "n03f39191e8a32629145ba6a677ed040"], - ["nae1218ddd2e3448b562bc79dc084401", "n383748fe27434d582f0ca17af9d968a"], - ["n26725bdcc0931fab0bc73e7244545ca", "n1430047af8537f88710c4bbf3cbfb0f"], - ["n48bbfdc912334fc89c4f48c05e8969e", "n8a805c0cd02307bad9f7828880b53dc"], - ["ne7af4a7c3613b3d81fe9e6046425a36", "ncb4e013a6a8348bab087cc8500a3876"], - ["nfc1dcdd7476393b9a81a988c113e1cf", "n8d4568db0ad364692b0387e86a2f1e0"], - ["n0197f8f210b3a1b8a7fc2f90e94744e", "n99817348b4a36a6931854c93eed8c5f"], - ["n90b7ef55fe839b181879e036b4f8ffe", "n302d25dfc9c369ab13104d5208e7119"], - ["n1d48483d8023439ad98d61d156c85fb", "n0ada7b4abe63771a43052eaf188dc4b"], - ["na2a8a54e68033d0a276eb88dbff91c3", "nca50848d1aa340f8c2b4776ce81868d"], - ["n90b7ef55fe839b181879e036b4f8ffe", "n9372fb07ad936cba31f3d4e440f395a"], - ["ndb1cf7af0e2319c9868530d0df8fd93", "n2ad9caac5b737bd897d4c8844c85f12"], - ["n8492d92ab6a3da48c2b49d6fcb8a479", "nfc1dcdd7476393b9a81a988c113e1cf"], - ["n8d4568db0ad364692b0387e86a2f1e0", "ne7af4a7c3613b3d81fe9e6046425a36"], - ["n302d25dfc9c369ab13104d5208e7119", "n1430047af8537f88710c4bbf3cbfb0f"], - ["n51426abd4be3a4691c80a73c3f93b3c", "n2c88d1c1d8b35aebf883cbf259fb6bc"], - ["n786694b5ed33295a885b5bcd8c7c1ce", "n0cd3b95c714388bacdf1a486ab432fc"], - ["n854753a77933562ae72ec87c365f23d", "n2890db24f6c3cd1bbcd6b7d8cf2c045"], - ["nc3d0d49adf530bbaffe53630c184c0a", "n476ddcb6dd33e2abac43596b08c2bc1"], - ["n2c88d1c1d8b35aebf883cbf259fb6bc", "n383748fe27434d582f0ca17af9d968a"], - ["n0cd3b95c714388bacdf1a486ab432fc", "n854753a77933562ae72ec87c365f23d"], - ["n51426abd4be3a4691c80a73c3f93b3c", "nae1218ddd2e3448b562bc79dc084401"], - ["nc3d0d49adf530bbaffe53630c184c0a", "n01fb40259ad3cf285bb11a8bbbe59f2"], - ["ne1f86f902a23e7fa4a67192e8b38a05", "n374443fbdc1313d98ebbe19d535fec2"], - ["n0b9e36e6b633ddb906d2044f658f110", "nfc1dcdd7476393b9a81a988c113e1cf"], - ["ncab9a48e79d357195dcee68dad3a31f", "ncbf4db2c48f3348b2c7081f9e3b363a"], - ["n8daedbb02273a0fbc94cc118c90649f", "n86a8b1a56f9399f90c4c227594a9d03"], - ["ncbf4db2c48f3348b2c7081f9e3b363a", "n99817348b4a36a6931854c93eed8c5f"], - ["n1430047af8537f88710c4bbf3cbfb0f", "ncab9a48e79d357195dcee68dad3a31f"], - ["n4790f8aa48e335aa712e2af757e180b", "nca50848d1aa340f8c2b4776ce81868d"], - ["ne26def77df1385caa206c64e7e3ea53", "ne36a6858a733430bffa4fec053dc1ab"], - ["ncab9a48e79d357195dcee68dad3a31f", "n31688b7ab44338e9e6cb8dcaf259eef"], - ["n07982b8985139249bca3a046f3a4379", "nf3ebee137c53da28091ad7d140ce00c"], - ["n66a0562670e37648a3e05c243335bff", "nf3ebee137c53da28091ad7d140ce00c"], - ["n03f39191e8a32629145ba6a677ed040", "nca50848d1aa340f8c2b4776ce81868d"], - ["n8c7e35b0457300d9d6a96a6b1d18329", "n38c3dd0344a3f86bc7511c454bcdf4c"], - ["n5ced56bcc863060ac4977755f35a5f5", "n1430047af8537f88710c4bbf3cbfb0f"], - ["n2ad9caac5b737bd897d4c8844c85f12", "n48bbfdc912334fc89c4f48c05e8969e"], - ["n31688b7ab44338e9e6cb8dcaf259eef", "n99817348b4a36a6931854c93eed8c5f"], - ["n3934eef90463940a6a9cf4ba2e63b1c", "n7dccd56c80233469a4609f684ebe457"], - ["ncab9a48e79d357195dcee68dad3a31f", "n9335d0718a937f9a39ec5b36d5637fe"], - ], - ) - assert not graph4.has_cycle() - assert graph4.get_cycle() == [] - graph5 = Graph([1, 2, 3, 4, 5], [[1, 2], [2, 3], [2, 4], [4, 5], [5, 2]]) - assert graph5.has_cycle() - assert graph5.get_cycle() == [2, 4, 5, 2] diff --git a/lib/pipeline/utils/http.py b/lib/pipeline/utils/http.py deleted file mode 100644 index f175d8b..0000000 --- a/lib/pipeline/utils/http.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -import requests -import ujson as module_json - -logger = logging.getLogger("root") - - -def http_post_request(url, data=None, json=None, **kwargs): - response = requests.post(url, data=data, json=json, **kwargs) - if response.status_code == 200: - try: - content_dict = module_json.loads(response.content) - return content_dict - except Exception as e: - message = "the format of HTTP request result is valid: %s" % e - logger.exception(message) - return {"result": False, "code": 1, "message": message} - message = "HTTP request failed,Http status code is:%s" % response.status_code - logger.error(message) - return {"result": False, "code": response.status_code, "message": message} - - -def http_get_request(url, params=None, **kwargs): - response = requests.get(url, params=params, **kwargs) - if response.status_code == 200: - try: - content_dict = module_json.loads(response.content) - return content_dict - except Exception as e: - message = "the format of HTTP request result is valid: %s" % e - logger.exception(message) - return {"result": False, "code": 1, "message": message} - message = "HTTP request failed,Http status code is:%s" % response.status_code - logger.error(message) - return {"result": False, "code": response.status_code, "message": message} diff --git a/lib/pipeline/utils/imoports.py b/lib/pipeline/utils/imoports.py deleted file mode 100644 index efd4da4..0000000 --- a/lib/pipeline/utils/imoports.py +++ /dev/null @@ -1,21 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -def qualname(obj): - if not hasattr(obj, "__name__") and hasattr(obj, "__class__"): - obj = obj.__class__ - q = getattr(obj, "__name__") - if "." not in q: - q = ".".join((obj.__module__, q)) - return q diff --git a/lib/pipeline/utils/mako_utils/__init__.py b/lib/pipeline/utils/mako_utils/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/utils/mako_utils/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/utils/mako_utils/checker.py b/lib/pipeline/utils/mako_utils/checker.py deleted file mode 100644 index 232fb46..0000000 --- a/lib/pipeline/utils/mako_utils/checker.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -import ast -from typing import List - -from mako import parsetree -from mako.exceptions import MakoException -from mako.lexer import Lexer - -from .code_extract import MakoNodeCodeExtractor -from .exceptions import ForbiddenMakoTemplateException - - -def parse_template_nodes( - nodes: List[parsetree.Node], node_visitor: ast.NodeVisitor, code_extractor: MakoNodeCodeExtractor, -): - """ - 解析mako模板节点,逐个节点解析抽象语法树并检查安全性 - :param nodes: mako模板节点列表 - :param node_visitor: 节点访问类,用于遍历AST节点 - :param code_extractor: Mako 词法节点处理器,用于提取 python 代码 - """ - for node in nodes: - code = code_extractor.extract(node) - if code is None: - continue - - ast_node = ast.parse(code, "", "exec") - node_visitor.visit(ast_node) - if hasattr(node, "nodes"): - parse_template_nodes(node.nodes, node_visitor) - - -def check_mako_template_safety(text: str, node_visitor: ast.NodeVisitor, code_extractor: MakoNodeCodeExtractor) -> bool: - """ - 检查mako模板是否安全,若不安全直接抛出异常,安全则返回True - :param text: mako模板内容 - :param node_visitor: 节点访问器,用于遍历AST节点 - """ - try: - lexer_template = Lexer(text).parse() - except MakoException as mako_error: - raise ForbiddenMakoTemplateException("非mako模板,解析失败, {err_msg}".format(err_msg=mako_error.__class__.__name__)) - parse_template_nodes(lexer_template.nodes, node_visitor, code_extractor) - return True diff --git a/lib/pipeline/utils/mako_utils/code_extract.py b/lib/pipeline/utils/mako_utils/code_extract.py deleted file mode 100644 index 9c1ea52..0000000 --- a/lib/pipeline/utils/mako_utils/code_extract.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import abc - -from mako import parsetree -from mako.ast import PythonFragment - -from .exceptions import ForbiddenMakoTemplateException - - -class MakoNodeCodeExtractor(object): - @abc.abstractmethod - def extract(self, node): - """处理 Mako Lexer 分割出来的 code 对象,返回需要检测的 python 代码,返回 None 表示该节点不需要处理 - - :param node: mako parsetree node - :return: 需要处理的代码,或 None - """ - raise NotImplementedError() - - -class StrictMakoNodeCodeExtractor(MakoNodeCodeExtractor): - def extract(self, node): - if isinstance(node, parsetree.Code) or isinstance(node, parsetree.Expression): - return node.text - elif isinstance(node, parsetree.ControlLine): - if node.isend: - return None - return PythonFragment(node.text).code - elif isinstance(node, parsetree.Text): - return None - else: - raise ForbiddenMakoTemplateException("不支持[{}]节点".format(node.__class__.__name__)) diff --git a/lib/pipeline/utils/mako_utils/exceptions.py b/lib/pipeline/utils/mako_utils/exceptions.py deleted file mode 100644 index 384bbfb..0000000 --- a/lib/pipeline/utils/mako_utils/exceptions.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -class ForbiddenMakoTemplateException(Exception): - pass diff --git a/lib/pipeline/utils/mako_utils/visitors.py b/lib/pipeline/utils/mako_utils/visitors.py deleted file mode 100644 index 45f82ec..0000000 --- a/lib/pipeline/utils/mako_utils/visitors.py +++ /dev/null @@ -1,115 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import _ast -import ast - -from django.utils.module_loading import import_string - -from .exceptions import ForbiddenMakoTemplateException - - -class StrictNodeVisitor(ast.NodeVisitor): - """ - 遍历语法树节点,遇到魔术方法使用或import时,抛出异常 - """ - - BLACK_LIST_MODULE_METHODS = { - "os": dir(__import__("os")), - "subprocess": dir(__import__("subprocess")), - "shutil": dir(__import__("shutil")), - "ctypes": dir(__import__("ctypes")), - "codecs": dir(__import__("codecs")), - "sys": dir(__import__("sys")), - "socket": dir(__import__("socket")), - "webbrowser": dir(__import__("webbrowser")), - "threading": dir(__import__("threading")), - "sqlite3": dir(__import__("threading")), - "signal": dir(__import__("signal")), - "imaplib": dir(__import__("imaplib")), - "fcntl": dir(__import__("fcntl")), - "pdb": dir(__import__("pdb")), - "pty": dir(__import__("pty")), - "glob": dir(__import__("glob")), - "tempfile": dir(__import__("tempfile")), - "types": dir(import_string("types.CodeType")) + dir(import_string("types.FrameType")), - "builtins": [ - "getattr", - "hasattr", - "breakpoint", - "compile", - "delattr", - "open", - "eval", - "exec", - "execfile", - "exit", - "dir", - "globals", - "locals", - "input", - "iter", - "next", - "quit", - "setattr", - "vars", - "memoryview", - "super", - "print", - ], - } - - BLACK_LIST_METHODS = [] - for module_name, methods in BLACK_LIST_MODULE_METHODS.items(): - BLACK_LIST_METHODS.append(module_name) - BLACK_LIST_METHODS.extend(methods) - BLACK_LIST_METHODS = set(BLACK_LIST_METHODS) - - WHITE_LIST_MODULES = ["datetime", "re", "random", "json", "math"] - - def __init__(self, black_list_methods=None, white_list_modules=None): - self.black_list_methods = black_list_methods or self.BLACK_LIST_METHODS - self.white_list_modules = white_list_modules or self.WHITE_LIST_MODULES - - @staticmethod - def is_white_list_ast_obj(ast_obj: _ast.AST) -> bool: - """ - 判断是否白名单对象,特殊豁免 - :param ast_obj: 抽象语法树节点 - :return: bool - """ - # re 正则表达式允许使用 compile - if isinstance(ast_obj, _ast.Attribute) and isinstance(ast_obj.value, _ast.Name): - if ast_obj.value.id == "re" and ast_obj.attr in ["compile"]: - return True - - return False - - def visit_Attribute(self, node): - if self.is_white_list_ast_obj(node): - return - - if node.attr in self.black_list_methods or node.attr.startswith("_"): - raise ForbiddenMakoTemplateException("Mako template forbidden.") - - def visit_Name(self, node): - if node.id in self.black_list_methods or node.id.startswith("_"): - raise ForbiddenMakoTemplateException("Mako template forbidden.") - - def visit_Import(self, node): - for name in node.names: - if name.name not in self.white_list_modules: - raise ForbiddenMakoTemplateException("Mako template forbidden.") - - def visit_ImportFrom(self, node): - self.visit_Import(node) diff --git a/lib/pipeline/utils/register.py b/lib/pipeline/utils/register.py deleted file mode 100644 index 9de35c9..0000000 --- a/lib/pipeline/utils/register.py +++ /dev/null @@ -1,69 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging -import pkgutil -import os -import sys -from importlib import import_module - - -logger = logging.getLogger("root") - - -def find_all_modules(module_dir, sub_dir=None): - modules = [] - for _, name, is_pkg in pkgutil.iter_modules([module_dir]): - if name.startswith("_"): - continue - module = name if sub_dir is None else "{}.{}".format(sub_dir, name) - if is_pkg: - modules += find_all_modules(os.path.join(module_dir, name), module) - else: - modules.append(module) - return modules - - -def autodiscover_items(module): - """ - Given a path to discover, auto register all items - """ - # Workaround for a Python 3.2 bug with pkgutil.iter_modules - module_dir = module.__path__[0] - sys.path_importer_cache.pop(module_dir, None) - modules = find_all_modules(module_dir) - for name in modules: - module_path = "{}.{}".format(module.__name__, name) - try: - __import__(module_path) - except Exception as e: - logger.error(f"[!] module({module_path}) import failed with err: {e}") - - -def autodiscover_collections(path): - """ - Auto-discover INSTALLED_APPS modules and fail silently when - not present. This forces an import on them to register any admin bits they - may want. - """ - from django.apps import apps - - for app_config in apps.get_app_configs(): - # Attempt to import the app's module. - try: - - _module = import_module("%s.%s" % (app_config.name, path)) - autodiscover_items(_module) - except ImportError as e: - if not str(e) == "No module named %s" % path: - pass diff --git a/lib/pipeline/utils/uniqid.py b/lib/pipeline/utils/uniqid.py deleted file mode 100644 index e42be64..0000000 --- a/lib/pipeline/utils/uniqid.py +++ /dev/null @@ -1,30 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import uuid - -from pipeline.conf import settings - - -def uniqid(): - return uuid.uuid3(uuid.uuid1(), uuid.uuid4().hex).hex - - -def node_uniqid(): - uid = uniqid() - return "n%s" % uid[1:] if settings.UUID_DIGIT_STARTS_SENSITIVE else uid - - -def line_uniqid(): - uid = uniqid() - return "l%s" % uid[1:] if settings.UUID_DIGIT_STARTS_SENSITIVE else uid diff --git a/lib/pipeline/utils/utils.py b/lib/pipeline/utils/utils.py deleted file mode 100644 index c39a55c..0000000 --- a/lib/pipeline/utils/utils.py +++ /dev/null @@ -1,116 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -ITERATED = 1 -NEW = 0 -ITERATING = -1 - - -def has_circle(graph): - # init marks - marks = {} - for node in graph: - # marks as not iterated - marks[node] = NEW - - # dfs every node - for cur_node in graph: - trace = [cur_node] - for node in graph[cur_node]: - if marks[node] == ITERATED: - continue - trace.append(node) - # return immediately when circle be detected - if _has_circle(graph, node, marks, trace): - return True, trace - trace.pop() - # mark as iterated - marks[cur_node] = ITERATED - - return False, [] - - -def _has_circle(graph, cur_node, marks, trace): - # detect circle when iterate to a node which been marked as -1 - if marks[cur_node] == ITERATING: - return True - # mark as iterating - marks[cur_node] = ITERATING - # dfs - for node in graph[cur_node]: - # return immediately when circle be detected - trace.append(node) - if _has_circle(graph, node, marks, trace): - return True - trace.pop() - # mark as iterated - marks[cur_node] = ITERATED - - return False - - -def convert_bytes_to_str(obj): - - converted = set() - - def _convert(obj, converted): - if isinstance(obj, dict): - new_dict = obj.__class__() - - for attr, value in obj.items(): - - if isinstance(attr, bytes): - attr = attr.decode("utf-8") - - value = _convert(value, converted) - - new_dict[attr] = value - - obj = new_dict - - if isinstance(obj, list): - new_list = obj.__class__() - - for item in obj: - new_list.append(_convert(item, converted)) - - obj = new_list - - elif isinstance(obj, bytes): - - try: - obj = obj.decode("utf-8") - except Exception: - pass - - elif hasattr(obj, "__dict__"): - - if id(obj) in converted: - return obj - else: - converted.add(id(obj)) - - new__dict__ = {} - - for attr, value in obj.__dict__.items(): - - if isinstance(attr, bytes): - attr = attr.decode("utf-8") - - new__dict__[attr] = _convert(value, converted) - - obj.__dict__ = new__dict__ - - return obj - - return _convert(obj, converted) diff --git a/lib/pipeline/validators/__init__.py b/lib/pipeline/validators/__init__.py deleted file mode 100644 index c529e9d..0000000 --- a/lib/pipeline/validators/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from .base import validate_pipeline_tree # noqa diff --git a/lib/pipeline/validators/base.py b/lib/pipeline/validators/base.py deleted file mode 100644 index 8399d27..0000000 --- a/lib/pipeline/validators/base.py +++ /dev/null @@ -1,41 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline import exceptions -from pipeline.validators.connection import ( - validate_graph_connection, - validate_graph_without_circle, -) -from pipeline.validators.gateway import validate_gateways, validate_stream -from pipeline.validators.utils import format_pipeline_tree_io_to_list - - -def validate_pipeline_tree(pipeline_tree, cycle_tolerate=False): - format_pipeline_tree_io_to_list(pipeline_tree) - # 1. connection validation - try: - validate_graph_connection(pipeline_tree) - except exceptions.ConnectionValidateError as e: - raise exceptions.ParserException(e.detail) - - # do not tolerate circle in flow - if not cycle_tolerate: - no_cycle = validate_graph_without_circle(pipeline_tree) - if not no_cycle["result"]: - raise exceptions.ParserException(no_cycle["message"]) - - # 2. gateway validation - validate_gateways(pipeline_tree) - - # 3. stream validation - validate_stream(pipeline_tree) diff --git a/lib/pipeline/validators/connection.py b/lib/pipeline/validators/connection.py deleted file mode 100644 index 922eda7..0000000 --- a/lib/pipeline/validators/connection.py +++ /dev/null @@ -1,67 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.utils.translation import ugettext_lazy as _ - -from pipeline.exceptions import ConnectionValidateError -from pipeline.utils.graph import Graph -from pipeline.validators.rules import NODE_RULES -from pipeline.validators.utils import get_nodes_dict -from pipeline.core.constants import PE - - -def validate_graph_connection(data): - """ - 节点连接合法性校验 - """ - nodes = get_nodes_dict(data) - - result = {"result": True, "message": {}, "failed_nodes": []} - - for i in nodes: - node_type = nodes[i][PE.type] - rule = NODE_RULES[node_type] - message = "" - for j in nodes[i][PE.target]: - if nodes[j][PE.type] not in rule["allowed_out"]: - message += _("不能连接%s类型节点\n") % nodes[i][PE.type] - if rule["min_in"] > len(nodes[i][PE.source]) or len(nodes[i][PE.source]) > rule["max_in"]: - message += _("节点的入度最大为%s,最小为%s\n") % (rule["max_in"], rule["min_in"]) - if rule["min_out"] > len(nodes[i][PE.target]) or len(nodes[i][PE.target]) > rule["max_out"]: - message += _("节点的出度最大为%s,最小为%s\n") % (rule["max_out"], rule["min_out"]) - if message: - result["failed_nodes"].append(i) - result["message"][i] = message - - if result["failed_nodes"]: - raise ConnectionValidateError(failed_nodes=result["failed_nodes"], detail=result["message"]) - - -def validate_graph_without_circle(data): - """ - validate if a graph has not cycle - - return { - "result": False, - "message": "error message", - "error_data": ["node1_id", "node2_id", "node1_id"] - } - """ - - nodes = [data[PE.start_event][PE.id], data[PE.end_event][PE.id]] - nodes += list(data[PE.gateways].keys()) + list(data[PE.activities].keys()) - flows = [[flow[PE.source], flow[PE.target]] for _, flow in list(data[PE.flows].items())] - cycle = Graph(nodes, flows).get_cycle() - if cycle: - return {"result": False, "message": "pipeline graph has circle", "error_data": cycle} - return {"result": True, "data": []} diff --git a/lib/pipeline/validators/gateway.py b/lib/pipeline/validators/gateway.py deleted file mode 100644 index 2627e51..0000000 --- a/lib/pipeline/validators/gateway.py +++ /dev/null @@ -1,507 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import queue - -from django.utils.translation import ugettext_lazy as _ - -from pipeline import exceptions -from pipeline.core.constants import PE -from pipeline.engine.utils import Stack -from pipeline.validators.utils import get_node_for_sequence, get_nodes_dict - -STREAM = "stream" -P_STREAM = "p_stream" -P = "p" -MAIN_STREAM = "main" - -PARALLEL_GATEWAYS = {PE.ParallelGateway, PE.ConditionalParallelGateway} - - -def not_in_parallel_gateway(gateway_stack, start_from=None): - """ - check whether there is parallel gateway in stack from specific gateway - :param gateway_stack: - :param start_from: - :return: - """ - start = 0 - if start_from: - id_stack = [g[PE.id] for g in gateway_stack] - start = id_stack.index(start_from) - - for i in range(start, len(gateway_stack)): - gateway = gateway_stack[i] - if gateway[PE.type] in PARALLEL_GATEWAYS: - return False - return True - - -def matched_in_prev_blocks(gid, current_start, block_nodes): - """ - check whether gateway with gid is matched in previous block - :param gid: - :param current_start: - :param block_nodes: - :return: - """ - prev_nodes = set() - for prev_start, nodes in list(block_nodes.items()): - if prev_start == current_start: - continue - prev_nodes.update(nodes) - - return gid in prev_nodes - - -def match_converge( - converges, - gateways, - cur_index, - end_event_id, - block_start, - block_nodes, - converged, - dist_from_start, - converge_in_len, - stack=None, -): - """ - find converge for parallel and exclusive in blocks, and check sanity of gateway - :param converges: - :param gateways: - :param cur_index: - :param end_event_id: - :param block_start: - :param block_nodes: - :param converged: - :param dist_from_start: - :param stack: - :param converge_in_len: - :return: - """ - - if stack is None: - stack = Stack() - - if cur_index not in gateways: - return None, False - - # return if this node is already matched - if gateways[cur_index]["match"]: - return gateways[cur_index]["match"], gateways[cur_index]["share_converge"] - - current_gateway = gateways[cur_index] - target = gateways[cur_index][PE.target] - stack.push(gateways[cur_index]) - stack_id_set = {g[PE.id] for g in stack} - - # find closest converge recursively - for i in range(len(target)): - - # do not process prev blocks nodes - if matched_in_prev_blocks(target[i], block_start, block_nodes): - target[i] = None - continue - - block_nodes[block_start].add(target[i]) - - # do not find self's converge node again - while target[i] in gateways and target[i] != current_gateway[PE.id]: - - if target[i] in stack_id_set: - # return to previous gateway - - if not_in_parallel_gateway(stack, start_from=target[i]): - # do not trace back - target[i] = None - break - else: - raise exceptions.ConvergeMatchError(cur_index, _("并行网关中的分支网关必须将所有分支汇聚到一个汇聚网关")) - - converge_id, shared = match_converge( - converges=converges, - gateways=gateways, - cur_index=target[i], - end_event_id=end_event_id, - block_start=block_start, - block_nodes=block_nodes, - stack=stack, - converged=converged, - dist_from_start=dist_from_start, - converge_in_len=converge_in_len, - ) - if converge_id: - target[i] = converge_id - - if not shared: - # try to get next node fo converge which is not shared - target[i] = converges[converge_id][PE.target][0] - - else: - # can't find corresponding converge gateway, which means this gateway will reach end event directly - target[i] = end_event_id - - if target[i] in converges and dist_from_start[target[i]] < dist_from_start[cur_index]: - # do not match previous converge - target[i] = None - - stack.pop() - - is_exg = current_gateway[PE.type] == PE.ExclusiveGateway - converge_id = None - shared = False - cur_to_converge = len(target) - converge_end = False - - # gateway match validation - for i in range(len(target)): - - # mark first converge - if target[i] in converges and not converge_id: - converge_id = target[i] - - # same converge node - elif target[i] in converges and converge_id == target[i]: - pass - - # exclusive gateway point to end - elif is_exg and target[i] == end_event_id: - if not_in_parallel_gateway(stack): - converge_end = True - else: - raise exceptions.ConvergeMatchError(cur_index, _("并行网关中的分支网关必须将所有分支汇聚到一个汇聚网关")) - - # exclusive gateway point back to self - elif is_exg and target[i] == current_gateway[PE.id]: - # not converge behavior - cur_to_converge -= 1 - pass - - # exclusive gateway converge at different converge gateway - elif is_exg and target[i] in converges and converge_id != target[i]: - raise exceptions.ConvergeMatchError(cur_index, _("分支网关的所有分支第一个遇到的汇聚网关必须是同一个")) - - # meet previous node - elif is_exg and target[i] is None: - # not converge behavior - cur_to_converge -= 1 - pass - - # invalid cases - else: - raise exceptions.ConvergeMatchError(cur_index, _("非法网关,请检查其分支是否符合规则")) - - if is_exg: - if converge_id in converges: - # this converge is shared by multiple gateway - # only compare to the number of positive incoming - shared = converge_in_len[converge_id] > cur_to_converge or converge_id in converged - else: - # for parallel gateway - - converge_incoming = len(converges[converge_id][PE.incoming]) - gateway_outgoing = len(target) - - if converge_incoming > gateway_outgoing: - for gateway_id in converged.get(converge_id, []): - # find another parallel gateway - if gateways[gateway_id][PE.type] in PARALLEL_GATEWAYS: - raise exceptions.ConvergeMatchError(converge_id, _("汇聚网关只能汇聚来自同一个并行网关的分支")) - - shared = True - - elif converge_incoming < gateway_outgoing: - raise exceptions.ConvergeMatchError(converge_id, _("汇聚网关没有汇聚其对应的并行网关的所有分支")) - - current_gateway["match"] = converge_id - current_gateway["share_converge"] = shared - current_gateway["converge_end"] = converge_end - - converged.setdefault(converge_id, []).append(current_gateway[PE.id]) - block_nodes[block_start].add(current_gateway[PE.id]) - - return converge_id, shared - - -def distance_from(origin, node, tree, marked, visited=None): - """ - get max distance from origin to node - :param origin: - :param node: - :param tree: - :param marked: - :param visited: - :return: - """ - if visited is None: - visited = set() - - if node[PE.id] in marked: - return marked[node[PE.id]] - - if node[PE.id] == origin[PE.id]: - return 0 - - if node[PE.id] in visited: - # do not trace circle - return None - - visited.add(node[PE.id]) - - incoming_dist = [] - for incoming in node[PE.incoming]: - prev_node = get_node_for_sequence(incoming, tree, PE.source) - - # get incoming node's distance recursively - dist = distance_from(origin=origin, node=prev_node, tree=tree, marked=marked, visited=visited) - - # if this incoming do not trace back to current node - if dist is not None: - incoming_dist.append(dist + 1) - - if not incoming_dist: - return None - - # get max distance - res = max(incoming_dist) - marked[node[PE.id]] = res - return res - - -def validate_gateways(tree): - """ - check sanity of gateways and find their converge gateway - :param tree: - :return: - """ - converges = {} - gateways = {} - all = {} - distances = {} - converge_positive_in = {} - process_order = [] - - # data preparation - for i, item in list(tree[PE.gateways].items()): - node = { - PE.incoming: item[PE.incoming] if isinstance(item[PE.incoming], list) else [item[PE.incoming]], - PE.outgoing: item[PE.outgoing] if isinstance(item[PE.outgoing], list) else [item[PE.outgoing]], - PE.type: item[PE.type], - PE.target: [], - PE.source: [], - PE.id: item[PE.id], - "match": None, - } - - # find all first reach nodes(ConvergeGateway, ExclusiveGateway, ParallelGateway, EndEvent) - # which is not ServiceActivity for each gateway - for index in node[PE.outgoing]: - index = tree[PE.flows][index][PE.target] - while index in tree[PE.activities]: - index = tree[PE.flows][tree[PE.activities][index][PE.outgoing]][PE.target] - - # append this node's id to current gateway's target list - node[PE.target].append(index) - - # get current node's distance from start event - if not distance_from(node=node, origin=tree[PE.start_event], tree=tree, marked=distances): - raise exceptions.ConvergeMatchError(node[PE.id], _("无法获取该网关距离开始节点的距离")) - - if item[PE.type] == PE.ConvergeGateway: - converges[i] = node - else: - process_order.append(i) - gateways[i] = node - - all[i] = node - - # calculate positive incoming number for converge - for nid, node in list(all.items()): - for t in node[PE.target]: - if t in converges and distances[t] > distances[nid]: - converge_positive_in[t] = converge_positive_in.setdefault(t, 0) + 1 - - process_order.sort(key=lambda gid: distances[gid]) - end_event_id = tree[PE.end_event][PE.id] - converged = {} - block_nodes = {} - visited = set() - - # process in distance order - for gw in process_order: - if gw in visited or "match" in gw: - continue - visited.add(gw) - - block_nodes[gw] = set() - - match_converge( - converges=converges, - gateways=gateways, - cur_index=gw, - end_event_id=end_event_id, - converged=converged, - block_start=gw, - block_nodes=block_nodes, - dist_from_start=distances, - converge_in_len=converge_positive_in, - ) - - # set converge gateway - for i in gateways: - if gateways[i]["match"]: - tree[PE.gateways][i][PE.converge_gateway_id] = gateways[i]["match"] - - return converged - - -def blend(source, target, custom_stream=None): - """ - blend source and target streams - :param source: - :param target: - :param custom_stream: - :return: - """ - - if custom_stream: - # use custom stream instead of source's stream - if isinstance(custom_stream, set): - for stream in custom_stream: - target[STREAM].add(stream) - else: - target[STREAM].add(custom_stream) - - return - - if len(source[STREAM]) == 0: - raise exceptions.InvalidOperationException("stream validation error, node(%s) stream is empty" % source[PE.id]) - - # blend - for s in source[STREAM]: - target[STREAM].add(s) - - -def streams_for_parallel(p): - streams = set() - for i, target_id in enumerate(p[PE.target]): - streams.add("{}_{}".format(p[PE.id], i)) - - return streams - - -def flowing(where, to, parallel_converges): - """ - mark target's stream from target - :param where: - :param to: - :param parallel_converges: - :return: - """ - is_parallel = where[PE.type] in PARALLEL_GATEWAYS - - stream = None - if is_parallel: - # add parallel's stream to its converge - parallel_converge = to[where[PE.converge_gateway_id]] - blend(source=where, target=parallel_converge, custom_stream=stream) - - if len(parallel_converge[STREAM]) > 1: - raise exceptions.StreamValidateError(node_id=parallel_converge) - - # flow to target - for i, target_id in enumerate(where[PE.target]): - target = to[target_id] - fake = False - - # generate different stream - if is_parallel: - stream = "{}_{}".format(where[PE.id], i) - - if target_id in parallel_converges: - - is_valid_branch = where[STREAM].issubset(parallel_converges[target_id][P_STREAM]) - is_direct_connect = where.get(PE.converge_gateway_id) == target_id - - if is_valid_branch or is_direct_connect: - # do not flow when branch of parallel converge to its converge gateway - fake = True - - if not fake: - blend(source=where, target=target, custom_stream=stream) - - # sanity check - if len(target[STREAM]) != 1: - raise exceptions.StreamValidateError(node_id=target_id) - - -def validate_stream(tree): - """ - validate flow stream - :param tree: pipeline tree - :return: - """ - # data preparation - start_event_id = tree[PE.start_event][PE.id] - end_event_id = tree[PE.end_event][PE.id] - nodes = get_nodes_dict(tree) - nodes[start_event_id][STREAM] = {MAIN_STREAM} - nodes[end_event_id][STREAM] = {MAIN_STREAM} - parallel_converges = {} - visited = set({}) - - for nid, node in list(nodes.items()): - node.setdefault(STREAM, set()) - - # set allow streams for parallel's converge - if node[PE.type] in PARALLEL_GATEWAYS: - parallel_converges[node[PE.converge_gateway_id]] = {P_STREAM: streams_for_parallel(node), P: nid} - - # build stream from start - node_queue = queue.Queue() - node_queue.put(nodes[start_event_id]) - while not node_queue.empty(): - - # get node - node = node_queue.get() - - if node[PE.id] in visited: - # flow again to validate stream, but do not add target to queue - flowing(where=node, to=nodes, parallel_converges=parallel_converges) - continue - - # add to queue - for target_id in node[PE.target]: - node_queue.put(nodes[target_id]) - - # mark as visited - visited.add(node[PE.id]) - - # flow - flowing(where=node, to=nodes, parallel_converges=parallel_converges) - - # data clean - for nid, n in list(nodes.items()): - if len(n[STREAM]) != 1: - raise exceptions.StreamValidateError(node_id=nid) - - # replace set to str - n[STREAM] = n[STREAM].pop() - - # isolate node check - for __, node in list(nodes.items()): - if not node[STREAM]: - raise exceptions.IsolateNodeError() - - return nodes diff --git a/lib/pipeline/validators/handlers.py b/lib/pipeline/validators/handlers.py deleted file mode 100644 index 52af526..0000000 --- a/lib/pipeline/validators/handlers.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.dispatch import receiver - -from pipeline.core.flow.event import EndEvent -from pipeline.core.flow.signals import post_new_end_event_register -from pipeline.validators import rules - - -@receiver(post_new_end_event_register, sender=EndEvent) -def post_new_end_event_register_handler(sender, node_type, node_cls, **kwargs): - rules.NODE_RULES[node_type] = rules.SINK_RULE - rules.FLOW_NODES_WITHOUT_STARTEVENT.append(node_type) diff --git a/lib/pipeline/validators/rules.py b/lib/pipeline/validators/rules.py deleted file mode 100644 index 5b3b814..0000000 --- a/lib/pipeline/validators/rules.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.core.flow import FlowNodeClsFactory - -MAX_IN = 1000 -MAX_OUT = 1000 -FLOW_NODES_WITHOUT_STARTEVENT = FlowNodeClsFactory.node_types_without_start_event() - -FLOW_NODES_WITHOUT_START_AND_END = FlowNodeClsFactory.node_types_without_start_end_event() - -SOURCE_RULE = {"min_in": 0, "max_in": 0, "min_out": 1, "max_out": 1, "allowed_out": FLOW_NODES_WITHOUT_START_AND_END} - -SINK_RULE = {"min_in": 1, "max_in": MAX_IN, "min_out": 0, "max_out": 0, "allowed_out": []} - -ACTIVITY_RULE = { - "min_in": 1, - "max_in": MAX_IN, - "min_out": 1, - "max_out": 1, - "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, -} - -EMIT_RULE = { - "min_in": 1, - "max_in": MAX_IN, - "min_out": 1, - "max_out": MAX_OUT, - "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, -} - -CONVERGE_RULE = { - "min_in": 1, - "max_in": MAX_IN, - "min_out": 1, - "max_out": 1, - "allowed_out": FLOW_NODES_WITHOUT_STARTEVENT, -} - -# rules of activity graph -NODE_RULES = { - "EmptyStartEvent": SOURCE_RULE, - "EmptyEndEvent": SINK_RULE, - "ServiceActivity": ACTIVITY_RULE, - "ExclusiveGateway": EMIT_RULE, - "ParallelGateway": EMIT_RULE, - "ConditionalParallelGateway": EMIT_RULE, - "ConvergeGateway": CONVERGE_RULE, - "SubProcess": ACTIVITY_RULE, -} diff --git a/lib/pipeline/validators/utils.py b/lib/pipeline/validators/utils.py deleted file mode 100644 index 044d618..0000000 --- a/lib/pipeline/validators/utils.py +++ /dev/null @@ -1,91 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from copy import deepcopy - -from pipeline import exceptions -from pipeline.core.constants import PE - - -def format_to_list(notype): - """ - format a data to list - :return: - """ - if isinstance(notype, list): - return notype - if not notype: - return [] - return [notype] - - -def format_node_io_to_list(node, i=True, o=True): - if i: - node["incoming"] = format_to_list(node["incoming"]) - - if o: - node["outgoing"] = format_to_list(node["outgoing"]) - - -def format_pipeline_tree_io_to_list(pipeline_tree): - """ - :summary: format incoming and outgoing to list - :param pipeline_tree: - :return: - """ - for act in list(pipeline_tree[PE.activities].values()): - format_node_io_to_list(act, o=False) - - for gateway in list(pipeline_tree[PE.gateways].values()): - format_node_io_to_list(gateway, o=False) - - format_node_io_to_list(pipeline_tree[PE.end_event], o=False) - - -def get_node_for_sequence(sid, tree, node_type): - target_id = tree[PE.flows][sid][node_type] - - if target_id in tree[PE.activities]: - return tree[PE.activities][target_id] - elif target_id in tree[PE.gateways]: - return tree[PE.gateways][target_id] - elif target_id == tree[PE.end_event][PE.id]: - return tree[PE.end_event] - elif target_id == tree[PE.start_event][PE.id]: - return tree[PE.start_event] - - raise exceptions.InvalidOperationException("node(%s) not in data" % target_id) - - -def get_nodes_dict(data): - """ - get all FlowNodes of a pipeline - """ - data = deepcopy(data) - start = data[PE.start_event][PE.id] - end = data[PE.end_event][PE.id] - - nodes = {start: data[PE.start_event], end: data[PE.end_event]} - - nodes.update(data[PE.activities]) - nodes.update(data[PE.gateways]) - - for node in list(nodes.values()): - # format to list - node[PE.incoming] = format_to_list(node[PE.incoming]) - node[PE.outgoing] = format_to_list(node[PE.outgoing]) - - node[PE.source] = [data[PE.flows][incoming][PE.source] for incoming in node[PE.incoming]] - node[PE.target] = [data[PE.flows][outgoing][PE.target] for outgoing in node[PE.outgoing]] - - return nodes diff --git a/lib/pipeline/variable_framework/__init__.py b/lib/pipeline/variable_framework/__init__.py deleted file mode 100644 index 7ea9a1b..0000000 --- a/lib/pipeline/variable_framework/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -default_app_config = "pipeline.variable_framework.apps.VariableFrameworkConfig" diff --git a/lib/pipeline/variable_framework/admin.py b/lib/pipeline/variable_framework/admin.py deleted file mode 100644 index 853cfe9..0000000 --- a/lib/pipeline/variable_framework/admin.py +++ /dev/null @@ -1,23 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.contrib import admin - -from pipeline.variable_framework import models - - -@admin.register(models.VariableModel) -class VariableModelAdmin(admin.ModelAdmin): - list_display = ["id", "code", "status"] - search_fields = ["code"] - list_filter = ["status"] diff --git a/lib/pipeline/variable_framework/apps.py b/lib/pipeline/variable_framework/apps.py deleted file mode 100644 index a60a5e6..0000000 --- a/lib/pipeline/variable_framework/apps.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from django.apps import AppConfig -from django.db.utils import OperationalError, ProgrammingError - -from pipeline.conf import settings -from pipeline.utils.register import autodiscover_collections -from pipeline.variable_framework import context - -logger = logging.getLogger("root") - - -class VariableFrameworkConfig(AppConfig): - name = "pipeline.variable_framework" - verbose_name = "PipelineVariableFramework" - - def ready(self): - """ - @summary: 注册公共部分和RUN_VER下的变量到数据库 - @return: - """ - from pipeline.variable_framework.signals.handlers import pre_variable_register_handler # noqa - - for path in settings.VARIABLE_AUTO_DISCOVER_PATH: - autodiscover_collections(path) - - if context.skip_update_var_models(): - return - - from pipeline.variable_framework.models import VariableModel - from pipeline.core.data.library import VariableLibrary - - try: - print("update variable models") - VariableModel.objects.exclude(code__in=list(VariableLibrary.variables.keys())).update(status=False) - print("update variable models finish") - except (ProgrammingError, OperationalError) as e: - # first migrate - logger.exception(e) diff --git a/lib/pipeline/variable_framework/context.py b/lib/pipeline/variable_framework/context.py deleted file mode 100644 index 3f26946..0000000 --- a/lib/pipeline/variable_framework/context.py +++ /dev/null @@ -1,28 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from pipeline.conf import settings -from pipeline.utils import env - -UPDATE_TRIGGER = "update_variable_models" - - -def skip_update_var_models(): - if settings.AUTO_UPDATE_VARIABLE_MODELS: - return False - - django_command = env.get_django_command() - if django_command is None: - return True - - return django_command != UPDATE_TRIGGER diff --git a/lib/pipeline/variable_framework/management/__init__.py b/lib/pipeline/variable_framework/management/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/variable_framework/management/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/variable_framework/management/commands/__init__.py b/lib/pipeline/variable_framework/management/commands/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/variable_framework/management/commands/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/variable_framework/management/commands/update_variable_models.py b/lib/pipeline/variable_framework/management/commands/update_variable_models.py deleted file mode 100644 index 525d010..0000000 --- a/lib/pipeline/variable_framework/management/commands/update_variable_models.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.core.management import BaseCommand - - -class Command(BaseCommand): - def handle(self, *args, **options): - # do not need to do anything, the app ready will handle model update work - print("variable models update finished.") diff --git a/lib/pipeline/variable_framework/migrations/0001_initial.py b/lib/pipeline/variable_framework/migrations/0001_initial.py deleted file mode 100644 index bf00d16..0000000 --- a/lib/pipeline/variable_framework/migrations/0001_initial.py +++ /dev/null @@ -1,34 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [] - - operations = [ - migrations.CreateModel( - name="VariableModel", - fields=[ - ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), - ("code", models.CharField(max_length=255, unique=True, verbose_name="\u53d8\u91cf\u7f16\u7801")), - ("status", models.BooleanField(default=True, verbose_name="\u53d8\u91cf\u662f\u5426\u53ef\u7528")), - ], - options={"verbose_name": "Variable\u53d8\u91cf", "verbose_name_plural": "Variable\u53d8\u91cf"}, - ), - ] diff --git a/lib/pipeline/variable_framework/migrations/__init__.py b/lib/pipeline/variable_framework/migrations/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/variable_framework/migrations/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/variable_framework/models.py b/lib/pipeline/variable_framework/models.py deleted file mode 100644 index 84b49a5..0000000 --- a/lib/pipeline/variable_framework/models.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -from django.db import models -from django.utils.translation import ugettext_lazy as _ - -from pipeline.core.data.library import VariableLibrary - - -class VariableModel(models.Model): - """ - 注册的变量 - """ - - code = models.CharField(_("变量编码"), max_length=255, unique=True) - status = models.BooleanField(_("变量是否可用"), default=True) - - class Meta: - verbose_name = _("Variable变量") - verbose_name_plural = _("Variable变量") - - def __unicode__(self): - return self.code - - def get_class(self): - return VariableLibrary.get_var_class(self.code) - - @property - def name(self): - return self.get_class().name - - @property - def form(self): - return self.get_class().form - - @property - def type(self): - return self.get_class().type - - @property - def tag(self): - return self.get_class().tag - - @property - def meta_tag(self): - return getattr(self.get_class(), "meta_tag") diff --git a/lib/pipeline/variable_framework/signals/__init__.py b/lib/pipeline/variable_framework/signals/__init__.py deleted file mode 100644 index 4009729..0000000 --- a/lib/pipeline/variable_framework/signals/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" diff --git a/lib/pipeline/variable_framework/signals/handlers.py b/lib/pipeline/variable_framework/signals/handlers.py deleted file mode 100644 index e971a7b..0000000 --- a/lib/pipeline/variable_framework/signals/handlers.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community -Edition) available. -Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved. -Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. -You may obtain a copy of the License at -http://opensource.org/licenses/MIT -Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on -an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. -""" - -import logging - -from django.db.utils import ProgrammingError, OperationalError -from django.dispatch import receiver - -from pipeline.core.data.var import LazyVariable -from pipeline.core.signals import pre_variable_register -from pipeline.variable_framework.models import VariableModel -from pipeline.variable_framework import context - -logger = logging.getLogger("root") - - -@receiver(pre_variable_register, sender=LazyVariable) -def pre_variable_register_handler(sender, variable_cls, **kwargs): - if context.skip_update_var_models(): - return - - try: - print("update {} variable model".format(variable_cls.code)) - obj, created = VariableModel.objects.get_or_create(code=variable_cls.code, defaults={"status": __debug__}) - if not created and not obj.status: - obj.status = True - obj.save() - except (ProgrammingError, OperationalError): - # first migrate - logger.exception("update variable model fail") diff --git a/manage.py b/manage.py index 62f5adf..976fa7d 100644 --- a/manage.py +++ b/manage.py @@ -6,7 +6,7 @@ import sys def main(): """Run administrative tasks.""" - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dj_flow.settings') + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django_vue_cli.settings') try: from django.core.management import execute_from_command_line except ImportError as exc: diff --git a/templates/index.html b/templates/index.html index d0d4851..e4a0227 100644 --- a/templates/index.html +++ b/templates/index.html @@ -3,7 +3,7 @@ - 任务调度平台 + django-vue diff --git a/web/index.html b/web/index.html index 977a8fd..da759f7 100644 --- a/web/index.html +++ b/web/index.html @@ -3,13 +3,13 @@ - 任务调度平台 + django-vue
diff --git a/web/index.prod.html b/web/index.prod.html index 61c9f8d..98124c8 100644 --- a/web/index.prod.html +++ b/web/index.prod.html @@ -1,3 +1,3 @@ -任务调度平台
\ No newline at end of file + window.CSRF_COOKIE_NAME = 'dj-flow_csrftoken'
diff --git a/web/package.json b/web/package.json index 9d3132c..ebaa317 100644 --- a/web/package.json +++ b/web/package.json @@ -12,8 +12,6 @@ "build": "node build/build.js" }, "dependencies": { - "@antv/g2": "^3.4.10", - "@antv/g6": "^4.1.7", "axios": "^0.16.2", "bk-magic-vue": "^2.3.0", "brace": "^0.11.1", @@ -24,8 +22,6 @@ "jquery": "^2.2.4", "lodash": "^4.17.15", "moment": "^2.24.0", - "monaco-editor": "^0.19.0", - "monaco-editor-webpack-plugin": "^1.8.0", "screenfull": "^5.1.0", "stylelint": "^13.5.0", "stylelint-scss": "^3.19.0", diff --git a/web/src/api/apiUrl/history/node_history.js b/web/src/api/apiUrl/history/node_history.js deleted file mode 100644 index 2e2a4f3..0000000 --- a/web/src/api/apiUrl/history/node_history.js +++ /dev/null @@ -1,10 +0,0 @@ -import {GET, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - list: function(params) { - return GET(reUrl + '/node_history/', params) - }, - retrieve: function(id, params) { - return GET(reUrl + '/node_history/' + JSON.stringify(id) + '/', params) - } -} diff --git a/web/src/api/apiUrl/history/process_history.js b/web/src/api/apiUrl/history/process_history.js deleted file mode 100644 index c197a94..0000000 --- a/web/src/api/apiUrl/history/process_history.js +++ /dev/null @@ -1,10 +0,0 @@ -import {GET, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - list: function(params) { - return GET(reUrl + '/process_history/', params) - }, - retrieve: function(id, params) { - return GET(reUrl + '/process_history/' + JSON.stringify(id) + '/', params) - } -} diff --git a/web/src/api/apiUrl/monitor/node_run.js b/web/src/api/apiUrl/monitor/node_run.js deleted file mode 100644 index 38f11f9..0000000 --- a/web/src/api/apiUrl/monitor/node_run.js +++ /dev/null @@ -1,19 +0,0 @@ -import {GET, POST, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - list: function(params) { - return GET(reUrl + '/process/sub_run/', params) - }, - retrieve: function(id, params) { - return GET(reUrl + '/process/sub_run/' + JSON.stringify(id) + '/', params) - }, - control: function(params) { - return POST(reUrl + '/node_run/control/', params) - }, - node_snapshot: function(params) { - return GET(reUrl + '/node_snapshot/', params) - }, - node_snapshot_id: function(id, params) { - return GET(reUrl + '/node_snapshot/' + JSON.stringify(id) + '/', params) - } -} diff --git a/web/src/api/apiUrl/monitor/process_run.js b/web/src/api/apiUrl/monitor/process_run.js deleted file mode 100644 index 77372a2..0000000 --- a/web/src/api/apiUrl/monitor/process_run.js +++ /dev/null @@ -1,29 +0,0 @@ -import {GET, POST, PUT, DELETE, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - // 作业台 - list: function(params) { - return GET(reUrl + '/process/run/', params) - }, - create: function(params) { - return POST(reUrl + '/process/run/', params) - }, - retrieve: function(id, params) { - return GET(reUrl + '/process/run/' + JSON.stringify(id) + '/', params) - }, - update: function(id, params) { - return PUT(reUrl + '/process/run/' + JSON.stringify(id) + '/', params) - }, - delete: function(id) { - return DELETE(reUrl + '/process/run/' + JSON.stringify(id) + '/') - }, - control: function(params) { - return POST(reUrl + '/process/run/control/', params) - }, - process_snapshot: function(params) { - return GET(reUrl + '/process_snapshot/', params) - }, - process_snapshot_id: function(id, params) { - return GET(reUrl + '/process_snapshot/' + JSON.stringify(id) + '/', params) - } -} diff --git a/web/src/api/apiUrl/report/process_report.js b/web/src/api/apiUrl/report/process_report.js deleted file mode 100644 index 8767281..0000000 --- a/web/src/api/apiUrl/report/process_report.js +++ /dev/null @@ -1,16 +0,0 @@ -import { GET, reUrl } from '../../axiosconfig/axiosconfig' - -export default { - getJobFlowReport: function(params) { - return GET(reUrl + '/process_run/data_analyze/', params) - }, - getJobFlowName: function(params) { - return GET(reUrl + '/process/process_name/', params) - }, - getJobReport: function(params) { - return GET(reUrl + '/node_run/data_analyze/', params) - }, - getJobName: function(params) { - return GET(reUrl + '/node/node_name/', params) - } -} diff --git a/web/src/api/apiUrl/system/alarm_center.js b/web/src/api/apiUrl/system/alarm_center.js deleted file mode 100644 index d46f70d..0000000 --- a/web/src/api/apiUrl/system/alarm_center.js +++ /dev/null @@ -1,7 +0,0 @@ -import {GET, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - list: function(params) { - return GET(reUrl + '/alarm_center/', params) - } -} diff --git a/web/src/api/apiUrl/system/audit_log.js b/web/src/api/apiUrl/system/audit_log.js deleted file mode 100644 index 1d666ec..0000000 --- a/web/src/api/apiUrl/system/audit_log.js +++ /dev/null @@ -1,11 +0,0 @@ -import {GET, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - // 系统审计日志 - list: function(params) { - return GET(reUrl + '/audit_log/', params) - }, - retrieve: function(id) { - return GET(reUrl + '/audit_log/' + JSON.stringify(id) + '/') - } -} diff --git a/web/src/api/apiUrl/system/category.js b/web/src/api/apiUrl/system/category.js deleted file mode 100644 index d81b943..0000000 --- a/web/src/api/apiUrl/system/category.js +++ /dev/null @@ -1,26 +0,0 @@ -import {GET, POST, PUT, DELETE, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - list: function(params) { - return GET(reUrl + '/category/', params) - }, - create: function(params) { - return POST(reUrl + '/category/', params) - }, - retrieve: function(id, params) { - return GET(reUrl + '/category/' + JSON.stringify(id) + '/', params) - }, - update: function(id, params) { - return PUT(reUrl + '/category/' + JSON.stringify(id) + '/', params) - }, - delete: function(id) { - return DELETE(reUrl + '/category/' + JSON.stringify(id) + '/') - }, - get_topology: function(params) { - return GET(reUrl + '/category/get_topology/', params) - }, - set_topology: function(params) { - // PARAMS: {'nodes': array} - return POST(reUrl + '/category/set_topology/', params) - } -} diff --git a/web/src/api/apiUrl/system/home.js b/web/src/api/apiUrl/system/home.js deleted file mode 100644 index a8379b2..0000000 --- a/web/src/api/apiUrl/system/home.js +++ /dev/null @@ -1,19 +0,0 @@ -import {GET, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - overview: function(params) { - return GET(reUrl + '/home/overview/', params) - }, - weekly_job: function(params) { - return GET(reUrl + '/home/weekly_job/', params) - }, - today_job: function(params) { - return GET(reUrl + '/home/today_job/', params) - }, - top5_agent: function(params) { - return GET(reUrl + '/home/top5_agent/', params) - }, - job_dynamic: function(params) { - return GET(reUrl + '/home/job_dynamic/', params) - } -} diff --git a/web/src/api/apiUrl/system/setting.js b/web/src/api/apiUrl/system/setting.js deleted file mode 100644 index 7f1b5d5..0000000 --- a/web/src/api/apiUrl/system/setting.js +++ /dev/null @@ -1,23 +0,0 @@ -import {GET, POST, PUT, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - // 系统设置 - list: function(params) { - return GET(reUrl + '/setting/', params) - }, - update: function(id, params) { - return PUT(reUrl + '/setting/' + JSON.stringify(id) + '/', params) - }, - get_logo: function(params) { - return GET(reUrl + '/setting/get_logo/', params) - }, - reset_logo: function(params) { - return GET(reUrl + '/setting/reset_logo/', params) - }, - update_logo: function(params) { - return POST(reUrl + '/setting/update_logo/', params) - }, - batch_update: function(params) { - return POST(reUrl + '/setting/batch_update/', params) - } -} diff --git a/web/src/api/apiUrl/system/show_table.js b/web/src/api/apiUrl/system/show_table.js deleted file mode 100644 index 5c3973b..0000000 --- a/web/src/api/apiUrl/system/show_table.js +++ /dev/null @@ -1,11 +0,0 @@ -import {GET, PUT, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - // 获取表格要显示的列 - show_table: function(token, params) { - return GET(reUrl + '/show_table/' + token + '/', params) - }, - update_table: function(token, params) { - return PUT(reUrl + '/show_table/' + token + '/', params) - } -} diff --git a/web/src/api/apiUrl/system/user.js b/web/src/api/apiUrl/system/user.js deleted file mode 100644 index 038dd48..0000000 --- a/web/src/api/apiUrl/system/user.js +++ /dev/null @@ -1,29 +0,0 @@ -import {GET, POST, PUT, DELETE, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - // 系统用户 - login: function(params) { - return GET(reUrl + '/user/login/', params) - }, - list: function(params) { - return GET(reUrl + '/user/', params) - }, - create: function(params) { - return POST(reUrl + '/user/', params) - }, - retrieve: function(id, params) { - return GET(reUrl + '/user/' + JSON.stringify(id) + '/', params) - }, - update: function(id, params) { - return PUT(reUrl + '/user/' + JSON.stringify(id) + '/', params) - }, - delete: function(id) { - return DELETE(reUrl + '/user/' + JSON.stringify(id) + '/') - }, - init_privilege: function(params) { - return GET(reUrl + '/user/init_privilege/', params) - }, - get_uncreated_users: function(params) { - return GET(reUrl + '/user/get_uncreated_users/', params) - } -} diff --git a/web/src/api/apiUrl/template/calendar.js b/web/src/api/apiUrl/template/calendar.js deleted file mode 100644 index b987ca0..0000000 --- a/web/src/api/apiUrl/template/calendar.js +++ /dev/null @@ -1,23 +0,0 @@ -import {GET, POST, PUT, DELETE, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - // 作业台 - list: function(params) { - return GET(reUrl + '/calendar/', params) - }, - create: function(params) { - return POST(reUrl + '/calendar/', params) - }, - retrieve: function(id, params) { - return GET(reUrl + '/calendar/' + JSON.stringify(id) + '/', params) - }, - update: function(id, params) { - return PUT(reUrl + '/calendar/' + JSON.stringify(id) + '/', params) - }, - delete: function(id) { - return DELETE(reUrl + '/calendar/' + JSON.stringify(id) + '/') - }, - post_calendar_file: function(params, config) { - return POST(reUrl + '/calendar/post_calendar_file/', params) - } -} diff --git a/web/src/api/apiUrl/template/content.js b/web/src/api/apiUrl/template/content.js deleted file mode 100644 index c1aa4f3..0000000 --- a/web/src/api/apiUrl/template/content.js +++ /dev/null @@ -1,35 +0,0 @@ -import {GET, POST, PUT, DELETE, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - // 作业台 - list: function(params) { - return GET(reUrl + '/node/template/', params) - }, - create: function(params) { - return POST(reUrl + '/node/template/', params) - }, - retrieve: function(id, params) { - return GET(reUrl + '/node/template/' + JSON.stringify(id) + '/', params) - }, - update: function(id, params) { - return PUT(reUrl + '/node/template/' + JSON.stringify(id) + '/', params) - }, - clone: function(params) { - return POST(reUrl + '/node/template/clone/', params) - }, - delete: function(id) { - return DELETE(reUrl + '/node/template/' + JSON.stringify(id) + '/') - }, - execute: function(params) { - return POST(reUrl + '/content/execute/', params) - }, - upload_job: function(params) { - return POST(reUrl + '/content/upload_contents/', params) - }, - check_job: function(params) { - return POST(reUrl + '/content/check_job/', params) - }, - save_job_data: function(params) { - return POST(reUrl + '/content/save_job_data/', params) - } -} diff --git a/web/src/api/apiUrl/template/node.js b/web/src/api/apiUrl/template/node.js deleted file mode 100644 index 6a7c073..0000000 --- a/web/src/api/apiUrl/template/node.js +++ /dev/null @@ -1,7 +0,0 @@ -import {GET, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - list: function(params) { - return GET(reUrl + '/node/', params) - } -} diff --git a/web/src/api/apiUrl/template/process.js b/web/src/api/apiUrl/template/process.js deleted file mode 100644 index f93e1b9..0000000 --- a/web/src/api/apiUrl/template/process.js +++ /dev/null @@ -1,45 +0,0 @@ -import {GET, POST, PUT, DELETE, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - // 作业台 - list: function(params) { - return GET(reUrl + '/process/flow/', params) - }, - create: function(params) { - return POST(reUrl + '/process/flow/', params) - }, - retrieve: function(id) { - return GET(reUrl + '/process/flow/' + JSON.stringify(id) + '/') - }, - update: function(id, params) { - return PUT(reUrl + '/process/flow/' + JSON.stringify(id) + '/', params) - }, - clone: function(params) { - return POST(reUrl + '/process/clone/', params) - }, - delete: function(id) { - return DELETE(reUrl + '/process/flow/' + JSON.stringify(id) + '/') - }, - execute: function(params) { - return POST(reUrl + '/process/flow/execute/', params) - }, - get_process_node: function(params) { - return GET(reUrl + '/process/get_process_node/', params) - }, - get_topology: function(params) { - return GET(reUrl + '/process/get_topology/', params) - }, - set_topology: function(params) { - // PARAMS: {'is_global': bool, 'nodes': array} - return POST(reUrl + '/process/set_topology/', params) - }, - upload_process: function(params, config) { - return POST(reUrl + '/process/upload_process/', params, config) - }, - serialize_process: function(params, config) { - return POST(reUrl + '/process/serialize_process/', params, config) - }, - save_process: function(params, config) { - return POST(reUrl + '/process/save_process/', params, config) - } -} diff --git a/web/src/api/apiUrl/template/station.js b/web/src/api/apiUrl/template/station.js deleted file mode 100644 index b2cad79..0000000 --- a/web/src/api/apiUrl/template/station.js +++ /dev/null @@ -1,29 +0,0 @@ -import {GET, POST, PUT, DELETE, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - // 作业台 - list: function(params) { - return GET(reUrl + '/station/', params) - }, - create: function(params) { - return POST(reUrl + '/station/', params) - }, - retrieve: function(id, params) { - return GET(reUrl + '/station/' + JSON.stringify(id) + '/', params) - }, - update: function(id, params) { - return PUT(reUrl + '/station/' + JSON.stringify(id) + '/', params) - }, - delete: function(id) { - return DELETE(reUrl + '/station/' + JSON.stringify(id) + '/') - }, - get_biz: function(params) { - return GET(reUrl + '/station/get_biz/', params) - }, - search_host: function(params) { - return POST(reUrl + '/station/search_host/', params) - }, - get_os_account: function(params) { - return POST(reUrl + '/station/get_os_account/', params) - } -} diff --git a/web/src/api/apiUrl/template/station_state.js b/web/src/api/apiUrl/template/station_state.js deleted file mode 100644 index 4c7f5c7..0000000 --- a/web/src/api/apiUrl/template/station_state.js +++ /dev/null @@ -1,8 +0,0 @@ -import {GET, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - // 作业台 - list: function(params) { - return GET(reUrl + '/station_state/', params) - } -} diff --git a/web/src/api/apiUrl/template/var_table.js b/web/src/api/apiUrl/template/var_table.js deleted file mode 100644 index 7efe0a0..0000000 --- a/web/src/api/apiUrl/template/var_table.js +++ /dev/null @@ -1,20 +0,0 @@ -import {GET, POST, PUT, DELETE, reUrl} from '../../axiosconfig/axiosconfig' - -export default { - // 作业台 - list: function(params) { - return GET(reUrl + '/var_table/', params) - }, - create: function(params) { - return POST(reUrl + '/var_table/', params) - }, - retrieve: function(id, params) { - return GET(reUrl + '/var_table/' + JSON.stringify(id) + '/', params) - }, - update: function(id, params) { - return PUT(reUrl + '/var_table/' + JSON.stringify(id) + '/', params) - }, - delete: function(id) { - return DELETE(reUrl + '/var_table/' + JSON.stringify(id) + '/') - } -} diff --git a/web/src/api/axiosconfig/axiosconfig.js b/web/src/api/axiosconfig/axiosconfig.js index 0185384..cb32ba4 100644 --- a/web/src/api/axiosconfig/axiosconfig.js +++ b/web/src/api/axiosconfig/axiosconfig.js @@ -4,14 +4,7 @@ axios.defaults.baseURL = window.siteUrl axios.defaults.withCredentials = true axios.defaults.timeout = 200000 axios.defaults.crossDomain = true -// const VueEnv = process.env.NODE_ENV -// let ApiUrl = 'http://127.0.0.1:8000' -// if (VueEnv === 'production') { -// ApiUrl = '.' -// } else { -// ApiUrl = 'http://127.0.0.1:8000' -// } -// // axios.defaults.baseURL = ApiUrl; + axios.interceptors.request.use((config) => { config.headers['X-Requested-With'] = 'XMLHttpRequest' const name = window.CSRF_COOKIE_NAME || 'csrftoken' diff --git a/web/src/api/index.js b/web/src/api/index.js index e85e31e..21fd507 100644 --- a/web/src/api/index.js +++ b/web/src/api/index.js @@ -1,48 +1,6 @@ // 统一引入api模块 -import alarmCenter from './apiUrl/system/alarm_center' -import auditLog from './apiUrl/system/audit_log' -import category from './apiUrl/system/category' -import home from './apiUrl/system/home' -import setting from './apiUrl/system/setting' -import user from './apiUrl/system/user' -import showTable from './apiUrl/system/show_table' - -import calendar from './apiUrl/template/calendar' -import content from './apiUrl/template/content' -import process from './apiUrl/template/process' -import node from './apiUrl/template/node' -import station from './apiUrl/template/station' -import stationState from './apiUrl/template/station_state' -import varTable from './apiUrl/template/var_table' - -import nodeRun from './apiUrl/monitor/node_run' -import processRun from './apiUrl/monitor/process_run' -import processReport from './apiUrl/report/process_report' - -import nodeHistory from './apiUrl/history/node_history' -import processHistory from './apiUrl/history/process_history' - import Task from './apiUrl/task/task' export default { - alarmCenter, - auditLog, - category, - home, - setting, - user, - calendar, - content, - process, - node, - station, - stationState, - varTable, - nodeRun, - processRun, - nodeHistory, - processHistory, - processReport, - showTable, Task } diff --git a/web/src/assets/js/convert-expression/asterisk-to-range-conversion.js b/web/src/assets/js/convert-expression/asterisk-to-range-conversion.js deleted file mode 100644 index 9d1ef61..0000000 --- a/web/src/assets/js/convert-expression/asterisk-to-range-conversion.js +++ /dev/null @@ -1,22 +0,0 @@ -/* eslint-disable */ -'use strict'; -export default (() => { - function convertAsterisk(expression, replecement){ - if(expression.indexOf('*') !== -1){ - return expression.replace('*', replecement); - } - return expression; - } - - function convertAsterisksToRanges(expressions){ - expressions[0] = convertAsterisk(expressions[0], '0-59'); - expressions[1] = convertAsterisk(expressions[1], '0-59'); - expressions[2] = convertAsterisk(expressions[2], '0-23'); - expressions[3] = convertAsterisk(expressions[3], '1-31'); - expressions[4] = convertAsterisk(expressions[4], '1-12'); - expressions[5] = convertAsterisk(expressions[5], '0-6'); - return expressions; - } - - return convertAsterisksToRanges; -})(); diff --git a/web/src/assets/js/convert-expression/index.js b/web/src/assets/js/convert-expression/index.js deleted file mode 100644 index 47e1eea..0000000 --- a/web/src/assets/js/convert-expression/index.js +++ /dev/null @@ -1,64 +0,0 @@ -/* eslint-disable */ -'use strict'; -import monthNamesConversion from './month-names-conversion' -import weekDayNamesConversion from './week-day-names-conversion' -import convertAsterisksToRanges from './asterisk-to-range-conversion' -import convertRanges from './range-conversion' -import convertSteps from './step-values-conversion' - -export default (() => { - - function appendSeccondExpression(expressions){ - if(expressions.length === 5){ - return ['0'].concat(expressions); - } - return expressions; - } - - function removeSpaces(str) { - return str.replace(/\s{2,}/g, ' ').trim(); - } - - // Function that takes care of normalization. - function normalizeIntegers(expressions) { - for (const i in expressions){ - var numbers = expressions[i].split(','); - for (const j in numbers){ - numbers[j] = parseInt(numbers[j]); - } - expressions[i] = numbers; - } - return expressions; - } - - /* - * The node-cron core allows only numbers (including multiple numbers e.g 1,2). - * This module is going to translate the month names, week day names and ranges - * to integers relatives. - * - * Month names example: - * - expression 0 1 1 January,Sep * - * - Will be translated to 0 1 1 1,9 * - * - * Week day names example: - * - expression 0 1 1 2 Monday,Sat - * - Will be translated to 0 1 1 1,5 * - * - * Ranges example: - * - expression 1-5 * * * * - * - Will be translated to 1,2,3,4,5 * * * * - */ - function interprete(expression){ - var expressions = removeSpaces(expression).split(' '); - expressions = appendSeccondExpression(expressions); - expressions[4] = monthNamesConversion(expressions[4]); - expressions[5] = weekDayNamesConversion(expressions[5]); - expressions = convertAsterisksToRanges(expressions); - expressions = convertRanges(expressions); - expressions = convertSteps(expressions); - expressions = normalizeIntegers(expressions); - return expressions.join(' '); - } - - return interprete; -})(); diff --git a/web/src/assets/js/convert-expression/month-names-conversion.js b/web/src/assets/js/convert-expression/month-names-conversion.js deleted file mode 100644 index d02339e..0000000 --- a/web/src/assets/js/convert-expression/month-names-conversion.js +++ /dev/null @@ -1,23 +0,0 @@ -/* eslint-disable */ -'use strict'; -export default (() => { - var months = ['january','february','march','april','may','june','july', - 'august','september','october','november','december']; - var shortMonths = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', - 'sep', 'oct', 'nov', 'dec']; - - function convertMonthName(expression, items){ - for(const i in expression){ - expression = expression.replace(new RegExp(items[i], 'gi'), parseInt(i, 10) + 1); - } - return expression; - } - - function interprete(monthExpression){ - monthExpression = convertMonthName(monthExpression, months); - monthExpression = convertMonthName(monthExpression, shortMonths); - return monthExpression; - } - - return interprete; -})(); diff --git a/web/src/assets/js/convert-expression/range-conversion.js b/web/src/assets/js/convert-expression/range-conversion.js deleted file mode 100644 index b6a83ae..0000000 --- a/web/src/assets/js/convert-expression/range-conversion.js +++ /dev/null @@ -1,42 +0,0 @@ -/* eslint-disable */ -'use strict'; -export default ( () => { - function replaceWithRange(expression, text, init, end) { - var numbers = []; - var last = parseInt(end); - var first = parseInt(init); - - if(first > last){ - last = parseInt(init); - first = parseInt(end); - } - - for(let i = first; i <= last; i++) { - numbers.push(i); - } - - return expression.replace(new RegExp(text, 'gi'), numbers.join()); - } - - function convertRange(expression){ - var rangeRegEx = /(\d+)\-(\d+)/; - var match = rangeRegEx.exec(expression); - while(match !== null && match.length > 0){ - expression = replaceWithRange(expression, match[0], match[1], match[2]); - match = rangeRegEx.exec(expression); - } - return expression; - } - - function convertAllRanges(expressions){ - for(let i in expressions){ - expressions[i] = convertRange(expressions[i]); - } - return expressions; - } - - return convertAllRanges; -})(); - - - diff --git a/web/src/assets/js/convert-expression/step-values-conversion.js b/web/src/assets/js/convert-expression/step-values-conversion.js deleted file mode 100644 index 61d964c..0000000 --- a/web/src/assets/js/convert-expression/step-values-conversion.js +++ /dev/null @@ -1,27 +0,0 @@ -/* eslint-disable */ -'use strict'; -export default (() => { - function convertSteps(expressions){ - var stepValuePattern = /^(.+)\/(\d+)$/; - for(const i in expressions){ - var match = stepValuePattern.exec(expressions[i]); - var isStepValue = match !== null && match.length > 0; - if(isStepValue){ - var values = match[1].split(','); - var setpValues = []; - var divider = parseInt(match[2], 10); - for(const j in values){ - var value = parseInt(values[j], 10); - if(value % divider === 0){ - setpValues.push(value); - } - } - expressions[i] = setpValues.join(','); - } - } - return expressions; - } - - return convertSteps; -})(); - diff --git a/web/src/assets/js/convert-expression/week-day-names-conversion.js b/web/src/assets/js/convert-expression/week-day-names-conversion.js deleted file mode 100644 index 0f9af57..0000000 --- a/web/src/assets/js/convert-expression/week-day-names-conversion.js +++ /dev/null @@ -1,22 +0,0 @@ -/* eslint-disable */ -'use strict'; -export default (() => { - var weekDays = ['sunday', 'monday', 'tuesday', 'wednesday', 'thursday', - 'friday', 'saturday']; - var shortWeekDays = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']; - - function convertWeekDayName(expression, items){ - for(const i in items){ - expression = expression.replace(new RegExp(items[i], 'gi'), parseInt(i, 10)); - } - return expression; - } - - function convertWeekDays(expression){ - expression = expression.replace('6', '0'); - expression = convertWeekDayName(expression, weekDays); - return convertWeekDayName(expression, shortWeekDays); - } - - return convertWeekDays; -})(); diff --git a/web/src/assets/js/cron-validator.js b/web/src/assets/js/cron-validator.js deleted file mode 100644 index eb78d66..0000000 --- a/web/src/assets/js/cron-validator.js +++ /dev/null @@ -1,31 +0,0 @@ -/* eslint-disable */ -'use strict'; -import validation from './pattern-validation' -function ErrorException (value) { - this.value = value; - this.message = "是一个非法表达式,请校验"; - this.toString = function() { - return this.value + this.message; - }; -} -export default (() => { - - function validate(expression, config) { - var common_config = Object.assign({ language: 'en'}, config || {}) - try { - validation(expression, common_config, ErrorException); - } catch(e) { - if (e instanceof ErrorException) { - return {status: false, msg: e.toString()}; - } - return { status: false, msg: common_config.language === 'en' ? 'this is a invalid expression' : '非法表达式,请校验'} - - } - - return {status: true, msg: ''}; - } - - return { - validate: validate - }; -})(); diff --git a/web/src/assets/js/pattern-validation.js b/web/src/assets/js/pattern-validation.js deleted file mode 100644 index d23acae..0000000 --- a/web/src/assets/js/pattern-validation.js +++ /dev/null @@ -1,162 +0,0 @@ -/* eslint-disable */ -'use strict'; -import convertExpression from './convert-expression' -var commonConfig = {} -var KEYSMAP = [ - { - key: 'second', - error_en: 'is a invalid expression for second', - error_ch: '在 “秒” ' - }, - { - key: 'minute', - error_en: 'is a invalid expression for minute', - error_ch: '在 “分” ' - }, - { - key: 'hour', - error_en: 'is a invalid expression for hour', - error_ch: '在 “小时” ' - }, - { - key: 'day', - error_en: 'is a invalid expression for day of month', - error_ch: '在 “天” ' - }, - { - key: 'month', - error_en: 'is a invalid expression for month', - error_ch: '在 “月” ' - }, - { - key: 'week', - error_en: 'is a invalid expression for second', - error_ch: '在 “周” ' - } -] - -export default ( () => { - function isValidExpression(expression, min, max){ - var options = expression.split(','); - var regexValidation = /^\d+$|^\*$|^\*\/\d+$/; - for(const i in options){ - var option = options[i]; - var optionAsInt = parseInt(options[i], 10); - if(optionAsInt < min || optionAsInt > max || !regexValidation.test(option)) { - return false; - } - } - return true; - } - function isIncludeDecimals(patterns) { - for(var i = 0; i < patterns.length; i++){ - if(patterns[i].indexOf('.') > 0) { - return {valid: true, index: i}; - } - } - return {valid: false}; - } - function isInvalidSecond(expression){ - return !isValidExpression(expression, 0, 59); - } - - function isInvalidMinute(expression){ - return !isValidExpression(expression, 0, 59); - } - - function isInvalidHour(expression){ - return !isValidExpression(expression, 0, 23); - } - - function isInvalidDayOfMonth(expression){ - return !isValidExpression(expression, 1, 31); - } - - function isInvalidMonth(expression){ - return !isValidExpression(expression, 1, 12); - } - - function isInvalidWeekDay(expression){ - return !isValidExpression(expression, 0, 6); - } - - function validateFields(patterns, executablePatterns, ErrorException){ - var errorKey = 'error_ch' - if (isIncludeDecimals(patterns).valid) { - var currIndex = isIncludeDecimals(patterns).index - throw new ErrorException(patterns[currIndex] + KEYSMAP[currIndex][errorKey]); - } - if (isInvalidSecond(executablePatterns[0])) { - throw new ErrorException(patterns[0] + KEYSMAP[0][errorKey]); - } - - if (isInvalidMinute(executablePatterns[1])) { - throw new ErrorException(patterns[1] + KEYSMAP[1][errorKey]); - } - - if (isInvalidHour(executablePatterns[2])) { - throw new ErrorException(patterns[2] + KEYSMAP[2][errorKey]); - } - - if (isInvalidDayOfMonth(executablePatterns[3])) { - - throw new ErrorException(patterns[3] + KEYSMAP[3][errorKey]); - } - - if (isInvalidMonth(executablePatterns[4])) { - throw new ErrorException(patterns[4] + KEYSMAP[4][errorKey]); - } - - if (isInvalidWeekDay(executablePatterns[5])) { - throw new ErrorException(patterns[5] + KEYSMAP[5][errorKey]); - } - } - /** - * 接受: - * [a-z] , - * / \d - * 排除: - * [A-Z] - * \d[a-z] - * *[^\/] - */ - function basicCheck (patterns) { - var allowValue = /[^\,|\-|\*|\/|\w]|\d[a-z]|[A-Z]|\*[^\/]/ - for (const pattern in patterns) { - if (allowValue.test(patterns[pattern])) { - throw '表达式非法,请校验' - } - } - } - function WeekExchangeDay (pattern) { - var patterns = pattern.split(' '); - var week = patterns[2] - var day = patterns[3] - var moth = patterns[4] - patterns[2] = day - patterns[3] = moth - patterns[4] = week - return patterns.join(' ') - } - function validate(pattern, common_config, ErrorException){ - commonConfig = common_config - if (typeof pattern !== 'string'){ - throw new ErrorException('pattern must be a string!'); - } - if (pattern.split(' ').length !== 5) { - throw '表达式非法,请校验' - } - pattern = WeekExchangeDay(pattern); - var patterns = pattern.split(' '); - // 先基础验证下 - basicCheck(patterns, ErrorException) - // 对应的表达式解析成数字 - var executablePattern = convertExpression(pattern); - var executablePatterns = executablePattern.split(' '); - if(patterns.length === 5){ - patterns = ['0'].concat(patterns); - } - validateFields(patterns, executablePatterns, ErrorException); - } - - return validate; -})(); \ No newline at end of file diff --git a/web/src/components/FullYearCalendar/calendar.vue b/web/src/components/FullYearCalendar/calendar.vue deleted file mode 100644 index 14340ab..0000000 --- a/web/src/components/FullYearCalendar/calendar.vue +++ /dev/null @@ -1,181 +0,0 @@ - - - - - diff --git a/web/src/components/FullYearCalendar/css/datePacker.css b/web/src/components/FullYearCalendar/css/datePacker.css deleted file mode 100644 index 812b1fa..0000000 --- a/web/src/components/FullYearCalendar/css/datePacker.css +++ /dev/null @@ -1,88 +0,0 @@ -.fullYearPicker, .fullYearPicker table { - font-size: 12px; - -moz-user-select: none; - -webkit-user-select: none; - user-select: none -} - -.fullYearPicker div.year { - text-align: center -} - -.fullYearPicker div.year a { - margin-right: 30px -} - -.fullYearPicker div.year a.next { - margin-right: 0; - margin-left: 30px -} - -.fullYearPicker table { - border: 1px solid #3a84ff; - margin-top: 5px; - float: left; - margin-right: 10px -} - -.fullYearPicker table.right { - margin-right: 0 -} - -.fullYearPicker table th.head { - text-align: center; - line-height: 13px; - cursor: default; - background: #fff -} - -.fullYearPicker table td { - background: #fff; - text-align: center; - line-height: 13px; - cursor: pointer -} - -.fullYearPicker table th { - color: #63656e -} - -.fullYearPicker table td.weekend, .fullYearPicker table th.weekend { -} - -.fullYearPicker table td.disabled { - color: #2b2929; - cursor: not-allowed -} - -.fullYearPicker table td.selected { - background: #3a84ff; - color: #fff -} - -.fullYearPicker table td.empty { - cursor: default -} - -.fullYearPicker br { - clear: both -} - -.year { - display: none -} - -.arrow_box { - animation: glow 800ms ease-out infinite alternate -} - -@keyframes glow { - 0% { - border-color: red; - box-shadow: 0 0 5px red, inset 0 0 5px red, 0 1px red - } - 100% { - border-color: red; - box-shadow: 0 0 20px red, inset 0 0 10px red, 0 1px 0 red - } -} diff --git a/web/src/components/FullYearCalendar/js/fullYearPicker.js b/web/src/components/FullYearCalendar/js/fullYearPicker.js deleted file mode 100644 index 6fa8198..0000000 --- a/web/src/components/FullYearCalendar/js/fullYearPicker.js +++ /dev/null @@ -1,339 +0,0 @@ -(function () { - var fullYearPicker_nowSelect = null; - var fullYearPicker_last = null; - var _viewer_ = this; - var lastdate = '' - var hasshift = false - - function tdClass(i, disabledDay, sameMonth, values, dateStr) { - var cls = i == 0 || i == 6 ? 'weekend' : ''; - if (disabledDay && disabledDay.indexOf(i) != -1) cls += (cls ? ' ' : '') + 'disabled'; - if (!sameMonth) cls += (cls ? ' ' : '') + 'empty'; - if (sameMonth && values && cls.indexOf('disabled') == -1 && values.indexOf(',' + dateStr + ',') != -1) cls += (cls ? ' ' : '') + 'selected'; - return cls == '' ? '' : ' class="' + cls + '"'; - } - - function renderMonth(year, month, clear, disabledDay, values) { - var d = new Date(year, month - 1, 1) - let s = '' + '' + ''; - var dMonth = month - 1; - var firstDay = d.getDay() - let hit = false; - s += ''; - for (let i = 0; i < 7; i++) { - if (firstDay == i || hit) { - s += ''; - d.setDate(d.getDate() + 1); - hit = true; - } else { - s += ''; - } - } - s += ''; - for (let i = 0; i < 5; i++) { - s += ''; - for (var j = 0; j < 7; j++) { - var dateStr = d.getMonth() == dMonth ? year + '-' + month + '-' + d.getDate() : ''; - s += ''; - d.setDate(d.getDate() + 1); - } - s += ''; - } - return s + '
' + year + '年' + month + '月
' + d.getDate() + ' 
' + (d.getMonth() == dMonth ? d.getDate() : ' ') + '
' + (clear ? '
' : ''); - } - - function getDateStr(td) { - return td.parentNode.parentNode.rows[0].cells[0].innerHTML.replace(/[年月]/g, '-') + td.innerHTML - } - - //将进来的值进行分割 例:如:2020-1-2 转换成 2020-01-02 - function splicdate(val) { - var data = val.split('-') - var mouth = data[1] < 10 ? (0 + data[1]) : data[1] - var day = data[2] < 10 ? (0 + data[2]) : data[2] - return data[0] + '-' + mouth + '-' + day - } - - function renderYear(year, el, disabledDay, value) { - el.find('td').unbind(); - let s = '' - let values = ',' + value.join(',') + ','; - for (var i = 1; i <= 12; i++) s += renderMonth(year, i, i % 4 == 0, disabledDay, values); - el.find('div.picker').html(s).find('td').click(function () { - if (!/disabled|empty/g.test(this.className)) $(this).toggleClass('selected'); - if (this.className.indexOf('empty') == -1 && typeof el.data('config').cellClick == 'function') { - el.data('config').cellClick(getDateStr(this), this.className.indexOf('disabled') != -1); - $('.fullYearPicker td').removeClass('arrow_box'); - $(this).addClass('arrow_box'); - fullYearPicker_nowSelect = getDateStr(this); - _viewer_.data('config').choose(_viewer_.fullYearPicker('getSelected')); - } - }); - } - - $.fn.selectDates = function (dateArray) { - dateArray.forEach(function (item) { - $("[date='" + item + "']").addClass('selected'); - }); - } - var format = Date - //范围选择方法 - format.prototype.format = function () { - var s = ''; - var mouth = (this.getMonth() + 1) >= 10 ? (this.getMonth() + 1) : ('0' + (this.getMonth() + 1)); - var day = this.getDate() >= 10 ? this.getDate() : ('0' + this.getDate()); - s += this.getFullYear() + '-'; // 获取年份。 - s += mouth + '-'; // 获取月份。 - s += day; // 获取日。 - return (s); // 返回日期。 - }; - - //范围选择方法 获取中间范围日期 - function getAll(begin, end) { - var arr = []; - var ab = begin.split('-'); - var ae = end.split('-'); - var db = new Date(); - db.setUTCFullYear(ab[0], ab[1] - 1, ab[2]); - var de = new Date(); - de.setUTCFullYear(ae[0], ae[1] - 1, ae[2]); - var unixDb = db.getTime() - 24 * 60 * 60 * 1000; - var unixDe = de.getTime() - 24 * 60 * 60 * 1000; - for (var k = unixDb; k <= unixDe;) { - k = k + 24 * 60 * 60 * 1000; - arr.push((new Date(parseInt(k))).format()); - } - return arr; - } - - $.fn.fullYearPicker = function (config, param) { - if (hasshift == true) { //如果摁过shift键 就进来 - //取中间时间 - var centerData = '' - var lastdatas = splicdate(lastdate) - var fullYearPicker_nowSelects = splicdate(fullYearPicker_nowSelect) - //将两个数放入去中间值得方法里面 - if (lastdatas > fullYearPicker_nowSelects) { - centerData = getAll(fullYearPicker_nowSelect, lastdate) - } else { - centerData = getAll(lastdate, fullYearPicker_nowSelect) - } - //开始循环中间的数 - for (let item in centerData) { - var center = centerData[item].split('-') - var mouth = center[1].split('') - var mouths = mouth[0] === '0' ? mouth[1] : mouth[0] + mouth[1] - var day = center[2].split('') - var days = day[0] === '0' ? day[1] : day[0] + day[1] - var datetimes = center[0] + '-' + mouths + '-' + days - var $td = $("[date='" + datetimes + "']"); - //将2020-01-02 转换成 2020-1-2 - $td.addClass('selected').addClass('arrow_box'); //添加当前选中项 - $('.fullYearPicker td').removeClass('arrow_box'); //删除当前选中项 - lastdate = '' - } - } else { - } - if (config === 'setDisabledDay' || config === 'setYear' || config === 'getSelected' || config === 'acceptChange' || config === 'setColors' || config === 'initDate') { - var me = $(this); - if (config == 'setYear') { - me.data('config').year = param; - me.find('div.year a:first').trigger('click', true); - } else if (config == 'getSelected') { - return me.find('td.selected').map(function () { - var selectStr = getDateStr(this); - if (_viewer_.data('config').format === 'YYYY-MM-DD') { - var selects = selectStr.split('-'); - var yy = selects[0]; - var mm = selects[1]; - if (Number(mm) < 10) { - mm = '0' + mm; - } - var dd = selects[2]; - if (Number(dd) < 10) { - dd = '0' + dd; - } - selectStr = yy + '-' + mm + '-' + dd; - } - return selectStr; - }).get(); - } else if (config == 'acceptChange') { - me.data('config').value = me.fullYearPicker('getSelected'); - } else if (config == 'setColors') { - return me.find('td').each(function () { - var d = getDateStr(this); - for (var i = 0; i < param.dc.length; i++) if (d == param.dc[i].d) this.style.backgroundColor = param.dc[i].c || param.defaultColor || '#f00'; - }); - } else { - me.find('td.disabled').removeClass('disabled'); - me.data('config').disabledDay = param; - if (param) { - me.find('table tr:gt(1)').find('td').each(function () { - if (param.indexOf(this.cellIndex) != -1) { - this.className = (this.className || '').replace('selected', '') + (this.className ? ' ' : '') + 'disabled'; - } - }); - } - } - return this; - } - config = $.extend({ - year: new Date().getFullYear(), - disabledDay: '', - value: [], - initDate: [], - format: '', - disable: false - }, config); - return this.addClass('fullYearPicker').each(function () { - _viewer_ = $(this); - _viewer_.html(''); - var me = $(this) - let year = config.year || new Date().getFullYear(); - let newConifg = { - cellClick: config.cellClick, - disabledDay: config.disabledDay, - year: year, - value: config.value, - yearScale: config.yearScale, - choose: config.choose, - initDate: config.initDate, - format: config.format, - disable: config.disable - }; - me.data('config', newConifg); - var selYear = ''; - if (newConifg.yearScale) { - selYear = ''; - } - selYear = selYear || year; - me.append('
上一年' + selYear + '年
').find('a').click(function (e, setYear) { - if (setYear) year = me.data('config').year; else this.innerHTML == '上一年' ? year-- : year++; - me.find('select').val(year); - renderYear(year, $(this).closest('div.fullYearPicker'), newConifg.disabledDay, newConifg.value); - this.parentNode.firstChild.nextSibling.data = year + '年'; - return false; - }).end().find('select').change(function () { - me.fullYearPicker('setYear', this.value); - }); - if (_viewer_.data('config').disable === true) { - _viewer_.data('config').disabledDay = '0,1,2,3,4,5,6'; - } - renderYear(year, me, newConifg.disabledDay, newConifg.value); - if (newConifg.initDate.length > 0) { - newConifg.initDate.forEach(function (p1, p2, p3) { - if (newConifg.format === 'YYYY-MM-DD') { - var items = p1.split('-'); - var mm = items[1]; - if (mm[0] === '0') { - mm = mm[1]; - } - var dd = items[2]; - if (dd[0] === '0') { - dd = dd[1]; - } - var item = items[0] + '-' + mm + '-' + dd; - } - $("[date='" + item + "']").addClass('selected') - }) - } - }); - }; - - function getMaxDay(year, month) { - var thisDate = new Date(year, month, 0); - //返回了某个月的某一天 - return thisDate.getDate(); - } - - function selectDay(type, del) { - var day = Number(fullYearPicker_nowSelect.split('-')[2]); - var year = fullYearPicker_nowSelect.split('-')[0]; - var month = fullYearPicker_nowSelect.split('-')[1]; - var maxDay = Number(getMaxDay(year, month)) + 1; - if (maxDay) { - switch (type) { - case 38: - if (day < 7 || day === 7) { - return - } - day -= 7; - break; - case 37: - if (day === 1) { - return - } - day -= 1; - break; - case 40: - if ((day + 7) > Number(maxDay) || (day + 7) === Number(maxDay)) { - return - } - day += 7; - break; - case 39: - if (day === Number(maxDay) - 1) { - return - } - day += 1; - break; - default: - break; - } - fullYearPicker_nowSelect = year + '-' + month + '-' + day; - var $td = $("[date='" + fullYearPicker_nowSelect + "']"); - if (del) { - if (!$td.hasClass('empty') && !$td.hasClass('selected')) { - $('.fullYearPicker td').removeClass('arrow_box'); - $td.addClass('selected').addClass('arrow_box'); - _viewer_.data('config').choose(_viewer_.fullYearPicker('getSelected')); - } else if (!$td.hasClass('empty') && $td.hasClass('selected')) { - $('.fullYearPicker td').removeClass('arrow_box'); - $td.removeClass('selected').addClass('arrow_box'); - _viewer_.data('config').choose(_viewer_.fullYearPicker('getSelected')); - } - } else { - if (!$td.hasClass('empty')) { - $('.fullYearPicker td').removeClass('arrow_box'); - $td.addClass('selected').addClass('arrow_box'); - _viewer_.data('config').choose(_viewer_.fullYearPicker('getSelected')); - } - } - } - } - - //键盘事件(上下左右) - document.onkeydown = function (event) { - if (fullYearPicker_nowSelect === null) { - return - } - var e = event || window.event - //如果摁住SHift - if (e && e.keyCode === 16) { - hasshift = true - lastdate = fullYearPicker_nowSelect - } - if ((e && e.keyCode === 38) || (e && e.keyCode === 37)) { - if (e.keyCode === 38) { - selectDay(38, true); - } else if (e && e.keyCode === 37) { - selectDay(37, true); - } - } - if ((e && e.keyCode === 40) || (e && e.keyCode === 39)) { - if (e.keyCode === 40) { - selectDay(40, true); - } else if (e && e.keyCode === 39) { - selectDay(39, true); - } - } - }; - document.onkeyup = function(event) { - var e = event || window.event - if (e && e.keyCode === 16) { - hasshift = false - } - } -})(); diff --git a/web/src/components/FullYearCalendar/new_calendar.vue b/web/src/components/FullYearCalendar/new_calendar.vue deleted file mode 100644 index 14340ab..0000000 --- a/web/src/components/FullYearCalendar/new_calendar.vue +++ /dev/null @@ -1,181 +0,0 @@ - - - - - diff --git a/web/src/components/FullYearCalendar/old_calendar.vue b/web/src/components/FullYearCalendar/old_calendar.vue deleted file mode 100644 index e419e6f..0000000 --- a/web/src/components/FullYearCalendar/old_calendar.vue +++ /dev/null @@ -1,189 +0,0 @@ - - - diff --git a/web/src/components/FullYearCalendar/work_holiday_compute.js b/web/src/components/FullYearCalendar/work_holiday_compute.js deleted file mode 100644 index 9206662..0000000 --- a/web/src/components/FullYearCalendar/work_holiday_compute.js +++ /dev/null @@ -1,368 +0,0 @@ -/* -* 获取工作日和节假日的脚本函数 -* # -*- coding: UTF-8 -*- -import datetime - - -def find(): - list_data = [] - start_time = datetime.datetime(2024, 1, 1) - end_time = datetime.datetime(2025, 1, 1) - while start_time < end_time: - if start_time.weekday() > 4: - list_data.append(datetime.datetime.strftime(start_time, '%Y-%m-%d')) - start_time = start_time + datetime.timedelta(days=1) - print(list_data) - - -if __name__ == '__main__': - find() -* */ - - -const workDays = { - 2020: [ - '2020-01-02', '2020-01-03', '2020-01-06', '2020-01-07', '2020-01-08', '2020-01-09', - '2020-01-10', '2020-01-13', '2020-01-14', '2020-01-15', '2020-01-16', '2020-01-17', - '2020-01-19', '2020-01-20', '2020-01-21', '2020-01-22', '2020-01-23', - '2020-02-03', '2020-02-04', '2020-02-05', '2020-02-06', '2020-02-07', '2020-02-10', - '2020-02-11', '2020-02-12', '2020-02-13', '2020-02-14', - '2020-02-17', '2020-02-18', '2020-02-19', '2020-02-20', '2020-02-21', - '2020-02-24', '2020-02-25', '2020-02-26', '2020-02-27', '2020-02-28', - '2020-03-02', '2020-03-03', '2020-03-04', '2020-03-05', '2020-03-06', - '2020-03-09', '2020-03-10', '2020-03-11', '2020-03-12', '2020-03-13', - '2020-03-16', '2020-03-17', '2020-03-18', '2020-03-19', '2020-03-20', - '2020-03-23', '2020-03-24', '2020-03-25', '2020-03-26', '2020-03-27', - '2020-03-30', '2020-03-31', - '2020-04-01', '2020-04-02', '2020-04-03', '2020-04-07', '2020-04-08', '2020-04-09', - '2020-04-10', '2020-04-13', '2020-04-14', '2020-04-15', '2020-04-16', '2020-04-17', - '2020-04-20', '2020-04-21', '2020-04-22', '2020-04-23', '2020-04-24', '2020-04-27', - '2020-04-28', '2020-04-29', '2020-04-30', - '2020-05-06', '2020-05-07', '2020-05-08', '2020-05-09', '2020-05-11', '2020-05-12', - '2020-05-13', '2020-05-14', '2020-05-15', '2020-05-18', '2020-05-19', '2020-05-20', - '2020-05-21', '2020-05-22', '2020-05-25', '2020-05-26', '2020-05-27', '2020-05-28', - '2020-05-29', - '2020-06-01', '2020-06-02', '2020-06-03', '2020-06-04', '2020-06-05', '2020-06-08', - '2020-06-09', '2020-06-10', '2020-06-11', '2020-06-12', '2020-06-15', '2020-06-16', - '2020-06-17', '2020-06-18', '2020-06-19', '2020-06-22', '2020-06-23', '2020-06-24', - '2020-06-28', '2020-06-29', '2020-06-30', - '2020-07-01', '2020-07-02', '2020-07-03', '2020-07-06', '2020-07-07', '2020-07-08', - '2020-07-09', '2020-07-10', '2020-07-13', '2020-07-14', '2020-07-15', '2020-07-16', - '2020-07-17', '2020-07-20', '2020-07-21', '2020-07-22', '2020-07-23', '2020-07-24', - '2020-07-27', '2020-07-28', '2020-07-29', '2020-07-30', '2020-07-31', - '2020-08-03', '2020-08-04', '2020-08-05', '2020-08-06', '2020-08-07', '2020-08-08', - '2020-08-10', '2020-08-11', '2020-08-12', '2020-08-13', '2020-08-14', '2020-08-17', - '2020-08-18', '2020-08-19', '2020-08-20', '2020-08-21', '2020-08-24', '2020-08-25', - '2020-08-26', '2020-08-27', '2020-08-28', '2020-08-31', - '2020-09-01', '2020-09-02', '2020-09-03', '2020-09-04', '2020-09-07', '2020-09-08', - '2020-09-09', '2020-09-10', '2020-09-11', '2020-09-14', '2020-09-15', '2020-09-16', - '2020-09-17', '2020-09-18', '2020-09-21', '2020-09-22', '2020-09-23', '2020-09-24', - '2020-09-25', '2020-09-27', '2020-09-28', '2020-09-29', '2020-09-30', - '2020-10-09', '2020-10-10', '2020-10-12', '2020-10-13', '2020-10-14', '2020-10-15', - '2020-10-16', '2020-10-19', '2020-10-20', '2020-10-21', '2020-10-22', '2020-10-23', - '2020-10-26', '2020-10-27', '2020-10-28', '2020-10-29', '2020-10-30', - '2020-11-02', '2020-11-03', '2020-11-04', '2020-11-05', '2020-11-06', '2020-11-09', - '2020-11-10', '2020-11-11', '2020-11-12', '2020-11-13', '2020-11-16', '2020-11-17', - '2020-11-18', '2020-11-19', '2020-11-20', - '2020-11-23', '2020-11-24', '2020-11-25', '2020-11-26', '2020-11-27', '2020-11-30', - '2020-12-05', '2020-12-06', '2020-12-12', '2020-12-13', '2020-12-19', '2020-12-20', - '2020-12-26', '2020-12-27', - '2020-12-01', '2020-12-02', '2020-12-03', '2020-12-04', '2020-12-07', '2020-12-08', - '2020-12-09', '2020-12-10', - '2020-12-11', '2020-12-14', '2020-12-15', '2020-12-16', '2020-12-17', '2020-12-18', - '2020-12-21', '2020-12-22', '2020-12-23', '2020-12-24', '2020-12-25', '2020-12-28', - '2020-12-29', '2020-12-30', '2020-12-31', - ], - 2021: [ - '2021-01-01', '2021-01-04', '2021-01-05', '2021-01-06', '2021-01-07', '2021-01-08', - '2021-01-11', '2021-01-12', '2021-01-13', '2021-01-14', '2021-01-15', '2021-01-18', - '2021-01-19', '2021-01-20', '2021-01-21', '2021-01-22', '2021-01-25', '2021-01-26', - '2021-01-27', '2021-01-28', '2021-01-29', '2021-02-01', '2021-02-02', '2021-02-03', - '2021-02-04', '2021-02-05', '2021-02-08', '2021-02-09', '2021-02-10', '2021-02-11', - '2021-02-12', '2021-02-15', '2021-02-16', '2021-02-17', '2021-02-18', '2021-02-19', - '2021-02-22', '2021-02-23', '2021-02-24', '2021-02-25', '2021-02-26', '2021-03-01', - '2021-03-02', '2021-03-03', '2021-03-04', '2021-03-05', '2021-03-08', '2021-03-09', - '2021-03-10', '2021-03-11', '2021-03-12', '2021-03-15', '2021-03-16', '2021-03-17', - '2021-03-18', '2021-03-19', '2021-03-22', '2021-03-23', '2021-03-24', '2021-03-25', - '2021-03-26', '2021-03-29', '2021-03-30', '2021-03-31', '2021-04-01', '2021-04-02', - '2021-04-05', '2021-04-06', '2021-04-07', '2021-04-08', '2021-04-09', '2021-04-12', - '2021-04-13', '2021-04-14', '2021-04-15', '2021-04-16', '2021-04-19', '2021-04-20', - '2021-04-21', '2021-04-22', '2021-04-23', '2021-04-26', '2021-04-27', '2021-04-28', - '2021-04-29', '2021-04-30', '2021-05-03', '2021-05-04', '2021-05-05', '2021-05-06', - '2021-05-07', '2021-05-10', '2021-05-11', '2021-05-12', '2021-05-13', '2021-05-14', - '2021-05-17', '2021-05-18', '2021-05-19', '2021-05-20', '2021-05-21', '2021-05-24', - '2021-05-25', '2021-05-26', '2021-05-27', '2021-05-28', '2021-05-31', '2021-06-01', - '2021-06-02', '2021-06-03', '2021-06-04', '2021-06-07', '2021-06-08', '2021-06-09', - '2021-06-10', '2021-06-11', '2021-06-14', '2021-06-15', '2021-06-16', '2021-06-17', - '2021-06-18', '2021-06-21', '2021-06-22', '2021-06-23', '2021-06-24', '2021-06-25', - '2021-06-28', '2021-06-29', '2021-06-30', '2021-07-01', '2021-07-02', '2021-07-05', - '2021-07-06', '2021-07-07', '2021-07-08', '2021-07-09', '2021-07-12', '2021-07-13', - '2021-07-14', '2021-07-15', '2021-07-16', '2021-07-19', '2021-07-20', '2021-07-21', - '2021-07-22', '2021-07-23', '2021-07-26', '2021-07-27', '2021-07-28', '2021-07-29', - '2021-07-30', '2021-08-02', '2021-08-03', '2021-08-04', '2021-08-05', '2021-08-06', - '2021-08-09', '2021-08-10', '2021-08-11', '2021-08-12', '2021-08-13', '2021-08-16', - '2021-08-17', '2021-08-18', '2021-08-19', '2021-08-20', '2021-08-23', '2021-08-24', - '2021-08-25', '2021-08-26', '2021-08-27', '2021-08-30', '2021-08-31', '2021-09-01', - '2021-09-02', '2021-09-03', '2021-09-06', '2021-09-07', '2021-09-08', '2021-09-09', - '2021-09-10', '2021-09-13', '2021-09-14', '2021-09-15', '2021-09-16', '2021-09-17', - '2021-09-20', '2021-09-21', '2021-09-22', '2021-09-23', '2021-09-24', '2021-09-27', - '2021-09-28', '2021-09-29', '2021-09-30', '2021-10-01', '2021-10-04', '2021-10-05', - '2021-10-06', '2021-10-07', '2021-10-08', '2021-10-11', '2021-10-12', '2021-10-13', - '2021-10-14', '2021-10-15', '2021-10-18', '2021-10-19', '2021-10-20', '2021-10-21', - '2021-10-22', '2021-10-25', '2021-10-26', '2021-10-27', '2021-10-28', '2021-10-29', - '2021-11-01', '2021-11-02', '2021-11-03', '2021-11-04', '2021-11-05', '2021-11-08', - '2021-11-09', '2021-11-10', '2021-11-11', '2021-11-12', '2021-11-15', '2021-11-16', - '2021-11-17', '2021-11-18', '2021-11-19', '2021-11-22', '2021-11-23', '2021-11-24', - '2021-11-25', '2021-11-26', '2021-11-29', '2021-11-30', '2021-12-01', '2021-12-02', - '2021-12-03', '2021-12-06', '2021-12-07', '2021-12-08', '2021-12-09', '2021-12-10', - '2021-12-13', '2021-12-14', '2021-12-15', '2021-12-16', '2021-12-17', '2021-12-20', - '2021-12-21', '2021-12-22', '2021-12-23', '2021-12-24', '2021-12-27', '2021-12-28', - '2021-12-29', '2021-12-30', '2021-12-31' - ], - 2022: [ - '2022-01-03', '2022-01-04', '2022-01-05', '2022-01-06', '2022-01-07', '2022-01-10', - '2022-01-11', '2022-01-12', '2022-01-13', '2022-01-14', '2022-01-17', '2022-01-18', - '2022-01-19', '2022-01-20', '2022-01-21', '2022-01-24', '2022-01-25', '2022-01-26', - '2022-01-27', '2022-01-28', '2022-01-31', '2022-02-01', '2022-02-02', '2022-02-03', - '2022-02-04', '2022-02-07', '2022-02-08', '2022-02-09', '2022-02-10', '2022-02-11', - '2022-02-14', '2022-02-15', '2022-02-16', '2022-02-17', '2022-02-18', '2022-02-21', - '2022-02-22', '2022-02-23', '2022-02-24', '2022-02-25', '2022-02-28', '2022-03-01', - '2022-03-02', '2022-03-03', '2022-03-04', '2022-03-07', '2022-03-08', '2022-03-09', - '2022-03-10', '2022-03-11', '2022-03-14', '2022-03-15', '2022-03-16', '2022-03-17', - '2022-03-18', '2022-03-21', '2022-03-22', '2022-03-23', '2022-03-24', '2022-03-25', - '2022-03-28', '2022-03-29', '2022-03-30', '2022-03-31', '2022-04-01', '2022-04-04', - '2022-04-05', '2022-04-06', '2022-04-07', '2022-04-08', '2022-04-11', '2022-04-12', - '2022-04-13', '2022-04-14', '2022-04-15', '2022-04-18', '2022-04-19', '2022-04-20', - '2022-04-21', '2022-04-22', '2022-04-25', '2022-04-26', '2022-04-27', '2022-04-28', - '2022-04-29', '2022-05-02', '2022-05-03', '2022-05-04', '2022-05-05', '2022-05-06', - '2022-05-09', '2022-05-10', '2022-05-11', '2022-05-12', '2022-05-13', '2022-05-16', - '2022-05-17', '2022-05-18', '2022-05-19', '2022-05-20', '2022-05-23', '2022-05-24', - '2022-05-25', '2022-05-26', '2022-05-27', '2022-05-30', '2022-05-31', '2022-06-01', - '2022-06-02', '2022-06-03', '2022-06-06', '2022-06-07', '2022-06-08', '2022-06-09', - '2022-06-10', '2022-06-13', '2022-06-14', '2022-06-15', '2022-06-16', '2022-06-17', - '2022-06-20', '2022-06-21', '2022-06-22', '2022-06-23', '2022-06-24', '2022-06-27', - '2022-06-28', '2022-06-29', '2022-06-30', '2022-07-01', '2022-07-04', '2022-07-05', - '2022-07-06', '2022-07-07', '2022-07-08', '2022-07-11', '2022-07-12', '2022-07-13', - '2022-07-14', '2022-07-15', '2022-07-18', '2022-07-19', - '2022-07-20', '2022-07-21', '2022-07-22', '2022-07-25', '2022-07-26', '2022-07-27', - '2022-07-28', '2022-07-29', '2022-08-01', '2022-08-02', '2022-08-03', '2022-08-04', - '2022-08-05', '2022-08-08', '2022-08-09', '2022-08-10', '2022-08-11', '2022-08-12', - '2022-08-15', '2022-08-16', '2022-08-17', '2022-08-18', '2022-08-19', '2022-08-22', - '2022-08-23', '2022-08-24', '2022-08-25', '2022-08-26', '2022-08-29', '2022-08-30', - '2022-08-31', '2022-09-01', '2022-09-02', '2022-09-05', '2022-09-06', '2022-09-07', - '2022-09-08', '2022-09-09', '2022-09-12', '2022-09-13', '2022-09-14', '2022-09-15', - '2022-09-16', '2022-09-19', '2022-09-20', '2022-09-21', '2022-09-22', '2022-09-23', - '2022-09-26', '2022-09-27', '2022-09-28', '2022-09-29', '2022-09-30', '2022-10-03', - '2022-10-04', '2022-10-05', '2022-10-06', '2022-10-07', '2022-10-10', '2022-10-11', - '2022-10-12', '2022-10-13', '2022-10-14', '2022-10-17', '2022-10-18', '2022-10-19', - '2022-10-20', '2022-10-21', '2022-10-24', '2022-10-25', '2022-10-26', '2022-10-27', - '2022-10-28', '2022-10-31', '2022-11-01', '2022-11-02', '2022-11-03', '2022-11-04', - '2022-11-07', '2022-11-08', '2022-11-09', '2022-11-10', '2022-11-11', '2022-11-14', - '2022-11-15', '2022-11-16', '2022-11-17', '2022-11-18', '2022-11-21', '2022-11-22', - '2022-11-23', '2022-11-24', '2022-11-25', '2022-11-28', '2022-11-29', '2022-11-30', - '2022-12-01', '2022-12-02', '2022-12-05', '2022-12-06', '2022-12-07', '2022-12-08', - '2022-12-09', '2022-12-12', '2022-12-13', '2022-12-14', '2022-12-15', '2022-12-16', - '2022-12-19', '2022-12-20', '2022-12-21', '2022-12-22', '2022-12-23', '2022-12-26', - '2022-12-27', '2022-12-28', '2022-12-29', '2022-12-30' - ], - 2023: [ - '2023-01-02', '2023-01-03', '2023-01-04', '2023-01-05', '2023-01-06', '2023-01-09', - '2023-01-10', '2023-01-11', '2023-01-12', '2023-01-13', '2023-01-16', '2023-01-17', - '2023-01-18', '2023-01-19', '2023-01-20', '2023-01-23', '2023-01-24', '2023-01-25', - '2023-01-26', '2023-01-27', '2023-01-30', '2023-01-31', '2023-02-01', '2023-02-02', - '2023-02-03', '2023-02-06', '2023-02-07', '2023-02-08', '2023-02-09', '2023-02-10', - '2023-02-13', '2023-02-14', '2023-02-15', '2023-02-16', '2023-02-17', '2023-02-20', - '2023-02-21', '2023-02-22', '2023-02-23', '2023-02-24', '2023-02-27', '2023-02-28', - '2023-03-01', '2023-03-02', '2023-03-03', '2023-03-06', '2023-03-07', '2023-03-08', - '2023-03-09', '2023-03-10', '2023-03-13', '2023-03-14', '2023-03-15', '2023-03-16', - '2023-03-17', '2023-03-20', '2023-03-21', '2023-03-22', '2023-03-23', '2023-03-24', - '2023-03-27', '2023-03-28', '2023-03-29', '2023-03-30', '2023-03-31', '2023-04-03', - '2023-04-04', '2023-04-05', '2023-04-06', '2023-04-07', '2023-04-10', '2023-04-11', - '2023-04-12', '2023-04-13', '2023-04-14', '2023-04-17', '2023-04-18', '2023-04-19', - '2023-04-20', '2023-04-21', '2023-04-24', '2023-04-25', '2023-04-26', '2023-04-27', - '2023-04-28', '2023-05-01', '2023-05-02', '2023-05-03', '2023-05-04', '2023-05-05', - '2023-05-08', '2023-05-09', '2023-05-10', '2023-05-11', '2023-05-12', '2023-05-15', - '2023-05-16', '2023-05-17', '2023-05-18', '2023-05-19', '2023-05-22', '2023-05-23', - '2023-05-24', '2023-05-25', '2023-05-26', '2023-05-29', '2023-05-30', '2023-05-31', - '2023-06-01', '2023-06-02', '2023-06-05', '2023-06-06', '2023-06-07', '2023-06-08', - '2023-06-09', '2023-06-12', '2023-06-13', '2023-06-14', '2023-06-15', '2023-06-16', - '2023-06-19', '2023-06-20', '2023-06-21', '2023-06-22', '2023-06-23', '2023-06-26', - '2023-06-27', '2023-06-28', '2023-06-29', '2023-06-30', '2023-07-03', '2023-07-04', - '2023-07-05', '2023-07-06', '2023-07-07', '2023-07-10', '2023-07-11', '2023-07-12', - '2023-07-13', '2023-07-14', '2023-07-17', '2023-07-18', '2023-07-19', '2023-07-20', - '2023-07-21', '2023-07-24', '2023-07-25', '2023-07-26', '2023-07-27', '2023-07-28', - '2023-07-31', '2023-08-01', '2023-08-02', '2023-08-03', '2023-08-04', '2023-08-07', - '2023-08-08', '2023-08-09', '2023-08-10', '2023-08-11', '2023-08-14', '2023-08-15', - '2023-08-16', '2023-08-17', '2023-08-18', '2023-08-21', '2023-08-22', '2023-08-23', - '2023-08-24', '2023-08-25', '2023-08-28', '2023-08-29', '2023-08-30', '2023-08-31', - '2023-09-01', '2023-09-04', '2023-09-05', '2023-09-06', '2023-09-07', '2023-09-08', - '2023-09-11', '2023-09-12', '2023-09-13', '2023-09-14', '2023-09-15', '2023-09-18', - '2023-09-19', '2023-09-20', '2023-09-21', '2023-09-22', '2023-09-25', '2023-09-26', - '2023-09-27', '2023-09-28', '2023-09-29', '2023-10-02', '2023-10-03', '2023-10-04', - '2023-10-05', '2023-10-06', '2023-10-09', '2023-10-10', '2023-10-11', '2023-10-12', - '2023-10-13', '2023-10-16', '2023-10-17', '2023-10-18', '2023-10-19', '2023-10-20', - '2023-10-23', '2023-10-24', '2023-10-25', '2023-10-26', '2023-10-27', '2023-10-30', - '2023-10-31', '2023-11-01', '2023-11-02', '2023-11-03', '2023-11-06', '2023-11-07', - '2023-11-08', '2023-11-09', '2023-11-10', '2023-11-13', '2023-11-14', '2023-11-15', - '2023-11-16', '2023-11-17', '2023-11-20', '2023-11-21', '2023-11-22', '2023-11-23', - '2023-11-24', '2023-11-27', '2023-11-28', '2023-11-29', '2023-11-30', '2023-12-01', - '2023-12-04', '2023-12-05', '2023-12-06', '2023-12-07', '2023-12-08', '2023-12-11', - '2023-12-12', '2023-12-13', '2023-12-14', '2023-12-15', '2023-12-18', '2023-12-19', - '2023-12-20', '2023-12-21', '2023-12-22', '2023-12-25', '2023-12-26', '2023-12-27', - '2023-12-28', '2023-12-29' - ], - 2024: [ - '2024-01-01', '2024-01-02', '2024-01-03', '2024-01-04', '2024-01-05', '2024-01-08', - '2024-01-09', '2024-01-10', '2024-01-11', '2024-01-12', '2024-01-15', '2024-01-16', - '2024-01-17', '2024-01-18', '2024-01-19', '2024-01-22', '2024-01-23', '2024-01-24', - '2024-01-25', '2024-01-26', '2024-01-29', '2024-01-30', '2024-01-31', '2024-02-01', - '2024-02-02', '2024-02-05', '2024-02-06', '2024-02-07', '2024-02-08', '2024-02-09', - '2024-02-12', '2024-02-13', '2024-02-14', '2024-02-15', '2024-02-16', '2024-02-19', - '2024-02-20', '2024-02-21', '2024-02-22', '2024-02-23', '2024-02-26', '2024-02-27', - '2024-02-28', '2024-02-29', '2024-03-01', '2024-03-04', '2024-03-05', '2024-03-06', - '2024-03-07', '2024-03-08', '2024-03-11', '2024-03-12', '2024-03-13', '2024-03-14', - '2024-03-15', '2024-03-18', '2024-03-19', '2024-03-20', '2024-03-21', '2024-03-22', - '2024-03-25', '2024-03-26', '2024-03-27', '2024-03-28', '2024-03-29', '2024-04-01', - '2024-04-02', '2024-04-03', '2024-04-04', '2024-04-05', '2024-04-08', '2024-04-09', - '2024-04-10', '2024-04-11', '2024-04-12', '2024-04-15', '2024-04-16', '2024-04-17', - '2024-04-18', '2024-04-19', '2024-04-22', '2024-04-23', '2024-04-24', '2024-04-25', - '2024-04-26', '2024-04-29', '2024-04-30', '2024-05-01', '2024-05-02', '2024-05-03', - '2024-05-06', '2024-05-07', '2024-05-08', '2024-05-09', '2024-05-10', '2024-05-13', - '2024-05-14', '2024-05-15', '2024-05-16', '2024-05-17', '2024-05-20', '2024-05-21', - '2024-05-22', '2024-05-23', '2024-05-24', '2024-05-27', '2024-05-28', '2024-05-29', - '2024-05-30', '2024-05-31', '2024-06-03', '2024-06-04', '2024-06-05', '2024-06-06', - '2024-06-07', '2024-06-10', '2024-06-11', '2024-06-12', '2024-06-13', '2024-06-14', - '2024-06-17', '2024-06-18', '2024-06-19', '2024-06-20', '2024-06-21', '2024-06-24', - '2024-06-25', '2024-06-26', '2024-06-27', '2024-06-28', '2024-07-01', '2024-07-02', - '2024-07-03', '2024-07-04', '2024-07-05', '2024-07-08', '2024-07-09', '2024-07-10', - '2024-07-11', '2024-07-12', '2024-07-15', '2024-07-16', '2024-07-17', '2024-07-18', - '2024-07-19', '2024-07-22', '2024-07-23', '2024-07-24', '2024-07-25', '2024-07-26', - '2024-07-29', '2024-07-30', '2024-07-31', '2024-08-01', '2024-08-02', '2024-08-05', - '2024-08-06', '2024-08-07', '2024-08-08', '2024-08-09', '2024-08-12', '2024-08-13', - '2024-08-14', '2024-08-15', '2024-08-16', '2024-08-19', '2024-08-20', '2024-08-21', - '2024-08-22', '2024-08-23', '2024-08-26', '2024-08-27', '2024-08-28', '2024-08-29', - '2024-08-30', '2024-09-02', '2024-09-03', '2024-09-04', '2024-09-05', '2024-09-06', - '2024-09-09', '2024-09-10', '2024-09-11', '2024-09-12', '2024-09-13', '2024-09-16', - '2024-09-17', '2024-09-18', '2024-09-19', '2024-09-20', '2024-09-23', '2024-09-24', - '2024-09-25', '2024-09-26', '2024-09-27', '2024-09-30', '2024-10-01', '2024-10-02', - '2024-10-03', '2024-10-04', '2024-10-07', '2024-10-08', '2024-10-09', '2024-10-10', - '2024-10-11', '2024-10-14', '2024-10-15', '2024-10-16', '2024-10-17', '2024-10-18', - '2024-10-21', '2024-10-22', '2024-10-23', '2024-10-24', '2024-10-25', '2024-10-28', - '2024-10-29', '2024-10-30', '2024-10-31', '2024-11-01', '2024-11-04', '2024-11-05', - '2024-11-06', '2024-11-07', '2024-11-08', '2024-11-11', '2024-11-12', '2024-11-13', - '2024-11-14', '2024-11-15', '2024-11-18', '2024-11-19', '2024-11-20', '2024-11-21', - '2024-11-22', '2024-11-25', '2024-11-26', '2024-11-27', '2024-11-28', '2024-11-29', - '2024-12-02', '2024-12-03', '2024-12-04', '2024-12-05', '2024-12-06', '2024-12-09', - '2024-12-10', '2024-12-11', '2024-12-12', '2024-12-13', '2024-12-16', '2024-12-17', - '2024-12-18', '2024-12-19', '2024-12-20', '2024-12-23', '2024-12-24', '2024-12-25', - '2024-12-26', '2024-12-27', '2024-12-30', '2024-12-31' - ], -} -const holidaysAndFestivals = { - 2020: [ - '2020-01-01', '2020-01-04', '2020-01-05', '2020-01-11', '2020-01-12', '2020-01-19', - '2020-01-24', '2020-01-25', '2020-01-26', '2020-01-27', '2020-01-28', '2020-01-29', - '2020-01-30', '2020-01-31', '2020-02-01', '2020-02-02', '2020-02-08', '2020-02-09', - '2020-02-15', '2020-02-16', '2020-02-22', '2020-02-23', '2020-02-29', '2020-03-01', - '2020-03-07', '2020-03-08', '2020-03-14', '2020-03-15', '2020-03-21', '2020-03-22', - '2020-03-28', '2020-03-29', '2020-04-04', '2020-04-05', '2020-04-06', '2020-04-11', - '2020-04-12', '2020-04-18', '2020-04-19', '2020-04-25', '2020-05-01', '2020-05-02', - '2020-05-03', '2020-05-04', '2020-05-05', '2020-05-10', '2020-05-16', '2020-05-17', - '2020-05-23', '2020-05-24', '2020-05-30', '2020-05-31', '2020-06-06', '2020-06-07', - '2020-06-13', '2020-06-14', '2020-06-20', '2020-06-21', '2020-06-25', '2020-06-26', - '2020-06-27', '2020-07-04', '2020-07-05', '2020-07-11', '2020-07-12', '2020-07-18', - '2020-07-19', '2020-07-25', '2020-07-26', '2020-08-01', '2020-08-02', '2020-08-08', - '2020-08-09', '2020-08-15', '2020-08-16', '2020-08-22', '2020-08-23', '2020-08-29', - '2020-08-30', '2020-09-05', '2020-09-06', '2020-09-12', '2020-09-13', '2020-09-19', - '2020-09-20', '2020-09-26', '2020-10-01', '2020-10-02', '2020-10-03', '2020-10-04', - '2020-10-05', '2020-10-06', '2020-10-07', '2020-10-08', '2020-10-11', '2020-10-17', - '2020-10-18', '2020-10-24', '2020-10-25', '2020-10-31', '2020-11-01', '2020-11-07', - '2020-11-08', '2020-11-14', '2020-11-15', '2020-11-21', '2020-11-22', '2020-11-28', - '2020-11-29', '2020-12-05', '2020-12-06', '2020-12-12', '2020-12-13', '2020-12-19', - '2020-12-20', '2020-12-26', '2020-12-27', - ], - 2021: [ - '2021-01-02', '2021-01-03', '2021-01-09', '2021-01-10', '2021-01-16', '2021-01-17', - '2021-01-23', '2021-01-24', '2021-01-30', '2021-01-31', '2021-02-06', '2021-02-07', - '2021-02-13', '2021-02-14', '2021-02-20', '2021-02-21', '2021-02-27', '2021-02-28', - '2021-03-06', '2021-03-07', '2021-03-13', '2021-03-14', '2021-03-20', '2021-03-21', - '2021-03-27', '2021-03-28', '2021-04-03', '2021-04-04', '2021-04-10', '2021-04-11', - '2021-04-17', '2021-04-18', '2021-04-24', '2021-04-25', '2021-05-01', '2021-05-02', - '2021-05-08', '2021-05-09', '2021-05-15', '2021-05-16', '2021-05-22', '2021-05-23', - '2021-05-29', '2021-05-30', '2021-06-05', '2021-06-06', '2021-06-12', '2021-06-13', - '2021-06-19', '2021-06-20', '2021-06-26', '2021-06-27', '2021-07-03', '2021-07-04', - '2021-07-10', '2021-07-11', '2021-07-17', '2021-07-18', '2021-07-24', '2021-07-25', - '2021-07-31', '2021-08-01', '2021-08-07', '2021-08-08', '2021-08-14', '2021-08-15', - '2021-08-21', '2021-08-22', '2021-08-28', '2021-08-29', '2021-09-04', '2021-09-05', - '2021-09-11', '2021-09-12', '2021-09-18', '2021-09-19', '2021-09-25', '2021-09-26', - '2021-10-02', '2021-10-03', '2021-10-09', '2021-10-10', '2021-10-16', '2021-10-17', - '2021-10-23', '2021-10-24', '2021-10-30', '2021-10-31', '2021-11-06', '2021-11-07', - '2021-11-13', '2021-11-14', '2021-11-20', '2021-11-21', '2021-11-27', '2021-11-28', - '2021-12-04', '2021-12-05', '2021-12-11', '2021-12-12', '2021-12-18', '2021-12-19', - '2021-12-25', '2021-12-26' - ], - 2022: [ - '2022-01-01', '2022-01-02', '2022-01-08', '2022-01-09', '2022-01-15', '2022-01-16', - '2022-01-22', '2022-01-23', '2022-01-29', '2022-01-30', '2022-02-05', '2022-02-06', - '2022-02-12', '2022-02-13', '2022-02-19', '2022-02-20', '2022-02-26', '2022-02-27', - '2022-03-05', '2022-03-06', '2022-03-12', '2022-03-13', '2022-03-19', '2022-03-20', - '2022-03-26', '2022-03-27', '2022-04-02', '2022-04-03', '2022-04-09', '2022-04-10', - '2022-04-16', '2022-04-17', '2022-04-23', '2022-04-24', '2022-04-30', '2022-05-01', - '2022-05-07', '2022-05-08', '2022-05-14', '2022-05-15', '2022-05-21', '2022-05-22', - '2022-05-28', '2022-05-29', '2022-06-04', '2022-06-05', '2022-06-11', '2022-06-12', - '2022-06-18', '2022-06-19', '2022-06-25', '2022-06-26', '2022-07-02', '2022-07-03', - '2022-07-09', '2022-07-10', '2022-07-16', '2022-07-17', '2022-07-23', '2022-07-24', - '2022-07-30', '2022-07-31', '2022-08-06', '2022-08-07', '2022-08-13', '2022-08-14', - '2022-08-20', '2022-08-21', '2022-08-27', '2022-08-28', '2022-09-03', '2022-09-04', - '2022-09-10', '2022-09-11', '2022-09-17', '2022-09-18', '2022-09-24', '2022-09-25', - '2022-10-01', '2022-10-02', '2022-10-08', '2022-10-09', '2022-10-15', '2022-10-16', - '2022-10-22', '2022-10-23', '2022-10-29', '2022-10-30', '2022-11-05', '2022-11-06', - '2022-11-12', '2022-11-13', '2022-11-19', '2022-11-20', '2022-11-26', '2022-11-27', - '2022-12-03', '2022-12-04', '2022-12-10', '2022-12-11', '2022-12-17', '2022-12-18', - '2022-12-24', '2022-12-25', '2022-12-31' - ], - 2023: [ - '2023-01-01', '2023-01-07', '2023-01-08', '2023-01-14', '2023-01-15', '2023-01-21', - '2023-01-22', '2023-01-28', '2023-01-29', '2023-02-04', '2023-02-05', '2023-02-11', - '2023-02-12', '2023-02-18', '2023-02-19', '2023-02-25', '2023-02-26', '2023-03-04', - '2023-03-05', '2023-03-11', '2023-03-12', '2023-03-18', '2023-03-19', '2023-03-25', - '2023-03-26', '2023-04-01', '2023-04-02', '2023-04-08', '2023-04-09', '2023-04-15', - '2023-04-16', '2023-04-22', '2023-04-23', '2023-04-29', '2023-04-30', '2023-05-06', - '2023-05-07', '2023-05-13', '2023-05-14', '2023-05-20', '2023-05-21', '2023-05-27', - '2023-05-28', '2023-06-03', '2023-06-04', '2023-06-10', '2023-06-11', '2023-06-17', - '2023-06-18', '2023-06-24', '2023-06-25', '2023-07-01', '2023-07-02', '2023-07-08', - '2023-07-09', '2023-07-15', '2023-07-16', '2023-07-22', '2023-07-23', '2023-07-29', - '2023-07-30', '2023-08-05', '2023-08-06', '2023-08-12', '2023-08-13', '2023-08-19', - '2023-08-20', '2023-08-26', '2023-08-27', '2023-09-02', '2023-09-03', '2023-09-09', - '2023-09-10', '2023-09-16', '2023-09-17', '2023-09-23', '2023-09-24', '2023-09-30', - '2023-10-01', '2023-10-07', '2023-10-08', '2023-10-14', '2023-10-15', '2023-10-21', - '2023-10-22', '2023-10-28', '2023-10-29', '2023-11-04', '2023-11-05', '2023-11-11', - '2023-11-12', '2023-11-18', '2023-11-19', '2023-11-25', '2023-11-26', '2023-12-02', - '2023-12-03', '2023-12-09', '2023-12-10', '2023-12-16', '2023-12-17', '2023-12-23', - '2023-12-24', '2023-12-30', '2023-12-31' - ], - 2024: [ - '2024-01-06', '2024-01-07', '2024-01-13', '2024-01-14', '2024-01-20', '2024-01-21', - '2024-01-27', '2024-01-28', '2024-02-03', '2024-02-04', '2024-02-10', '2024-02-11', - '2024-02-17', '2024-02-18', '2024-02-24', '2024-02-25', '2024-03-02', '2024-03-03', - '2024-03-09', '2024-03-10', '2024-03-16', '2024-03-17', '2024-03-23', '2024-03-24', - '2024-03-30', '2024-03-31', '2024-04-06', '2024-04-07', '2024-04-13', '2024-04-14', - '2024-04-20', '2024-04-21', '2024-04-27', '2024-04-28', '2024-05-04', '2024-05-05', - '2024-05-11', '2024-05-12', '2024-05-18', '2024-05-19', '2024-05-25', '2024-05-26', - '2024-06-01', '2024-06-02', '2024-06-08', '2024-06-09', '2024-06-15', '2024-06-16', - '2024-06-22', '2024-06-23', '2024-06-29', '2024-06-30', '2024-07-06', '2024-07-07', - '2024-07-13', '2024-07-14', '2024-07-20', '2024-07-21', '2024-07-27', '2024-07-28', - '2024-08-03', '2024-08-04', '2024-08-10', '2024-08-11', '2024-08-17', '2024-08-18', - '2024-08-24', '2024-08-25', '2024-08-31', '2024-09-01', '2024-09-07', '2024-09-08', - '2024-09-14', '2024-09-15', '2024-09-21', '2024-09-22', '2024-09-28', '2024-09-29', - '2024-10-05', '2024-10-06', '2024-10-12', '2024-10-13', '2024-10-19', '2024-10-20', - '2024-10-26', '2024-10-27', '2024-11-02', '2024-11-03', '2024-11-09', '2024-11-10', - '2024-11-16', '2024-11-17', '2024-11-23', '2024-11-24', '2024-11-30', '2024-12-01', - '2024-12-07', '2024-12-08', '2024-12-14', '2024-12-15', '2024-12-21', '2024-12-22', - '2024-12-28', '2024-12-29' - ], -} - -export function acquireWorkAndHolidays() { - return {workDays: workDays, holidaysAndFestivals: holidaysAndFestivals} -} diff --git a/web/src/components/base/magicMenu/container.vue b/web/src/components/base/magicMenu/container.vue index 138bc71..dbba56e 100644 --- a/web/src/components/base/magicMenu/container.vue +++ b/web/src/components/base/magicMenu/container.vue @@ -1,9 +1,6 @@ @@ -12,19 +9,6 @@ export default { data() { return { - keepAliveShow: true - } - }, - watch: { - $route: function(val, oldval) { - if (val.name === 'LargeScreen' && oldval.name !== 'ViewDetail') { - // 表明从别的页面进入作业监视大屏 - // 刷新keepAlive - this.keepAliveShow = false - setTimeout(() => { - this.keepAliveShow = true - }, 0) - } } } } diff --git a/web/src/components/base/magicMenu/header.vue b/web/src/components/base/magicMenu/header.vue index 5d28558..a590090 100644 --- a/web/src/components/base/magicMenu/header.vue +++ b/web/src/components/base/magicMenu/header.vue @@ -26,10 +26,9 @@ - - diff --git a/web/src/components/time_crontab/crontab.vue b/web/src/components/time_crontab/crontab.vue deleted file mode 100644 index a38b732..0000000 --- a/web/src/components/time_crontab/crontab.vue +++ /dev/null @@ -1,635 +0,0 @@ - - - diff --git a/web/src/constants/index.js b/web/src/constants/index.js deleted file mode 100644 index 8049f2c..0000000 --- a/web/src/constants/index.js +++ /dev/null @@ -1,5 +0,0 @@ -const PERIODIC_REG = /^((\*\/)?(([0-5]?\d[,-/])*([0-5]?\d))|\*)[ ]((\*\/)?(([0]?[0-9]|1\d|2[0-3])[,-/])*(([0]?[0-9]|1\d|2[0-3]))|\*)[ ]((\*\/)?((([0-6][,-/])*[0-6])|((mon|tue|wed|thu|fri|sat|sun)[,-/])*(mon|tue|wed|thu|fri|sat|sun))|\*)[ ]((\*\/)?((0?[1-9]|[12]\d|3[01])[,-/])*((0?[1-9]|[12]\d|3[01]))|\*)[ ]((\*\/)?((0?[1-9]|1[0-2])[,-/])*(0?[1-9]|1[0-2])|\*)$/ - -export { - PERIODIC_REG -} diff --git a/web/src/main.js b/web/src/main.js index 9a9fa75..a507a2e 100644 --- a/web/src/main.js +++ b/web/src/main.js @@ -16,7 +16,6 @@ import 'view-design/dist/styles/iview.css' import './components/iview/index' // 几何图 import * as Echarts from 'echarts' -import G6 from '@antv/g6' // 引用API文件 import api from './api/index' // 时间格式化插件 @@ -43,7 +42,6 @@ import './assets/custom_icon/iconfont.css' // import '../static/cw-icon/iconfont.css' import 'echarts/dist/extension/dataTool' import VeeValidate, {Validator} from 'vee-validate' -import cron from '@/assets/js/cron-validator.js' const config = { errorBagName: 'veeErrors', @@ -52,11 +50,9 @@ const config = { Vue.use(VeeValidate, config) Vue.use(bkMagic) Vue.use(Echarts) -Vue.use(G6) Vue.use(Component) Vue.use(axios) Vue.prototype.$echarts = Echarts -Vue.prototype.$G6 = G6 Vue.prototype.$moment = moment Vue.prototype.$cwMessage = cwMessage // 将API方法绑定到全局 @@ -70,10 +66,7 @@ Vue.config.productionTip = false Vue.prototype.cloneDeep = function(data) { return lodash.cloneDeep(data) } -Validator.extend('cronRlue', { - getMessage: (field, args) => args + '输入定时表达式非法,请校验', - validate: value => cron.validate(value).status -}) + Validator.extend('integer', { getMessage: (field, args) => args + '间隔时间必须是正整数', validate: value => Number(value) >= 1 && Number(value) % 1 === 0 diff --git a/web/src/promission.js b/web/src/promission.js index ee34e32..8a34309 100644 --- a/web/src/promission.js +++ b/web/src/promission.js @@ -1,42 +1,6 @@ import router from './router' import store from '@/vuex/index' import Home from '@/views/home/home' -import AgentList from '@/views/agent_mgmt/agent_list' -import AgentMonitor from '@/views/agent_mgmt/agent_monitor' -import CalendarMgmt from '@/views/job_flow_mgmt/calendar_mgmt' -import JobFlowList from '@/views/job_flow_mgmt/job_flow_list' -import NewJobFlow from '@/views/job_flow_mgmt/new_job_flow' -import VariableMgmt from '@/views/job_flow_mgmt/variable_mgmt' -import JobList from '@/views/job_mgmt/job_list' -import NewJob from '@/views/job_mgmt/new_job' -import ScanFile from '@/views/job_mgmt/scan_file' -import SingleJob from '@/views/job_mgmt/single_job' -import JobViewHistory from '@/views/job_monitor/history/job_view_history' -import JobFlowViewHistory from '@/views/job_monitor/history/job_flow_view_history' -import MultipleJob from '@/views/job_mgmt/multiple_job' -import JobHistory from '@/views/job_monitor/history/job_history' -import JobMonitor from '@/views/job_monitor/monitor/job_monitor' -import SysSetup from '@/views/system/sys_setup' -import UserAndPermissions from '@/views/system/user_and_permissions' -import AlarmList from '@/views/alarm_center/alarm_list' -import JobView from '@/views/job_monitor/monitor/job_view' -import JobFlowView from '@/views/job_monitor/monitor/job_flow_view' -import ViewDetail from '@/views/job_monitor/monitor/view_detail' -import SingleJobFlow from '@/views/job_flow_mgmt/single_job_flow' -import MultipleJobFlow from '@/views/job_flow_mgmt/multiple_job_flow' -import ImportFile from '@/views/job_flow_mgmt/import_file' -import JobDetail from '@/views/job_monitor/monitor/job_detail' -import Log from '@/views/system/log' -import LogMange from '@/views/system/log_mange' -import Report from '@/views/report/report' -import SystemClassManage from '@/views/system/system_class_manage' -import JobFlowDetail from '@/views/job_monitor/history/job_flow_detail' -import JobViewDetail from '@/views/job_monitor/history/job_view_detail' -import variableChange from '@/views/job_flow_mgmt/variable_change' -import AddCalendarMgmt from '@/views/job_flow_mgmt/add_calendar_mgmt' -import LargeScreen from '@/views/job_monitor_large_screen/large_screen' -import TaskList from '@/views/task_mgmt/task_list' -import TaskCreate from '@/views/task_mgmt/task_create' // const _import = require('./router/_import_' + process.env.NODE_ENV) // 获取组件的方法 @@ -58,102 +22,6 @@ router.beforeEach((to, from, next) => { 'title': '首页' } }, - { - 'path': '/log', - 'name': 'Log', - 'component': 'Log', - 'meta': { - 'title': '操作审计' - } - }, - { - 'path': '/addcalendarmgmt', - 'name': 'AddCalendarMgmt', - 'component': 'AddCalendarMgmt', - 'meta': { - 'title': '操作日历', - 'back': 'true', - 'fatherName': 'CalendarMgmt' - } - }, - { - 'path': '/variablechange', - 'name': 'variableChange', - 'component': 'variableChange', - 'meta': { - 'title': '变量表', - 'back': 'true', - 'fatherName': 'VariableMgmt' - } - }, - { - 'path': '/singlejob', - 'name': 'SingleJob', - 'component': 'SingleJob', - 'meta': { - 'title': '单个作业', - 'back': 'true', - 'fatherName': 'NewJob' - } - }, - { - 'path': '/singleJobdetail', - 'name': 'singleJobDetail', - 'component': 'SingleJob', - 'meta': { - 'title': '作业管理 > 修改作业 > 单个作业' - } - }, - { - 'path': '/viewdetail', - 'name': 'ViewDetail', - 'component': 'ViewDetail', - 'meta': { - 'title': '作业流视图详情', - 'back': 'true', - 'fatherName': 'JobMonitor' - } - }, - { - 'path': '/jobflowdetail', - 'name': 'JobFlowDetail', - 'component': 'JobFlowDetail', - 'meta': { - 'title': '作业流视图历史详情', - 'back': 'true', - 'fatherName': 'JobHistory' - } - }, - { - 'path': '/jobviewdetail', - 'name': 'JobViewDetail', - 'component': 'JobViewDetail', - 'meta': { - 'title': '作业视图历史详情', - 'back': 'true', - 'fatherName': 'JobHistory' - } - }, - { - 'path': '/multiplejob', - 'name': 'MultipleJob', - 'component': 'MultipleJob', - 'meta': { - 'title': '批量作业导入', - 'back': 'true', - 'fatherName': 'NewJob' - } - }, - { - 'path': '/scanfile', - 'name': 'ScanFile', - 'component': 'ScanFile', - 'meta': { - 'title': '导入详情', - 'back': 'true', - 'fatherName': 'NewJob' - } - }, { 'path': '/home', 'name': 'home', @@ -161,389 +29,9 @@ router.beforeEach((to, from, next) => { 'meta': { 'title': '首页' } - }, - { - 'path': '/agentlist', - 'name': 'AgentList', - 'component': 'AgentList', - 'meta': { - 'title': 'Agent列表' - } - }, - { - 'path': '/agentmonitor', - 'name': 'AgentMonitor', - 'component': 'AgentMonitor', - 'meta': { - 'title': 'Agent监视' - } - }, - { - 'path': '/calendarmgmt', - 'name': 'CalendarMgmt', - 'component': 'CalendarMgmt', - 'meta': { - 'title': '日历管理' - } - }, - { - 'path': '/jobflowlist', - 'name': 'JobFlowList', - 'component': 'JobFlowList', - 'meta': { - 'title': '作业流列表' - } - }, - { - 'path': '/newjobflow', - 'name': 'NewJobFlow', - 'component': 'NewJobFlow', - 'meta': { - 'title': '新建作业流' - } - }, - { - 'path': '/singlejobflow', - 'name': 'SingleJobFlow', - 'component': 'SingleJobFlow', - 'meta': { - 'title': '单个作业流', - 'back': 'true', - 'fatherName': 'NewJobFlow' - } - }, - { - 'path': '/multiplejobflow', - 'name': 'MultipleJobFlow', - 'component': 'MultipleJobFlow', - 'meta': { - 'title': '批量导入', - 'back': 'true', - 'fatherName': 'NewJobFlow' - } - }, - { - 'path': '/importfile', - 'name': 'importFile', - 'component': 'ImportFile', - 'meta': { - 'title': '导入详情', - 'back': 'true', - 'fatherName': 'NewJobFlow' - } - }, - { - 'path': '/variablemgmt', - 'name': 'VariableMgmt', - 'component': 'VariableMgmt', - 'meta': { - 'title': '变量管理' - } - }, - { - 'path': '/joblist', - 'name': 'JobList', - 'component': 'JobList', - 'meta': { - 'title': '作业列表' - } - }, - { - 'path': '/newjob', - 'name': 'NewJob', - 'component': 'NewJob', - 'meta': { - 'title': '新建作业' - } - }, - { - 'path': '/jobhistory', - 'name': 'JobHistory', - 'component': 'JobHistory', - 'meta': { - 'title': '作业历史' - }, - 'children': [ - { - 'path': '/jobflowviewhistory', - 'name': 'JobFlowViewHistory', - 'component': 'JobFlowViewHistory', - 'meta': { - 'title': '作业历史', - 'fatherName': 'JobHistory' - } - }, - { - 'path': '/jobviewhistory', - 'name': 'JobViewHistory', - 'component': 'JobViewHistory', - 'meta': { - 'title': '作业历史', - 'fatherName': 'JobHistory' - } - } - ] - }, - { - 'path': '/report', - 'name': 'Report', - 'component': 'Report', - 'meta': { - 'title': '报表分析' - } - }, - { - 'path': '/largescreen', - 'name': 'LargeScreen', - 'component': 'LargeScreen', - 'meta': { - 'title': '作业监视大屏' - } - }, - { - 'path': '/jobmonitor', - 'name': 'JobMonitor', - 'component': 'JobMonitor', - 'meta': { - 'title': '作业监视' - }, - 'children': [ - { - 'path': '/jobview', - 'name': 'JobView', - 'component': 'JobView', - 'meta': { - 'title': '作业监视', - 'fatherName': 'JobMonitor' - } - }, - { - 'path': '/jobflowview', - 'name': 'JobFlowView', - 'component': 'JobFlowView', - 'meta': { - 'title': '作业监视', - 'fatherName': 'JobMonitor' - } - } - ] - }, - { - 'path': '/jobdetail', - 'name': 'jobDetail', - 'component': 'JobDetail', - 'meta': { - 'title': '作业视图详情', - 'back': 'true', - 'fatherName': 'JobMonitor' - } - }, - { - 'path': '/syssetup', - 'name': 'SysSetup', - 'component': 'SysSetup', - 'meta': { - 'title': '系统设置' - } - }, - { - 'path': '/userandpermissions', - 'name': 'UserAndPermissions', - 'component': 'UserAndPermissions', - 'meta': { - 'title': '用户与权限' - } - }, - { - 'path': '/systemclassmanage', - 'name': 'SystemClassManage', - 'component': 'SystemClassManage', - 'meta': { - 'title': '系统类别管理' - } - }, - { - 'path': '/logmange', - 'name': 'LogMange', - 'component': 'LogMange', - 'meta': { - 'title': '日志管理' - } - }, - { - 'path': '/alarmlist', - 'name': 'AlarmList', - 'component': 'AlarmList', - 'meta': { - 'title': '告警中心' - } - }, - { - 'path': '/taskList', - 'name': 'TaskList', - 'component': 'TaskList', - 'meta': { - 'title': '任务管理' - } - }, - { - 'path': '/taskCreate', - 'name': 'taskCreate', - 'component': 'TaskCreate', - 'meta': { - 'title': '新建任务', - 'fatherName': 'TaskList', - 'back': 'true' - } - } - ] - getButton = [ - { - 'url': '/agentlist', - 'auth': { - 'search': true, - 'create': true, - 'modify': true, - 'del': true - } - }, - { - 'url': '/agentmonitor', - 'auth': { - 'search': true - } - }, - { - 'url': '/newjob', - 'auth': { - 'create': true - } - }, - { - 'url': '/joblist', - 'auth': { - 'search': true, - 'operate': true, - 'modify': true, - 'del': true - } - }, - { - 'url': '/newjobflow', - 'auth': { - 'create': true - } - }, - { - 'url': '/jobflowlist', - 'auth': { - 'search': true, - 'operate': true, - 'modify': true, - 'del': true - } - }, - { - 'url': '/calendarmgmt', - 'auth': { - 'search': true, - 'create': true, - 'modify': true, - 'del': true - } - }, - { - 'url': '/variablemgmt', - 'auth': { - 'search': true, - 'create': true, - 'modify': true, - 'del': true - } - }, - { - 'url': '/jobflowview', - 'auth': { - 'search': true, - 'operate': true - } - }, - { - 'url': '/jobflowviewhistory', - 'auth': { - 'search': true - } - }, - { - 'url': '/jobview', - 'auth': { - 'search': true, - 'operate': true - } - }, - { - 'url': '/jobviewhistory', - 'auth': { - 'search': true - } - }, - { - 'url': '/alarmlist', - 'auth': { - 'search': true - } - }, - { - 'url': '/syssetup', - 'auth': { - 'operate': true, - 'modify': true - } - }, - { - 'url': '/userandpermissions', - 'auth': { - 'search': true, - 'create': true - } - }, - { - 'url': '/systemclassmanage', - 'auth': { - 'search': true, - 'create': true, - 'modify': true, - 'del': true - } - }, - { - 'url': '/log', - 'auth': { - 'search': true - } - }, - { - 'url': '/viewdetail', - 'auth': { - 'search': true, - 'operate': true - } - }, - { - 'url': '/taskList', - 'auth': { - 'search': true, - 'operate': true - } - }, - { - 'url': '/taskCreate', - 'auth': { - 'search': true, - 'operate': true - } } ] + getButton = [] saveObjArr('router', getRouter) // 存储路由到localStorage routerGo(to, next) // 执行路由跳转方法 } else { // 从localStorage拿到了路由 @@ -577,44 +65,7 @@ function getObjArr(name) { // localStorage 获取数组对象的方法 } const ROUTER_MAP = { - 'Home': Home, - 'AgentList': AgentList, - 'AgentMonitor': AgentMonitor, - 'CalendarMgmt': CalendarMgmt, - 'JobFlowList': JobFlowList, - 'NewJobFlow': NewJobFlow, - 'VariableMgmt': VariableMgmt, - 'JobList': JobList, - 'NewJob': NewJob, - 'AddCalendarMgmt': AddCalendarMgmt, - 'variableChange': variableChange, - 'ScanFile': ScanFile, - 'SingleJob': SingleJob, - 'SingleJobDetail': SingleJob, - 'JobViewHistory': JobViewHistory, - 'JobFlowViewHistory': JobFlowViewHistory, - 'MultipleJob': MultipleJob, - 'JobHistory': JobHistory, - 'JobMonitor': JobMonitor, - 'SysSetup': SysSetup, - 'UserAndPermissions': UserAndPermissions, - 'AlarmList': AlarmList, - 'JobView': JobView, - 'JobFlowView': JobFlowView, - 'ViewDetail': ViewDetail, - 'SingleJobFlow': SingleJobFlow, - 'MultipleJobFlow': MultipleJobFlow, - 'ImportFile': ImportFile, - 'JobDetail': JobDetail, - 'Log': Log, - 'LogMange': LogMange, - 'Report': Report, - 'SystemClassManage': SystemClassManage, - 'JobFlowDetail': JobFlowDetail, - 'JobViewDetail': JobViewDetail, - 'LargeScreen': LargeScreen, - 'TaskList': TaskList, - 'TaskCreate': TaskCreate + 'Home': Home } function filterAsyncRouter(asyncRouterMap) { // 遍历后台传来的路由字符串,转换为组件对象 diff --git a/web/src/views/agent_mgmt/agent_dialog.vue b/web/src/views/agent_mgmt/agent_dialog.vue deleted file mode 100644 index 87f6461..0000000 --- a/web/src/views/agent_mgmt/agent_dialog.vue +++ /dev/null @@ -1,215 +0,0 @@ - - - - - diff --git a/web/src/views/agent_mgmt/agent_list.vue b/web/src/views/agent_mgmt/agent_list.vue deleted file mode 100644 index 1e8975d..0000000 --- a/web/src/views/agent_mgmt/agent_list.vue +++ /dev/null @@ -1,308 +0,0 @@ - - - - - diff --git a/web/src/views/agent_mgmt/agent_monitor.vue b/web/src/views/agent_mgmt/agent_monitor.vue deleted file mode 100644 index 48f8d7f..0000000 --- a/web/src/views/agent_mgmt/agent_monitor.vue +++ /dev/null @@ -1,247 +0,0 @@ - - - - - diff --git a/web/src/views/alarm_center/alarm_list.vue b/web/src/views/alarm_center/alarm_list.vue deleted file mode 100644 index 3e0123d..0000000 --- a/web/src/views/alarm_center/alarm_list.vue +++ /dev/null @@ -1,228 +0,0 @@ - - - - - diff --git a/web/src/views/home/home.vue b/web/src/views/home/home.vue index 6801036..6cd1486 100644 --- a/web/src/views/home/home.vue +++ b/web/src/views/home/home.vue @@ -1,90 +1,6 @@ @@ -92,606 +8,18 @@ export default { data() { return { - statusList1: [{ - tag: 'pony审批通过,并附“同意”', - content: '2020-03-06 11:23', - color: 'green', - filled: true - }, - { - tag: 'tony审批通过,并附“同意”', - content: '2020-03-07 10:20', - color: 'green', - filled: true - }, - { - tag: 'allen正在审批', - color: 'green', - filled: true, - content: '2020-03-06 11:23' - }, - { - tag: '等待mark审批', - color: 'green', - filled: true, - content: '2020-03-06 11:23' - }, - { - tag: '等待mark审批', - color: 'green', - filled: true, - content: '2020-03-06 11:23' - } - ], - overViewLoading: false, - todayJobLoading: false, - top5AgentLoading: false, - weeklyJobLoading: false, - jobDynamicLoading: false, - jobDynamicState: [], - weeklyJobChart: null, - top5AgentChart: null, - todayJobChart: null, - weeklyJob: { - weekly_time: [], - weekly_job_num: [], - weekly_error_job_num: [] - }, - todayJob: { - finished_job_num: [], - error_job_num: [], - unfinished_job_num: [], - time_line: [] - }, - top5Agent: { - top5_agent_name: [], - top5_agent_num: [] - }, - overview_data: { - today_wait_job_num: 0, // 当日为执行作业数 - today_job_num: 0, // 当日作业数 - today_job_flow_num: 0, // 当日作业流数 - today_error_job_num: 0, // 当日异常作业数 - jobDynamicState: [] - } } }, computed: { - weeklyJobId() { - return 'weeklyJobId' + this._uid - }, - top5AgentId() { - return 'top5AgentId' + this._uid - }, - todayJobId() { - return 'todayJobId' + this._uid - } }, mounted() { - this.getWeeklyJob() - this.getTop5Agent() - this.getTodayJob() - this.getJobtTrend() - const _this = this - const elementResizeDetectorMaker = require('element-resize-detector') // 导入element-resize-detector,为了使线图饼图自适应左侧菜单栏缩放后的大小 - // 创建实例 - const erd = elementResizeDetectorMaker() - // 监听id为home的元素 大小变化 - this.$nextTick(() => { - erd.listenTo(document.getElementById('home'), function(element) { - _this.weeklyJobChart.resize() - _this.top5AgentChart.resize() - _this.todayJobChart.resize() - }) - }) }, created() { - this.getOverViewData() }, methods: { - // 处理查看更多 - handleCheckMore() { - this.$router.push({ - path: '/log', - query: { - object_repr: '作业', - log: 'fromHome' - } - }) - }, - // 获取头部预览数据 - getOverViewData() { - this.overViewLoading = true - this.$api.home.overview().then(res => { - if (res.result) { - this.overview_data = res.data - } else { - this.$cwMessage(res.message, 'error') - } - this.overViewLoading = false - }) - }, - // 获取近七天作业执行情况 - getWeeklyJob() { - this.weeklyJobLoading = true - this.weeklyJobChart = this.$echarts.init(document.getElementById(this.weeklyJobId)) - const option = { - color: ['#3A84FF', '#FF5656'], - tooltip: { - trigger: 'axis', - axisPointer: { - type: 'shadow' - } - }, - title: { - text: '近七天作业执行情况' - }, - legend: { - x: '15', - y: 'top', - top: '13%', - data: ['作业总数', '异常作业数'], - textStyle: { - color: 'rgba(0, 0, 0, 0.45)' - } - }, - grid: { - height: 210, - width: '98%', - left: '20px', - right: '0px', - bottom: '40px', - containLabel: true - }, - toolbox: { - feature: { - saveAsImage: { - show: true, - // icon: 'M158.496 503.584c3.712 0 6.816 2.592 7.648 6.08l0.192 1.792v334.368h691.328v-334.4c0-3.68 2.56-6.816 6.08-7.616l1.76-0.224h53.376c3.744 0 6.848 2.592 7.68 6.08l0.192 1.792v372.032a31.392 31.392 0 0 1-27.744 31.2l-3.68 0.192H128.672a31.392 31.392 0 0 1-31.2-27.712l-0.224-3.68V511.456c0-3.712 2.592-6.848 6.08-7.68l1.792-0.192h53.376zM537.888 109.12c3.712 0 6.816 2.56 7.648 6.048l0.192 1.792V695.04l175.776-146.88a7.872 7.872 0 0 1 9.504 0.48l1.408 1.536 30.176 44.032a7.84 7.84 0 0 1-0.48 9.504l-1.568 1.408-244.96 194.304a7.84 7.84 0 0 1-7.2 0.896l-1.728-0.896-243.2-194.336a7.84 7.84 0 0 1-2.976-9.056l0.96-1.856 30.272-43.936a7.872 7.872 0 0 1 9.056-2.944l1.888 0.96 174.016 146.56V116.928c0-3.712 2.56-6.816 6.016-7.648l1.824-0.192h53.376z', - // iconStyle: { - // color: '#bfbfbf' - // }, - emphasis: { - iconStyle: { - textFill: '#fff' - } - } - } - } - }, - calculable: true, - xAxis: [{ - type: 'category', - axisTick: { - show: false - }, - axisLine: { - lineStyle: { - color: '#DCDEE5' - } - }, - axisLabel: { - color: 'rgba(0, 0, 0, 0.45)' - }, - data: this.weeklyJob.weekly_time - }], - yAxis: [{ - type: 'value', - axisLine: { // y轴 - show: false - }, - axisTick: { - show: false - }, - splitLine: { - lineStyle: { - color: '#F0F1F5' - } - }, - min: 0, - minInterval: 1, - axisLabel: { - formatter: '{value}', - color: 'rgba(0, 0, 0, 0.45)' - } - }], - series: [{ - name: '作业总数', - type: 'bar', - barWidth: 24, - label: '1222', - data: this.weeklyJob.weekly_job_num - // data: [2, 10, 30, 7, 6, 0] - }, - { - name: '异常作业数', - type: 'bar', - barGap: '20%', - barWidth: 24, - label: '2333', - data: this.weeklyJob.weekly_error_job_num - } - ] - } - this.$api.home.weekly_job().then(res => { - if (res.result) { - this.weeklyJob = res.data - option.xAxis[0].data = this.weeklyJob.weekly_time - option.series[0].data = this.weeklyJob.weekly_job_num - option.series[1].data = this.weeklyJob.weekly_error_job_num - this.drawline(this.weeklyJobChart, option) - } else { - this.$cwMessage(res.message, 'error') - this.drawline(this.weeklyJobChart, option) - } - this.weeklyJobLoading = false - }) - }, - // 获取日均作业top5的agent - getTop5Agent() { - this.top5AgentLoading = true - this.top5AgentChart = this.$echarts.init(document.getElementById(this.top5AgentId)) - const option = { - color: ['#3A84FF'], - xAxis: { - type: 'category', - data: this.top5Agent.top5_agent_name, - axisTick: { - show: false - }, - axisLine: { - lineStyle: { - color: '#DCDEE5' - } - }, - axisLabel: { - color: 'rgba(0, 0, 0, 0.45)' - } - }, - legend: { - data: [] - }, - title: { - text: '日均作业Top5的Agent' - }, - grid: { - height: 210, - width: '100%', - left: '20px', - bottom: '30px', - containLabel: true - }, - toolbox: { - feature: { - saveAsImage: { - show: true, - emphasis: { - iconStyle: { - textFill: '#fff' - } - } - } - } - }, - tooltip: { - trigger: 'axis', - axisPointer: { - type: 'shadow' - } - }, - yAxis: { - type: 'value', - axisLine: { // y轴 - show: false - }, - axisTick: { - show: false - }, - splitLine: { - lineStyle: { - color: '#F0F1F5' - } - }, - name: '日均作业数', - nameTextStyle: { - color: '#63656E' - }, - min: 0, - minInterval: 1, - axisLabel: { - formatter: '{value}', - color: 'rgba(0, 0, 0, 0.45)' - } - }, - series: [{ - data: this.top5Agent.top5_agent_num, - barWidth: 24, - type: 'bar' - }] - } - this.$api.home.top5_agent().then(res => { - if (res.result) { - this.top5Agent.top5_agent_name = res.data.top5_agent_name - this.top5Agent.top5_agent_num = res.data.top5_agent_num - // option.xAxis[0].data = this.top5Agent.top5_agent_name - option.xAxis.data = this.top5Agent.top5_agent_name - option.series[0].data = this.top5Agent.top5_agent_num - this.drawline(this.top5AgentChart, option) - } else { - this.$cwMessage(res.message, 'error') - this.drawline(this.top5AgentChart, option) - } - this.top5AgentLoading = false - }) - }, - // 获取当日作业执行情况 - getTodayJob() { - this.todayJobLoading = true - this.todayJobChart = this.$echarts.init(document.getElementById(this.todayJobId)) - // 当日作业执行情况 - const option = { - color: ['#45E35F', '#FF9C01', '#FF5656'], - tooltip: { - trigger: 'axis', - // formatter: '{b0}
{a0}: {c0}
{a1}: {c1}
{a2}: {c2}', - axisPointer: { - type: 'shadow' - } - }, - title: { - text: '当日作业执行情况' - }, - grid: { - height: 190, - width: '100%', - left: '40px', - bottom: '60px' - }, - toolbox: { - feature: { - saveAsImage: { - show: true, - emphasis: { - iconStyle: { - textFill: '#fff' - } - } - } - } - }, - legend: { - x: '10', - y: 'top', - top: '15%', - data: ['已完成的作业总数', '待完成的作业总数', '异常作业总数'], - textStyle: { - color: 'rgba(0, 0, 0, 0.45)' - } - }, - xAxis: [{ - type: 'category', - axisTick: { - show: false - }, - axisLine: { - lineStyle: { - color: '#DCDEE5' - } - }, - axisLabel: { - color: 'rgba(0, 0, 0, 0.45)' - }, - data: this.todayJob.time_line - }], - yAxis: [{ - type: 'value', - axisLine: { // y轴 - show: false - }, - axisTick: { - show: false - }, - splitLine: { - lineStyle: { - color: '#F0F1F5' - } - }, - min: 0, - axisLabel: { - formatter: '{value}', - color: 'rgba(0, 0, 0, 0.45)' - } - }], - series: [{ - name: '已完成的作业总数', - type: 'line', - data: this.todayJob.finished_job_num - }, - { - name: '待完成的作业总数', - type: 'line', - barWidth: 30, - data: this.todayJob.unfinished_job_num - }, - { - name: '异常作业总数', - type: 'line', - data: this.todayJob.error_job_num - } - ] - } - this.$api.home.today_job().then(res => { - if (res.result) { - this.todayJob.finished_job_num = res.data.finished_job_num - this.todayJob.error_job_num = res.data.error_job_num - this.todayJob.unfinished_job_num = res.data.unfinished_job_num - this.todayJob.time_line = res.data.time_line - option.series[0].data = this.todayJob.finished_job_num - option.series[1].data = this.todayJob.unfinished_job_num - option.series[2].data = this.todayJob.error_job_num - option.xAxis[0].data = this.todayJob.time_line - this.drawline(this.todayJobChart, option) - } else { - this.$cwMessage(res.message, 'error') - this.drawline(this.todayJobChart, option) - } - this.todayJobLoading = false - }) - }, - // 获取作业管理动态 - getJobtTrend() { - this.jobDynamicLoading = true - this.$api.home.job_dynamic().then(res => { - if (res.result) { - const data = res.data.slice(0, 4) - this.jobDynamicState = data.map(item => { - return { - tag: item.condition, - color: 'green', - filled: true, - content: `${item.finish_time}` - - } - }) - } else { - this.$cwMessage(res.message, 'error') - } - this.jobDynamicLoading = false - }) - }, - drawline(obj, option) { - obj.setOption(option) - } } } diff --git a/web/src/views/job_flow_mgmt/add_calendar_mgmt.vue b/web/src/views/job_flow_mgmt/add_calendar_mgmt.vue deleted file mode 100644 index a146cd5..0000000 --- a/web/src/views/job_flow_mgmt/add_calendar_mgmt.vue +++ /dev/null @@ -1,439 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/calendar_mgmt.vue b/web/src/views/job_flow_mgmt/calendar_mgmt.vue deleted file mode 100644 index 07d519f..0000000 --- a/web/src/views/job_flow_mgmt/calendar_mgmt.vue +++ /dev/null @@ -1,346 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/import_file.vue b/web/src/views/job_flow_mgmt/import_file.vue deleted file mode 100644 index ce1a1fa..0000000 --- a/web/src/views/job_flow_mgmt/import_file.vue +++ /dev/null @@ -1,143 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/job_flow_list.vue b/web/src/views/job_flow_mgmt/job_flow_list.vue deleted file mode 100644 index e238cb4..0000000 --- a/web/src/views/job_flow_mgmt/job_flow_list.vue +++ /dev/null @@ -1,469 +0,0 @@ - - - - - - diff --git a/web/src/views/job_flow_mgmt/multiple_job_flow.vue b/web/src/views/job_flow_mgmt/multiple_job_flow.vue deleted file mode 100644 index abce80c..0000000 --- a/web/src/views/job_flow_mgmt/multiple_job_flow.vue +++ /dev/null @@ -1,140 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/new_add_calendar_mgmt.vue b/web/src/views/job_flow_mgmt/new_add_calendar_mgmt.vue deleted file mode 100644 index 5316013..0000000 --- a/web/src/views/job_flow_mgmt/new_add_calendar_mgmt.vue +++ /dev/null @@ -1,392 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/new_job_flow.vue b/web/src/views/job_flow_mgmt/new_job_flow.vue deleted file mode 100644 index 25d17b0..0000000 --- a/web/src/views/job_flow_mgmt/new_job_flow.vue +++ /dev/null @@ -1,105 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/old_add_calendar_mgmt.vue b/web/src/views/job_flow_mgmt/old_add_calendar_mgmt.vue deleted file mode 100644 index 4272fe5..0000000 --- a/web/src/views/job_flow_mgmt/old_add_calendar_mgmt.vue +++ /dev/null @@ -1,596 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/single_job_flow.vue b/web/src/views/job_flow_mgmt/single_job_flow.vue deleted file mode 100644 index e82f75f..0000000 --- a/web/src/views/job_flow_mgmt/single_job_flow.vue +++ /dev/null @@ -1,870 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/single_job_flow/addModeDialog.vue b/web/src/views/job_flow_mgmt/single_job_flow/addModeDialog.vue deleted file mode 100644 index 63a39e3..0000000 --- a/web/src/views/job_flow_mgmt/single_job_flow/addModeDialog.vue +++ /dev/null @@ -1,21 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/single_job_flow/baseInfo.vue b/web/src/views/job_flow_mgmt/single_job_flow/baseInfo.vue deleted file mode 100644 index 7517e5b..0000000 --- a/web/src/views/job_flow_mgmt/single_job_flow/baseInfo.vue +++ /dev/null @@ -1,512 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/single_job_flow/baseNode.js b/web/src/views/job_flow_mgmt/single_job_flow/baseNode.js deleted file mode 100644 index fe068a5..0000000 --- a/web/src/views/job_flow_mgmt/single_job_flow/baseNode.js +++ /dev/null @@ -1,69 +0,0 @@ -const baseNodes = [{ - detail: false, - // id: getUUID(32, 16), - label: '开始', - name: '开始', - type: 'circle-node', - icon: 'dian', - content: null, - // state: 'wait', - // newState: '等待', - node_data: { - node_name: '开始', - description: '我是开始', - fail_retry_count: 0, - node_type: 0, - is_skip_fail: false, - is_timeout_alarm: false, - inputs: {}, - outputs: {} - }, - nodeType: 0, - endUuid: '', - labelCfg: { - style: { - textAlign: 'center' - } - }, - style: { - fill: '#fff', - r: 24 - }, - x: 600, - y: 300 -}, { - detail: false, - // id: getUUID(32, 16), - label: '结束', - name: '结束', - nodeType: 1, - endUuid: '', - icon: 'dian', - content: null, - // state: 'wait', - // newState: '等待', - node_data: { - node_name: '结束', - description: '我是结束', - fail_retry_count: 0, - node_type: 0, - is_skip_fail: false, - is_timeout_alarm: false, - inputs: {}, - outputs: {} - }, - type: 'circle-node', - labelCfg: { - style: { - textAlign: 'center' - } - }, - style: { - fill: '#fff', - r: 24 - }, - x: 800, - y: 300 -}] - -export default baseNodes diff --git a/web/src/views/job_flow_mgmt/single_job_flow/edgeInfo.vue b/web/src/views/job_flow_mgmt/single_job_flow/edgeInfo.vue deleted file mode 100644 index 31d8788..0000000 --- a/web/src/views/job_flow_mgmt/single_job_flow/edgeInfo.vue +++ /dev/null @@ -1,144 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/single_job_flow/headerPanel.vue b/web/src/views/job_flow_mgmt/single_job_flow/headerPanel.vue deleted file mode 100644 index 1de493e..0000000 --- a/web/src/views/job_flow_mgmt/single_job_flow/headerPanel.vue +++ /dev/null @@ -1,526 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/single_job_flow/nodeInfo.vue b/web/src/views/job_flow_mgmt/single_job_flow/nodeInfo.vue deleted file mode 100644 index 13cf2d5..0000000 --- a/web/src/views/job_flow_mgmt/single_job_flow/nodeInfo.vue +++ /dev/null @@ -1,368 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/single_job_flow/options.js b/web/src/views/job_flow_mgmt/single_job_flow/options.js deleted file mode 100644 index 10be7d9..0000000 --- a/web/src/views/job_flow_mgmt/single_job_flow/options.js +++ /dev/null @@ -1,91 +0,0 @@ -const options = { - defaultNode: { - type: 'rect-node', - style: { - radius: 10 - }, - labelCfg: { - fontSize: 20 - } - }, - defaultEdge: { - type: 'polyline-edge', // 扩展了内置边, 有边的事件 - // type: 'cubic-vertical-edge', // 扩展了内置边, 有边的事件 - labelCfg: { - refY: -15, - style: { - fill: '#1890ff', - fontSize: 14, - cursor: 'pointer', - background: { - fill: '#ffffff', - stroke: '#9EC9FF', - padding: [4, 4, 4, 4], - radius: 2 - } - } - }, - style: { - radius: 0, // 拐弯弧度 - offset: 15, // 拐弯处距离节点的最小距离 - stroke: '#aab7c3', - lineAppendWidth: 30, // 防止线太细没法点中 - // endArrow: true, - endArrow: { - path: 'M 0,0 L 4,3 L 3,0 L 4,-3 Z', - fill: '#aab7c3', - stroke: '#aab7c3' - }, - zIndex: 999999 - } - }, - // 覆盖全局样式 - nodeStateStyles: { - 'nodeState:default': { - opacity: 1, - fill: '#fff', - stroke: '#DCDEE5', - labelCfg: { - style: { - fill: '#333333' - } - } - }, - 'nodeState:hover': { - opacity: 0.8 - }, - 'nodeState:selected': { - opacity: 0.9, - stroke: 'rgb(58,132,255)', - labelCfg: { - style: { - fill: 'rgb(58,132,255)' - } - } - } - }, - // 默认边不同状态下的样式集合 - edgeStateStyles: { - 'edgeState:default': { - stroke: '#aab7c3' - }, - 'edgeState:selected': { - stroke: '#1890FF' - }, - 'edgeState:hover': { - animate: true, - animationType: 'dash', - stroke: '#1890FF' - } - }, - modes: [ - 'drag-canvas', // 官方内置的行为 - 'zoom-canvas', - 'select-node', - 'hover-node', - 'active-edge', - 'hover-edge' - ] -} - -export default options diff --git a/web/src/views/job_flow_mgmt/single_job_flow/preFlowCanvas.vue b/web/src/views/job_flow_mgmt/single_job_flow/preFlowCanvas.vue deleted file mode 100644 index 8b5393c..0000000 --- a/web/src/views/job_flow_mgmt/single_job_flow/preFlowCanvas.vue +++ /dev/null @@ -1,339 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/single_job_flow/taskMake.vue b/web/src/views/job_flow_mgmt/single_job_flow/taskMake.vue deleted file mode 100644 index 7029962..0000000 --- a/web/src/views/job_flow_mgmt/single_job_flow/taskMake.vue +++ /dev/null @@ -1,462 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/variable_change.vue b/web/src/views/job_flow_mgmt/variable_change.vue deleted file mode 100644 index 2dadee4..0000000 --- a/web/src/views/job_flow_mgmt/variable_change.vue +++ /dev/null @@ -1,243 +0,0 @@ - - - - - diff --git a/web/src/views/job_flow_mgmt/variable_mgmt.vue b/web/src/views/job_flow_mgmt/variable_mgmt.vue deleted file mode 100644 index 579b46d..0000000 --- a/web/src/views/job_flow_mgmt/variable_mgmt.vue +++ /dev/null @@ -1,230 +0,0 @@ - - - - - diff --git a/web/src/views/job_mgmt/job_dialog.vue b/web/src/views/job_mgmt/job_dialog.vue deleted file mode 100644 index ad0d811..0000000 --- a/web/src/views/job_mgmt/job_dialog.vue +++ /dev/null @@ -1,197 +0,0 @@ - - - - - diff --git a/web/src/views/job_mgmt/job_list.vue b/web/src/views/job_mgmt/job_list.vue deleted file mode 100644 index bf902c9..0000000 --- a/web/src/views/job_mgmt/job_list.vue +++ /dev/null @@ -1,473 +0,0 @@ - - - - - - diff --git a/web/src/views/job_mgmt/multiple_job.vue b/web/src/views/job_mgmt/multiple_job.vue deleted file mode 100644 index 485106e..0000000 --- a/web/src/views/job_mgmt/multiple_job.vue +++ /dev/null @@ -1,140 +0,0 @@ - - - - - diff --git a/web/src/views/job_mgmt/new_job.vue b/web/src/views/job_mgmt/new_job.vue deleted file mode 100644 index 7d51033..0000000 --- a/web/src/views/job_mgmt/new_job.vue +++ /dev/null @@ -1,104 +0,0 @@ - - - - - diff --git a/web/src/views/job_mgmt/scan_file.vue b/web/src/views/job_mgmt/scan_file.vue deleted file mode 100644 index 4710a91..0000000 --- a/web/src/views/job_mgmt/scan_file.vue +++ /dev/null @@ -1,152 +0,0 @@ - - - - - diff --git a/web/src/views/job_mgmt/single_job.vue b/web/src/views/job_mgmt/single_job.vue deleted file mode 100644 index 1d60373..0000000 --- a/web/src/views/job_mgmt/single_job.vue +++ /dev/null @@ -1,711 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/history/job_flow_detail.vue b/web/src/views/job_monitor/history/job_flow_detail.vue deleted file mode 100644 index af154f6..0000000 --- a/web/src/views/job_monitor/history/job_flow_detail.vue +++ /dev/null @@ -1,497 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/history/job_flow_detail/nodeInfo.vue b/web/src/views/job_monitor/history/job_flow_detail/nodeInfo.vue deleted file mode 100644 index 914dcfb..0000000 --- a/web/src/views/job_monitor/history/job_flow_detail/nodeInfo.vue +++ /dev/null @@ -1,244 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/history/job_flow_detail/statusList.js b/web/src/views/job_monitor/history/job_flow_detail/statusList.js deleted file mode 100644 index 989318b..0000000 --- a/web/src/views/job_monitor/history/job_flow_detail/statusList.js +++ /dev/null @@ -1,72 +0,0 @@ -const statusList = [{ - label: '成功', - key: 'success', - fill: '#2DCB56' -}, { - label: '正在执行', - key: 'run', - fill: '#3A84FF' -}, { - label: '失败', - key: 'fail', - fill: '#FF9C01' -}, { - label: '终止', - key: 'stop', - fill: '#A60505' -}, { - label: '错误', - key: 'error', - fill: '#EA3636' -}, { - label: '等待', - key: 'wait', - fill: '#FFFFFF' -}, { - label: '就绪', - key: 'positive', - fill: '#94F5A4' -}, -{ - label: '忽略', - key: 'ignore', - fill: '#aa557f' -}, -{ - label: '挂起', - key: 'pause', - fill: '#FD9C9C' -}, -{ - label: '取消', - key: 'cancel', - fill: '#C4C6CC' -}, -{ - label: '待复核', - key: 'need_confirm', - fill: '#aa55ff' -}, { - label: '正在执行(存在审核)', - key: 'exists_need_confirm', - fill: '#ffaaff' -}, { - label: '正在执行(存在错误)', - key: 'exists_error', - fill: '#ff0000' -}, { - label: '正在执行(存在失败)', - key: 'exists_fail', - fill: '#c30d0d' -}, { - label: '正在执行(存在终止)', - key: 'exists_stop', - fill: '#5500ff' -}, { - label: '正在执行(存在挂起)', - key: 'exists_pause', - fill: '#00557f' -} -] - -export default statusList diff --git a/web/src/views/job_monitor/history/job_flow_detail/statusList.vue b/web/src/views/job_monitor/history/job_flow_detail/statusList.vue deleted file mode 100644 index 239d7fa..0000000 --- a/web/src/views/job_monitor/history/job_flow_detail/statusList.vue +++ /dev/null @@ -1,55 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/history/job_flow_view_history.vue b/web/src/views/job_monitor/history/job_flow_view_history.vue deleted file mode 100644 index 1cbd949..0000000 --- a/web/src/views/job_monitor/history/job_flow_view_history.vue +++ /dev/null @@ -1,262 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/history/job_history.vue b/web/src/views/job_monitor/history/job_history.vue deleted file mode 100644 index c347282..0000000 --- a/web/src/views/job_monitor/history/job_history.vue +++ /dev/null @@ -1,54 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/history/job_view_detail.vue b/web/src/views/job_monitor/history/job_view_detail.vue deleted file mode 100644 index 6a13f3c..0000000 --- a/web/src/views/job_monitor/history/job_view_detail.vue +++ /dev/null @@ -1,215 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/history/job_view_history.vue b/web/src/views/job_monitor/history/job_view_history.vue deleted file mode 100644 index d747cbd..0000000 --- a/web/src/views/job_monitor/history/job_view_history.vue +++ /dev/null @@ -1,236 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/monitor/job_detail.vue b/web/src/views/job_monitor/monitor/job_detail.vue deleted file mode 100644 index be328a0..0000000 --- a/web/src/views/job_monitor/monitor/job_detail.vue +++ /dev/null @@ -1,797 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/monitor/job_flow_view.vue b/web/src/views/job_monitor/monitor/job_flow_view.vue deleted file mode 100644 index c10dfbc..0000000 --- a/web/src/views/job_monitor/monitor/job_flow_view.vue +++ /dev/null @@ -1,663 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/monitor/job_flow_view_detail/nodeInfo.vue b/web/src/views/job_monitor/monitor/job_flow_view_detail/nodeInfo.vue deleted file mode 100644 index 0175a99..0000000 --- a/web/src/views/job_monitor/monitor/job_flow_view_detail/nodeInfo.vue +++ /dev/null @@ -1,276 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/monitor/job_flow_view_detail/statusList.js b/web/src/views/job_monitor/monitor/job_flow_view_detail/statusList.js deleted file mode 100644 index f88bab8..0000000 --- a/web/src/views/job_monitor/monitor/job_flow_view_detail/statusList.js +++ /dev/null @@ -1,71 +0,0 @@ -const statusList = [{ - label: '成功', - key: 'success', - fill: '#2DCB56' -}, { - label: '正在执行', - key: 'run', - fill: '#3A84FF' -}, { - label: '失败', - key: 'fail', - fill: '#FF9C01' -}, { - label: '终止', - key: 'stop', - fill: '#A60505' -}, { - label: '错误', - key: 'error', - fill: '#EA3636' -}, { - label: '等待', - key: 'wait', - fill: '#FFFFFF' -}, { - label: '就绪', - key: 'positive', - fill: '#94F5A4' -}, { - label: '挂起', - key: 'pause', - fill: '#FD9C9C' -}, -{ - label: '忽略', - key: 'ignore', - fill: '#aa557f' -}, -{ - label: '取消', - key: 'cancel', - fill: '#C4C6CC' -}, -{ - label: '待复核', - key: 'need_confirm', - fill: '#aa55ff' -}, { - label: '正在执行(存在审核)', - key: 'exists_need_confirm', - fill: '#ffaaff' -}, { - label: '正在执行(存在错误)', - key: 'exists_error', - fill: '#ff0000' -}, { - label: '正在执行(存在失败)', - key: 'exists_fail', - fill: '#c30d0d' -}, { - label: '正在执行(存在终止)', - key: 'exists_stop', - fill: '#5500ff' -}, { - label: '正在执行(存在挂起)', - key: 'exists_pause', - fill: '#00557f' -} -] - -export default statusList diff --git a/web/src/views/job_monitor/monitor/job_flow_view_detail/statusList.vue b/web/src/views/job_monitor/monitor/job_flow_view_detail/statusList.vue deleted file mode 100644 index 9c05f8d..0000000 --- a/web/src/views/job_monitor/monitor/job_flow_view_detail/statusList.vue +++ /dev/null @@ -1,56 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/monitor/job_monitor.vue b/web/src/views/job_monitor/monitor/job_monitor.vue deleted file mode 100644 index a25d9e2..0000000 --- a/web/src/views/job_monitor/monitor/job_monitor.vue +++ /dev/null @@ -1,56 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/monitor/job_view.vue b/web/src/views/job_monitor/monitor/job_view.vue deleted file mode 100644 index 33759bb..0000000 --- a/web/src/views/job_monitor/monitor/job_view.vue +++ /dev/null @@ -1,635 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor/monitor/view_detail.vue b/web/src/views/job_monitor/monitor/view_detail.vue deleted file mode 100644 index f92a1d1..0000000 --- a/web/src/views/job_monitor/monitor/view_detail.vue +++ /dev/null @@ -1,795 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor_large_screen/large_screen.vue b/web/src/views/job_monitor_large_screen/large_screen.vue deleted file mode 100644 index cbf1583..0000000 --- a/web/src/views/job_monitor_large_screen/large_screen.vue +++ /dev/null @@ -1,430 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor_large_screen/statusList.js b/web/src/views/job_monitor_large_screen/statusList.js deleted file mode 100644 index e62fd84..0000000 --- a/web/src/views/job_monitor_large_screen/statusList.js +++ /dev/null @@ -1,53 +0,0 @@ -const statusList = [{ - label: '成功', - key: 'success', - fill: '#2DCB56' -}, { - label: '正在执行', - key: 'run', - fill: '#3A84FF' -}, { - label: '失败', - key: 'fail', - fill: '#FF9C01' -}, { - label: '终止', - key: 'stop', - fill: '#A60505' -}, { - label: '错误', - key: 'error', - fill: '#EA3636' -}, { - label: '等待', - key: 'wait', - fill: '#FFFFFF' -}, { - label: '就绪', - key: 'positive', - fill: '#94F5A4' -}, { - label: '挂起', - key: 'pause', - fill: '#FD9C9C' -}, -{ - label: '忽略', - key: 'ignore', - fill: '#aa557f' -}, { - label: '正在执行(存在阻塞)', - key: 'exists_error', - fill: '#699DF4' -}, { - label: '取消', - key: 'cancel', - fill: '#C4C6CC' -}, { - label: '尚未实例化', - key: 'no_instance', - fill: '#A3C5FD' -} -] - -export default statusList diff --git a/web/src/views/job_monitor_large_screen/statusList.vue b/web/src/views/job_monitor_large_screen/statusList.vue deleted file mode 100644 index c5216f8..0000000 --- a/web/src/views/job_monitor_large_screen/statusList.vue +++ /dev/null @@ -1,56 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor_large_screen/topMenu.vue b/web/src/views/job_monitor_large_screen/topMenu.vue deleted file mode 100644 index f1705b7..0000000 --- a/web/src/views/job_monitor_large_screen/topMenu.vue +++ /dev/null @@ -1,79 +0,0 @@ - - - - - diff --git a/web/src/views/job_monitor_large_screen/tree.vue b/web/src/views/job_monitor_large_screen/tree.vue deleted file mode 100644 index 5d4b6f1..0000000 --- a/web/src/views/job_monitor_large_screen/tree.vue +++ /dev/null @@ -1,73 +0,0 @@ - - - - - diff --git a/web/src/views/report/job_flow_view_report.vue b/web/src/views/report/job_flow_view_report.vue deleted file mode 100644 index 3a587bd..0000000 --- a/web/src/views/report/job_flow_view_report.vue +++ /dev/null @@ -1,580 +0,0 @@ - - - - - diff --git a/web/src/views/report/job_view_report.vue b/web/src/views/report/job_view_report.vue deleted file mode 100644 index d14e556..0000000 --- a/web/src/views/report/job_view_report.vue +++ /dev/null @@ -1,595 +0,0 @@ - - - - - diff --git a/web/src/views/report/report.vue b/web/src/views/report/report.vue deleted file mode 100644 index 797a79f..0000000 --- a/web/src/views/report/report.vue +++ /dev/null @@ -1,76 +0,0 @@ - - - - - diff --git a/web/src/views/system/log.vue b/web/src/views/system/log.vue deleted file mode 100644 index 83e13e9..0000000 --- a/web/src/views/system/log.vue +++ /dev/null @@ -1,226 +0,0 @@ - - - - - diff --git a/web/src/views/system/log_mange.vue b/web/src/views/system/log_mange.vue deleted file mode 100644 index 9c42350..0000000 --- a/web/src/views/system/log_mange.vue +++ /dev/null @@ -1,9 +0,0 @@ - - - - - diff --git a/web/src/views/system/sys_setup.vue b/web/src/views/system/sys_setup.vue deleted file mode 100644 index 7bb5101..0000000 --- a/web/src/views/system/sys_setup.vue +++ /dev/null @@ -1,268 +0,0 @@ - - - - - diff --git a/web/src/views/system/system_class_manage.vue b/web/src/views/system/system_class_manage.vue deleted file mode 100644 index efbb5cd..0000000 --- a/web/src/views/system/system_class_manage.vue +++ /dev/null @@ -1,220 +0,0 @@ - - - - - diff --git a/web/src/views/system/system_class_manage/systemDialog.vue b/web/src/views/system/system_class_manage/systemDialog.vue deleted file mode 100644 index 89ecb55..0000000 --- a/web/src/views/system/system_class_manage/systemDialog.vue +++ /dev/null @@ -1,51 +0,0 @@ - - - - - diff --git a/web/src/views/system/user_and_permissions.vue b/web/src/views/system/user_and_permissions.vue deleted file mode 100644 index bf89ab9..0000000 --- a/web/src/views/system/user_and_permissions.vue +++ /dev/null @@ -1,290 +0,0 @@ - - - - - diff --git a/web/src/views/system/user_and_permissions/addUserDialog.vue b/web/src/views/system/user_and_permissions/addUserDialog.vue deleted file mode 100644 index 64f4e78..0000000 --- a/web/src/views/system/user_and_permissions/addUserDialog.vue +++ /dev/null @@ -1,160 +0,0 @@ - - - - - diff --git a/web/src/views/system/user_and_permissions/userInfo.vue b/web/src/views/system/user_and_permissions/userInfo.vue deleted file mode 100644 index 83c691e..0000000 --- a/web/src/views/system/user_and_permissions/userInfo.vue +++ /dev/null @@ -1,130 +0,0 @@ - - - - - diff --git a/web/src/views/task_mgmt/task_create.vue b/web/src/views/task_mgmt/task_create.vue deleted file mode 100644 index 30c0d5c..0000000 --- a/web/src/views/task_mgmt/task_create.vue +++ /dev/null @@ -1,259 +0,0 @@ - - - - - diff --git a/web/src/views/task_mgmt/task_list.vue b/web/src/views/task_mgmt/task_list.vue deleted file mode 100644 index a2b9d70..0000000 --- a/web/src/views/task_mgmt/task_list.vue +++ /dev/null @@ -1,471 +0,0 @@ - - - - - - diff --git a/web/src/vuex/actions.js b/web/src/vuex/actions.js index 1887548..e2fe963 100644 --- a/web/src/vuex/actions.js +++ b/web/src/vuex/actions.js @@ -1,4 +1,4 @@ -//给action注册事件处理函数 当函数被触发的时候,将该状态提交到mutations中处理好多 +// 给action注册事件处理函数 当函数被触发的时候,将该状态提交到mutations中处理好多 export function modifyAName({commit}, name) { return commit('modifyName', name) } diff --git a/web/static/img/flow_1x.png b/web/static/img/flow_1x.png deleted file mode 100644 index 4668e53..0000000 Binary files a/web/static/img/flow_1x.png and /dev/null differ diff --git a/web/static/img/full_screen_16.png b/web/static/img/full_screen_16.png deleted file mode 100644 index c9d79ed..0000000 Binary files a/web/static/img/full_screen_16.png and /dev/null differ diff --git a/web/static/img/job.png b/web/static/img/job.png deleted file mode 100644 index 3c59ce3..0000000 Binary files a/web/static/img/job.png and /dev/null differ diff --git a/web/static/img/jobFlow.png b/web/static/img/jobFlow.png deleted file mode 100644 index fd27c55..0000000 Binary files a/web/static/img/jobFlow.png and /dev/null differ diff --git a/web/static/img/jobFlow_16.png b/web/static/img/jobFlow_16.png deleted file mode 100644 index 4a2bf0e..0000000 Binary files a/web/static/img/jobFlow_16.png and /dev/null differ diff --git a/web/static/img/jobFlow_16_dark.png b/web/static/img/jobFlow_16_dark.png deleted file mode 100644 index 7bb476d..0000000 Binary files a/web/static/img/jobFlow_16_dark.png and /dev/null differ diff --git a/web/static/img/jobFlow_22.png b/web/static/img/jobFlow_22.png deleted file mode 100644 index fd29b4f..0000000 Binary files a/web/static/img/jobFlow_22.png and /dev/null differ diff --git a/web/static/img/jobFlow_22_dark.png b/web/static/img/jobFlow_22_dark.png deleted file mode 100644 index c9cd74c..0000000 Binary files a/web/static/img/jobFlow_22_dark.png and /dev/null differ diff --git a/web/static/img/jobFlow_28.png b/web/static/img/jobFlow_28.png deleted file mode 100644 index 322ecab..0000000 Binary files a/web/static/img/jobFlow_28.png and /dev/null differ diff --git a/web/static/img/jobFlow_28_dark.png b/web/static/img/jobFlow_28_dark.png deleted file mode 100644 index 4301c53..0000000 Binary files a/web/static/img/jobFlow_28_dark.png and /dev/null differ diff --git a/web/static/img/jobFlow_32.png b/web/static/img/jobFlow_32.png deleted file mode 100644 index 998b635..0000000 Binary files a/web/static/img/jobFlow_32.png and /dev/null differ diff --git a/web/static/img/jobFlow_32_dark.png b/web/static/img/jobFlow_32_dark.png deleted file mode 100644 index 691f07c..0000000 Binary files a/web/static/img/jobFlow_32_dark.png and /dev/null differ diff --git a/web/static/img/jobFlow_dark.png b/web/static/img/jobFlow_dark.png deleted file mode 100644 index c347a23..0000000 Binary files a/web/static/img/jobFlow_dark.png and /dev/null differ diff --git a/web/static/img/job_2x.png b/web/static/img/job_2x.png deleted file mode 100644 index 50d1f63..0000000 Binary files a/web/static/img/job_2x.png and /dev/null differ diff --git a/web/static/img/red_cross.png b/web/static/img/red_cross.png deleted file mode 100644 index 0825e4e..0000000 Binary files a/web/static/img/red_cross.png and /dev/null differ diff --git a/web/static/img/system.png b/web/static/img/system.png deleted file mode 100644 index c85ce5e..0000000 Binary files a/web/static/img/system.png and /dev/null differ diff --git a/web/static/img/system_16.png b/web/static/img/system_16.png deleted file mode 100644 index 988dec4..0000000 Binary files a/web/static/img/system_16.png and /dev/null differ diff --git a/web/static/img/system_16_dark.png b/web/static/img/system_16_dark.png deleted file mode 100644 index 33d01b3..0000000 Binary files a/web/static/img/system_16_dark.png and /dev/null differ diff --git a/web/static/img/system_28.png b/web/static/img/system_28.png deleted file mode 100644 index a5f8154..0000000 Binary files a/web/static/img/system_28.png and /dev/null differ diff --git a/web/static/img/system_28_dark.png b/web/static/img/system_28_dark.png deleted file mode 100644 index 1e8cde3..0000000 Binary files a/web/static/img/system_28_dark.png and /dev/null differ diff --git a/web/static/img/system_blue.svg b/web/static/img/system_blue.svg deleted file mode 100644 index fceb6fd..0000000 --- a/web/static/img/system_blue.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/web/static/img/system_design.png b/web/static/img/system_design.png deleted file mode 100644 index 806148b..0000000 Binary files a/web/static/img/system_design.png and /dev/null differ diff --git a/web/static/img/system_new.png b/web/static/img/system_new.png deleted file mode 100644 index 79b194f..0000000 Binary files a/web/static/img/system_new.png and /dev/null differ