Skip to content

Commit

Permalink
Merge pull request #1388 from goodrain/merageto2306
Browse files Browse the repository at this point in the history
fix: some issues merge to main
  • Loading branch information
yangkaa committed Aug 23, 2023
2 parents 0d84ba3 + 6d64ac3 commit 5716570
Show file tree
Hide file tree
Showing 31 changed files with 673 additions and 89 deletions.
Binary file added bin/linux/promql-parser-arm64
Binary file not shown.
2 changes: 1 addition & 1 deletion console/models/main.py
Expand Up @@ -107,7 +107,7 @@ class Meta:
install_number = models.IntegerField(default=0, help_text='安装次数')
is_official = models.BooleanField(default=False, help_text='是否官方认证')
details = models.TextField(null=True, blank=True, help_text="应用详情")
arch = models.CharField(max_length=32, default="amd64", help_text="架构")
arch = models.CharField(max_length=32, help_text="架构")


class RainbondCenterAppVersion(BaseModel):
Expand Down
6 changes: 5 additions & 1 deletion console/services/app.py
Expand Up @@ -32,6 +32,7 @@
from console.repositories.service_group_relation_repo import \
service_group_relation_repo
from console.services.app_config import label_service
from console.services.app_config.arch_service import arch_service
from console.services.app_config.port_service import AppPortService
from console.services.app_config.probe_service import ProbeService
from console.services.app_config.service_monitor import service_monitor_repo
Expand Down Expand Up @@ -236,7 +237,8 @@ def __init_package_build_app(self, region):
tenant_service.create_status = "creating"
return tenant_service

def create_package_upload_info(self, region, tenant, user, service_cname, k8s_component_name, event_id, pkg_create_time):
def create_package_upload_info(self, region, tenant, user, service_cname, k8s_component_name, event_id, pkg_create_time,
arch):
service_cname = service_cname.rstrip().lstrip()
is_pass, msg = self.check_service_cname(tenant, service_cname, region)
if not is_pass:
Expand All @@ -253,6 +255,7 @@ def create_package_upload_info(self, region, tenant, user, service_cname, k8s_co
new_service.k8s_component_name = k8s_component_name if k8s_component_name else service_alias
new_service.git_url = "/grdata/package_build/components/" + service_id + "/events/" + event_id
new_service.code_version = pkg_create_time
new_service.arch = arch
new_service.save()
ts = TenantServiceInfo.objects.get(service_id=new_service.service_id, tenant_id=new_service.tenant_id)
return ts
Expand Down Expand Up @@ -716,6 +719,7 @@ def create_region_service(self, tenant, service, user_name, do_deploy=True, dep_
# conponent install complete
service.create_status = "complete"
service.save()
arch_service.update_affinity_by_arch(service.arch, tenant, service.service_region, service)
return service

def __init_stream_rule_for_region(self, tenant, service, rule, user_name):
Expand Down
2 changes: 1 addition & 1 deletion console/services/app_actions/app_manage.py
Expand Up @@ -966,7 +966,7 @@ def truncate_service(self, tenant, service, user=None, app=None):
# 如果这个组件属于模型安装应用, 则删除最后一个组件后同时删除安装应用关系。
if service.tenant_service_group_id > 0:
count = service_repo.get_services_by_service_group_id(service.tenant_service_group_id).count()
if count <= 1:
if not count:
tenant_service_group_repo.delete_tenant_service_group_by_pk(service.tenant_service_group_id)

return 200, "success"
Expand Down
2 changes: 1 addition & 1 deletion console/services/app_actions/properties_changes.py
Expand Up @@ -153,7 +153,7 @@ def component_graph_changes(self, component_graphs):
add = []
for graph in component_graphs:
try:
new_promql = promql_service.add_or_update_label(self.service.service_id, graph.get("promql"))
new_promql = promql_service.add_or_update_label(self.service.service_id, graph.get("promql"), self.service.arch)
except AbortRequest as e:
logger.warning("promql: {}, {}".format(graph.get("promql"), e))
continue
Expand Down
16 changes: 8 additions & 8 deletions console/services/app_config/component_graph.py
Expand Up @@ -42,7 +42,7 @@ def list_internal_graphs(self):
graphs, _ = self._load_internal_graphs()
return graphs

def create_internal_graphs(self, component_id, graph_name):
def create_internal_graphs(self, component_id, graph_name, component_arch):
_, internal_graphs = self._load_internal_graphs()
if not internal_graphs or not internal_graphs.get(graph_name):
raise ErrInternalGraphsNotFound
Expand All @@ -57,7 +57,7 @@ def create_internal_graphs(self, component_id, graph_name):
pass

try:
promql = promql_service.add_or_update_label(component_id, graph["promql"])
promql = promql_service.add_or_update_label(component_id, graph["promql"], component_arch)
except AbortRequest as e:
logger.warning("promql {}: {}".format(graph["promql"], e))
continue
Expand All @@ -74,8 +74,8 @@ def create_internal_graphs(self, component_id, graph_name):
ComponentGraph.objects.bulk_create(graphs)

@transaction.atomic
def create_component_graph(self, component_id, title, promql):
promql = promql_service.add_or_update_label(component_id, promql)
def create_component_graph(self, component_id, title, promql, component_arch):
promql = promql_service.add_or_update_label(component_id, promql, component_arch)
graph_id = make_uuid()
sequence = self._next_sequence(component_id)
if sequence > 10000:
Expand Down Expand Up @@ -108,18 +108,18 @@ def delete_by_component_id(self, component_id):
return component_graph_repo.delete_by_component_id(component_id)

@transaction.atomic()
def update_component_graph(self, graph, title, promql, sequence):
def update_component_graph(self, graph, title, promql, sequence, arch):
data = {
"title": title,
"promql": promql_service.add_or_update_label(graph.component_id, promql),
"promql": promql_service.add_or_update_label(graph.component_id, promql, arch),
}
if sequence != graph.sequence:
data["sequence"] = sequence
self._sequence_move_back(graph.component_id, sequence, graph.sequence)
component_graph_repo.update(graph.component_id, graph.graph_id, **data)
return component_graph_repo.get(graph.component_id, graph.graph_id).to_dict()

def bulk_create(self, component_id, graphs):
def bulk_create(self, component_id, graphs, arch):
if not graphs:
return
cgs = []
Expand All @@ -131,7 +131,7 @@ def bulk_create(self, component_id, graphs):
pass

try:
promql = promql_service.add_or_update_label(component_id, graph.get("promql"))
promql = promql_service.add_or_update_label(component_id, graph.get("promql"), arch)
except AbortRequest as e:
logger.warning("promql: {}, {}".format(graph.get("promql"), e))
continue
Expand Down
7 changes: 5 additions & 2 deletions console/services/app_config/promql_service.py
Expand Up @@ -12,11 +12,14 @@

class PromQLService(object):
@staticmethod
def add_or_update_label(component_id, promql):
def add_or_update_label(component_id, promql, component_arch=""):
"""
Add service_id label, or replace illegal service_id label
"""
promql_parser = BASE_DIR + "/bin/" + platform.system().lower() + "/promql-parser"
promql_name = "/promql-parser"
if component_arch == "arm64":
promql_name = "/promql-parser-arm64"
promql_parser = BASE_DIR + "/bin/" + platform.system().lower() + promql_name
c = subprocess.Popen([os.getenv("PROMQL_PARSER", promql_parser), "--component_id", component_id],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
Expand Down
85 changes: 78 additions & 7 deletions console/services/app_import_and_export_service.py
Expand Up @@ -278,7 +278,43 @@ def start_import_apps(self, scope, event_id, file_names, team_name=None, enterpr
import_record.status = "importing"
import_record.save()

def get_and_update_import_by_event_id(self, event_id):
def openapi_deploy_import_apps(self, region, scope, event_id, file_names, team_name=None, enterprise_id=None):
service_image = app_store.get_app_hub_info(enterprise_id=enterprise_id)
data = {"service_image": service_image, "event_id": event_id, "apps": file_names}
if scope == "enterprise":
region_api.import_app_2_enterprise(region, enterprise_id, data)
else:
region_api.import_app(region, team_name, data)

def get_helm_yaml_info(self,
region_name,
tenant,
event_id,
file_name,
region_app_id,
name,
version,
enterprise_id=None,
region_id=None):
data = {
"event_id": event_id,
"file_name": file_name,
"namespace": tenant.namespace,
"name": name,
"version": version,
}
res, body = region_api.get_yaml_by_chart(region_name, enterprise_id, data)
yaml_resource_detailed_data = {
"event_id": "",
"region_app_id": region_app_id,
"tenant_id": tenant.tenant_id,
"namespace": tenant.namespace,
"yaml": body["bean"]["yaml"]
}
_, body = region_api.yaml_resource_detailed(enterprise_id, region_id, yaml_resource_detailed_data)
return body["bean"]

def get_and_update_import_by_event_id(self, event_id, arch):
import_record = app_import_record_repo.get_import_record_by_event_id(event_id)
if not import_record:
raise RecordNotFound("import_record not found")
Expand All @@ -288,7 +324,7 @@ def get_and_update_import_by_event_id(self, event_id):
if import_record.status != "success":
if status == "success":
logger.debug("app import success !")
self.__save_enterprise_import_info(import_record, body["bean"]["metadata"])
self.__save_enterprise_import_info(import_record, body["bean"]["metadata"], arch)
import_record.source_dir = body["bean"]["source_dir"]
import_record.format = body["bean"]["format"]
import_record.status = "success"
Expand Down Expand Up @@ -324,6 +360,34 @@ def get_and_update_import_by_event_id(self, event_id):

return import_record, apps_status

def openapi_deploy_app_get_import_by_event_id(self, event_id):
import_record = app_import_record_repo.get_import_record_by_event_id(event_id)
if not import_record:
raise RecordNotFound("import_record not found")
# get import status from region
res, body = region_api.get_enterprise_app_import_status(import_record.region, import_record.enterprise_id, event_id)
metadata = []
status = body["bean"]["status"]
if import_record.status != "success":
if status == "success":
logger.debug("app import success !")
import_record.scope = "enterprise"
self.__save_enterprise_import_info(import_record, body["bean"]["metadata"], "")
import_record.source_dir = body["bean"]["source_dir"]
import_record.format = body["bean"]["format"]
import_record.status = "success"
import_record.save()
metadata = json.loads(body["bean"]["metadata"])
# 成功以后删除数据中心目录数据
try:
region_api.delete_enterprise_import_file_dir(import_record.region, import_record.enterprise_id, event_id)
except Exception as e:
logger.exception(e)
else:
import_record.status = status
import_record.save()
return import_record, metadata

def get_and_update_import_status(self, tenant, region, event_id):
"""获取并更新导入状态"""
import_record = app_import_record_repo.get_import_record_by_event_id(event_id)
Expand Down Expand Up @@ -427,7 +491,7 @@ def delete_import_app_dir(self, tenant, region, event_id):

app_import_record_repo.delete_by_event_id(event_id)

def __save_enterprise_import_info(self, import_record, metadata):
def __save_enterprise_import_info(self, import_record, metadata, arch):
rainbond_apps = []
rainbond_app_versions = []
metadata = json.loads(metadata)
Expand All @@ -437,13 +501,18 @@ def __save_enterprise_import_info(self, import_record, metadata):
for app_template in metadata:
annotations = app_template.get("annotations", {})
app_describe = app_template.pop("describe", "")
apps = app_template.get("apps")
if annotations.get("describe", ""):
app_describe = annotations.pop("describe", "")
app = rainbond_app_repo.get_rainbond_app_by_app_id(import_record.enterprise_id, app_template["group_key"])
if not arch:
arch_map = {a.get("arch", "amd64"): 1 for a in apps}
arch = "&".join(list(arch_map.keys()))
# if app exists, update it
if app:
app.scope = import_record.scope
app.describe = app_describe
app.arch = app.arch if arch in app.arch.split(",") else app.arch + "," + arch
app.save()
app_version = rainbond_app_repo.get_rainbond_app_version_by_app_id_and_version(
app.app_id, app_template["group_version"])
Expand All @@ -460,10 +529,11 @@ def __save_enterprise_import_info(self, import_record, metadata):
app_version.template_version = app_template["template_version"]
app_version.app_version_info = version_info,
app_version.version_alias = version_alias,
app_version.arch = arch
app_version.save()
else:
# create a new version
rainbond_app_versions.append(self.create_app_version(app, import_record, app_template))
rainbond_app_versions.append(self.create_app_version(app, import_record, app_template, arch))
else:
image_base64_string = app_template.pop("image_base64_string", "")
if annotations.get("image_base64_string"):
Expand All @@ -487,15 +557,15 @@ def __save_enterprise_import_info(self, import_record, metadata):
scope=import_record.scope,
describe=app_describe,
pic=pic_url,
)
arch=arch)
rainbond_apps.append(rainbond_app)
# create a new app version
rainbond_app_versions.append(self.create_app_version(rainbond_app, import_record, app_template))
rainbond_app_versions.append(self.create_app_version(rainbond_app, import_record, app_template, arch))
rainbond_app_repo.bulk_create_rainbond_app_versions(rainbond_app_versions)
rainbond_app_repo.bulk_create_rainbond_apps(rainbond_apps)

@staticmethod
def create_app_version(app, import_record, app_template):
def create_app_version(app, import_record, app_template, arch):
version = RainbondCenterAppVersion(
scope=import_record.scope,
enterprise_id=import_record.enterprise_id,
Expand All @@ -508,6 +578,7 @@ def create_app_version(app, import_record, app_template):
is_complete=1,
app_version_info=app_template.get("annotations", {}).get("version_info", ""),
version_alias=app_template.get("annotations", {}).get("version_alias", ""),
arch=arch,
)
if app_store.is_no_multiple_region_hub(import_record.enterprise_id):
version.region_name = import_record.region
Expand Down
4 changes: 4 additions & 0 deletions console/services/compose_service.py
Expand Up @@ -120,6 +120,10 @@ def save_compose_services(self, tenant, user, region, group_compose, data):
group_service.add_service_to_group(tenant, region, group_compose.group_id, service.service_id)

app_check_service.save_service_info(tenant, service, service_info)
res, body = region_api.get_cluster_nodes_arch(region)
chaos_arch = list(set(body.get("list")))
arch = chaos_arch[0] if chaos_arch else "amd64"
service.arch = arch
# save service info
service.save()
# 创建组件构建源信息,存储账号密码
Expand Down
1 change: 1 addition & 0 deletions console/services/group_service.py
Expand Up @@ -113,6 +113,7 @@ def create_app(self,
res["group_id"] = app.ID
res['app_id'] = app.ID
res['app_name'] = app.group_name
res['k8s_app'] = app.k8s_app
return res

def create_default_app(self, tenant, region_name):
Expand Down

0 comments on commit 5716570

Please sign in to comment.