1#!/usr/bin/env python
2# -*- coding: utf-8 -*-
3# Copyright (c) 2024 Huawei Device Co., Ltd.
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#     http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15import subprocess
16import sys
17import stat
18import os
19import argparse
20import shutil
21import json
22import time
23import re
24import urllib.request
25
26
27def _get_args():
28    parser = argparse.ArgumentParser(add_help=True)
29    parser.add_argument("-op", "--out_path", default=r"./", type=str,
30                        help="path of out.", )
31    parser.add_argument("-rp", "--root_path", default=r"./", type=str,
32                        help="path of root. default: ./", )
33    parser.add_argument("-cl", "--components_list", default="", type=str,
34                        help="components_list , "
35                             "pass in the components' name, separated by commas , "
36                             "example: A,B,C . "
37                             "default: none", )
38    parser.add_argument("-bt", "--build_type", default=0, type=int,
39                        help="build_type ,default: 0", )
40    parser.add_argument("-on", "--organization_name", default='ohos', type=str,
41                        help="organization_name ,default: '' ", )
42    parser.add_argument("-os", "--os_arg", default=r"linux", type=str,
43                        help="path of output file. default: linux", )
44    parser.add_argument("-ba", "--build_arch", default=r"x86", type=str,
45                        help="build_arch_arg. default: x86", )
46    parser.add_argument("-lt", "--local_test", default=0, type=int,
47                        help="local test ,default: not local , 0", )
48    args = parser.parse_args()
49    return args
50
51
52def _check_label(public_deps, value):
53    innerapis = value["innerapis"]
54    for _innerapi in innerapis:
55        if _innerapi:
56            label = _innerapi.get("label")
57            if public_deps == label:
58                return label.split(':')[-1]
59            continue
60    return ""
61
62
63def _get_public_external_deps(data, public_deps):
64    if not isinstance(data, dict):
65        return ""
66    for key, value in data.items():
67        if not isinstance(value, dict):
68            continue
69        _data = _check_label(public_deps, value)
70        if _data:
71            return f"{key}:{_data}"
72        continue
73    return ""
74
75
76def _is_innerkit(data, part, module):
77    if not isinstance(data, dict):
78        return False
79
80    part_data = data.get(part)
81    if not isinstance(part_data, dict):
82        return False
83    module_list = []
84    for i in part_data["innerapis"]:
85        if i:
86            module_list.append(i["name"])
87    if module in module_list:
88        return True
89    return False
90
91
92def _get_components_json(out_path):
93    jsondata = ""
94    json_path = os.path.join(out_path + "/build_configs/parts_info/components.json")
95    with os.fdopen(os.open(json_path, os.O_RDWR | os.O_CREAT, stat.S_IWUSR | stat.S_IRUSR),
96            'r', encoding='utf-8') as f:
97        try:
98            jsondata = json.load(f)
99        except Exception as e:
100            print('--_get_components_json parse json error--')
101    return jsondata
102
103
104def _handle_one_layer_json(json_key, json_data, desc_list):
105    data_list = json_data.get(json_key)
106    if isinstance(data_list, list) and len(json_data.get(json_key)) >= 1:
107        desc_list.extend(data_list)
108    else:
109        desc_list.append(json_data.get(json_key))
110
111
112def _handle_two_layer_json(json_key, json_data, desc_list):
113    value_depth = len(json_data.get(json_key))
114    for i in range(value_depth):
115        _include_dirs = json_data.get(json_key)[i].get('include_dirs')
116        if _include_dirs:
117            desc_list.extend(_include_dirs)
118
119
120def _get_json_data(args, module):
121    json_path = os.path.join(args.get("out_path"),
122                             args.get("subsystem_name"), args.get("part_name"), "publicinfo", module + ".json")
123    with os.fdopen(os.open(json_path, os.O_RDWR | os.O_CREAT, stat.S_IWUSR | stat.S_IRUSR),
124            'r', encoding='utf-8') as f:
125        try:
126            jsondata = json.load(f)
127        except Exception as e:
128            print(json_path)
129            print('--_get_json_data parse json error--')
130    return jsondata
131
132
133def _handle_deps_data(json_data):
134    dep_list = []
135    if json_data.get('public_deps'):
136        _handle_one_layer_json('public_deps', json_data, dep_list)
137    return dep_list
138
139
140def _handle_includes_data(json_data):
141    include_list = []
142    if json_data.get('public_configs'):
143        _handle_two_layer_json('public_configs', json_data, include_list)
144    if json_data.get('all_dependent_configs'):
145        _handle_two_layer_json('all_dependent_configs', json_data, include_list)
146    return include_list
147
148
149def _get_static_lib_path(args, json_data):
150    label = json_data.get('label')
151    split_label = label.split("//")[1].split(":")[0]
152    real_static_lib_path = os.path.join(args.get("out_path"), "obj",
153                                        split_label, json_data.get('out_name'))
154    return real_static_lib_path
155
156
157def _copy_dir(src_path, target_path):
158    if not os.path.isdir(src_path):
159        return False
160    filelist_src = os.listdir(src_path)
161    suffix_list = [".h", ".hpp", ".in", ".inc"]
162    for file in filelist_src:
163        path = os.path.join(os.path.abspath(src_path), file)
164        if os.path.isdir(path):
165            if file.startswith("."):
166                continue
167            path1 = os.path.join(target_path, file)
168            _copy_dir(path, path1)
169        else:
170            if not (os.path.splitext(path)[-1] in suffix_list):
171                continue
172            with open(path, 'rb') as read_stream:
173                contents = read_stream.read()
174            if not os.path.exists(target_path):
175                os.makedirs(target_path)
176            path1 = os.path.join(target_path, file)
177            with os.fdopen(os.open(path1, os.O_WRONLY | os.O_CREAT, mode=0o640), "wb") as write_stream:
178                write_stream.write(contents)
179    return True
180
181
182def _copy_includes(args, module, includes: list):
183    if module == 'ipc_single':
184        includes = [
185            "//foundation/communication/ipc/interfaces/innerkits/ipc_core/include",
186            "//foundation/communication/ipc/ipc/native/src/core/include",
187            "//foundation/communication/ipc/ipc/native/src/mock/include",
188        ]
189    includes_out_dir = os.path.join(args.get("out_path"), "component_package",
190                                    args.get("part_path"), "innerapis", module, "includes")
191    for i in args.get("toolchain_info").keys():
192        toolchain_includes_out_dir = os.path.join(args.get("out_path"), "component_package",
193                                                  args.get("part_path"), "innerapis", module, i, "includes")
194        toolchain_lib_out_dir = os.path.join(args.get("out_path"), "component_package",
195                                             args.get("part_path"), "innerapis", module, i, "libs")
196        if not os.path.exists(toolchain_includes_out_dir) and os.path.exists(toolchain_lib_out_dir):
197            os.makedirs(toolchain_includes_out_dir)
198        else:
199            continue
200        for include in includes:
201            part_path = args.get("part_path")
202            _sub_include = include.split(f"{part_path}/")[-1]
203            split_include = include.split("//")[1]
204            real_include_path = os.path.join(args.get("root_path"), split_include)
205            if args.get('part_name') == 'libunwind':
206                _out_dir = os.path.join(toolchain_includes_out_dir, _sub_include)
207                _copy_dir(real_include_path, _out_dir)
208                continue
209            _copy_dir(real_include_path, toolchain_includes_out_dir)
210    if not os.path.exists(includes_out_dir):
211        os.makedirs(includes_out_dir)
212    for include in includes:
213        part_path = args.get("part_path")
214        _sub_include = include.split(f"{part_path}/")[-1]
215        split_include = include.split("//")[1]
216        real_include_path = os.path.join(args.get("root_path"), split_include)
217        if args.get('part_name') == 'libunwind':
218            _out_dir = os.path.join(includes_out_dir, _sub_include)
219            _copy_dir(real_include_path, _out_dir)
220            continue
221        _copy_dir(real_include_path, includes_out_dir)
222    print("_copy_includes has done ")
223
224
225def _copy_toolchain_lib(file_name, root, _name, lib_out_dir):
226    if not file_name.startswith('.') and file_name.startswith(_name):
227        if not os.path.exists(lib_out_dir):
228            os.makedirs(lib_out_dir)
229        file = os.path.join(root, file_name)
230        shutil.copy(file, lib_out_dir)
231
232
233def _toolchain_lib_handler(args, toolchain_path, _name, module, toolchain_name):
234    for root, dirs, files in os.walk(toolchain_path):
235        for file_name in files:
236            lib_out_dir = os.path.join(args.get("out_path"), "component_package",
237                                       args.get("part_path"), "innerapis", module, toolchain_name, "libs")
238            _copy_toolchain_lib(file_name, root, _name, lib_out_dir)
239
240
241def _toolchain_static_file_path_mapping(subsystem_name, args, i):
242    if subsystem_name == "thirdparty":
243        subsystem_name = "third_party"
244    toolchain_path = os.path.join(args.get("out_path"), i, 'obj', subsystem_name,
245                                  args.get("part_name"))
246    return toolchain_path
247
248
249def _copy_lib(args, json_data, module):
250    so_path = ""
251    lib_status = False
252    subsystem_name = args.get("subsystem_name")
253    if json_data.get('type') == 'static_library':
254        so_path = _get_static_lib_path(args, json_data)
255    else:
256        so_path = os.path.join(args.get("out_path"), subsystem_name,
257                               args.get("part_name"), json_data.get('out_name'))
258    if args.get("toolchain_info").keys():
259        for i in args.get("toolchain_info").keys():
260            so_type = ''
261            toolchain_path = os.path.join(args.get("out_path"), i, subsystem_name,
262                                          args.get("part_name"))
263            _name = json_data.get('out_name').split('.')[0]
264            if json_data.get('type') == 'static_library':
265                _name = json_data.get('out_name')
266                toolchain_path = _toolchain_static_file_path_mapping(subsystem_name, args, i)
267            _toolchain_lib_handler(args, toolchain_path, _name, module, i)
268            lib_status = lib_status or True
269    if os.path.isfile(so_path):
270        lib_out_dir = os.path.join(args.get("out_path"), "component_package",
271                                   args.get("part_path"), "innerapis", module, "libs")
272        if not os.path.exists(lib_out_dir):
273            os.makedirs(lib_out_dir)
274        shutil.copy(so_path, lib_out_dir)
275        lib_status = lib_status or True
276    return lib_status
277
278
279def _dirs_handler(bundlejson_out):
280    dirs = dict()
281    dirs['./'] = []
282    directory = bundlejson_out
283    for filename in os.listdir(directory):
284        filepath = os.path.join(directory, filename)
285        if os.path.isfile(filepath):
286            dirs['./'].append(filename)
287        else:
288            dirs[filename] = [f"{filename}/*"]
289    delete_list = ['LICENSE', 'README.md', 'README_zh.md', 'README_en.md', 'bundle.json']
290    for delete_txt in delete_list:
291        if delete_txt in dirs['./']:
292            dirs['./'].remove(delete_txt)
293    if dirs['./'] == []:
294        del dirs['./']
295    return dirs
296
297
298def _copy_bundlejson(args, public_deps_list):
299    bundlejson_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path"))
300    print("bundlejson_out : ", bundlejson_out)
301    if not os.path.exists(bundlejson_out):
302        os.makedirs(bundlejson_out)
303    bundlejson = os.path.join(args.get("root_path"), args.get("part_path"), "bundle.json")
304    dependencies_dict = {}
305    for public_deps in public_deps_list:
306        _public_dep_part_name = public_deps.split(':')[0]
307        if _public_dep_part_name != args.get("part_name"):
308            _public_dep = f"@{args.get('organization_name')}/{_public_dep_part_name}"
309            dependencies_dict.update({_public_dep: "*"})
310    if os.path.isfile(bundlejson):
311        with open(bundlejson, 'r') as f:
312            bundle_data = json.load(f)
313            bundle_data['publishAs'] = 'binary'
314            bundle_data.update({'os': args.get('os')})
315            bundle_data.update({'buildArch': args.get('buildArch')})
316            dirs = _dirs_handler(bundlejson_out)
317            bundle_data['dirs'] = dirs
318            bundle_data['version'] = str(bundle_data['version'])
319            if bundle_data['version'] == '':
320                bundle_data['version'] = '1.0.0'
321            pattern = r'^(\d+)\.(\d+)(-[a-zA-Z]+)?$'  # 正则表达式匹配a.b[-后缀]格式的字符串
322            match = re.match(pattern, bundle_data['version'])
323            if match:
324                a = match.group(1)
325                b = match.group(2)
326                suffix = match.group(3) if match.group(3) else ""
327                bundle_data['version'] = f"{a}.{b}.0{suffix}"
328            if args.get('build_type') in [0, 1]:
329                bundle_data['version'] += '-snapshot'
330            if args.get('organization_name'):
331                _name_pattern = r'@(.*.)/'
332                bundle_data['name'] = re.sub(_name_pattern, '@' + args.get('organization_name') + '/',
333                                             bundle_data['name'])
334            if bundle_data.get('scripts'):
335                bundle_data.update({'scripts': {}})
336            if bundle_data.get('licensePath'):
337                del bundle_data['licensePath']
338            if bundle_data.get('readmePath'):
339                del bundle_data['readmePath']
340            bundle_data['dependencies'] = dependencies_dict
341            if os.path.isfile(os.path.join(bundlejson_out, "bundle.json")):
342                os.remove(os.path.join(bundlejson_out, "bundle.json"))
343            with os.fdopen(os.open(os.path.join(bundlejson_out, "bundle.json"), os.O_WRONLY | os.O_CREAT, mode=0o640),
344                           "w",
345                           encoding='utf-8') as fd:
346                json.dump(bundle_data, fd, indent=4, ensure_ascii=False)
347
348
349def _copy_license(args):
350    license_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path"))
351    print("license_out : ", license_out)
352    if not os.path.exists(license_out):
353        os.makedirs(license_out)
354    license_file = os.path.join(args.get("root_path"), args.get("part_path"), "LICENSE")
355    if os.path.isfile(license_file):
356        shutil.copy(license_file, license_out)
357    else:
358        license_default = os.path.join(args.get("root_path"), "build", "LICENSE")
359        shutil.copy(license_default, license_out)
360        bundlejson_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path"), 'bundle.json')
361        with open(bundlejson_out, 'r') as f:
362            bundle_data = json.load(f)
363            bundle_data.update({"license": "Apache License 2.0"})
364        if os.path.isfile(bundlejson_out):
365            os.remove(bundlejson_out)
366        with os.fdopen(os.open(bundlejson_out, os.O_WRONLY | os.O_CREAT, mode=0o640), "w",
367                       encoding='utf-8') as fd:
368            json.dump(bundle_data, fd, indent=4, ensure_ascii=False)
369
370
371def _copy_readme(args):
372    readme_out = os.path.join(args.get("out_path"), "component_package", args.get("part_path"))
373    print("readme_out : ", readme_out)
374    if not os.path.exists(readme_out):
375        os.makedirs(readme_out)
376    readme = os.path.join(args.get("root_path"), args.get("part_path"), "README.md")
377    readme_zh = os.path.join(args.get("root_path"), args.get("part_path"), "README_zh.md")
378    readme_en = os.path.join(args.get("root_path"), args.get("part_path"), "README_en.md")
379    readme_out_file = os.path.join(readme_out, "README.md")
380    if os.path.isfile(readme):
381        shutil.copy(readme, readme_out)
382    elif os.path.isfile(readme_zh):
383        shutil.copy(readme_zh, readme_out_file)
384    elif os.path.isfile(readme_en):
385        shutil.copy(readme_en, readme_out_file)
386    else:
387        try:
388            with os.fdopen(os.open(readme_out_file, os.O_WRONLY | os.O_CREAT, mode=0o640), 'w') as fp:
389                fp.write('READ.ME')
390        except FileExistsError:
391            pass
392
393
394def _generate_import(fp):
395    fp.write('import("//build/ohos.gni")\n')
396
397
398def _generate_configs(fp, module):
399    fp.write('\nconfig("' + module + '_configs") {\n')
400    fp.write('  visibility = [ ":*" ]\n')
401    fp.write('  include_dirs = [\n')
402    fp.write('    "includes",\n')
403    if module == 'libunwind':
404        fp.write('    "includes/libunwind/src",\n')
405        fp.write('    "includes/libunwind/include",\n')
406        fp.write('    "includes/libunwind/include/tdep-arm",\n')
407    if module == 'ability_runtime':
408        fp.write('    "includes/context",\n')
409        fp.write('    "includes/app",\n')
410    fp.write('  ]\n')
411    if module == 'libunwind':
412        fp.write('  cflags = [\n')
413        fp.write("""    "-D_GNU_SOURCE",
414    "-DHAVE_CONFIG_H",
415    "-DNDEBUG",
416    "-DCC_IS_CLANG",
417    "-fcommon",
418    "-Werror",
419    "-Wno-absolute-value",
420    "-Wno-header-guard",
421    "-Wno-unused-parameter",
422    "-Wno-unused-variable",
423    "-Wno-int-to-pointer-cast",
424    "-Wno-pointer-to-int-cast",
425    "-Wno-inline-asm",
426    "-Wno-shift-count-overflow",
427    "-Wno-tautological-constant-out-of-range-compare",
428    "-Wno-unused-function",\n""")
429        fp.write('  ]\n')
430    fp.write('  }\n')
431
432
433def _generate_prebuilt_shared_library(fp, lib_type, module):
434    if lib_type == 'static_library':
435        fp.write('ohos_prebuilt_static_library("' + module + '") {\n')
436    elif lib_type == 'executable':
437        fp.write('ohos_prebuilt_executable("' + module + '") {\n')
438    elif lib_type == 'etc':
439        fp.write('ohos_prebuilt_etc("' + module + '") {\n')
440    else:
441        fp.write('ohos_prebuilt_shared_library("' + module + '") {\n')
442
443
444def _generate_public_configs(fp, module):
445    fp.write(f'  public_configs = [":{module}_configs"]\n')
446
447
448def _public_deps_special_handler(module):
449    if module == 'appexecfwk_core':
450        return ["ability_base:want"]
451    return []
452
453
454def _generate_public_deps(fp, module, deps: list, components_json, public_deps_list: list):
455    if not deps:
456        return public_deps_list
457    fp.write('  public_external_deps = [\n')
458    for dep in deps:
459        public_external_deps = _get_public_external_deps(components_json, dep)
460        if len(public_external_deps) > 0:
461            fp.write(f"""    "{public_external_deps}",\n""")
462            public_deps_list.append(public_external_deps)
463    for _public_external_deps in _public_deps_special_handler(module):
464        fp.write(f"""    "{_public_external_deps}",\n""")
465        public_deps_list.append(_public_external_deps)
466    fp.write('  ]\n')
467
468    return public_deps_list
469
470
471def _generate_other(fp, args, json_data, module):
472    so_name = json_data.get('out_name')
473    fp.write('  source = "libs/' + so_name + '"\n')
474    fp.write('  part_name = "' + args.get("part_name") + '"\n')
475    fp.write('  subsystem_name = "' + args.get("subsystem_name") + '"\n')
476
477
478def _generate_end(fp):
479    fp.write('}')
480
481
482def _generate_build_gn(args, module, json_data, deps: list, components_json, public_deps_list):
483    gn_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"),
484                           "innerapis", module, "BUILD.gn")
485    fd = os.open(gn_path, os.O_WRONLY | os.O_CREAT, mode=0o640)
486    fp = os.fdopen(fd, 'w')
487    _generate_import(fp)
488    _generate_configs(fp, module)
489    _generate_prebuilt_shared_library(fp, json_data.get('type'), module)
490    _generate_public_configs(fp, module)
491    _list = _generate_public_deps(fp, module, deps, components_json, public_deps_list)
492    _generate_other(fp, args, json_data, module)
493    _generate_end(fp)
494    print("_generate_build_gn has done ")
495    fp.close()
496    return _list
497
498
499def _toolchain_gn_modify(gn_path, file_name, toolchain_gn_file):
500    if os.path.isfile(gn_path) and file_name:
501        with open(gn_path, 'r') as f:
502            _gn = f.read()
503            pattern = r"libs/(.*.)"
504            toolchain_gn = re.sub(pattern, 'libs/' + file_name + '\"', _gn)
505        fd = os.open(toolchain_gn_file, os.O_WRONLY | os.O_CREAT, mode=0o640)
506        fp = os.fdopen(fd, 'w')
507        fp.write(toolchain_gn)
508        fp.close()
509
510
511def _get_toolchain_gn_file(lib_out_dir):
512    file_name = ''
513    try:
514        file_list = os.scandir(lib_out_dir)
515    except FileNotFoundError:
516        return file_name
517    for file in file_list:
518        if not file.name.startswith('.') and file.is_file():
519            file_name = file.name
520    return file_name
521
522
523def _toolchain_gn_copy(args, module):
524    gn_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"),
525                           "innerapis", module, "BUILD.gn")
526    for i in args.get("toolchain_info").keys():
527        lib_out_dir = os.path.join(args.get("out_path"), "component_package",
528                                   args.get("part_path"), "innerapis", module, i, "libs")
529        file_name = _get_toolchain_gn_file(lib_out_dir)
530        if not file_name:
531            continue
532        toolchain_gn_file = os.path.join(args.get("out_path"), "component_package",
533                                         args.get("part_path"), "innerapis", module, i, "BUILD.gn")
534        if not os.path.exists(toolchain_gn_file):
535            os.mknod(toolchain_gn_file)
536        _toolchain_gn_modify(gn_path, file_name, toolchain_gn_file)
537
538
539def _parse_module_list(args):
540    module_list = []
541    publicinfo_path = os.path.join(args.get("out_path"),
542                                   args.get("subsystem_name"), args.get("part_name"), "publicinfo")
543    print('publicinfo_path', publicinfo_path)
544    if os.path.exists(publicinfo_path) is False:
545        return module_list
546    publicinfo_dir = os.listdir(publicinfo_path)
547    for filename in publicinfo_dir:
548        if filename.endswith(".json"):
549            module_name = filename.split(".json")[0]
550            module_list.append(module_name)
551            print('filename', filename)
552    print('module_list', module_list)
553    return module_list
554
555
556def _lib_special_handler(part_name, module, args):
557    if part_name == 'mksh':
558        mksh_file_path = os.path.join(args.get('out_path'), 'startup', 'init', 'sh')
559        sh_out = os.path.join(args.get("out_path"), "thirdparty", "mksh")
560        if os.path.isfile(mksh_file_path):
561            shutil.copy(mksh_file_path, sh_out)
562    if module == 'blkid':
563        blkid_file_path = os.path.join(args.get('out_path'), 'filemanagement', 'storage_service', 'blkid')
564        blkid_out = os.path.join(args.get("out_path"), "thirdparty", "e2fsprogs")
565        if os.path.isfile(blkid_file_path):
566            shutil.copy(blkid_file_path, blkid_out)
567    if module == 'grpc_cpp_plugin':
568        blkid_file_path = os.path.join(args.get('out_path'), 'clang_x64', 'thirdparty', 'grpc', 'grpc_cpp_plugin')
569        blkid_out = os.path.join(args.get("out_path"), "thirdparty", "grpc")
570        if os.path.isfile(blkid_file_path):
571            shutil.copy(blkid_file_path, blkid_out)
572
573
574def _generate_component_package(args, components_json):
575    part_name = args.get("part_name")
576    modules = _parse_module_list(args)
577    print('modules', modules)
578    if len(modules) == 0:
579        return
580    is_component_build = False
581    _public_deps_list = []
582    for module in modules:
583        public_deps_list = []
584        if _is_innerkit(components_json, args.get("part_name"), module) == False:
585            continue
586        json_data = _get_json_data(args, module)
587        _lib_special_handler(part_name, module, args)
588        lib_exists = _copy_lib(args, json_data, module)
589        if lib_exists is False:
590            continue
591        is_component_build = True
592        includes = _handle_includes_data(json_data)
593        deps = _handle_deps_data(json_data)
594        _copy_includes(args, module, includes)
595        _list = _generate_build_gn(args, module, json_data, deps, components_json, public_deps_list)
596        if _list:
597            _public_deps_list.extend(_list)
598        _toolchain_gn_copy(args, module)
599    if is_component_build:
600        _copy_bundlejson(args, _public_deps_list)
601        _copy_license(args)
602        _copy_readme(args)
603        if args.get("build_type") in [0, 1]:
604            _hpm_status = _hpm_pack(args)
605            if _hpm_status:
606                _copy_hpm_pack(args)
607
608
609def _get_part_subsystem(components_json: dict):
610    jsondata = dict()
611    try:
612        for component, v in components_json.items():
613            jsondata[component] = v.get('subsystem')
614    except Exception as e:
615        print('--_get_part_subsystem parse json error--')
616    return jsondata
617
618
619def _get_parts_path_info(components_json):
620    jsondata = dict()
621    try:
622        for component, v in components_json.items():
623            jsondata[component] = v.get('path')
624    except Exception as e:
625        print('--_get_part_subsystem parse json error--')
626    return jsondata
627
628
629def _get_toolchain_info(root_path):
630    jsondata = ""
631    json_path = os.path.join(root_path + "/build/indep_configs/variants/common/toolchain.json")
632    with os.fdopen(os.open(json_path, os.O_RDWR | os.O_CREAT, stat.S_IWUSR | stat.S_IRUSR),
633            'r', encoding='utf-8') as f:
634        try:
635            jsondata = json.load(f)
636        except Exception as e:
637            print('--_get_toolchain_info parse json error--')
638    return jsondata
639
640
641def _get_parts_path(json_data, part_name):
642    parts_path = None
643    if json_data.get(part_name) is not None:
644        parts_path = json_data[part_name]
645    return parts_path
646
647
648def _hpm_pack(args):
649    part_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"))
650    cmd = ['hpm', 'pack']
651    try:
652        subprocess.run(cmd, shell=False, cwd=part_path)
653    except Exception as e:
654        print("{} pack fail".format(args.get("part_name")))
655        return 0
656    print("{} pack succ".format(args.get("part_name")))
657    return 1
658
659
660def _copy_hpm_pack(args):
661    hpm_packages_path = args.get('hpm_packages_path')
662    part_path = os.path.join(args.get("out_path"), "component_package", args.get("part_path"))
663    dirs = os.listdir(part_path)
664    tgz_file_name = ''
665    for file in dirs:
666        if file.endswith(".tgz"):
667            tgz_file_name = file
668    tgz_file_out = os.path.join(part_path, tgz_file_name)
669    if tgz_file_name:
670        shutil.copy(tgz_file_out, hpm_packages_path)
671
672
673def _make_hpm_packages_dir(root_path):
674    _out_path = os.path.join(root_path, 'out')
675    hpm_packages_path = os.path.join(_out_path, 'hpm_packages')
676    os.makedirs(hpm_packages_path, exist_ok=True)
677    return hpm_packages_path
678
679
680def _del_exist_component_package(out_path):
681    _component_package_path = os.path.join(out_path, 'component_package')
682    if os.path.isdir(_component_package_path):
683        try:
684            print('del dir component_package start..')
685            shutil.rmtree(_component_package_path)
686            print('del dir component_package end..')
687        except Exception as e:
688            print('del dir component_package FAILED')
689
690
691def _get_component_check(local_test) -> list:
692    check_list = []
693    if local_test == 0:
694        contents = urllib.request.urlopen(
695            "https://ci.openharmony.cn/api/daily_build/component/check/list").read().decode(
696            encoding="utf-8")
697        _check_json = json.loads(contents)
698        try:
699            check_list.extend(_check_json["data"]["dep_list"])
700            check_list.extend(_check_json["data"]["indep_list"])
701        except Exception as e:
702            print("Call the component check API something wrong, plz check the API return..")
703    check_list = list(set(check_list))
704    check_list = sorted(check_list)
705    return check_list
706
707
708def _package_interface(args, parts_path_info, part_name, subsystem_name, components_json):
709    part_path = _get_parts_path(parts_path_info, part_name)
710    if part_path is None:
711        return
712    args.update({"subsystem_name": subsystem_name, "part_name": part_name,
713                 "part_path": part_path})
714    _generate_component_package(args, components_json)
715
716
717def generate_component_package(out_path, root_path, components_list=None, build_type=0, organization_name='ohos',
718                               os_arg='linux', build_arch_arg='x86', local_test=0):
719    """
720
721    Args:
722        out_path: output path of code default : out/rk3568
723        root_path: root path of code default : oh/
724        components_list: list of all components that need to be built
725        build_type: build type
726            0: default pack,do not change organization_name
727            1: pack ,change organization_name
728            2: do not pack,do not change organization_name
729        organization_name: default ohos, if diff then change
730        os_arg: default : linux
731        build_arch_arg:  default : x86
732        local_test: 1 to open local test , 0 to close , 2 to pack init and init deps
733    Returns:
734
735    """
736    start_time = time.time()
737    _check_list = _get_component_check(local_test)
738    if local_test == 1 and not components_list:
739        components_list = []
740    elif local_test == 1 and components_list:
741        components_list = [component for component in components_list.split(",")]
742    elif local_test == 2:
743        components_list = ["init", "appspawn", "safwk", "c_utils",
744                           "napi", "ipc", "config_policy", "hilog", "hilog_lite", "samgr", "access_token", "common",
745                           "dsoftbus", "hvb", "hisysevent", "hiprofiler", "bounds_checking_function",
746                           "bundle_framework", "selinux", "selinux_adapter", "storage_service",
747                           "mbedtls", "zlib", "libuv", "cJSON", "mksh", "libunwind", "toybox",
748                           "bounds_checking_function",
749                           "selinux", "libunwind", "mbedtls", "zlib", "cJSON", "mksh", "toybox", "config_policy",
750                           "e2fsprogs", "f2fs-tools", "selinux_adapter", "storage_service"
751                           ]
752    else:
753        components_list = [component for component in components_list.split(",") if component in _check_list]
754        if not components_list:
755            sys.exit("stop for no target to pack..")
756    print('components_list', components_list)
757    components_json = _get_components_json(out_path)
758    part_subsystem = _get_part_subsystem(components_json)
759    parts_path_info = _get_parts_path_info(components_json)
760    hpm_packages_path = _make_hpm_packages_dir(root_path)
761    toolchain_info = _get_toolchain_info(root_path)
762    # del component_package
763    _del_exist_component_package(out_path)
764    args = {"out_path": out_path, "root_path": root_path,
765            "os": os_arg, "buildArch": build_arch_arg, "hpm_packages_path": hpm_packages_path,
766            "build_type": build_type, "organization_name": organization_name,
767            "toolchain_info": toolchain_info
768            }
769    for key, value in part_subsystem.items():
770        part_name = key
771        subsystem_name = value
772        # components_list is NONE or part name in components_list
773        if not components_list:
774            _package_interface(args, parts_path_info, part_name, subsystem_name, components_json)
775        for component in components_list:
776            if part_name == component:
777                _package_interface(args, parts_path_info, part_name, subsystem_name, components_json)
778
779    end_time = time.time()
780    run_time = end_time - start_time
781    print("generate_component_package out_path", out_path)
782    print(f"Generating binary product package takes time:{run_time}")
783
784
785def main():
786    py_args = _get_args()
787    generate_component_package(py_args.out_path,
788                               py_args.root_path,
789                               components_list=py_args.components_list,
790                               build_type=py_args.build_type,
791                               organization_name=py_args.organization_name,
792                               os_arg=py_args.os_arg,
793                               build_arch_arg=py_args.build_arch,
794                               local_test=py_args.local_test)
795
796
797if __name__ == '__main__':
798    main()
799